mirror of
https://github.com/aaronpo97/the-biergarten-app.git
synced 2026-04-05 18:09:04 +00:00
Compare commits
124 Commits
main-1.0
...
077f6ab4ae
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
077f6ab4ae | ||
|
|
534403734a | ||
|
|
3af053f0eb | ||
|
|
ba165d8aa7 | ||
|
|
eb9a2767b4 | ||
|
|
29ea47fdb6 | ||
|
|
52e2333304 | ||
|
|
a1f0ca5b20 | ||
|
|
2ea8aa52b4 | ||
|
|
98083ab40c | ||
|
|
ac136f7179 | ||
|
|
280c9c61bd | ||
|
|
248a51b35f | ||
|
|
35aa7bc0df | ||
|
|
581863d69b | ||
|
|
9238036042 | ||
|
|
431e11e052 | ||
|
|
f1194d3da8 | ||
|
|
17eb04e20c | ||
|
|
50c2f5dfda | ||
|
|
c5683df4b6 | ||
|
|
2cad88e3f6 | ||
|
|
0d52c937ce | ||
|
|
6b66f5680f | ||
|
|
82f0d26200 | ||
|
|
7129e5679e | ||
|
|
584fe6282f | ||
|
|
8c61069b7d | ||
|
|
674f91cbdf | ||
|
|
a54d2a6da0 | ||
|
|
954c9c389c | ||
|
|
d942d92db5 | ||
|
|
c80eae694f | ||
|
|
94061c6d84 | ||
|
|
caf13de36e | ||
|
|
2cb8f1d918 | ||
|
|
f728514a7c | ||
|
|
4f92741b4f | ||
|
|
a038a12fca | ||
|
|
74c5528ea2 | ||
|
|
f48b8452d3 | ||
|
|
2411841bdc | ||
|
|
215824d4b6 | ||
|
|
99b13e2742 | ||
|
|
3a32f326bf | ||
|
|
b2cf21399b | ||
|
|
109ade474c | ||
|
|
07a62a0c99 | ||
|
|
31e67ebad8 | ||
|
|
c74b20079b | ||
|
|
2b0f9876bc | ||
|
|
8a4b833943 | ||
|
|
656981003b | ||
|
|
ff1ce15419 | ||
|
|
881a94893f | ||
|
|
8abacb5572 | ||
|
|
027e130fcd | ||
|
|
243931eb6a | ||
|
|
b22e1e5702 | ||
|
|
b07cec8c7e | ||
|
|
92628290da | ||
|
|
ca2d7c453f | ||
|
|
2076935ee2 | ||
|
|
5c49611bff | ||
|
|
ae6002bbe0 | ||
|
|
a1ea6391bc | ||
|
|
6d812638ba | ||
|
|
17bf29700a | ||
|
|
393e57af7f | ||
|
|
e0af25f17c | ||
|
|
9bfbed9b92 | ||
|
|
2ae99d5224 | ||
|
|
b994201a18 | ||
|
|
e4560f8d80 | ||
|
|
dbd3b6ce0a | ||
|
|
ee53cc60d8 | ||
|
|
954e224c34 | ||
|
|
9474fb7811 | ||
|
|
77bb1f6733 | ||
|
|
1af3d6f987 | ||
|
|
2332f9f9b5 | ||
|
|
0053d84de8 | ||
|
|
754578c84c | ||
|
|
ca49d19bf7 | ||
|
|
cf9f048daa | ||
|
|
a8c0ae6358 | ||
|
|
52643c1173 | ||
|
|
24b059ea3d | ||
|
|
97c093c4bc | ||
|
|
45f64f613d | ||
|
|
084f68da7a | ||
|
|
ea92735146 | ||
|
|
54788b1a6d | ||
|
|
7dc7ef4b1a | ||
|
|
a6702c89fd | ||
|
|
68ff549635 | ||
|
|
a56ea77861 | ||
|
|
14cb05e992 | ||
|
|
53a7569ed5 | ||
|
|
82db763951 | ||
|
|
fd544dbd34 | ||
|
|
89da531c48 | ||
|
|
c5aaf8cd05 | ||
|
|
b8cd855916 | ||
|
|
60ef65ec52 | ||
|
|
da84492aa4 | ||
|
|
b5ab6f6893 | ||
|
|
7fbdfbf542 | ||
|
|
43dcf0844d | ||
|
|
c928ddecb5 | ||
|
|
372aac897a | ||
|
|
8d6b903aa7 | ||
|
|
00a0f6c4ef | ||
|
|
afefdb9e3d | ||
|
|
fc2e8c9b6d | ||
|
|
b86607e37a | ||
|
|
a200164609 | ||
|
|
4e2c9836c9 | ||
|
|
b7f22fcc66 | ||
|
|
f0c9cff8be | ||
|
|
33db1368ec | ||
|
|
8975044034 | ||
|
|
738c055bf7 | ||
|
|
2f0bfd90b2 |
13
.config/dotnet-tools.json
Normal file
13
.config/dotnet-tools.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"version": 1,
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"csharpier": {
|
||||
"version": "1.2.1",
|
||||
"commands": [
|
||||
"csharpier"
|
||||
],
|
||||
"rollForward": false
|
||||
}
|
||||
}
|
||||
}
|
||||
19
.csharpierrc.json
Normal file
19
.csharpierrc.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/csharpier.json",
|
||||
|
||||
"printWidth": 80,
|
||||
"useTabs": false,
|
||||
"indentSize": 4,
|
||||
"endOfLine": "lf",
|
||||
|
||||
"overrides": [
|
||||
{
|
||||
"files": "*.xml",
|
||||
"indentSize": 2
|
||||
},
|
||||
{
|
||||
"files": "*.csx",
|
||||
"printWidth": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
56
.env.example
Normal file
56
.env.example
Normal file
@@ -0,0 +1,56 @@
|
||||
# ==============================================
|
||||
# Biergarten App - Environment Variables Template
|
||||
# ==============================================
|
||||
#
|
||||
# This file contains backend/Docker environment variables.
|
||||
# Copy this to create environment-specific files:
|
||||
# - .env.dev (development)
|
||||
# - .env.test (testing)
|
||||
# - .env.prod (production)
|
||||
#
|
||||
# For frontend variables, create a separate .env.local file
|
||||
# in the Website/ directory. See README.md for complete docs.
|
||||
#
|
||||
# ==============================================
|
||||
|
||||
# ======================
|
||||
# Database Configuration
|
||||
# ======================
|
||||
|
||||
# SQL Server Connection Components (Recommended for Docker)
|
||||
# These are used to build connection strings dynamically
|
||||
DB_SERVER=sqlserver,1433
|
||||
DB_NAME=Biergarten
|
||||
DB_USER=sa
|
||||
DB_PASSWORD=YourStrong!Passw0rd
|
||||
|
||||
# Alternative: Full Connection String (Local Development)
|
||||
# If set, this overrides the component-based configuration above
|
||||
# DB_CONNECTION_STRING=Server=localhost,1433;Database=Biergarten;User Id=sa;Password=YourStrong!Passw0rd;TrustServerCertificate=True;
|
||||
|
||||
# ======================
|
||||
# JWT Configuration
|
||||
# ======================
|
||||
|
||||
# JWT Secret for signing tokens
|
||||
# IMPORTANT: Generate a secure secret (minimum 32 characters)
|
||||
# Command: openssl rand -base64 32
|
||||
ACCESS_TOKEN_SECRET=your-secure-jwt-secret-key
|
||||
REFRESH_TOKEN_SECRET=your-secure-jwt-refresh-secret-key
|
||||
CONFIRMATION_TOKEN_SECRET=your-secure-jwt-confirmation-secret-key
|
||||
|
||||
|
||||
# ======================
|
||||
# SMTP Configuration
|
||||
# ======================
|
||||
# SMTP settings for sending emails (e.g., password resets)
|
||||
# For development, you can use a local SMTP testing tool like Mailpit or MailHog
|
||||
# In production, set these to real SMTP server credentials from an email service
|
||||
# provider (e.g., SendGrid, Mailgun, Amazon SES).
|
||||
SMTP_HOST=mailpit
|
||||
SMTP_PORT=1025
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
SMTP_USE_SSL=false
|
||||
SMTP_FROM_EMAIL=noreply@thebiergarten.app
|
||||
SMTP_FROM_NAME=The Biergarten App
|
||||
43
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
43
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
---
|
||||
name: Feature Request (BDD)
|
||||
about: Create a new feature using user story + BDD acceptance criteria
|
||||
title: "[Feature] "
|
||||
labels: ["feature", "BDD"]
|
||||
assignees: []
|
||||
---
|
||||
|
||||
## User Story
|
||||
**As a** (who wants to accomplish something)
|
||||
**I want to** (what they want to accomplish)
|
||||
**So that** (why they want to accomplish that thing)
|
||||
|
||||
## Acceptance Criteria (BDD)
|
||||
|
||||
### Scenario 1
|
||||
|
||||
|
||||
Given ...
|
||||
When ...
|
||||
Then ...
|
||||
|
||||
|
||||
### Scenario 2
|
||||
|
||||
|
||||
Given ...
|
||||
When ...
|
||||
Then ...
|
||||
|
||||
|
||||
### Scenario 3
|
||||
|
||||
|
||||
Given ...
|
||||
When ...
|
||||
Then ...
|
||||
|
||||
|
||||
## Subtasks
|
||||
- [ ] Task 1
|
||||
- [ ] Task 2
|
||||
- [ ] Task 3
|
||||
19
.github/workflows/github-actions-demo.yml
vendored
19
.github/workflows/github-actions-demo.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: Node.js CI
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
457
.gitignore
vendored
457
.gitignore
vendored
@@ -15,6 +15,14 @@
|
||||
# production
|
||||
/build
|
||||
|
||||
# project-specific build artifacts
|
||||
/src/Website/build/
|
||||
/src/Website/storybook-static/
|
||||
/src/Website/.react-router/
|
||||
/src/Website/playwright-report/
|
||||
/src/Website/test-results/
|
||||
/test-results/
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
@@ -42,7 +50,454 @@ next-env.d.ts
|
||||
|
||||
# vscode
|
||||
.vscode
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
/cloudinary-images
|
||||
|
||||
.obsidian
|
||||
.obsidian
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
##
|
||||
## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
|
||||
|
||||
# User-specific files
|
||||
*.rsuser
|
||||
*.suo
|
||||
*.user
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
*.env
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Mono auto generated files
|
||||
mono_crash.*
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
[Rr]elease/
|
||||
[Rr]eleases/
|
||||
|
||||
[Dd]ebug/x64/
|
||||
[Dd]ebugPublic/x64/
|
||||
[Rr]elease/x64/
|
||||
[Rr]eleases/x64/
|
||||
bin/x64/
|
||||
obj/x64/
|
||||
|
||||
[Dd]ebug/x86/
|
||||
[Dd]ebugPublic/x86/
|
||||
[Rr]elease/x86/
|
||||
[Rr]eleases/x86/
|
||||
bin/x86/
|
||||
obj/x86/
|
||||
|
||||
[Ww][Ii][Nn]32/
|
||||
[Aa][Rr][Mm]/
|
||||
[Aa][Rr][Mm]64/
|
||||
[Aa][Rr][Mm]64[Ee][Cc]/
|
||||
bld/
|
||||
[Oo]bj/
|
||||
[Oo]ut/
|
||||
[Ll]og/
|
||||
[Ll]ogs/
|
||||
|
||||
# Build results on 'Bin' directories
|
||||
**/[Bb]in/*
|
||||
# Uncomment if you have tasks that rely on *.refresh files to move binaries
|
||||
# (https://github.com/github/gitignore/pull/3736)
|
||||
#!**/[Bb]in/*.refresh
|
||||
|
||||
# Visual Studio 2015/2017 cache/options directory
|
||||
.vs/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
|
||||
# Visual Studio 2017 auto generated files
|
||||
Generated\ Files/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
*.trx
|
||||
|
||||
# NUnit
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
nunit-*.xml
|
||||
|
||||
# Approval Tests result files
|
||||
*.received.*
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# Benchmark Results
|
||||
BenchmarkDotNet.Artifacts/
|
||||
|
||||
# .NET Core
|
||||
project.lock.json
|
||||
project.fragment.lock.json
|
||||
artifacts/
|
||||
|
||||
# ASP.NET Scaffolding
|
||||
ScaffoldingReadMe.txt
|
||||
|
||||
# StyleCop
|
||||
StyleCopReport.xml
|
||||
|
||||
# Files built by Visual Studio
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_h.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.idb
|
||||
*.iobj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.ipdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
# but not Directory.Build.rsp, as it configures directory-level build defaults
|
||||
!Directory.Build.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*_wpftmp.csproj
|
||||
*.log
|
||||
*.tlog
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
*.VC.db
|
||||
*.VC.VC.opendb
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# Visual Studio Trace Files
|
||||
*.e2e
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# AxoCover is a Code Coverage Tool
|
||||
.axoCover/*
|
||||
!.axoCover/settings.json
|
||||
|
||||
# Coverlet is a free, cross platform Code Coverage Tool
|
||||
coverage*.json
|
||||
coverage*.xml
|
||||
coverage*.info
|
||||
|
||||
# Visual Studio code coverage results
|
||||
*.coverage
|
||||
*.coveragexml
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
# Note: Comment the next line if you want to checkin your web deploy settings,
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||
# checkin your Azure Web App publish settings, but sensitive information contained
|
||||
# in these scripts will be unencrypted
|
||||
PublishScripts/
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# NuGet Symbol Packages
|
||||
*.snupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/[Pp]ackages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/[Pp]ackages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/[Pp]ackages/repositories.config
|
||||
# NuGet v3's project.json files produces more ignorable files
|
||||
*.nuget.props
|
||||
*.nuget.targets
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Windows Store app package directories and files
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
Package.StoreAssociation.xml
|
||||
_pkginfo.txt
|
||||
*.appx
|
||||
*.appxbundle
|
||||
*.appxupload
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!?*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.jfm
|
||||
*.pfx
|
||||
*.publishsettings
|
||||
orleans.codegen.cs
|
||||
|
||||
# Including strong name files can present a security risk
|
||||
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
|
||||
#*.snk
|
||||
|
||||
# Since there are multiple workflows, uncomment next line to ignore bower_components
|
||||
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
|
||||
#bower_components/
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
ServiceFabricBackup/
|
||||
*.rptproj.bak
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
*.ndf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
*.rptproj.rsuser
|
||||
*- [Bb]ackup.rdl
|
||||
*- [Bb]ackup ([0-9]).rdl
|
||||
*- [Bb]ackup ([0-9][0-9]).rdl
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
node_modules/
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
|
||||
*.vbw
|
||||
|
||||
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
|
||||
*.dsw
|
||||
*.dsp
|
||||
|
||||
# Visual Studio 6 technical files
|
||||
*.ncb
|
||||
*.aps
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# Paket dependency manager
|
||||
**/.paket/paket.exe
|
||||
paket-files/
|
||||
|
||||
# FAKE - F# Make
|
||||
**/.fake/
|
||||
|
||||
# CodeRush personal settings
|
||||
**/.cr/personal
|
||||
|
||||
# Python Tools for Visual Studio (PTVS)
|
||||
**/__pycache__/
|
||||
*.pyc
|
||||
|
||||
# Cake - Uncomment if you are using it
|
||||
#tools/**
|
||||
#!tools/packages.config
|
||||
|
||||
# Tabs Studio
|
||||
*.tss
|
||||
|
||||
# Telerik's JustMock configuration file
|
||||
*.jmconfig
|
||||
|
||||
# BizTalk build output
|
||||
*.btp.cs
|
||||
*.btm.cs
|
||||
*.odx.cs
|
||||
*.xsd.cs
|
||||
|
||||
# OpenCover UI analysis results
|
||||
OpenCover/
|
||||
|
||||
# Azure Stream Analytics local run output
|
||||
ASALocalRun/
|
||||
|
||||
# MSBuild Binary and Structured Log
|
||||
*.binlog
|
||||
MSBuild_Logs/
|
||||
|
||||
# AWS SAM Build and Temporary Artifacts folder
|
||||
.aws-sam
|
||||
|
||||
# NVidia Nsight GPU debugger configuration file
|
||||
*.nvuser
|
||||
|
||||
# MFractors (Xamarin productivity tool) working folder
|
||||
**/.mfractor/
|
||||
|
||||
# Local History for Visual Studio
|
||||
**/.localhistory/
|
||||
|
||||
# Visual Studio History (VSHistory) files
|
||||
.vshistory/
|
||||
|
||||
# BeatPulse healthcheck temp database
|
||||
healthchecksdb
|
||||
|
||||
# Backup folder for Package Reference Convert tool in Visual Studio 2017
|
||||
MigrationBackup/
|
||||
|
||||
# Ionide (cross platform F# VS Code tools) working folder
|
||||
**/.ionide/
|
||||
|
||||
# Fody - auto-generated XML schema
|
||||
FodyWeavers.xsd
|
||||
|
||||
# VS Code files for those working on multiple tools
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
!.vscode/*.code-snippets
|
||||
|
||||
# Local History for Visual Studio Code
|
||||
.history/
|
||||
|
||||
# Built Visual Studio Code Extensions
|
||||
*.vsix
|
||||
|
||||
# Windows Installer files from build outputs
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
.DS_Store
|
||||
|
||||
*/data_source/other
|
||||
.fake
|
||||
.idea
|
||||
|
||||
*.feature.cs
|
||||
|
||||
.env
|
||||
.env.dev
|
||||
.env.test
|
||||
.env.prod
|
||||
|
||||
*storybook.log
|
||||
storybook-static
|
||||
|
||||
959
LICENSE.md
959
LICENSE.md
File diff suppressed because it is too large
Load Diff
288
README.md
288
README.md
@@ -1,219 +1,159 @@
|
||||
# The Biergarten App
|
||||
|
||||
## About
|
||||
The Biergarten App is a multi-project monorepo with a .NET backend and an active React
|
||||
Router frontend in `src/Website`. The current website focuses on account flows, theme
|
||||
switching, shared UI components, Storybook coverage, and integration with the API.
|
||||
|
||||
The Biergarten App is a web application designed for beer lovers to share their favorite
|
||||
brews and breweries with like-minded people online.
|
||||
## Documentation
|
||||
|
||||
This application's stack consists of Next.js, Prisma and Neon Postgres. I'm motivated to
|
||||
learn more about these technologies while exploring my passion for beer.
|
||||
- [Getting Started](docs/getting-started.md) - Local setup for backend and active website
|
||||
- [Architecture](docs/architecture.md) - Current backend and frontend architecture
|
||||
- [Docker Guide](docs/docker.md) - Container-based backend development and testing
|
||||
- [Testing](docs/testing.md) - Backend and frontend test commands
|
||||
- [Environment Variables](docs/environment-variables.md) - Active configuration reference
|
||||
- [Token Validation](docs/token-validation.md) - JWT validation architecture
|
||||
- [Legacy Website Archive](docs/archive/legacy-website-v1.md) - Archived notes for the old Next.js frontend
|
||||
|
||||
I've also incorporated different APIs into the application, such as the Cloudinary API for
|
||||
image uploading, the SparkPost API for email services as well as Mapbox for geolocation
|
||||
and map data.
|
||||
## Diagrams
|
||||
|
||||
To handle serverless functions (API routes), I use the next-connect package.
|
||||
- [Architecture](docs/diagrams-out/architecture.svg) - Layered architecture
|
||||
- [Deployment](docs/diagrams-out/deployment.svg) - Docker topology
|
||||
- [Authentication Flow](docs/diagrams-out/authentication-flow.svg) - Auth sequence
|
||||
- [Database Schema](docs/diagrams-out/database-schema.svg) - Entity relationships
|
||||
|
||||
On the client-side, I use Tailwind CSS, Headless UI and Daisy UI for styling to create a
|
||||
visually appealing and user-friendly interface.
|
||||
## Current Status
|
||||
|
||||
I'm sharing my code publicly so that others can learn from it and use it as a reference
|
||||
for their own projects.
|
||||
Active areas in the repository:
|
||||
|
||||
### Some beer terminology
|
||||
- .NET 10 backend with layered architecture and SQL Server
|
||||
- React Router 7 website in `src/Website`
|
||||
- Shared Biergarten theme system with a theme guide route
|
||||
- Storybook stories and browser-based checks for shared UI
|
||||
- Auth demo flows for home, login, register, dashboard, logout, and confirmation
|
||||
- Toast-based feedback for auth outcomes
|
||||
|
||||
In this app you will encounter various beer related terms. Here is a list of terms used in
|
||||
this app and their definitions.
|
||||
Legacy area retained for reference:
|
||||
|
||||
#### ABV
|
||||
- `src/Website-v1` contains the archived Next.js frontend and is no longer the active website
|
||||
|
||||
[Alcohol by volume](https://en.wikipedia.org/wiki/Alcohol_by_volume) (abbreviated as ABV)
|
||||
is a standard measure of how much alcohol (ethanol) is contained in a given volume of an
|
||||
alcoholic beverage (expressed as a volume percent).
|
||||
## Tech Stack
|
||||
|
||||
#### IBU
|
||||
- **Backend**: .NET 10, ASP.NET Core, SQL Server 2022, DbUp
|
||||
- **Frontend**: React 19, React Router 7, Vite 7, Tailwind CSS 4, DaisyUI 5
|
||||
- **UI Documentation**: Storybook 10, Vitest browser mode, Playwright
|
||||
- **Testing**: xUnit, Reqnroll (BDD), FluentAssertions, Moq
|
||||
- **Infrastructure**: Docker, Docker Compose
|
||||
- **Security**: Argon2id password hashing, JWT access/refresh/confirmation tokens
|
||||
|
||||
The
|
||||
[International Bitterness Units](https://en.wikipedia.org/wiki/Beer_measurement#Bitterness)
|
||||
scale, or IBU, is used to approximately quantify the bitterness of beer. This scale is not
|
||||
measured on the perceived bitterness of the beer, but rather the amount of a component of
|
||||
beer known as iso-alpha acids.
|
||||
## Quick Start
|
||||
|
||||
## Database Schema
|
||||
|
||||

|
||||
|
||||
## Technologies
|
||||
|
||||
### General
|
||||
|
||||
- [Next.js](https://nextjs.org/)
|
||||
- A React based framework for building web applications offering several features such
|
||||
as server side rendering, static site generation and API routes.
|
||||
|
||||
### Client
|
||||
|
||||
- [SWR](https://swr.vercel.app/)
|
||||
- A React Hooks library for fetching data with support for caching, revalidation and
|
||||
error handling.
|
||||
- [Tailwind CSS](https://tailwindcss.com/)
|
||||
- A popular open-source utility-first CSS framework that provides pre-defined classes to
|
||||
style HTML elements.
|
||||
- [Headless UI](https://headlessui.dev/)
|
||||
- A set of completely unstyled, fully accessible UI components, designed to integrate
|
||||
beautifully with Tailwind CSS.
|
||||
- [Daisy UI](https://daisyui.com/)
|
||||
- A component library for Tailwind CSS that provides ready-to-use components for
|
||||
building user interfaces.
|
||||
|
||||
### Server
|
||||
|
||||
- [Prisma](https://www.prisma.io/)
|
||||
- An open-source ORM for Node.js and TypeScript applications.
|
||||
- [Neon Postgres](https://neon.tech/)
|
||||
- A managed PostgreSQL database service powered by Neon.
|
||||
- [Cloudinary](https://cloudinary.com/)
|
||||
- A cloud-based image and video management service that provides developers with an easy
|
||||
way to upload, store, and manipulate media assets.
|
||||
- [SparkPost](https://www.sparkpost.com/)
|
||||
- A cloud-based email delivery service that provides developers with an easy way to send
|
||||
transactional and marketing emails.
|
||||
- [Mapbox](https://www.mapbox.com/)
|
||||
- A suite of open-source mapping tools that allows developers to add custom maps,
|
||||
search, and navigation into their applications.
|
||||
- [next-connect](https://github.com/hoangvvo/next-connect#readme)
|
||||
- A promise-based method routing and middleware layer for Next.js.
|
||||
|
||||
## How to run locally
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Before you can run this application locally, you will need to have the following installed
|
||||
on your machine:
|
||||
|
||||
- [Node.js](https://nodejs.org/en/)
|
||||
- [npm (version 8 or higher)](https://www.npmjs.com/get-npm)
|
||||
|
||||
You will also need to create a free account with the following services:
|
||||
|
||||
- [Cloudinary](https://cloudinary.com/users/register/free)
|
||||
- [SparkPost](https://www.sparkpost.com/)
|
||||
- [Neon Postgres](https://neon.tech/)
|
||||
- [Mapbox](https://account.mapbox.com/auth/signup/)
|
||||
|
||||
### Setup
|
||||
|
||||
1. Clone this repository and navigate to the project directory.
|
||||
### Backend
|
||||
|
||||
```bash
|
||||
git clone https://github.com/aaronpo97/the-biergarten-app
|
||||
cd the-biergarten-app
|
||||
cp .env.example .env.dev
|
||||
docker compose -f docker-compose.dev.yaml up -d
|
||||
```
|
||||
|
||||
2. Run the following command to install the dependencies.
|
||||
Backend access:
|
||||
|
||||
- API Swagger: http://localhost:8080/swagger
|
||||
- Health Check: http://localhost:8080/health
|
||||
|
||||
### Frontend
|
||||
|
||||
```bash
|
||||
cd src/Website
|
||||
npm install
|
||||
API_BASE_URL=http://localhost:8080 SESSION_SECRET=dev-secret npm run dev
|
||||
```
|
||||
|
||||
3. Run the following script to create a `.env` file in the root directory of the project
|
||||
and add the following environment variables. Update these variables with your own
|
||||
values.
|
||||
Optional frontend tools:
|
||||
|
||||
```bash
|
||||
echo "BASE_URL=
|
||||
NEXT_PUBLIC_CLOUDINARY_CLOUD_NAME=
|
||||
CLOUDINARY_KEY=
|
||||
CLOUDINARY_SECRET=
|
||||
CONFIRMATION_TOKEN_SECRET=
|
||||
RESET_PASSWORD_TOKEN_SECRET=
|
||||
SESSION_SECRET=
|
||||
SESSION_TOKEN_NAME=
|
||||
SESSION_MAX_AGE=
|
||||
NODE_ENV=
|
||||
|
||||
POSTGRES_PRISMA_URL=
|
||||
POSTGRES_URL_NON_POOLING=
|
||||
SHADOW_DATABASE_URL=
|
||||
|
||||
ADMIN_PASSWORD=
|
||||
|
||||
MAPBOX_ACCESS_TOKEN=
|
||||
|
||||
SPARKPOST_API_KEY=
|
||||
SPARKPOST_SENDER_ADDRESS=" > .env
|
||||
cd src/Website
|
||||
npm run storybook
|
||||
npm run test:storybook
|
||||
npm run test:storybook:playwright
|
||||
```
|
||||
|
||||
### Explanation of environment variables
|
||||
## Repository Structure
|
||||
|
||||
- `BASE_URL` is the base URL of the application.
|
||||
- For example, if you are running the application locally, you can set this to
|
||||
`http://localhost:3000`.
|
||||
- `NEXT_PUBLIC_CLOUDINARY_CLOUD_NAME`, `CLOUDINARY_KEY`, and `CLOUDINARY_SECRET` are the
|
||||
credentials for your Cloudinary account.
|
||||
- You can create a free account [here](https://cloudinary.com/users/register/free).
|
||||
- `CONFIRMATION_TOKEN_SECRET` is the secret used to sign the confirmation token used for
|
||||
email confirmation.
|
||||
- You can generate a random string using the`openssl rand -base64 127` command.
|
||||
- `RESET_PASSWORD_TOKEN_SECRET` is the secret used to sign the reset password token.
|
||||
- You can generate a random string using the `openssl rand -base64 127` command.
|
||||
- `SESSION_SECRET` is the secret used to sign the session cookie.
|
||||
- Use the same command as above to generate a random string.
|
||||
- `SESSION_TOKEN_NAME` is the name of the session cookie.
|
||||
- You can set this to `biergarten`.
|
||||
- `SESSION_MAX_AGE` is the maximum age of the session cookie in seconds.
|
||||
- You can set this to `604800` (1 week).
|
||||
- `POSTGRES_PRISMA_URL`is a pooled connection string for your Neon Postgres database.
|
||||
- `POSTGRES_URL_NON_POOLING` is a non-pooled connection string for your Neon Postgres
|
||||
database used for migrations.
|
||||
- `SHADOW_DATABASE_URL` is a connection string for a secondary database used for
|
||||
migrations to detect schema drift.
|
||||
- You can create a free account [here](https://neon.tech).
|
||||
- Consult the [docs](https://neon.tech/docs/guides/prisma) for more information.
|
||||
- `MAPBOX_ACCESS_TOKEN` is the access token for your Mapbox account.
|
||||
- You can create a free account [here](https://account.mapbox.com/auth/signup/).
|
||||
- `NODE_ENV` is the environment in which the application is running.
|
||||
- You can set this to `development` or `production`.
|
||||
- `SPARKPOST_API_KEY` is the API key for your SparkPost account.
|
||||
- You can create a free account [here](https://www.sparkpost.com/).
|
||||
- `SPARKPOST_SENDER_ADDRESS` is the email address that will be used to send emails.
|
||||
- `ADMIN_PASSWORD` is the password for the admin account created when seeding the
|
||||
database.
|
||||
```text
|
||||
src/Core/ Backend projects (.NET)
|
||||
src/Website/ Active React Router frontend
|
||||
src/Website-v1/ Archived legacy Next.js frontend
|
||||
docs/ Active project documentation
|
||||
docs/archive/ Archived legacy documentation
|
||||
```
|
||||
|
||||
1. Initialize the database and run the migrations.
|
||||
## Key Features
|
||||
|
||||
Implemented today:
|
||||
|
||||
- User registration and login against the API
|
||||
- JWT-based auth with access, refresh, and confirmation flows
|
||||
- SQL Server migrations and seed projects
|
||||
- Shared form components and auth screens
|
||||
- Theme switching with Lager, Stout, Cassis, and Weizen variants
|
||||
- Storybook documentation and automated story interaction tests
|
||||
- Toast feedback for auth-related outcomes
|
||||
|
||||
Planned next:
|
||||
|
||||
- Brewery discovery and management
|
||||
- Beer reviews and ratings
|
||||
- Social follow relationships
|
||||
- Geospatial brewery experiences
|
||||
- Additional frontend routes beyond the auth demo
|
||||
|
||||
## Testing
|
||||
|
||||
Backend suites:
|
||||
|
||||
- `API.Specs` - integration tests
|
||||
- `Infrastructure.Repository.Tests` - repository unit tests
|
||||
- `Service.Auth.Tests` - service unit tests
|
||||
|
||||
Frontend suites:
|
||||
|
||||
- Storybook interaction tests via Vitest
|
||||
- Storybook browser regression checks via Playwright
|
||||
|
||||
Run all backend tests with Docker:
|
||||
|
||||
```bash
|
||||
npx prisma generate
|
||||
npx prisma migrate dev
|
||||
docker compose -f docker-compose.test.yaml up --abort-on-container-exit
|
||||
```
|
||||
|
||||
5. Seed the database with some initial data.
|
||||
See [Testing](docs/testing.md) for the full command list.
|
||||
|
||||
```bash
|
||||
npm run seed
|
||||
```
|
||||
## Configuration
|
||||
|
||||
6. Start the application.
|
||||
Common active variables:
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
- Backend: `DB_SERVER`, `DB_NAME`, `DB_USER`, `DB_PASSWORD`, `ACCESS_TOKEN_SECRET`, `REFRESH_TOKEN_SECRET`, `CONFIRMATION_TOKEN_SECRET`
|
||||
- Frontend: `API_BASE_URL`, `SESSION_SECRET`, `NODE_ENV`
|
||||
|
||||
## License
|
||||
See [Environment Variables](docs/environment-variables.md) for details.
|
||||
|
||||
The Biergarten App is licensed under the GNU General Public License v3.0. This means that
|
||||
anyone is free to use, modify, and distribute the code as long as they also distribute
|
||||
their modifications under the same license.
|
||||
## Contributing
|
||||
|
||||
I encourage anyone who uses this code for educational purposes to attribute me as the
|
||||
original author, and to provide a link to this repository.
|
||||
1. Fork the repository
|
||||
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
|
||||
3. Commit your changes (`git commit -m 'Add amazing feature'`)
|
||||
4. Push to the branch (`git push origin feature/amazing-feature`)
|
||||
5. Open a Pull Request
|
||||
|
||||
By contributing to this repository, you agree to license your contributions under the same
|
||||
license as the project.
|
||||
### Development Workflow
|
||||
|
||||
If you have any questions or concerns about the license, please feel free to submit an
|
||||
issue to this repository.
|
||||
1. Start development environment: `docker compose -f docker-compose.dev.yaml up -d`
|
||||
2. Make changes to code
|
||||
3. Run tests: `docker compose -f docker-compose.test.yaml up --abort-on-container-exit`
|
||||
4. Rebuild if needed: `docker compose -f docker-compose.dev.yaml up -d --build api.core`
|
||||
|
||||
I hope that this project will be useful to other developers and beer enthusiasts who are
|
||||
interested in learning about web development with Next.js, Prisma, Postgres, and other
|
||||
technologies.
|
||||
## Support
|
||||
|
||||
- **Documentation**: [docs/](docs/)
|
||||
- **Architecture**: See [Architecture Guide](docs/architecture.md)
|
||||
|
||||
77
docker-compose.db.yaml
Normal file
77
docker-compose.db.yaml
Normal file
@@ -0,0 +1,77 @@
|
||||
services:
|
||||
sqlserver:
|
||||
env_file: ".env.dev"
|
||||
image: mcr.microsoft.com/mssql/server:2022-latest
|
||||
platform: linux/amd64
|
||||
container_name: dev-env-sqlserver
|
||||
environment:
|
||||
ACCEPT_EULA: "Y"
|
||||
SA_PASSWORD: "${DB_PASSWORD}"
|
||||
MSSQL_PID: "Express"
|
||||
ports:
|
||||
- "1433:1433"
|
||||
volumes:
|
||||
- sqlserverdata-dev:/var/opt/mssql
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "/opt/mssql-tools18/bin/sqlcmd -S localhost -U sa -P '${DB_PASSWORD}' -C -Q 'SELECT 1' || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 12
|
||||
start_period: 30s
|
||||
networks:
|
||||
- devnet
|
||||
database.migrations:
|
||||
env_file: ".env.dev"
|
||||
image: database.migrations
|
||||
container_name: dev-env-database-migrations
|
||||
depends_on:
|
||||
sqlserver:
|
||||
condition: service_healthy
|
||||
build:
|
||||
context: ./src/Core/Database
|
||||
dockerfile: Database.Migrations/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
APP_UID: 1000
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
CLEAR_DATABASE: "true"
|
||||
restart: "no"
|
||||
networks:
|
||||
- devnet
|
||||
|
||||
database.seed:
|
||||
env_file: ".env.dev"
|
||||
image: database.seed
|
||||
container_name: dev-env-database-seed
|
||||
depends_on:
|
||||
database.migrations:
|
||||
condition: service_completed_successfully
|
||||
build:
|
||||
context: ./src/Core
|
||||
dockerfile: Database/Database.Seed/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
APP_UID: 1000
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
restart: "no"
|
||||
networks:
|
||||
- devnet
|
||||
volumes:
|
||||
sqlserverdata-dev:
|
||||
driver: local
|
||||
nuget-cache-dev:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
devnet:
|
||||
driver: bridge
|
||||
125
docker-compose.dev.yaml
Normal file
125
docker-compose.dev.yaml
Normal file
@@ -0,0 +1,125 @@
|
||||
services:
|
||||
sqlserver:
|
||||
env_file: ".env.dev"
|
||||
image: mcr.microsoft.com/mssql/server:2022-latest
|
||||
platform: linux/amd64
|
||||
container_name: dev-env-sqlserver
|
||||
environment:
|
||||
ACCEPT_EULA: "Y"
|
||||
SA_PASSWORD: "${DB_PASSWORD}"
|
||||
MSSQL_PID: "Express"
|
||||
ports:
|
||||
- "1433:1433"
|
||||
volumes:
|
||||
- sqlserverdata-dev:/var/opt/mssql
|
||||
healthcheck:
|
||||
test: [ "CMD-SHELL", "/opt/mssql-tools18/bin/sqlcmd -S localhost -U sa -P '${DB_PASSWORD}' -C -Q 'SELECT 1' || exit 1" ]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 12
|
||||
start_period: 30s
|
||||
networks:
|
||||
- devnet
|
||||
database.migrations:
|
||||
env_file: ".env.dev"
|
||||
image: database.migrations
|
||||
container_name: dev-env-database-migrations
|
||||
depends_on:
|
||||
sqlserver:
|
||||
condition: service_healthy
|
||||
build:
|
||||
context: ./src/Core/Database
|
||||
dockerfile: Database.Migrations/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
APP_UID: 1000
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
CLEAR_DATABASE: "true"
|
||||
restart: "no"
|
||||
networks:
|
||||
- devnet
|
||||
|
||||
database.seed:
|
||||
env_file: ".env.dev"
|
||||
image: database.seed
|
||||
container_name: dev-env-database-seed
|
||||
depends_on:
|
||||
database.migrations:
|
||||
condition: service_completed_successfully
|
||||
build:
|
||||
context: ./src/Core
|
||||
dockerfile: Database/Database.Seed/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
APP_UID: 1000
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
restart: "no"
|
||||
networks:
|
||||
- devnet
|
||||
|
||||
api.core:
|
||||
env_file: ".env.dev"
|
||||
image: api.core
|
||||
container_name: dev-env-api-core
|
||||
depends_on:
|
||||
database.seed:
|
||||
condition: service_completed_successfully
|
||||
build:
|
||||
context: ./src/Core
|
||||
dockerfile: API/API.Core/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
APP_UID: 1000
|
||||
ports:
|
||||
- "8080:8080"
|
||||
- "8081:8081"
|
||||
environment:
|
||||
ASPNETCORE_ENVIRONMENT: "Development"
|
||||
ASPNETCORE_URLS: "http://0.0.0.0:8080"
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
ACCESS_TOKEN_SECRET: "${ACCESS_TOKEN_SECRET}"
|
||||
REFRESH_TOKEN_SECRET: "${REFRESH_TOKEN_SECRET}"
|
||||
CONFIRMATION_TOKEN_SECRET: "${CONFIRMATION_TOKEN_SECRET}"
|
||||
WEBSITE_BASE_URL: "${WEBSITE_BASE_URL}"
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- devnet
|
||||
volumes:
|
||||
- nuget-cache-dev:/root/.nuget/packages
|
||||
|
||||
mailpit:
|
||||
image: axllent/mailpit:latest
|
||||
container_name: dev-env-mailpit
|
||||
ports:
|
||||
- "8025:8025" # Web UI
|
||||
- "1025:1025" # SMTP server
|
||||
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MP_SMTP_AUTH_ACCEPT_ANY: 1
|
||||
MP_SMTP_AUTH_ALLOW_INSECURE: 1
|
||||
networks:
|
||||
- devnet
|
||||
volumes:
|
||||
sqlserverdata-dev:
|
||||
driver: local
|
||||
nuget-cache-dev:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
devnet:
|
||||
driver: bridge
|
||||
91
docker-compose.min.yaml
Normal file
91
docker-compose.min.yaml
Normal file
@@ -0,0 +1,91 @@
|
||||
services:
|
||||
sqlserver:
|
||||
env_file: ".env.local"
|
||||
image: mcr.microsoft.com/mssql/server:2022-latest
|
||||
platform: linux/amd64
|
||||
container_name: dev-env-sqlserver
|
||||
environment:
|
||||
ACCEPT_EULA: "Y"
|
||||
SA_PASSWORD: "${DB_PASSWORD}"
|
||||
MSSQL_PID: "Express"
|
||||
ports:
|
||||
- "1433:1433"
|
||||
volumes:
|
||||
- sqlserverdata-dev:/var/opt/mssql
|
||||
healthcheck:
|
||||
test: [ "CMD-SHELL", "/opt/mssql-tools18/bin/sqlcmd -S localhost -U sa -P '${DB_PASSWORD}' -C -Q 'SELECT 1' || exit 1" ]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 12
|
||||
start_period: 30s
|
||||
networks:
|
||||
- devnet
|
||||
database.migrations:
|
||||
env_file: ".env.local"
|
||||
image: database.migrations
|
||||
container_name: dev-env-database-migrations
|
||||
depends_on:
|
||||
sqlserver:
|
||||
condition: service_healthy
|
||||
build:
|
||||
context: ./src/Core/Database
|
||||
dockerfile: Database.Migrations/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
APP_UID: 1000
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
CLEAR_DATABASE: "true"
|
||||
restart: "no"
|
||||
networks:
|
||||
- devnet
|
||||
|
||||
mailpit:
|
||||
image: axllent/mailpit:latest
|
||||
container_name: dev-env-mailpit
|
||||
ports:
|
||||
- "8025:8025" # Web UI
|
||||
- "1025:1025" # SMTP server
|
||||
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MP_SMTP_AUTH_ACCEPT_ANY: 1
|
||||
MP_SMTP_AUTH_ALLOW_INSECURE: 1
|
||||
networks:
|
||||
- devnet
|
||||
|
||||
database.seed:
|
||||
env_file: ".env.local"
|
||||
image: database.seed
|
||||
container_name: dev-env-database-seed
|
||||
depends_on:
|
||||
database.migrations:
|
||||
condition: service_completed_successfully
|
||||
build:
|
||||
context: ./src/Core
|
||||
dockerfile: Database/Database.Seed/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
APP_UID: 1000
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
restart: "no"
|
||||
networks:
|
||||
- devnet
|
||||
volumes:
|
||||
sqlserverdata-dev:
|
||||
driver: local
|
||||
nuget-cache-dev:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
devnet:
|
||||
driver: bridge
|
||||
87
docker-compose.prod.yaml
Normal file
87
docker-compose.prod.yaml
Normal file
@@ -0,0 +1,87 @@
|
||||
services:
|
||||
sqlserver:
|
||||
env_file: ".env.prod"
|
||||
image: mcr.microsoft.com/mssql/server:2022-latest
|
||||
platform: linux/amd64
|
||||
container_name: prod-env-sqlserver
|
||||
environment:
|
||||
ACCEPT_EULA: "Y"
|
||||
SA_PASSWORD: "${DB_PASSWORD}"
|
||||
MSSQL_PID: "Express"
|
||||
volumes:
|
||||
- sqlserverdata-prod:/var/opt/mssql
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "/opt/mssql-tools18/bin/sqlcmd -S localhost -U sa -P '${DB_PASSWORD}' -C -Q 'SELECT 1' || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 12
|
||||
start_period: 30s
|
||||
networks:
|
||||
- prodnet
|
||||
|
||||
database.migrations:
|
||||
env_file: ".env.prod"
|
||||
image: database.migrations
|
||||
container_name: prod-env-database-migrations
|
||||
depends_on:
|
||||
sqlserver:
|
||||
condition: service_healthy
|
||||
build:
|
||||
context: ./src/Core/Database
|
||||
dockerfile: Database.Migrations/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
APP_UID: 1000
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
restart: "no"
|
||||
networks:
|
||||
- prodnet
|
||||
|
||||
api.core:
|
||||
env_file: ".env.prod"
|
||||
image: api.core
|
||||
container_name: prod-env-api-core
|
||||
depends_on:
|
||||
sqlserver:
|
||||
condition: service_healthy
|
||||
build:
|
||||
context: ./src/Core
|
||||
dockerfile: API/API.Core/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
APP_UID: 1000
|
||||
ports:
|
||||
- "8080:8080"
|
||||
- "8081:8081"
|
||||
environment:
|
||||
ASPNETCORE_ENVIRONMENT: "Production"
|
||||
ASPNETCORE_URLS: "http://0.0.0.0:8080"
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
ACCESS_TOKEN_SECRET: "${ACCESS_TOKEN_SECRET}"
|
||||
REFRESH_TOKEN_SECRET: "${REFRESH_TOKEN_SECRET}"
|
||||
CONFIRMATION_TOKEN_SECRET: "${CONFIRMATION_TOKEN_SECRET}"
|
||||
WEBSITE_BASE_URL: "${WEBSITE_BASE_URL}"
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- prodnet
|
||||
volumes:
|
||||
- nuget-cache-prod:/root/.nuget/packages
|
||||
|
||||
volumes:
|
||||
sqlserverdata-prod:
|
||||
driver: local
|
||||
nuget-cache-prod:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
prodnet:
|
||||
driver: bridge
|
||||
144
docker-compose.test.yaml
Normal file
144
docker-compose.test.yaml
Normal file
@@ -0,0 +1,144 @@
|
||||
services:
|
||||
sqlserver:
|
||||
env_file: ".env.test"
|
||||
image: mcr.microsoft.com/mssql/server:2022-latest
|
||||
platform: linux/amd64
|
||||
container_name: test-env-sqlserver
|
||||
environment:
|
||||
ACCEPT_EULA: "Y"
|
||||
SA_PASSWORD: "${DB_PASSWORD}"
|
||||
MSSQL_PID: "Express"
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
volumes:
|
||||
- sqlserverdata-test:/var/opt/mssql
|
||||
healthcheck:
|
||||
test: [ "CMD-SHELL", "/opt/mssql-tools18/bin/sqlcmd -S localhost -U sa -P '${DB_PASSWORD}' -C -Q 'SELECT 1' || exit 1" ]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 12
|
||||
start_period: 30s
|
||||
networks:
|
||||
- testnet
|
||||
|
||||
database.migrations:
|
||||
env_file: ".env.test"
|
||||
image: database.migrations
|
||||
container_name: test-env-database-migrations
|
||||
depends_on:
|
||||
sqlserver:
|
||||
condition: service_healthy
|
||||
build:
|
||||
context: ./src/Core/Database
|
||||
dockerfile: Database.Migrations/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
APP_UID: 1000
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
CLEAR_DATABASE: "true"
|
||||
restart: "no"
|
||||
networks:
|
||||
- testnet
|
||||
|
||||
database.seed:
|
||||
env_file: ".env.test"
|
||||
image: database.seed
|
||||
container_name: test-env-database-seed
|
||||
depends_on:
|
||||
database.migrations:
|
||||
condition: service_completed_successfully
|
||||
build:
|
||||
context: ./src/Core
|
||||
dockerfile: Database/Database.Seed/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
APP_UID: 1000
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
restart: "no"
|
||||
networks:
|
||||
- testnet
|
||||
|
||||
api.specs:
|
||||
env_file: ".env.test"
|
||||
image: api.specs
|
||||
container_name: test-env-api-specs
|
||||
depends_on:
|
||||
database.seed:
|
||||
condition: service_completed_successfully
|
||||
build:
|
||||
context: ./src/Core
|
||||
dockerfile: API/API.Specs/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
DB_SERVER: "${DB_SERVER}"
|
||||
DB_NAME: "${DB_NAME}"
|
||||
DB_USER: "${DB_USER}"
|
||||
DB_PASSWORD: "${DB_PASSWORD}"
|
||||
ACCESS_TOKEN_SECRET: "${ACCESS_TOKEN_SECRET}"
|
||||
REFRESH_TOKEN_SECRET: "${REFRESH_TOKEN_SECRET}"
|
||||
CONFIRMATION_TOKEN_SECRET: "${CONFIRMATION_TOKEN_SECRET}"
|
||||
WEBSITE_BASE_URL: "${WEBSITE_BASE_URL}"
|
||||
volumes:
|
||||
- ./test-results:/app/test-results
|
||||
restart: "no"
|
||||
networks:
|
||||
- testnet
|
||||
|
||||
repository.tests:
|
||||
env_file: ".env.test"
|
||||
image: repository.tests
|
||||
container_name: test-env-repository-tests
|
||||
depends_on:
|
||||
database.seed:
|
||||
condition: service_completed_successfully
|
||||
build:
|
||||
context: ./src/Core
|
||||
dockerfile: Infrastructure/Infrastructure.Repository.Tests/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
volumes:
|
||||
- ./test-results:/app/test-results
|
||||
restart: "no"
|
||||
networks:
|
||||
- testnet
|
||||
|
||||
service.auth.tests:
|
||||
env_file: ".env.test"
|
||||
image: service.auth.tests
|
||||
container_name: test-env-service-auth-tests
|
||||
depends_on:
|
||||
database.seed:
|
||||
condition: service_completed_successfully
|
||||
build:
|
||||
context: ./src/Core
|
||||
dockerfile: Service/Service.Auth.Tests/Dockerfile
|
||||
args:
|
||||
BUILD_CONFIGURATION: Release
|
||||
environment:
|
||||
DOTNET_RUNNING_IN_CONTAINER: "true"
|
||||
volumes:
|
||||
- ./test-results:/app/test-results
|
||||
restart: "no"
|
||||
networks:
|
||||
- testnet
|
||||
|
||||
volumes:
|
||||
sqlserverdata-test:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
testnet:
|
||||
driver: bridge
|
||||
427
docs/architecture.md
Normal file
427
docs/architecture.md
Normal file
@@ -0,0 +1,427 @@
|
||||
# Architecture
|
||||
|
||||
This document describes the active architecture of The Biergarten App.
|
||||
|
||||
## High-Level Overview
|
||||
|
||||
The Biergarten App is a monorepo with a clear split between the backend and the active
|
||||
website:
|
||||
|
||||
- **Backend**: .NET 10 Web API with SQL Server and a layered architecture
|
||||
- **Frontend**: React 19 + React Router 7 website in `src/Website`
|
||||
- **Architecture Style**: Layered backend plus server-rendered React frontend
|
||||
|
||||
The legacy Next.js frontend has been retained in `src/Website-v1` for reference only and is
|
||||
documented in [archive/legacy-website-v1.md](archive/legacy-website-v1.md).
|
||||
|
||||
## Diagrams
|
||||
|
||||
For visual representations, see:
|
||||
|
||||
- [architecture.svg](diagrams-out/architecture.svg) - Layered architecture diagram
|
||||
- [deployment.svg](diagrams-out/deployment.svg) - Docker deployment diagram
|
||||
- [authentication-flow.svg](diagrams-out/authentication-flow.svg) - Authentication workflow
|
||||
- [database-schema.svg](diagrams-out/database-schema.svg) - Database relationships
|
||||
|
||||
## Backend Architecture
|
||||
|
||||
### Layered Architecture Pattern
|
||||
|
||||
The backend follows a strict layered architecture:
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────┐
|
||||
│ API Layer (Controllers) │
|
||||
│ - HTTP Endpoints │
|
||||
│ - Request/Response mapping │
|
||||
│ - Swagger/OpenAPI │
|
||||
└─────────────────────────────────────┘
|
||||
↓
|
||||
┌─────────────────────────────────────┐
|
||||
│ Service Layer (Business Logic) │
|
||||
│ - Authentication logic │
|
||||
│ - User management │
|
||||
│ - Validation & orchestration │
|
||||
└─────────────────────────────────────┘
|
||||
↓
|
||||
┌─────────────────────────────────────┐
|
||||
│ Infrastructure Layer (Tools) │
|
||||
│ - JWT token generation │
|
||||
│ - Password hashing (Argon2id) │
|
||||
│ - Email services │
|
||||
│ - Repository implementations │
|
||||
└─────────────────────────────────────┘
|
||||
↓
|
||||
┌─────────────────────────────────────┐
|
||||
│ Domain Layer (Entities) │
|
||||
│ - UserAccount, UserCredential │
|
||||
│ - Pure POCO classes │
|
||||
│ - No external dependencies │
|
||||
└─────────────────────────────────────┘
|
||||
↓
|
||||
┌─────────────────────────────────────┐
|
||||
│ Database (SQL Server) │
|
||||
│ - Stored procedures │
|
||||
│ - Tables & constraints │
|
||||
└─────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### Layer Responsibilities
|
||||
|
||||
#### API Layer (`API.Core`)
|
||||
|
||||
**Purpose**: HTTP interface and request handling
|
||||
|
||||
**Components**:
|
||||
|
||||
- Controllers (`AuthController`, `UserController`)
|
||||
- Middleware for error handling
|
||||
- Swagger/OpenAPI documentation
|
||||
- Health check endpoints
|
||||
|
||||
**Dependencies**:
|
||||
|
||||
- Service layer
|
||||
- ASP.NET Core framework
|
||||
|
||||
**Rules**:
|
||||
|
||||
- No business logic
|
||||
- Only request/response transformation
|
||||
- Delegates all work to Service layer
|
||||
|
||||
#### Service Layer (`Service.Auth`, `Service.UserManagement`)
|
||||
|
||||
**Purpose**: Business logic and orchestration
|
||||
|
||||
**Components**:
|
||||
|
||||
- Authentication services (login, registration)
|
||||
- User management services
|
||||
- Business rule validation
|
||||
- Transaction coordination
|
||||
|
||||
**Dependencies**:
|
||||
|
||||
- Infrastructure layer (repositories, JWT, password hashing)
|
||||
- Domain entities
|
||||
|
||||
**Rules**:
|
||||
|
||||
- Contains all business logic
|
||||
- Coordinates multiple infrastructure components
|
||||
- No direct database access (uses repositories)
|
||||
- Returns domain models, not DTOs
|
||||
|
||||
#### Infrastructure Layer
|
||||
|
||||
**Purpose**: Technical capabilities and external integrations
|
||||
|
||||
**Components**:
|
||||
|
||||
- **Infrastructure.Repository**: Data access via stored procedures
|
||||
- **Infrastructure.Jwt**: JWT token generation and validation
|
||||
- **Infrastructure.PasswordHashing**: Argon2id password hashing
|
||||
- **Infrastructure.Email**: Email sending capabilities
|
||||
- **Infrastructure.Email.Templates**: Email template rendering
|
||||
|
||||
**Dependencies**:
|
||||
|
||||
- Domain entities
|
||||
- External libraries (ADO.NET, JWT, Argon2, etc.)
|
||||
|
||||
**Rules**:
|
||||
|
||||
- Implements technical concerns
|
||||
- No business logic
|
||||
- Reusable across services
|
||||
|
||||
#### Domain Layer (`Domain.Entities`)
|
||||
|
||||
**Purpose**: Core business entities and models
|
||||
|
||||
**Components**:
|
||||
|
||||
- `UserAccount` - User profile data
|
||||
- `UserCredential` - Authentication credentials
|
||||
- `UserVerification` - Account verification state
|
||||
|
||||
**Dependencies**:
|
||||
|
||||
- None (pure domain)
|
||||
|
||||
**Rules**:
|
||||
|
||||
- Plain Old CLR Objects (POCOs)
|
||||
- No framework dependencies
|
||||
- No infrastructure references
|
||||
- Represents business concepts
|
||||
|
||||
### Design Patterns
|
||||
|
||||
#### Repository Pattern
|
||||
|
||||
**Purpose**: Abstract database access behind interfaces
|
||||
|
||||
**Implementation**:
|
||||
|
||||
- `IAuthRepository` - Authentication queries
|
||||
- `IUserAccountRepository` - User account queries
|
||||
- `DefaultSqlConnectionFactory` - Connection management
|
||||
|
||||
**Benefits**:
|
||||
|
||||
- Testable (easy to mock)
|
||||
- SQL-first approach (stored procedures)
|
||||
- Centralized data access logic
|
||||
|
||||
**Example**:
|
||||
|
||||
```csharp
|
||||
public interface IAuthRepository
|
||||
{
|
||||
Task<UserCredential> GetUserCredentialAsync(string username);
|
||||
Task<int> CreateUserAccountAsync(UserAccount user, UserCredential credential);
|
||||
}
|
||||
```
|
||||
|
||||
#### Dependency Injection
|
||||
|
||||
**Purpose**: Loose coupling and testability
|
||||
|
||||
**Configuration**: `Program.cs` registers all services
|
||||
|
||||
**Lifetimes**:
|
||||
|
||||
- Scoped: Repositories, Services (per request)
|
||||
- Singleton: Connection factories, JWT configuration
|
||||
- Transient: Utilities, helpers
|
||||
|
||||
#### SQL-First Approach
|
||||
|
||||
**Purpose**: Leverage database capabilities
|
||||
|
||||
**Strategy**:
|
||||
|
||||
- All queries via stored procedures
|
||||
- No ORM (Entity Framework not used)
|
||||
- Database handles complex logic
|
||||
- Application focuses on orchestration
|
||||
|
||||
**Stored Procedure Examples**:
|
||||
|
||||
- `USP_RegisterUser` - User registration
|
||||
- `USP_GetUserAccountByUsername` - User lookup
|
||||
- `USP_RotateUserCredential` - Password update
|
||||
|
||||
## Frontend Architecture
|
||||
|
||||
### Active Website (`src/Website`)
|
||||
|
||||
The current website is a React Router 7 application with server-side rendering enabled.
|
||||
|
||||
```text
|
||||
src/Website/
|
||||
├── app/
|
||||
│ ├── components/ Shared UI such as Navbar, FormField, SubmitButton, ToastProvider
|
||||
│ ├── lib/ Auth helpers, schemas, and theme metadata
|
||||
│ ├── routes/ Route modules for home, login, register, dashboard, confirm, theme
|
||||
│ ├── root.tsx App shell and global providers
|
||||
│ └── app.css Theme tokens and global styling
|
||||
├── .storybook/ Storybook config and preview setup
|
||||
├── stories/ Storybook stories for shared UI and themes
|
||||
├── tests/playwright/ Storybook Playwright coverage
|
||||
└── package.json Frontend scripts and dependencies
|
||||
```
|
||||
|
||||
### Frontend Responsibilities
|
||||
|
||||
- Render the auth demo and theme guide routes
|
||||
- Manage cookie-backed website session state
|
||||
- Call the .NET API for login, registration, token refresh, and confirmation
|
||||
- Provide shared UI building blocks for forms, navigation, themes, and toasts
|
||||
- Supply Storybook documentation and browser-based component verification
|
||||
|
||||
### Theme System
|
||||
|
||||
The active website uses semantic DaisyUI theme tokens backed by four Biergarten themes:
|
||||
|
||||
- Biergarten Lager
|
||||
- Biergarten Stout
|
||||
- Biergarten Cassis
|
||||
- Biergarten Weizen
|
||||
|
||||
All component styling should prefer semantic tokens such as `primary`, `success`,
|
||||
`surface`, and `highlight` instead of hard-coded color values.
|
||||
|
||||
### Legacy Frontend
|
||||
|
||||
The previous Next.js frontend has been archived at `src/Website-v1`. Active product and
|
||||
engineering documentation should point to `src/Website`, while legacy notes live in
|
||||
[archive/legacy-website-v1.md](archive/legacy-website-v1.md).
|
||||
|
||||
## Security Architecture
|
||||
|
||||
### Authentication Flow
|
||||
|
||||
1. **Registration**:
|
||||
- User submits credentials
|
||||
- Password hashed with Argon2id
|
||||
- User account created
|
||||
- JWT token issued
|
||||
|
||||
2. **Login**:
|
||||
- User submits credentials
|
||||
- Password verified against hash
|
||||
- JWT token issued
|
||||
- Token stored client-side
|
||||
|
||||
3. **API Requests**:
|
||||
- Client sends JWT in Authorization header
|
||||
- Middleware validates token
|
||||
- Request proceeds if valid
|
||||
|
||||
### Password Security
|
||||
|
||||
**Algorithm**: Argon2id
|
||||
|
||||
- Memory: 64MB
|
||||
- Iterations: 4
|
||||
- Parallelism: CPU core count
|
||||
- Salt: 128-bit (16 bytes)
|
||||
- Hash: 256-bit (32 bytes)
|
||||
|
||||
### JWT Tokens
|
||||
|
||||
**Algorithm**: HS256 (HMAC-SHA256)
|
||||
|
||||
**Claims**:
|
||||
|
||||
- `sub` - User ID
|
||||
- `unique_name` - Username
|
||||
- `jti` - Unique token ID
|
||||
- `iat` - Issued at timestamp
|
||||
- `exp` - Expiration timestamp
|
||||
|
||||
**Configuration** (appsettings.json):
|
||||
|
||||
```json
|
||||
{
|
||||
"Jwt": {
|
||||
"ExpirationMinutes": 60,
|
||||
"Issuer": "biergarten-api",
|
||||
"Audience": "biergarten-users"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Database Architecture
|
||||
|
||||
### SQL-First Philosophy
|
||||
|
||||
**Principles**:
|
||||
|
||||
1. Database is source of truth
|
||||
2. Complex queries in stored procedures
|
||||
3. Database handles referential integrity
|
||||
4. Application orchestrates, database executes
|
||||
|
||||
**Benefits**:
|
||||
|
||||
- Performance optimization via execution plans
|
||||
- Centralized query logic
|
||||
- Version-controlled schema (migrations)
|
||||
- Easier query profiling and tuning
|
||||
|
||||
### Migration Strategy
|
||||
|
||||
**Tool**: DbUp
|
||||
|
||||
**Process**:
|
||||
|
||||
1. Write SQL migration script
|
||||
2. Embed in `Database.Migrations` project
|
||||
3. Run migrations on startup
|
||||
4. Idempotent and versioned
|
||||
|
||||
**Migration Files**:
|
||||
|
||||
```
|
||||
scripts/
|
||||
├── 001-CreateUserTables.sql
|
||||
├── 002-CreateLocationTables.sql
|
||||
├── 003-CreateBreweryTables.sql
|
||||
└── ...
|
||||
```
|
||||
|
||||
### Data Seeding
|
||||
|
||||
**Purpose**: Populate development/test databases
|
||||
|
||||
**Implementation**: `Database.Seed` project
|
||||
|
||||
**Seed Data**:
|
||||
|
||||
- Countries, states/provinces, cities
|
||||
- Test user accounts
|
||||
- Sample breweries (future)
|
||||
|
||||
## Deployment Architecture
|
||||
|
||||
### Docker Containerization
|
||||
|
||||
**Container Structure**:
|
||||
|
||||
- `sqlserver` - SQL Server 2022
|
||||
- `database.migrations` - Schema migration runner
|
||||
- `database.seed` - Data seeder
|
||||
- `api.core` - ASP.NET Core Web API
|
||||
|
||||
**Environments**:
|
||||
|
||||
- Development (`docker-compose.dev.yaml`)
|
||||
- Testing (`docker-compose.test.yaml`)
|
||||
- Production (`docker-compose.prod.yaml`)
|
||||
|
||||
For details, see [Docker Guide](docker.md).
|
||||
|
||||
### Health Checks
|
||||
|
||||
**SQL Server**: Validates database connectivity **API**: Checks service health and
|
||||
dependencies
|
||||
|
||||
**Configuration**:
|
||||
|
||||
```yaml
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sqlcmd health check"]
|
||||
interval: 10s
|
||||
retries: 12
|
||||
start_period: 30s
|
||||
```
|
||||
|
||||
## Testing Architecture
|
||||
|
||||
### Test Pyramid
|
||||
|
||||
```
|
||||
┌──────────────┐
|
||||
│ Integration │ ← API.Specs (Reqnroll)
|
||||
│ Tests │
|
||||
├──────────────┤
|
||||
│ Unit Tests │ ← Service.Auth.Tests
|
||||
│ (Service) │ Repository.Tests
|
||||
├──────────────┤
|
||||
│ Unit Tests │
|
||||
│ (Repository) │
|
||||
└──────────────┘
|
||||
```
|
||||
|
||||
**Strategy**:
|
||||
|
||||
- Many unit tests (fast, isolated)
|
||||
- Fewer integration tests (slower, e2e)
|
||||
- Mock external dependencies
|
||||
- Test database for integration tests
|
||||
|
||||
For details, see [Testing Guide](testing.md).
|
||||
56
docs/archive/legacy-website-v1.md
Normal file
56
docs/archive/legacy-website-v1.md
Normal file
@@ -0,0 +1,56 @@
|
||||
# Legacy Website Archive (`src/Website-v1`)
|
||||
|
||||
This archive captures high-level notes about the previous Biergarten frontend so active
|
||||
project documentation can focus on the current website in `src/Website`.
|
||||
|
||||
## Status
|
||||
|
||||
- `src/Website-v1` is retained for historical reference only
|
||||
- It is not the active frontend used by current setup, docs, or testing guidance
|
||||
- New product and engineering work should target `src/Website`
|
||||
|
||||
## Legacy Stack Summary
|
||||
|
||||
The archived frontend used a different application model from the current website:
|
||||
|
||||
- Next.js 14
|
||||
- React 18
|
||||
- Prisma
|
||||
- Postgres / Neon-hosted database workflows
|
||||
- Next.js API routes and server-side controllers
|
||||
- Additional third-party integrations such as Cloudinary, Mapbox, and SparkPost
|
||||
|
||||
## Why It Was Archived
|
||||
|
||||
The active website moved to a React Router-based frontend that talks directly to the .NET
|
||||
API. As part of that shift, the main docs were updated to describe:
|
||||
|
||||
- `src/Website` as the active frontend
|
||||
- React Router route modules and server rendering
|
||||
- Storybook-based component documentation and tests
|
||||
- Current frontend runtime variables: `API_BASE_URL`, `SESSION_SECRET`, and `NODE_ENV`
|
||||
|
||||
## Legacy Documentation Topics Moved Out of Active Docs
|
||||
|
||||
The following categories were removed from active documentation and intentionally archived:
|
||||
|
||||
- Next.js application structure guidance
|
||||
- Prisma and Postgres frontend setup
|
||||
- Legacy frontend environment variables
|
||||
- External service setup that only applied to `src/Website-v1`
|
||||
- Old frontend local setup instructions
|
||||
|
||||
## When To Use This Archive
|
||||
|
||||
Use this file only if you need to:
|
||||
|
||||
- inspect the historical frontend implementation
|
||||
- compare old flows against the current website
|
||||
- migrate or recover legacy logic from `src/Website-v1`
|
||||
|
||||
For all active work, use:
|
||||
|
||||
- [Getting Started](../getting-started.md)
|
||||
- [Architecture](../architecture.md)
|
||||
- [Environment Variables](../environment-variables.md)
|
||||
- [Testing](../testing.md)
|
||||
1
docs/diagrams-out/architecture.svg
Normal file
1
docs/diagrams-out/architecture.svg
Normal file
File diff suppressed because one or more lines are too long
1
docs/diagrams-out/authentication-flow.svg
Normal file
1
docs/diagrams-out/authentication-flow.svg
Normal file
File diff suppressed because one or more lines are too long
1
docs/diagrams-out/database-schema.svg
Normal file
1
docs/diagrams-out/database-schema.svg
Normal file
File diff suppressed because one or more lines are too long
1
docs/diagrams-out/deployment.svg
Normal file
1
docs/diagrams-out/deployment.svg
Normal file
File diff suppressed because one or more lines are too long
75
docs/diagrams-src/architecture.puml
Normal file
75
docs/diagrams-src/architecture.puml
Normal file
@@ -0,0 +1,75 @@
|
||||
@startuml architecture
|
||||
!theme plain
|
||||
skinparam backgroundColor #FFFFFF
|
||||
skinparam defaultFontName Arial
|
||||
skinparam packageStyle rectangle
|
||||
|
||||
title The Biergarten App - Layered Architecture
|
||||
|
||||
package "API Layer" #E3F2FD {
|
||||
[API.Core\nASP.NET Core Web API] as API
|
||||
note right of API
|
||||
- Controllers (Auth, User)
|
||||
- Swagger/OpenAPI
|
||||
- Middleware
|
||||
- Health Checks
|
||||
end note
|
||||
}
|
||||
|
||||
package "Service Layer" #F3E5F5 {
|
||||
[Service.Auth] as AuthSvc
|
||||
[Service.UserManagement] as UserSvc
|
||||
note right of AuthSvc
|
||||
- Business Logic
|
||||
- Validation
|
||||
- Orchestration
|
||||
end note
|
||||
}
|
||||
|
||||
package "Infrastructure Layer" #FFF3E0 {
|
||||
[Infrastructure.Repository] as Repo
|
||||
[Infrastructure.Jwt] as JWT
|
||||
[Infrastructure.PasswordHashing] as PwdHash
|
||||
[Infrastructure.Email] as Email
|
||||
}
|
||||
|
||||
package "Domain Layer" #E8F5E9 {
|
||||
[Domain.Entities] as Domain
|
||||
note right of Domain
|
||||
- UserAccount
|
||||
- UserCredential
|
||||
- UserVerification
|
||||
end note
|
||||
}
|
||||
|
||||
database "SQL Server" {
|
||||
[Stored Procedures] as SP
|
||||
[Tables] as Tables
|
||||
}
|
||||
|
||||
' Relationships
|
||||
API --> AuthSvc
|
||||
API --> UserSvc
|
||||
|
||||
AuthSvc --> Repo
|
||||
AuthSvc --> JWT
|
||||
AuthSvc --> PwdHash
|
||||
AuthSvc --> Email
|
||||
|
||||
UserSvc --> Repo
|
||||
|
||||
Repo --> SP
|
||||
Repo --> Domain
|
||||
SP --> Tables
|
||||
|
||||
AuthSvc --> Domain
|
||||
UserSvc --> Domain
|
||||
|
||||
' Notes
|
||||
note left of Repo
|
||||
SQL-first approach
|
||||
All queries via
|
||||
stored procedures
|
||||
end note
|
||||
|
||||
@enduml
|
||||
298
docs/diagrams-src/authentication-flow.puml
Normal file
298
docs/diagrams-src/authentication-flow.puml
Normal file
@@ -0,0 +1,298 @@
|
||||
@startuml authentication-flow
|
||||
!theme plain
|
||||
skinparam backgroundColor #FFFFFF
|
||||
skinparam defaultFontName Arial
|
||||
skinparam sequenceMessageAlign center
|
||||
skinparam maxMessageSize 200
|
||||
|
||||
title User Authentication Flow - Expanded
|
||||
|
||||
actor User
|
||||
participant "API\nController" as API
|
||||
box "Service Layer" #LightBlue
|
||||
participant "RegisterService" as RegSvc
|
||||
participant "LoginService" as LoginSvc
|
||||
participant "TokenService" as TokenSvc
|
||||
participant "EmailService" as EmailSvc
|
||||
end box
|
||||
box "Infrastructure Layer" #LightGreen
|
||||
participant "Argon2\nInfrastructure" as Argon2
|
||||
participant "JWT\nInfrastructure" as JWT
|
||||
participant "Email\nProvider" as SMTP
|
||||
participant "Template\nProvider" as Template
|
||||
end box
|
||||
box "Repository Layer" #LightYellow
|
||||
participant "AuthRepository" as AuthRepo
|
||||
participant "UserAccount\nRepository" as UserRepo
|
||||
end box
|
||||
database "SQL Server\nStored Procedures" as DB
|
||||
|
||||
== Registration Flow ==
|
||||
|
||||
User -> API: POST /api/auth/register\n{username, firstName, lastName,\nemail, dateOfBirth, password}
|
||||
activate API
|
||||
|
||||
note right of API
|
||||
FluentValidation runs:
|
||||
- Username: 3-64 chars, alphanumeric + [._-]
|
||||
- Email: valid format, max 128 chars
|
||||
- Password: min 8 chars, uppercase,\n lowercase, number, special char
|
||||
- DateOfBirth: must be 19+ years old
|
||||
end note
|
||||
|
||||
API -> API: Validate request\n(FluentValidation)
|
||||
|
||||
alt Validation fails
|
||||
API -> User: 400 Bad Request\n{errors: {...}}
|
||||
else Validation succeeds
|
||||
API -> RegSvc: RegisterAsync(userAccount, password)
|
||||
activate RegSvc
|
||||
|
||||
RegSvc -> AuthRepo: GetUserByUsernameAsync(username)
|
||||
activate AuthRepo
|
||||
AuthRepo -> DB: EXEC usp_GetUserAccountByUsername
|
||||
activate DB
|
||||
DB --> AuthRepo: null (user doesn't exist)
|
||||
deactivate DB
|
||||
deactivate AuthRepo
|
||||
|
||||
RegSvc -> AuthRepo: GetUserByEmailAsync(email)
|
||||
activate AuthRepo
|
||||
AuthRepo -> DB: EXEC usp_GetUserAccountByEmail
|
||||
activate DB
|
||||
DB --> AuthRepo: null (email doesn't exist)
|
||||
deactivate DB
|
||||
deactivate AuthRepo
|
||||
|
||||
alt User/Email already exists
|
||||
RegSvc -> API: throw ConflictException
|
||||
API -> User: 409 Conflict\n"Username or email already exists"
|
||||
else User doesn't exist
|
||||
|
||||
RegSvc -> Argon2: Hash(password)
|
||||
activate Argon2
|
||||
note right of Argon2
|
||||
Argon2id parameters:
|
||||
- Salt: 16 bytes (128-bit)
|
||||
- Memory: 64MB
|
||||
- Iterations: 4
|
||||
- Parallelism: CPU count
|
||||
- Hash output: 32 bytes
|
||||
end note
|
||||
Argon2 -> Argon2: Generate random salt\n(16 bytes)
|
||||
Argon2 -> Argon2: Hash password with\nArgon2id algorithm
|
||||
Argon2 --> RegSvc: "base64(salt):base64(hash)"
|
||||
deactivate Argon2
|
||||
|
||||
RegSvc -> AuthRepo: RegisterUserAsync(\n username, firstName, lastName,\n email, dateOfBirth, hash)
|
||||
activate AuthRepo
|
||||
|
||||
AuthRepo -> DB: EXEC USP_RegisterUser
|
||||
activate DB
|
||||
note right of DB
|
||||
Transaction begins:
|
||||
1. INSERT UserAccount
|
||||
2. INSERT UserCredential
|
||||
(with hashed password)
|
||||
Transaction commits
|
||||
end note
|
||||
DB -> DB: BEGIN TRANSACTION
|
||||
DB -> DB: INSERT INTO UserAccount\n(Username, FirstName, LastName,\nEmail, DateOfBirth)
|
||||
DB -> DB: OUTPUT INSERTED.UserAccountID
|
||||
DB -> DB: INSERT INTO UserCredential\n(UserAccountId, Hash)
|
||||
DB -> DB: COMMIT TRANSACTION
|
||||
DB --> AuthRepo: UserAccountId (GUID)
|
||||
deactivate DB
|
||||
|
||||
AuthRepo --> RegSvc: UserAccount entity
|
||||
deactivate AuthRepo
|
||||
|
||||
RegSvc -> TokenSvc: GenerateAccessToken(userAccount)
|
||||
activate TokenSvc
|
||||
TokenSvc -> JWT: GenerateJwt(userId, username, expiry)
|
||||
activate JWT
|
||||
note right of JWT
|
||||
JWT Configuration:
|
||||
- Algorithm: HS256
|
||||
- Expires: 1 hour
|
||||
- Claims:
|
||||
* sub: userId
|
||||
* unique_name: username
|
||||
* jti: unique token ID
|
||||
end note
|
||||
JWT -> JWT: Create JWT with claims
|
||||
JWT -> JWT: Sign with secret key
|
||||
JWT --> TokenSvc: Access Token
|
||||
deactivate JWT
|
||||
TokenSvc --> RegSvc: Access Token
|
||||
deactivate TokenSvc
|
||||
|
||||
RegSvc -> TokenSvc: GenerateRefreshToken(userAccount)
|
||||
activate TokenSvc
|
||||
TokenSvc -> JWT: GenerateJwt(userId, username, expiry)
|
||||
activate JWT
|
||||
note right of JWT
|
||||
Refresh Token:
|
||||
- Expires: 21 days
|
||||
- Same structure as access token
|
||||
end note
|
||||
JWT --> TokenSvc: Refresh Token
|
||||
deactivate JWT
|
||||
TokenSvc --> RegSvc: Refresh Token
|
||||
deactivate TokenSvc
|
||||
|
||||
RegSvc -> EmailSvc: SendRegistrationEmailAsync(\n createdUser, confirmationToken)
|
||||
activate EmailSvc
|
||||
|
||||
EmailSvc -> Template: RenderUserRegisteredEmailAsync(\n firstName, confirmationLink)
|
||||
activate Template
|
||||
note right of Template
|
||||
Razor Component:
|
||||
- Header with branding
|
||||
- Welcome message
|
||||
- Confirmation button
|
||||
- Footer
|
||||
end note
|
||||
Template -> Template: Render Razor component\nto HTML
|
||||
Template --> EmailSvc: HTML email content
|
||||
deactivate Template
|
||||
|
||||
EmailSvc -> SMTP: SendAsync(email, subject, body)
|
||||
activate SMTP
|
||||
note right of SMTP
|
||||
SMTP Configuration:
|
||||
- Host: from env (SMTP_HOST)
|
||||
- Port: from env (SMTP_PORT)
|
||||
- TLS: StartTLS
|
||||
- Auth: username/password
|
||||
end note
|
||||
SMTP -> SMTP: Create MIME message
|
||||
SMTP -> SMTP: Connect to SMTP server
|
||||
SMTP -> SMTP: Authenticate
|
||||
SMTP -> SMTP: Send email
|
||||
SMTP -> SMTP: Disconnect
|
||||
SMTP --> EmailSvc: Success / Failure
|
||||
deactivate SMTP
|
||||
|
||||
alt Email sent successfully
|
||||
EmailSvc --> RegSvc: emailSent = true
|
||||
else Email failed
|
||||
EmailSvc --> RegSvc: emailSent = false\n(error suppressed)
|
||||
end
|
||||
deactivate EmailSvc
|
||||
|
||||
RegSvc --> API: RegisterServiceReturn(\n userAccount, accessToken,\n refreshToken, emailSent)
|
||||
deactivate RegSvc
|
||||
|
||||
API -> API: Create response body
|
||||
API -> User: 201 Created\n{\n message: "User registered successfully",\n payload: {\n userAccountId, username,\n accessToken, refreshToken,\n confirmationEmailSent\n }\n}
|
||||
end
|
||||
end
|
||||
deactivate API
|
||||
|
||||
== Login Flow ==
|
||||
|
||||
User -> API: POST /api/auth/login\n{username, password}
|
||||
activate API
|
||||
|
||||
API -> API: Validate request\n(FluentValidation)
|
||||
|
||||
alt Validation fails
|
||||
API -> User: 400 Bad Request\n{errors: {...}}
|
||||
else Validation succeeds
|
||||
|
||||
API -> LoginSvc: LoginAsync(username, password)
|
||||
activate LoginSvc
|
||||
|
||||
LoginSvc -> AuthRepo: GetUserByUsernameAsync(username)
|
||||
activate AuthRepo
|
||||
AuthRepo -> DB: EXEC usp_GetUserAccountByUsername
|
||||
activate DB
|
||||
DB -> DB: SELECT FROM UserAccount\nWHERE Username = @Username
|
||||
DB --> AuthRepo: UserAccount entity
|
||||
deactivate DB
|
||||
deactivate AuthRepo
|
||||
|
||||
alt User not found
|
||||
LoginSvc -> API: throw UnauthorizedException\n"Invalid username or password"
|
||||
API -> User: 401 Unauthorized
|
||||
else User found
|
||||
|
||||
LoginSvc -> AuthRepo: GetActiveCredentialByUserAccountIdAsync(userId)
|
||||
activate AuthRepo
|
||||
AuthRepo -> DB: EXEC USP_GetActiveUserCredentialByUserAccountId
|
||||
activate DB
|
||||
note right of DB
|
||||
SELECT FROM UserCredential
|
||||
WHERE UserAccountId = @UserAccountId
|
||||
AND IsRevoked = 0
|
||||
end note
|
||||
DB --> AuthRepo: UserCredential entity
|
||||
deactivate DB
|
||||
deactivate AuthRepo
|
||||
|
||||
alt No active credential
|
||||
LoginSvc -> API: throw UnauthorizedException
|
||||
API -> User: 401 Unauthorized
|
||||
else Active credential found
|
||||
|
||||
LoginSvc -> Argon2: Verify(password, storedHash)
|
||||
activate Argon2
|
||||
note right of Argon2
|
||||
1. Split stored hash: "salt:hash"
|
||||
2. Extract salt
|
||||
3. Hash provided password\n with same salt
|
||||
4. Constant-time comparison
|
||||
end note
|
||||
Argon2 -> Argon2: Parse salt from stored hash
|
||||
Argon2 -> Argon2: Hash provided password\nwith extracted salt
|
||||
Argon2 -> Argon2: FixedTimeEquals(\n computed, stored)
|
||||
Argon2 --> LoginSvc: true/false
|
||||
deactivate Argon2
|
||||
|
||||
alt Password invalid
|
||||
LoginSvc -> API: throw UnauthorizedException
|
||||
API -> User: 401 Unauthorized
|
||||
else Password valid
|
||||
|
||||
LoginSvc -> TokenSvc: GenerateAccessToken(user)
|
||||
activate TokenSvc
|
||||
TokenSvc -> JWT: GenerateJwt(...)
|
||||
activate JWT
|
||||
JWT --> TokenSvc: Access Token
|
||||
deactivate JWT
|
||||
TokenSvc --> LoginSvc: Access Token
|
||||
deactivate TokenSvc
|
||||
|
||||
LoginSvc -> TokenSvc: GenerateRefreshToken(user)
|
||||
activate TokenSvc
|
||||
TokenSvc -> JWT: GenerateJwt(...)
|
||||
activate JWT
|
||||
JWT --> TokenSvc: Refresh Token
|
||||
deactivate JWT
|
||||
TokenSvc --> LoginSvc: Refresh Token
|
||||
deactivate TokenSvc
|
||||
|
||||
LoginSvc --> API: LoginServiceReturn(\n userAccount, accessToken,\n refreshToken)
|
||||
deactivate LoginSvc
|
||||
|
||||
API -> User: 200 OK\n{\n message: "Logged in successfully",\n payload: {\n userAccountId, username,\n accessToken, refreshToken\n }\n}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
deactivate API
|
||||
|
||||
== Error Handling (Global Exception Filter) ==
|
||||
|
||||
note over API
|
||||
GlobalExceptionFilter catches:
|
||||
- ValidationException → 400 Bad Request
|
||||
- ConflictException → 409 Conflict
|
||||
- NotFoundException → 404 Not Found
|
||||
- UnauthorizedException → 401 Unauthorized
|
||||
- ForbiddenException → 403 Forbidden
|
||||
- All others → 500 Internal Server Error
|
||||
end note
|
||||
|
||||
@enduml
|
||||
104
docs/diagrams-src/database-schema.puml
Normal file
104
docs/diagrams-src/database-schema.puml
Normal file
@@ -0,0 +1,104 @@
|
||||
@startuml database-schema
|
||||
!theme plain
|
||||
skinparam backgroundColor #FFFFFF
|
||||
skinparam defaultFontName Arial
|
||||
skinparam linetype ortho
|
||||
|
||||
title Key Database Schema - User & Authentication
|
||||
|
||||
entity "UserAccount" as User {
|
||||
* UserAccountId: INT <<PK>>
|
||||
--
|
||||
* Username: NVARCHAR(30) <<UNIQUE>>
|
||||
* Email: NVARCHAR(255) <<UNIQUE>>
|
||||
* FirstName: NVARCHAR(50)
|
||||
* LastName: NVARCHAR(50)
|
||||
Bio: NVARCHAR(500)
|
||||
CreatedAt: DATETIME2
|
||||
UpdatedAt: DATETIME2
|
||||
LastLoginAt: DATETIME2
|
||||
}
|
||||
|
||||
entity "UserCredential" as Cred {
|
||||
* UserCredentialId: INT <<PK>>
|
||||
--
|
||||
* UserAccountId: INT <<FK>>
|
||||
* PasswordHash: VARBINARY(32)
|
||||
* PasswordSalt: VARBINARY(16)
|
||||
CredentialRotatedAt: DATETIME2
|
||||
CredentialExpiresAt: DATETIME2
|
||||
CredentialRevokedAt: DATETIME2
|
||||
* IsActive: BIT
|
||||
CreatedAt: DATETIME2
|
||||
}
|
||||
|
||||
entity "UserVerification" as Verify {
|
||||
* UserVerificationId: INT <<PK>>
|
||||
--
|
||||
* UserAccountId: INT <<FK>>
|
||||
* IsVerified: BIT
|
||||
VerifiedAt: DATETIME2
|
||||
VerificationToken: NVARCHAR(255)
|
||||
TokenExpiresAt: DATETIME2
|
||||
}
|
||||
|
||||
entity "UserAvatar" as Avatar {
|
||||
* UserAvatarId: INT <<PK>>
|
||||
--
|
||||
* UserAccountId: INT <<FK>>
|
||||
PhotoId: INT <<FK>>
|
||||
* IsActive: BIT
|
||||
CreatedAt: DATETIME2
|
||||
}
|
||||
|
||||
entity "UserFollow" as Follow {
|
||||
* UserFollowId: INT <<PK>>
|
||||
--
|
||||
* FollowerUserId: INT <<FK>>
|
||||
* FollowedUserId: INT <<FK>>
|
||||
CreatedAt: DATETIME2
|
||||
}
|
||||
|
||||
entity "Photo" as Photo {
|
||||
* PhotoId: INT <<PK>>
|
||||
--
|
||||
* Url: NVARCHAR(500)
|
||||
* CloudinaryPublicId: NVARCHAR(255)
|
||||
Width: INT
|
||||
Height: INT
|
||||
Format: NVARCHAR(10)
|
||||
CreatedAt: DATETIME2
|
||||
}
|
||||
|
||||
' Relationships
|
||||
User ||--o{ Cred : "has"
|
||||
User ||--o| Verify : "has"
|
||||
User ||--o{ Avatar : "has"
|
||||
User ||--o{ Follow : "follows"
|
||||
User ||--o{ Follow : "followed by"
|
||||
Avatar }o--|| Photo : "refers to"
|
||||
|
||||
note right of Cred
|
||||
Password hashing:
|
||||
- Algorithm: Argon2id
|
||||
- Memory: 64MB
|
||||
- Iterations: 4
|
||||
- Salt: 128-bit
|
||||
- Hash: 256-bit
|
||||
end note
|
||||
|
||||
note right of Verify
|
||||
Account verification
|
||||
via email token
|
||||
with expiry
|
||||
end note
|
||||
|
||||
note bottom of User
|
||||
Core stored procedures:
|
||||
- USP_RegisterUser
|
||||
- USP_GetUserAccountByUsername
|
||||
- USP_RotateUserCredential
|
||||
- USP_UpdateUserAccount
|
||||
end note
|
||||
|
||||
@enduml
|
||||
227
docs/diagrams-src/deployment.puml
Normal file
227
docs/diagrams-src/deployment.puml
Normal file
@@ -0,0 +1,227 @@
|
||||
@startuml deployment
|
||||
!theme plain
|
||||
skinparam backgroundColor #FFFFFF
|
||||
skinparam defaultFontName Arial
|
||||
skinparam linetype ortho
|
||||
|
||||
title Docker Deployment Architecture
|
||||
|
||||
' External systems
|
||||
actor Developer
|
||||
cloud "Docker Host" as Host
|
||||
|
||||
package "Development Environment\n(docker-compose.dev.yaml)" #E3F2FD {
|
||||
|
||||
node "SQL Server\n(mcr.microsoft.com/mssql/server:2022-latest)" as DevDB {
|
||||
database "Biergarten\nDatabase" as DevDBInner {
|
||||
portin "1433"
|
||||
}
|
||||
note right
|
||||
Environment:
|
||||
- ACCEPT_EULA=Y
|
||||
- SA_PASSWORD=***
|
||||
- MSSQL_PID=Developer
|
||||
|
||||
Volumes:
|
||||
- biergarten-dev-data
|
||||
end note
|
||||
}
|
||||
|
||||
node "API Container\n(API.Core)" as DevAPI {
|
||||
component "ASP.NET Core 10" as API1
|
||||
portin "8080:8080 (HTTP)" as DevPort1
|
||||
portin "8081:8081 (HTTPS)" as DevPort2
|
||||
|
||||
note right
|
||||
Environment:
|
||||
- ASPNETCORE_ENVIRONMENT=Development
|
||||
- DB_SERVER=sql-server
|
||||
- DB_NAME=Biergarten
|
||||
- DB_USER/PASSWORD
|
||||
- JWT_SECRET
|
||||
- SMTP_* (10+ variables)
|
||||
|
||||
Health Check:
|
||||
/health endpoint
|
||||
end note
|
||||
}
|
||||
|
||||
node "Migrations\n(run-once)" as DevMig {
|
||||
component "Database.Migrations" as Mig1
|
||||
note bottom
|
||||
Runs: DbUp migrations
|
||||
Environment:
|
||||
- CLEAR_DATABASE=false
|
||||
Depends on: sql-server
|
||||
end note
|
||||
}
|
||||
|
||||
node "Seed\n(run-once)" as DevSeed {
|
||||
component "Database.Seed" as Seed1
|
||||
note bottom
|
||||
Creates:
|
||||
- 100 test users
|
||||
- Location data (US/CA/MX)
|
||||
- test.user account
|
||||
Depends on: migrations
|
||||
end note
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
package "Test Environment\n(docker-compose.test.yaml)" #FFF3E0 {
|
||||
|
||||
node "SQL Server\n(isolated instance)" as TestDB {
|
||||
database "Biergarten\nTest Database" as TestDBInner {
|
||||
portin "1434"
|
||||
}
|
||||
note right
|
||||
Fresh instance each run
|
||||
CLEAR_DATABASE=true
|
||||
|
||||
Volumes:
|
||||
- biergarten-test-data
|
||||
(ephemeral)
|
||||
end note
|
||||
}
|
||||
|
||||
node "Migrations\n(test)" as TestMig {
|
||||
component "Database.Migrations"
|
||||
}
|
||||
|
||||
node "Seed\n(test)" as TestSeed {
|
||||
component "Database.Seed"
|
||||
note bottom
|
||||
Minimal seed:
|
||||
- test.user only
|
||||
- Essential data
|
||||
end note
|
||||
}
|
||||
|
||||
node "API.Specs\n(Integration Tests)" as Specs {
|
||||
component "Reqnroll + xUnit" as SpecsComp
|
||||
note right
|
||||
Tests:
|
||||
- Registration flow
|
||||
- Login flow
|
||||
- Validation rules
|
||||
- 404 handling
|
||||
|
||||
Uses: TestApiFactory
|
||||
Mocks: Email services
|
||||
end note
|
||||
}
|
||||
|
||||
node "Infrastructure.Repository.Tests\n(Unit Tests)" as RepoTests {
|
||||
component "xUnit + DbMocker" as RepoComp
|
||||
note right
|
||||
Tests:
|
||||
- AuthRepository
|
||||
- UserAccountRepository
|
||||
- SQL command building
|
||||
|
||||
Uses: Mock connections
|
||||
No real database needed
|
||||
end note
|
||||
}
|
||||
|
||||
node "Service.Auth.Tests\n(Unit Tests)" as SvcTests {
|
||||
component "xUnit + Moq" as SvcComp
|
||||
note right
|
||||
Tests:
|
||||
- RegisterService
|
||||
- LoginService
|
||||
- Token generation
|
||||
|
||||
Uses: Mocked dependencies
|
||||
No database or infrastructure
|
||||
end note
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
folder "test-results/\n(mounted volume)" as Results {
|
||||
file "api-specs/\n results.trx" as Result1
|
||||
file "repository-tests/\n results.trx" as Result2
|
||||
file "service-auth-tests/\n results.trx" as Result3
|
||||
|
||||
note bottom
|
||||
TRX format
|
||||
Readable by:
|
||||
- Visual Studio
|
||||
- Azure DevOps
|
||||
- GitHub Actions
|
||||
end note
|
||||
}
|
||||
|
||||
' External access
|
||||
Developer --> Host : docker compose up
|
||||
Host --> DevAPI : http://localhost:8080
|
||||
|
||||
' Development dependencies
|
||||
DevMig --> DevDB : 1. Run migrations
|
||||
DevSeed --> DevDB : 2. Seed data
|
||||
DevAPI --> DevDB : 3. Connect & serve
|
||||
DevMig .up.> DevDB : depends_on
|
||||
DevSeed .up.> DevMig : depends_on
|
||||
DevAPI .up.> DevSeed : depends_on
|
||||
|
||||
' Test dependencies
|
||||
TestMig --> TestDB : 1. Migrate
|
||||
TestSeed --> TestDB : 2. Seed
|
||||
Specs --> TestDB : 3. Integration test
|
||||
RepoTests ..> TestDB : Mock (no connection)
|
||||
SvcTests ..> TestDB : Mock (no connection)
|
||||
|
||||
TestMig .up.> TestDB : depends_on
|
||||
TestSeed .up.> TestMig : depends_on
|
||||
Specs .up.> TestSeed : depends_on
|
||||
|
||||
' Test results export
|
||||
Specs --> Results : Export TRX
|
||||
RepoTests --> Results : Export TRX
|
||||
SvcTests --> Results : Export TRX
|
||||
|
||||
' Network notes
|
||||
note bottom of DevDB
|
||||
<b>Dev Network (bridge: biergarten-dev)</b>
|
||||
Internal DNS:
|
||||
- sql-server (resolves to SQL container)
|
||||
- api (resolves to API container)
|
||||
end note
|
||||
|
||||
note bottom of TestDB
|
||||
<b>Test Network (bridge: biergarten-test)</b>
|
||||
All test components isolated
|
||||
end note
|
||||
|
||||
' Startup sequence notes
|
||||
note top of DevMig
|
||||
Startup Order:
|
||||
1. SQL Server (health check)
|
||||
2. Migrations (run-once)
|
||||
3. Seed (run-once)
|
||||
4. API (long-running)
|
||||
end note
|
||||
|
||||
note top of Specs
|
||||
Test Execution:
|
||||
All tests run in parallel
|
||||
Results aggregated
|
||||
end note
|
||||
|
||||
' Production note
|
||||
note as ProductionNote
|
||||
<b>Production Deployment (not shown):</b>
|
||||
|
||||
Would include:
|
||||
• Azure SQL Database / AWS RDS
|
||||
• Azure Container Apps / ECS
|
||||
• Azure Key Vault for secrets
|
||||
• Application Insights / CloudWatch
|
||||
• Load balancer
|
||||
• HTTPS termination
|
||||
• CDN for static assets
|
||||
end note
|
||||
|
||||
@enduml
|
||||
327
docs/docker.md
Normal file
327
docs/docker.md
Normal file
@@ -0,0 +1,327 @@
|
||||
# Docker Guide
|
||||
|
||||
This document covers Docker deployment, configuration, and troubleshooting for The
|
||||
Biergarten App.
|
||||
|
||||
## Overview
|
||||
|
||||
The project uses Docker Compose to orchestrate multiple services:
|
||||
|
||||
- SQL Server 2022 database
|
||||
- Database migrations runner (DbUp)
|
||||
- Database seeder
|
||||
- .NET API
|
||||
- Test runners
|
||||
|
||||
See the [deployment diagram](diagrams/pdf/deployment.pdf) for visual representation.
|
||||
|
||||
## Docker Compose Environments
|
||||
|
||||
### 1. Development (`docker-compose.dev.yaml`)
|
||||
|
||||
**Purpose**: Local development with persistent data
|
||||
|
||||
**Features**:
|
||||
|
||||
- Persistent SQL Server volume
|
||||
- Hot reload support
|
||||
- Swagger UI enabled
|
||||
- Seed data included
|
||||
- `CLEAR_DATABASE=true` (drops and recreates schema)
|
||||
|
||||
**Services**:
|
||||
|
||||
```yaml
|
||||
sqlserver # SQL Server 2022 (port 1433)
|
||||
database.migrations # DbUp migrations
|
||||
database.seed # Seed initial data
|
||||
api.core # Web API (ports 8080, 8081)
|
||||
```
|
||||
|
||||
**Start Development Environment**:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.dev.yaml up -d
|
||||
```
|
||||
|
||||
**Access**:
|
||||
|
||||
- API Swagger: http://localhost:8080/swagger
|
||||
- Health Check: http://localhost:8080/health
|
||||
- SQL Server: localhost:1433 (sa credentials from .env.dev)
|
||||
|
||||
**Stop Environment**:
|
||||
|
||||
```bash
|
||||
# Stop services (keep volumes)
|
||||
docker compose -f docker-compose.dev.yaml down
|
||||
|
||||
# Stop and remove volumes (fresh start)
|
||||
docker compose -f docker-compose.dev.yaml down -v
|
||||
```
|
||||
|
||||
### 2. Testing (`docker-compose.test.yaml`)
|
||||
|
||||
**Purpose**: Automated CI/CD testing in isolated environment
|
||||
|
||||
**Features**:
|
||||
|
||||
- Fresh database each run
|
||||
- All test suites execute in parallel
|
||||
- Test results exported to `./test-results/`
|
||||
- Containers auto-exit after completion
|
||||
- Fully isolated testnet network
|
||||
|
||||
**Services**:
|
||||
|
||||
```yaml
|
||||
sqlserver # Test database
|
||||
database.migrations # Fresh schema
|
||||
database.seed # Test data
|
||||
api.specs # Reqnroll BDD tests
|
||||
repository.tests # Repository unit tests
|
||||
service.auth.tests # Service unit tests
|
||||
```
|
||||
|
||||
**Run Tests**:
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
docker compose -f docker-compose.test.yaml up --abort-on-container-exit
|
||||
|
||||
# View results
|
||||
ls -la test-results/
|
||||
cat test-results/api-specs/results.trx
|
||||
cat test-results/repository-tests/results.trx
|
||||
cat test-results/service-auth-tests/results.trx
|
||||
|
||||
# Clean up
|
||||
docker compose -f docker-compose.test.yaml down -v
|
||||
```
|
||||
|
||||
### 3. Production (`docker-compose.prod.yaml`)
|
||||
|
||||
**Purpose**: Production-ready deployment
|
||||
|
||||
**Features**:
|
||||
|
||||
- Production logging levels
|
||||
- No database clearing
|
||||
- Optimized build configurations
|
||||
- Health checks enabled
|
||||
- Restart policies (unless-stopped)
|
||||
- Security hardening
|
||||
|
||||
**Services**:
|
||||
|
||||
```yaml
|
||||
sqlserver # Production SQL Server
|
||||
database.migrations # Schema updates only
|
||||
api.core # Production API
|
||||
```
|
||||
|
||||
**Deploy Production**:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.prod.yaml up -d
|
||||
```
|
||||
|
||||
## Service Dependencies
|
||||
|
||||
Docker Compose manages startup order using health checks:
|
||||
|
||||
```mermaid
|
||||
sqlserver (health check)
|
||||
↓
|
||||
database.migrations (completes successfully)
|
||||
↓
|
||||
database.seed (completes successfully)
|
||||
↓
|
||||
api.core / tests (start when ready)
|
||||
```
|
||||
|
||||
**Health Check Example** (SQL Server):
|
||||
|
||||
```yaml
|
||||
healthcheck:
|
||||
test: ['CMD-SHELL', "sqlcmd -S localhost -U sa -P '${DB_PASSWORD}' -C -Q 'SELECT 1'"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 12
|
||||
start_period: 30s
|
||||
```
|
||||
|
||||
**Dependency Configuration**:
|
||||
|
||||
```yaml
|
||||
api.core:
|
||||
depends_on:
|
||||
database.seed:
|
||||
condition: service_completed_successfully
|
||||
```
|
||||
|
||||
## Volumes
|
||||
|
||||
### Persistent Volumes
|
||||
|
||||
**Development**:
|
||||
|
||||
- `sqlserverdata-dev` - Database files persist between restarts
|
||||
- `nuget-cache-dev` - NuGet package cache (speeds up builds)
|
||||
|
||||
**Testing**:
|
||||
|
||||
- `sqlserverdata-test` - Temporary, typically removed after tests
|
||||
|
||||
**Production**:
|
||||
|
||||
- `sqlserverdata-prod` - Production database files
|
||||
- `nuget-cache-prod` - Production NuGet cache
|
||||
|
||||
### Mounted Volumes
|
||||
|
||||
**Test Results**:
|
||||
|
||||
```yaml
|
||||
volumes:
|
||||
- ./test-results:/app/test-results
|
||||
```
|
||||
|
||||
Test results are written to host filesystem for CI/CD integration.
|
||||
|
||||
**Code Volumes** (development only):
|
||||
|
||||
```yaml
|
||||
volumes:
|
||||
- ./src:/app/src # Hot reload for development
|
||||
```
|
||||
|
||||
## Networks
|
||||
|
||||
Each environment uses isolated bridge networks:
|
||||
|
||||
- `devnet` - Development network
|
||||
- `testnet` - Testing network (fully isolated)
|
||||
- `prodnet` - Production network
|
||||
|
||||
## Environment Variables
|
||||
|
||||
All containers are configured via environment variables from `.env` files:
|
||||
|
||||
```yaml
|
||||
env_file: '.env.dev' # or .env.test, .env.prod
|
||||
|
||||
environment:
|
||||
ASPNETCORE_ENVIRONMENT: 'Development'
|
||||
DOTNET_RUNNING_IN_CONTAINER: 'true'
|
||||
DB_SERVER: '${DB_SERVER}'
|
||||
DB_NAME: '${DB_NAME}'
|
||||
DB_USER: '${DB_USER}'
|
||||
DB_PASSWORD: '${DB_PASSWORD}'
|
||||
JWT_SECRET: '${JWT_SECRET}'
|
||||
```
|
||||
|
||||
For complete list, see [Environment Variables](environment-variables.md).
|
||||
|
||||
## Common Commands
|
||||
|
||||
### View Services
|
||||
|
||||
```bash
|
||||
# Running services
|
||||
docker compose -f docker-compose.dev.yaml ps
|
||||
|
||||
# All containers (including stopped)
|
||||
docker ps -a
|
||||
```
|
||||
|
||||
### View Logs
|
||||
|
||||
```bash
|
||||
# All services
|
||||
docker compose -f docker-compose.dev.yaml logs -f
|
||||
|
||||
# Specific service
|
||||
docker compose -f docker-compose.dev.yaml logs -f api.core
|
||||
|
||||
# Last 100 lines
|
||||
docker compose -f docker-compose.dev.yaml logs --tail=100 api.core
|
||||
```
|
||||
|
||||
### Execute Commands in Container
|
||||
|
||||
```bash
|
||||
# Interactive shell
|
||||
docker exec -it dev-env-api-core bash
|
||||
|
||||
# Run command
|
||||
docker exec dev-env-sqlserver /opt/mssql-tools18/bin/sqlcmd -S localhost -U sa -P 'password' -C
|
||||
```
|
||||
|
||||
### Restart Services
|
||||
|
||||
```bash
|
||||
# Restart all services
|
||||
docker compose -f docker-compose.dev.yaml restart
|
||||
|
||||
# Restart specific service
|
||||
docker compose -f docker-compose.dev.yaml restart api.core
|
||||
|
||||
# Rebuild and restart
|
||||
docker compose -f docker-compose.dev.yaml up -d --build api.core
|
||||
```
|
||||
|
||||
### Build Images
|
||||
|
||||
```bash
|
||||
# Build all images
|
||||
docker compose -f docker-compose.dev.yaml build
|
||||
|
||||
# Build specific service
|
||||
docker compose -f docker-compose.dev.yaml build api.core
|
||||
|
||||
# Build without cache
|
||||
docker compose -f docker-compose.dev.yaml build --no-cache
|
||||
```
|
||||
|
||||
### Clean Up
|
||||
|
||||
```bash
|
||||
# Stop and remove containers
|
||||
docker compose -f docker-compose.dev.yaml down
|
||||
|
||||
# Remove containers and volumes
|
||||
docker compose -f docker-compose.dev.yaml down -v
|
||||
|
||||
# Remove containers, volumes, and images
|
||||
docker compose -f docker-compose.dev.yaml down -v --rmi all
|
||||
|
||||
# System-wide cleanup
|
||||
docker system prune -af --volumes
|
||||
```
|
||||
|
||||
## Dockerfile Structure
|
||||
|
||||
### Multi-Stage Build
|
||||
|
||||
```dockerfile
|
||||
# Stage 1: Build
|
||||
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
|
||||
WORKDIR /src
|
||||
COPY ["Project/Project.csproj", "Project/"]
|
||||
RUN dotnet restore
|
||||
COPY . .
|
||||
RUN dotnet build -c Release
|
||||
|
||||
# Stage 2: Runtime
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS final
|
||||
WORKDIR /app
|
||||
COPY --from=build /app/build .
|
||||
ENTRYPOINT ["dotnet", "Project.dll"]
|
||||
```
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Docker Compose Documentation](https://docs.docker.com/compose/)
|
||||
- [.NET Docker Images](https://hub.docker.com/_/microsoft-dotnet)
|
||||
- [SQL Server Docker Images](https://hub.docker.com/_/microsoft-mssql-server)
|
||||
304
docs/environment-variables.md
Normal file
304
docs/environment-variables.md
Normal file
@@ -0,0 +1,304 @@
|
||||
# Environment Variables
|
||||
|
||||
This document covers the active environment variables used by the current Biergarten
|
||||
stack.
|
||||
|
||||
## Overview
|
||||
|
||||
The application uses environment variables for:
|
||||
|
||||
- **.NET API backend** - database connections, token secrets, runtime settings
|
||||
- **React Router website** - API base URL and session signing
|
||||
- **Docker containers** - environment-specific orchestration
|
||||
|
||||
## Configuration Patterns
|
||||
|
||||
### Backend (.NET API)
|
||||
|
||||
Direct environment variable access via `Environment.GetEnvironmentVariable()`.
|
||||
|
||||
### Frontend (`src/Website`)
|
||||
|
||||
The active website reads runtime values from the server environment for its auth and API
|
||||
integration.
|
||||
|
||||
### Docker
|
||||
|
||||
Environment-specific `.env` files loaded via `env_file:` in docker-compose.yaml:
|
||||
|
||||
- `.env.dev` - Development
|
||||
- `.env.test` - Testing
|
||||
- `.env.prod` - Production
|
||||
|
||||
## Backend Variables (.NET API)
|
||||
|
||||
### Database Connection
|
||||
|
||||
**Option 1: Component-Based (Recommended for Docker)**
|
||||
|
||||
Build connection string from individual components:
|
||||
|
||||
```bash
|
||||
DB_SERVER=sqlserver,1433 # SQL Server host and port
|
||||
DB_NAME=Biergarten # Database name
|
||||
DB_USER=sa # SQL Server username
|
||||
DB_PASSWORD=YourStrong!Passw0rd # SQL Server password
|
||||
DB_TRUST_SERVER_CERTIFICATE=True # Optional, defaults to True
|
||||
```
|
||||
|
||||
**Option 2: Full Connection String (Local Development)**
|
||||
|
||||
Provide complete connection string:
|
||||
|
||||
```bash
|
||||
DB_CONNECTION_STRING="Server=localhost,1433;Database=Biergarten;User Id=sa;Password=YourStrong!Passw0rd;TrustServerCertificate=True;"
|
||||
```
|
||||
|
||||
**Priority**: `DB_CONNECTION_STRING` is checked first. If not found, connection string is
|
||||
built from components.
|
||||
|
||||
**Implementation**: See `DefaultSqlConnectionFactory.cs`
|
||||
|
||||
### JWT Authentication Secrets (Backend)
|
||||
|
||||
The backend uses separate secrets for different token types to enable independent key rotation and validation isolation.
|
||||
|
||||
```bash
|
||||
# Access token secret (1-hour tokens)
|
||||
ACCESS_TOKEN_SECRET=<generated-secret> # Signs short-lived access tokens
|
||||
|
||||
# Refresh token secret (21-day tokens)
|
||||
REFRESH_TOKEN_SECRET=<generated-secret> # Signs long-lived refresh tokens
|
||||
|
||||
# Confirmation token secret (30-minute tokens)
|
||||
CONFIRMATION_TOKEN_SECRET=<generated-secret> # Signs email confirmation tokens
|
||||
|
||||
# Website base URL (used in confirmation emails)
|
||||
WEBSITE_BASE_URL=https://thebiergarten.app # Base URL for the website
|
||||
```
|
||||
|
||||
**Security Requirements**:
|
||||
|
||||
- Each secret should be minimum 32 characters
|
||||
- Recommend 127+ characters for production
|
||||
- Generate using cryptographically secure random functions
|
||||
- Never reuse secrets across token types or environments
|
||||
- Rotate secrets periodically in production
|
||||
|
||||
**Generate Secrets**:
|
||||
|
||||
```bash
|
||||
# macOS/Linux - Generate 127-character base64 secret
|
||||
openssl rand -base64 127
|
||||
|
||||
# Windows PowerShell
|
||||
[Convert]::ToBase64String((1..127 | %{Get-Random -Max 256}))
|
||||
```
|
||||
|
||||
**Token Expiration**:
|
||||
|
||||
- **Access tokens**: 1 hour
|
||||
- **Refresh tokens**: 21 days
|
||||
- **Confirmation tokens**: 30 minutes
|
||||
|
||||
(Defined in `TokenServiceExpirationHours` class)
|
||||
|
||||
**JWT Implementation**:
|
||||
|
||||
- **Algorithm**: HS256 (HMAC-SHA256)
|
||||
- **Handler**: Microsoft.IdentityModel.JsonWebTokens.JsonWebTokenHandler
|
||||
- **Validation**: Token signature, expiration, and malformed token checks
|
||||
|
||||
### Migration Control
|
||||
|
||||
```bash
|
||||
CLEAR_DATABASE=true
|
||||
```
|
||||
|
||||
- **Required**: No
|
||||
- **Default**: false
|
||||
- **Effect**: If "true", drops and recreates database during migrations
|
||||
- **Usage**: Development and testing environments ONLY
|
||||
- **Warning**: NEVER use in production
|
||||
|
||||
### ASP.NET Core Configuration
|
||||
|
||||
```bash
|
||||
ASPNETCORE_ENVIRONMENT=Development # Development, Production, Staging
|
||||
ASPNETCORE_URLS=http://0.0.0.0:8080 # Binding address and port
|
||||
DOTNET_RUNNING_IN_CONTAINER=true # Flag for container execution
|
||||
```
|
||||
|
||||
## Frontend Variables (`src/Website`)
|
||||
|
||||
The active website does not use the old Next.js/Prisma environment model. Its core runtime
|
||||
variables are:
|
||||
|
||||
```bash
|
||||
API_BASE_URL=http://localhost:8080 # Base URL for the .NET API
|
||||
SESSION_SECRET=<generated-secret> # Cookie session signing secret
|
||||
NODE_ENV=development # Standard Node runtime mode
|
||||
```
|
||||
|
||||
### Frontend Variable Details
|
||||
|
||||
#### `API_BASE_URL`
|
||||
|
||||
- **Required**: Yes for local development
|
||||
- **Default in code**: `http://localhost:8080`
|
||||
- **Used by**: `src/Website/app/lib/auth.server.ts`
|
||||
- **Purpose**: Routes website auth actions to the .NET API
|
||||
|
||||
#### `SESSION_SECRET`
|
||||
|
||||
- **Required**: Strongly recommended in all environments
|
||||
- **Default in local code path**: `dev-secret-change-me`
|
||||
- **Used by**: React Router cookie session storage in `auth.server.ts`
|
||||
- **Purpose**: Signs and validates the website session cookie
|
||||
|
||||
#### `NODE_ENV`
|
||||
|
||||
- **Required**: No
|
||||
- **Typical values**: `development`, `production`, `test`
|
||||
- **Purpose**: Controls secure cookie behavior and runtime mode
|
||||
|
||||
### Admin Account (Seeding)
|
||||
|
||||
```bash
|
||||
ADMIN_PASSWORD=SecureAdminPassword123! # Initial admin password for seeding
|
||||
```
|
||||
|
||||
- **Required**: No (only needed for seeding)
|
||||
- **Purpose**: Sets admin account password during database seeding
|
||||
- **Security**: Use strong password, change immediately in production
|
||||
|
||||
## Docker-Specific Variables
|
||||
|
||||
### SQL Server Container
|
||||
|
||||
```bash
|
||||
SA_PASSWORD=YourStrong!Passw0rd # SQL Server SA password
|
||||
ACCEPT_EULA=Y # Accept SQL Server EULA (required)
|
||||
MSSQL_PID=Express # SQL Server edition (Express, Developer, Enterprise)
|
||||
```
|
||||
|
||||
**Password Requirements**:
|
||||
|
||||
- Minimum 8 characters
|
||||
- Uppercase, lowercase, digits, and special characters
|
||||
- Maps to `DB_PASSWORD` for application containers
|
||||
|
||||
## Environment File Structure
|
||||
|
||||
### Backend/Docker (Root Directory)
|
||||
|
||||
```
|
||||
.env.example # Template (tracked in Git)
|
||||
.env.dev # Development config (gitignored)
|
||||
.env.test # Testing config (gitignored)
|
||||
.env.prod # Production config (gitignored)
|
||||
```
|
||||
|
||||
**Setup**:
|
||||
|
||||
```bash
|
||||
cp .env.example .env.dev
|
||||
# Edit .env.dev with your values
|
||||
```
|
||||
|
||||
## Legacy Frontend Variables
|
||||
|
||||
Variables for the archived Next.js frontend (`src/Website-v1`) have been removed from this
|
||||
active reference. See [archive/legacy-website-v1.md](archive/legacy-website-v1.md) if you
|
||||
need the legacy Prisma, Cloudinary, Mapbox, or SparkPost notes.
|
||||
|
||||
**Docker Compose Mapping**:
|
||||
|
||||
- `docker-compose.dev.yaml` → `.env.dev`
|
||||
- `docker-compose.test.yaml` → `.env.test`
|
||||
- `docker-compose.prod.yaml` → `.env.prod`
|
||||
|
||||
## Variable Reference Table
|
||||
|
||||
| Variable | Backend | Frontend | Docker | Required | Notes |
|
||||
| ----------------------------- | :-----: | :------: | :----: | :------: | -------------------------- |
|
||||
| `DB_SERVER` | ✓ | | ✓ | Yes\* | SQL Server address |
|
||||
| `DB_NAME` | ✓ | | ✓ | Yes\* | Database name |
|
||||
| `DB_USER` | ✓ | | ✓ | Yes\* | SQL username |
|
||||
| `DB_PASSWORD` | ✓ | | ✓ | Yes\* | SQL password |
|
||||
| `DB_CONNECTION_STRING` | ✓ | | | Yes\* | Alternative to components |
|
||||
| `DB_TRUST_SERVER_CERTIFICATE` | ✓ | | ✓ | No | Defaults to `True` |
|
||||
| `ACCESS_TOKEN_SECRET` | ✓ | | ✓ | Yes | Access token signing |
|
||||
| `REFRESH_TOKEN_SECRET` | ✓ | | ✓ | Yes | Refresh token signing |
|
||||
| `CONFIRMATION_TOKEN_SECRET` | ✓ | | ✓ | Yes | Confirmation token signing |
|
||||
| `WEBSITE_BASE_URL` | ✓ | | | Yes | Website URL for emails |
|
||||
| `API_BASE_URL` | | ✓ | | Yes | Website-to-API base URL |
|
||||
| `SESSION_SECRET` | | ✓ | | Yes | Website session signing |
|
||||
| `NODE_ENV` | | ✓ | | No | Runtime mode |
|
||||
| `CLEAR_DATABASE` | ✓ | | ✓ | No | Dev/test reset flag |
|
||||
| `ASPNETCORE_ENVIRONMENT` | ✓ | | ✓ | Yes | ASP.NET environment |
|
||||
| `ASPNETCORE_URLS` | ✓ | | ✓ | Yes | API binding address |
|
||||
| `SA_PASSWORD` | | | ✓ | Yes | SQL Server container |
|
||||
| `ACCEPT_EULA` | | | ✓ | Yes | SQL Server EULA |
|
||||
| `MSSQL_PID` | | | ✓ | No | SQL Server edition |
|
||||
| `DOTNET_RUNNING_IN_CONTAINER` | ✓ | | ✓ | No | Container flag |
|
||||
|
||||
\* Either `DB_CONNECTION_STRING` OR the component variables (`DB_SERVER`, `DB_NAME`,
|
||||
`DB_USER`, `DB_PASSWORD`) must be provided.
|
||||
|
||||
## Validation
|
||||
|
||||
### Backend Validation
|
||||
|
||||
Variables are validated at startup:
|
||||
|
||||
- Missing required variables cause application to fail
|
||||
- JWT_SECRET length is enforced (min 32 chars)
|
||||
- Connection string format is validated
|
||||
|
||||
### Frontend Validation
|
||||
|
||||
The active website relies on runtime defaults for local development and the surrounding
|
||||
server environment in deployed environments.
|
||||
|
||||
- `API_BASE_URL` defaults to `http://localhost:8080`
|
||||
- `SESSION_SECRET` falls back to a development-only local secret
|
||||
- `NODE_ENV` controls secure cookie behavior
|
||||
|
||||
## Example Configuration Files
|
||||
|
||||
### `.env.dev` (Backend/Docker)
|
||||
|
||||
```bash
|
||||
# Database
|
||||
DB_SERVER=sqlserver,1433
|
||||
DB_NAME=Biergarten
|
||||
DB_USER=sa
|
||||
DB_PASSWORD=Dev_Password_123!
|
||||
|
||||
# JWT Authentication Secrets
|
||||
ACCESS_TOKEN_SECRET=<generated-with-openssl>
|
||||
REFRESH_TOKEN_SECRET=<generated-with-openssl>
|
||||
CONFIRMATION_TOKEN_SECRET=<generated-with-openssl>
|
||||
WEBSITE_BASE_URL=http://localhost:3000
|
||||
|
||||
# Migration
|
||||
CLEAR_DATABASE=true
|
||||
|
||||
# ASP.NET Core
|
||||
ASPNETCORE_ENVIRONMENT=Development
|
||||
ASPNETCORE_URLS=http://0.0.0.0:8080
|
||||
|
||||
# SQL Server Container
|
||||
SA_PASSWORD=Dev_Password_123!
|
||||
ACCEPT_EULA=Y
|
||||
MSSQL_PID=Express
|
||||
```
|
||||
|
||||
### Frontend local runtime example
|
||||
|
||||
```bash
|
||||
API_BASE_URL=http://localhost:8080
|
||||
SESSION_SECRET=<generated-with-openssl>
|
||||
NODE_ENV=development
|
||||
```
|
||||
138
docs/getting-started.md
Normal file
138
docs/getting-started.md
Normal file
@@ -0,0 +1,138 @@
|
||||
# Getting Started
|
||||
|
||||
This guide covers local setup for the current Biergarten stack: the .NET backend in
|
||||
`src/Core` and the active React Router frontend in `src/Website`.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- **.NET SDK 10+**
|
||||
- **Node.js 18+**
|
||||
- **Docker Desktop** or equivalent Docker Engine setup
|
||||
- **Java 8+** if you want to regenerate PlantUML diagrams
|
||||
|
||||
## Recommended Path: Docker for Backend, Node for Frontend
|
||||
|
||||
### 1. Clone the Repository
|
||||
|
||||
```bash
|
||||
git clone <repository-url>
|
||||
cd the-biergarten-app
|
||||
```
|
||||
|
||||
### 2. Configure Backend Environment Variables
|
||||
|
||||
```bash
|
||||
cp .env.example .env.dev
|
||||
```
|
||||
|
||||
At minimum, ensure `.env.dev` includes valid database and token values:
|
||||
|
||||
```bash
|
||||
DB_SERVER=sqlserver,1433
|
||||
DB_NAME=Biergarten
|
||||
DB_USER=sa
|
||||
DB_PASSWORD=YourStrong!Passw0rd
|
||||
ACCESS_TOKEN_SECRET=<generated>
|
||||
REFRESH_TOKEN_SECRET=<generated>
|
||||
CONFIRMATION_TOKEN_SECRET=<generated>
|
||||
WEBSITE_BASE_URL=http://localhost:3000
|
||||
```
|
||||
|
||||
See [Environment Variables](environment-variables.md) for the full list.
|
||||
|
||||
### 3. Start the Backend Stack
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.dev.yaml up -d
|
||||
```
|
||||
|
||||
This starts SQL Server, migrations, seeding, and the API.
|
||||
|
||||
Available endpoints:
|
||||
|
||||
- API Swagger: http://localhost:8080/swagger
|
||||
- Health Check: http://localhost:8080/health
|
||||
|
||||
### 4. Start the Active Frontend
|
||||
|
||||
```bash
|
||||
cd src/Website
|
||||
npm install
|
||||
API_BASE_URL=http://localhost:8080 SESSION_SECRET=dev-secret-change-me npm run dev
|
||||
```
|
||||
|
||||
The website will be available at the local address printed by React Router dev.
|
||||
|
||||
Required frontend runtime variables for local work:
|
||||
|
||||
- `API_BASE_URL` - Base URL for the .NET API
|
||||
- `SESSION_SECRET` - Cookie session signing secret for the website server
|
||||
|
||||
### 5. Optional: Run Storybook
|
||||
|
||||
```bash
|
||||
cd src/Website
|
||||
npm run storybook
|
||||
```
|
||||
|
||||
Storybook runs at http://localhost:6006 by default.
|
||||
|
||||
## Useful Commands
|
||||
|
||||
### Backend
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.dev.yaml logs -f
|
||||
docker compose -f docker-compose.dev.yaml down
|
||||
docker compose -f docker-compose.dev.yaml down -v
|
||||
```
|
||||
|
||||
### Frontend
|
||||
|
||||
```bash
|
||||
cd src/Website
|
||||
npm run lint
|
||||
npm run typecheck
|
||||
npm run format:check
|
||||
npm run test:storybook
|
||||
npm run test:storybook:playwright
|
||||
```
|
||||
|
||||
## Manual Backend Setup
|
||||
|
||||
If you do not want to use Docker, you can run the backend locally.
|
||||
|
||||
### 1. Set Environment Variables
|
||||
|
||||
```bash
|
||||
export DB_CONNECTION_STRING="Server=localhost,1433;Database=Biergarten;User Id=sa;Password=YourStrong!Passw0rd;TrustServerCertificate=True;"
|
||||
export ACCESS_TOKEN_SECRET="<generated>"
|
||||
export REFRESH_TOKEN_SECRET="<generated>"
|
||||
export CONFIRMATION_TOKEN_SECRET="<generated>"
|
||||
export WEBSITE_BASE_URL="http://localhost:3000"
|
||||
```
|
||||
|
||||
### 2. Run Migrations and Seed
|
||||
|
||||
```bash
|
||||
cd src/Core
|
||||
dotnet run --project Database/Database.Migrations/Database.Migrations.csproj
|
||||
dotnet run --project Database/Database.Seed/Database.Seed.csproj
|
||||
```
|
||||
|
||||
### 3. Start the API
|
||||
|
||||
```bash
|
||||
dotnet run --project API/API.Core/API.Core.csproj
|
||||
```
|
||||
|
||||
## Legacy Frontend Note
|
||||
|
||||
The previous Next.js frontend now lives in `src/Website-v1` and is not the active website.
|
||||
Legacy setup details have been moved to [docs/archive/legacy-website-v1.md](archive/legacy-website-v1.md).
|
||||
|
||||
## Next Steps
|
||||
|
||||
- Review [Architecture](architecture.md)
|
||||
- Run backend and frontend checks from [Testing](testing.md)
|
||||
- Use [Docker Guide](docker.md) for container troubleshooting
|
||||
340
docs/testing.md
Normal file
340
docs/testing.md
Normal file
@@ -0,0 +1,340 @@
|
||||
# Testing
|
||||
|
||||
This document describes the testing strategy and how to run tests for The Biergarten App.
|
||||
|
||||
## Overview
|
||||
|
||||
The project uses a multi-layered testing approach across backend and frontend:
|
||||
|
||||
- **API.Specs** - BDD integration tests using Reqnroll (Gherkin)
|
||||
- **Infrastructure.Repository.Tests** - Unit tests for data access layer
|
||||
- **Service.Auth.Tests** - Unit tests for authentication business logic
|
||||
- **Storybook Vitest project** - Browser-based interaction tests for shared website stories
|
||||
- **Storybook Playwright suite** - Browser checks against Storybook-rendered components
|
||||
|
||||
## Running Tests with Docker (Recommended)
|
||||
|
||||
The easiest way to run all tests is using Docker Compose, which sets up an isolated test
|
||||
environment:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.test.yaml up --abort-on-container-exit
|
||||
```
|
||||
|
||||
This command:
|
||||
|
||||
1. Starts a fresh SQL Server instance
|
||||
2. Runs database migrations
|
||||
3. Seeds test data
|
||||
4. Executes all test suites in parallel
|
||||
5. Exports results to `./test-results/`
|
||||
6. Exits when tests complete
|
||||
|
||||
### View Test Results
|
||||
|
||||
```bash
|
||||
# List test result files
|
||||
ls -la test-results/
|
||||
|
||||
# View specific test results
|
||||
cat test-results/api-specs/results.trx
|
||||
cat test-results/repository-tests/results.trx
|
||||
cat test-results/service-auth-tests/results.trx
|
||||
```
|
||||
|
||||
### Clean Up
|
||||
|
||||
```bash
|
||||
# Remove test containers and volumes
|
||||
docker compose -f docker-compose.test.yaml down -v
|
||||
```
|
||||
|
||||
## Running Tests Locally
|
||||
|
||||
You can run individual test projects locally without Docker:
|
||||
|
||||
### Integration Tests (API.Specs)
|
||||
|
||||
```bash
|
||||
cd src/Core
|
||||
dotnet test API/API.Specs/API.Specs.csproj
|
||||
```
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- SQL Server instance running
|
||||
- Database migrated and seeded
|
||||
- Environment variables set (DB connection, JWT secret)
|
||||
|
||||
### Repository Tests
|
||||
|
||||
```bash
|
||||
cd src/Core
|
||||
dotnet test Infrastructure/Infrastructure.Repository.Tests/Infrastructure.Repository.Tests.csproj
|
||||
```
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- SQL Server instance running (uses mock data)
|
||||
|
||||
### Service Tests
|
||||
|
||||
```bash
|
||||
cd src/Core
|
||||
dotnet test Service/Service.Auth.Tests/Service.Auth.Tests.csproj
|
||||
```
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- No database required (uses Moq for mocking)
|
||||
|
||||
### Frontend Storybook Tests
|
||||
|
||||
```bash
|
||||
cd src/Website
|
||||
npm install
|
||||
npm run test:storybook
|
||||
```
|
||||
|
||||
**Purpose**:
|
||||
|
||||
- Verifies shared stories such as form fields, submit buttons, navbar states, toasts, and the theme gallery
|
||||
- Runs in browser mode via Vitest and Storybook integration
|
||||
|
||||
### Frontend Playwright Storybook Tests
|
||||
|
||||
```bash
|
||||
cd src/Website
|
||||
npm install
|
||||
npm run test:storybook:playwright
|
||||
```
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- Storybook dependencies installed
|
||||
- Playwright browser dependencies installed
|
||||
- The command will start or reuse the Storybook server defined in `playwright.storybook.config.ts`
|
||||
|
||||
## Test Coverage
|
||||
|
||||
### Current Coverage
|
||||
|
||||
**Authentication & User Management**:
|
||||
|
||||
- User registration with validation
|
||||
- User login with JWT token generation
|
||||
- Password hashing and verification (Argon2id)
|
||||
- JWT token generation and claims
|
||||
- Invalid credentials handling
|
||||
- 404 error responses
|
||||
|
||||
**Repository Layer**:
|
||||
|
||||
- User account creation
|
||||
- User credential management
|
||||
- GetUserByUsername queries
|
||||
- Stored procedure execution
|
||||
|
||||
**Service Layer**:
|
||||
|
||||
- Login service with password verification
|
||||
- Register service with validation
|
||||
- Business logic for authentication flow
|
||||
|
||||
**Frontend UI Coverage**:
|
||||
|
||||
- Shared submit button states
|
||||
- Form field happy path and error presentation
|
||||
- Navbar guest, authenticated, and mobile behavior
|
||||
- Theme gallery rendering across Biergarten themes
|
||||
- Toast interactions and themed notification display
|
||||
|
||||
### Planned Coverage
|
||||
|
||||
- [ ] Email verification workflow
|
||||
- [ ] Password reset functionality
|
||||
- [ ] Token refresh mechanism
|
||||
- [ ] Brewery data management
|
||||
- [ ] Beer post operations
|
||||
- [ ] User follow/unfollow
|
||||
- [ ] Image upload service
|
||||
- [ ] Frontend route integration coverage beyond Storybook stories
|
||||
|
||||
## Testing Frameworks & Tools
|
||||
|
||||
### xUnit
|
||||
|
||||
- Primary unit testing framework
|
||||
- Used for Repository and Service layer tests
|
||||
- Supports parallel test execution
|
||||
|
||||
### Reqnroll (Gherkin/BDD)
|
||||
|
||||
- Behavior-driven development framework
|
||||
- Used for API integration tests
|
||||
- Human-readable test scenarios in `.feature` files
|
||||
|
||||
### FluentAssertions
|
||||
|
||||
- Expressive assertion library
|
||||
- Makes test assertions more readable
|
||||
- Used across all test projects
|
||||
|
||||
### Moq
|
||||
|
||||
- Mocking framework for .NET
|
||||
- Used in Service layer tests
|
||||
- Enables isolated unit testing
|
||||
|
||||
### DbMocker
|
||||
|
||||
- Database mocking for repository tests
|
||||
- Simulates SQL Server responses
|
||||
- No real database required for unit tests
|
||||
|
||||
## Test Structure
|
||||
|
||||
### API.Specs (Integration Tests)
|
||||
|
||||
```
|
||||
API.Specs/
|
||||
├── Features/
|
||||
│ ├── Authentication.feature # Login/register scenarios
|
||||
│ └── UserManagement.feature # User CRUD scenarios
|
||||
├── Steps/
|
||||
│ ├── AuthenticationSteps.cs # Step definitions
|
||||
│ └── UserManagementSteps.cs
|
||||
└── Mocks/
|
||||
└── TestApiFactory.cs # Test server setup
|
||||
```
|
||||
|
||||
**Example Feature**:
|
||||
|
||||
```gherkin
|
||||
Feature: User Authentication
|
||||
As a user
|
||||
I want to register and login
|
||||
So that I can access the platform
|
||||
|
||||
Scenario: Successful user registration
|
||||
Given I have valid registration details
|
||||
When I register a new account
|
||||
Then I should receive a JWT token
|
||||
And my account should be created
|
||||
```
|
||||
|
||||
### Infrastructure.Repository.Tests
|
||||
|
||||
```
|
||||
Infrastructure.Repository.Tests/
|
||||
├── AuthRepositoryTests.cs # Auth repository tests
|
||||
├── UserAccountRepositoryTests.cs # User account tests
|
||||
└── TestFixtures/
|
||||
└── DatabaseFixture.cs # Shared test setup
|
||||
```
|
||||
|
||||
### Service.Auth.Tests
|
||||
|
||||
```
|
||||
Service.Auth.Tests/
|
||||
├── LoginService.test.cs # Login business logic tests
|
||||
└── RegisterService.test.cs # Registration business logic tests
|
||||
```
|
||||
|
||||
## Writing Tests
|
||||
|
||||
### Unit Test Example (xUnit)
|
||||
|
||||
```csharp
|
||||
public class LoginServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task LoginAsync_ValidCredentials_ReturnsToken()
|
||||
{
|
||||
// Arrange
|
||||
var mockRepo = new Mock<IAuthRepository>();
|
||||
var mockJwt = new Mock<IJwtService>();
|
||||
var service = new AuthService(mockRepo.Object, mockJwt.Object);
|
||||
|
||||
// Act
|
||||
var result = await service.LoginAsync("testuser", "password123");
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Token.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Integration Test Example (Reqnroll)
|
||||
|
||||
```gherkin
|
||||
Scenario: User login with valid credentials
|
||||
Given a registered user with username "testuser"
|
||||
When I POST to "/api/auth/login" with valid credentials
|
||||
Then the response status should be 200
|
||||
And the response should contain a JWT token
|
||||
```
|
||||
|
||||
## Continuous Integration
|
||||
|
||||
Tests run automatically in CI/CD pipelines using the test Docker Compose configuration:
|
||||
|
||||
```bash
|
||||
# CI/CD command
|
||||
docker compose -f docker-compose.test.yaml build
|
||||
docker compose -f docker-compose.test.yaml up --abort-on-container-exit
|
||||
docker compose -f docker-compose.test.yaml down -v
|
||||
```
|
||||
|
||||
Exit codes:
|
||||
|
||||
- `0` - All tests passed
|
||||
- Non-zero - Test failures occurred
|
||||
|
||||
Frontend UI checks should also be included in CI for the active website workspace:
|
||||
|
||||
```bash
|
||||
cd src/Website
|
||||
npm ci
|
||||
npm run test:storybook
|
||||
npm run test:storybook:playwright
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Tests Failing Due to Database Connection
|
||||
|
||||
Ensure SQL Server is running and environment variables are set:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.test.yaml ps
|
||||
```
|
||||
|
||||
### Port Conflicts
|
||||
|
||||
If port 1433 is in use, stop other SQL Server instances or modify the port in
|
||||
`docker-compose.test.yaml`.
|
||||
|
||||
### Stale Test Data
|
||||
|
||||
Clean up test database:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.test.yaml down -v
|
||||
```
|
||||
|
||||
### View Container Logs
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.test.yaml logs <service-name>
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Isolation**: Each test should be independent and not rely on other tests
|
||||
2. **Cleanup**: Use fixtures and dispose patterns for resource cleanup
|
||||
3. **Mocking**: Mock external dependencies in unit tests
|
||||
4. **Descriptive Names**: Use clear, descriptive test method names
|
||||
5. **Arrange-Act-Assert**: Follow AAA pattern in unit tests
|
||||
6. **Given-When-Then**: Follow GWT pattern in BDD scenarios
|
||||
205
docs/token-validation.md
Normal file
205
docs/token-validation.md
Normal file
@@ -0,0 +1,205 @@
|
||||
# Token Validation Architecture
|
||||
|
||||
## Overview
|
||||
|
||||
The Core project implements comprehensive JWT token validation across three token types:
|
||||
|
||||
- **Access Tokens**: Short-lived (1 hour) tokens for API authentication
|
||||
- **Refresh Tokens**: Long-lived (21 days) tokens for obtaining new access tokens
|
||||
- **Confirmation Tokens**: Short-lived (30 minutes) tokens for email confirmation
|
||||
|
||||
## Components
|
||||
|
||||
### Infrastructure Layer
|
||||
|
||||
#### [ITokenInfrastructure](Infrastructure.Jwt/ITokenInfrastructure.cs)
|
||||
|
||||
Low-level JWT operations.
|
||||
|
||||
**Methods:**
|
||||
- `GenerateJwt()` - Creates signed JWT tokens
|
||||
- `ValidateJwtAsync()` - Validates token signature, expiration, and format
|
||||
|
||||
**Implementation:** [JwtInfrastructure.cs](Infrastructure.Jwt/JwtInfrastructure.cs)
|
||||
- Uses Microsoft.IdentityModel.JsonWebTokens.JsonWebTokenHandler
|
||||
- Algorithm: HS256 (HMAC-SHA256)
|
||||
- Validates token lifetime, signature, and well-formedness
|
||||
|
||||
### Service Layer
|
||||
|
||||
#### [ITokenValidationService](Service.Auth/ITokenValidationService.cs)
|
||||
|
||||
High-level token validation with context (token type, user extraction).
|
||||
|
||||
**Methods:**
|
||||
- `ValidateAccessTokenAsync(string token)` - Validates access tokens
|
||||
- `ValidateRefreshTokenAsync(string token)` - Validates refresh tokens
|
||||
- `ValidateConfirmationTokenAsync(string token)` - Validates confirmation tokens
|
||||
|
||||
**Returns:** `ValidatedToken` record containing:
|
||||
- `UserId` (Guid)
|
||||
- `Username` (string)
|
||||
- `Principal` (ClaimsPrincipal) - Full JWT claims
|
||||
|
||||
**Implementation:** [TokenValidationService.cs](Service.Auth/TokenValidationService.cs)
|
||||
- Reads token secrets from environment variables
|
||||
- Extracts and validates claims (Sub, UniqueName)
|
||||
- Throws `UnauthorizedException` on validation failure
|
||||
|
||||
#### [ITokenService](Service.Auth/ITokenService.cs)
|
||||
|
||||
Token generation (existing service extended).
|
||||
|
||||
**Methods:**
|
||||
- `GenerateAccessToken(UserAccount)` - Creates 1-hour access token
|
||||
- `GenerateRefreshToken(UserAccount)` - Creates 21-day refresh token
|
||||
- `GenerateConfirmationToken(UserAccount)` - Creates 30-minute confirmation token
|
||||
|
||||
### Integration Points
|
||||
|
||||
#### [ConfirmationService](Service.Auth/IConfirmationService.cs)
|
||||
|
||||
**Flow:**
|
||||
1. Receives confirmation token from user
|
||||
2. Calls `TokenValidationService.ValidateConfirmationTokenAsync()`
|
||||
3. Extracts user ID from validated token
|
||||
4. Calls `AuthRepository.ConfirmUserAccountAsync()` to update database
|
||||
5. Returns confirmation result
|
||||
|
||||
#### [RefreshTokenService](Service.Auth/RefreshTokenService.cs)
|
||||
|
||||
**Flow:**
|
||||
1. Receives refresh token from user
|
||||
2. Calls `TokenValidationService.ValidateRefreshTokenAsync()`
|
||||
3. Retrieves user account via `AuthRepository.GetUserByIdAsync()`
|
||||
4. Issues new access and refresh tokens via `TokenService`
|
||||
5. Returns new token pair
|
||||
|
||||
#### [AuthController](API.Core/Controllers/AuthController.cs)
|
||||
|
||||
**Endpoints:**
|
||||
- `POST /api/auth/register` - Register new user
|
||||
- `POST /api/auth/login` - Authenticate user
|
||||
- `POST /api/auth/confirm?token=...` - Confirm email
|
||||
- `POST /api/auth/refresh` - Refresh access token
|
||||
|
||||
## Validation Security
|
||||
|
||||
### Token Secrets
|
||||
|
||||
Three independent secrets enable:
|
||||
- **Key rotation** - Rotate each secret type independently
|
||||
- **Isolation** - Compromise of one secret doesn't affect others
|
||||
- **Different expiration** - Different token types can expire at different rates
|
||||
|
||||
**Environment Variables:**
|
||||
```bash
|
||||
ACCESS_TOKEN_SECRET=... # Signs 1-hour access tokens
|
||||
REFRESH_TOKEN_SECRET=... # Signs 21-day refresh tokens
|
||||
CONFIRMATION_TOKEN_SECRET=... # Signs 30-minute confirmation tokens
|
||||
```
|
||||
|
||||
### Validation Checks
|
||||
|
||||
Each token is validated for:
|
||||
|
||||
1. **Signature Verification** - Token must be signed with correct secret
|
||||
2. **Expiration** - Token must not be expired (checked against current time)
|
||||
3. **Claims Presence** - Required claims (Sub, UniqueName) must be present
|
||||
4. **Claims Format** - UserId claim must be a valid GUID
|
||||
|
||||
### Error Handling
|
||||
|
||||
Validation failures return HTTP 401 Unauthorized:
|
||||
- Invalid signature → "Invalid token"
|
||||
- Expired token → "Invalid token" (message doesn't reveal reason for security)
|
||||
- Missing claims → "Invalid token"
|
||||
- Malformed claims → "Invalid token"
|
||||
|
||||
## Token Lifecycle
|
||||
|
||||
### Access Token Lifecycle
|
||||
|
||||
1. **Generation**: During login (1-hour validity)
|
||||
2. **Usage**: Included in Authorization header on API requests
|
||||
3. **Validation**: Validated on protected endpoints
|
||||
4. **Expiration**: Token becomes invalid after 1 hour
|
||||
5. **Refresh**: Use refresh token to obtain new access token
|
||||
|
||||
### Refresh Token Lifecycle
|
||||
|
||||
1. **Generation**: During login (21-day validity)
|
||||
2. **Storage**: Client-side (secure storage)
|
||||
3. **Usage**: Posted to `/api/auth/refresh` endpoint
|
||||
4. **Validation**: Validated by RefreshTokenService
|
||||
5. **Rotation**: New refresh token issued on successful refresh
|
||||
6. **Expiration**: Token becomes invalid after 21 days
|
||||
|
||||
### Confirmation Token Lifecycle
|
||||
|
||||
1. **Generation**: During user registration (30-minute validity)
|
||||
2. **Delivery**: Emailed to user in confirmation link
|
||||
3. **Usage**: User clicks link, token posted to `/api/auth/confirm`
|
||||
4. **Validation**: Validated by ConfirmationService
|
||||
5. **Completion**: User account marked as confirmed
|
||||
6. **Expiration**: Token becomes invalid after 30 minutes
|
||||
|
||||
## Testing
|
||||
|
||||
### Unit Tests
|
||||
|
||||
**TokenValidationService.test.cs**
|
||||
- Happy path: Valid token extraction
|
||||
- Error cases: Invalid, expired, malformed tokens
|
||||
- Missing/invalid claims scenarios
|
||||
|
||||
**RefreshTokenService.test.cs**
|
||||
- Successful refresh with valid token
|
||||
- Invalid/expired refresh token rejection
|
||||
- Non-existent user handling
|
||||
|
||||
**ConfirmationService.test.cs**
|
||||
- Successful confirmation with valid token
|
||||
- Token validation failures
|
||||
- User not found scenarios
|
||||
|
||||
### BDD Tests (Reqnroll)
|
||||
|
||||
**TokenRefresh.feature**
|
||||
- Successful token refresh
|
||||
- Invalid/expired token rejection
|
||||
- Missing token validation
|
||||
|
||||
**Confirmation.feature**
|
||||
- Successful email confirmation
|
||||
- Expired/tampered token rejection
|
||||
- Missing token validation
|
||||
|
||||
**AccessTokenValidation.feature**
|
||||
- Protected endpoint access token validation
|
||||
- Invalid/expired access token rejection
|
||||
- Token type mismatch (refresh used as access token)
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
### Stretch Goals
|
||||
|
||||
1. **Middleware for Access Token Validation**
|
||||
- Automatically validate access tokens on protected routes
|
||||
- Populate HttpContext.User from token claims
|
||||
- Return 401 for invalid/missing tokens
|
||||
|
||||
2. **Token Blacklisting**
|
||||
- Implement token revocation (e.g., on logout)
|
||||
- Store blacklisted tokens in cache/database
|
||||
- Check blacklist during validation
|
||||
|
||||
3. **Refresh Token Rotation Strategy**
|
||||
- Detect token reuse (replay attacks)
|
||||
- Automatically invalidate entire token chain on reuse
|
||||
- Log suspicious activity
|
||||
|
||||
4. **Structured Logging**
|
||||
- Log token validation attempts
|
||||
- Track failed validation reasons
|
||||
- Alert on repeated validation failures (brute force detection)
|
||||
2411
misc/raw-data/beers.csv
Normal file
2411
misc/raw-data/beers.csv
Normal file
File diff suppressed because it is too large
Load Diff
559
misc/raw-data/breweries.csv
Normal file
559
misc/raw-data/breweries.csv
Normal file
@@ -0,0 +1,559 @@
|
||||
,name,city,state
|
||||
0,NorthGate Brewing ,Minneapolis, MN
|
||||
1,Against the Grain Brewery,Louisville, KY
|
||||
2,Jack's Abby Craft Lagers,Framingham, MA
|
||||
3,Mike Hess Brewing Company,San Diego, CA
|
||||
4,Fort Point Beer Company,San Francisco, CA
|
||||
5,COAST Brewing Company,Charleston, SC
|
||||
6,Great Divide Brewing Company,Denver, CO
|
||||
7,Tapistry Brewing,Bridgman, MI
|
||||
8,Big Lake Brewing,Holland, MI
|
||||
9,The Mitten Brewing Company,Grand Rapids, MI
|
||||
10,Brewery Vivant,Grand Rapids, MI
|
||||
11,Petoskey Brewing,Petoskey, MI
|
||||
12,Blackrocks Brewery,Marquette, MI
|
||||
13,Perrin Brewing Company,Comstock Park, MI
|
||||
14,Witch's Hat Brewing Company,South Lyon, MI
|
||||
15,Founders Brewing Company,Grand Rapids, MI
|
||||
16,Flat 12 Bierwerks,Indianapolis, IN
|
||||
17,Tin Man Brewing Company,Evansville, IN
|
||||
18,Black Acre Brewing Co.,Indianapolis, IN
|
||||
19,Brew Link Brewing,Plainfield, IN
|
||||
20,Bare Hands Brewery,Granger, IN
|
||||
21,Three Pints Brewing,Martinsville, IN
|
||||
22,Four Fathers Brewing ,Valparaiso, IN
|
||||
23,Indiana City Brewing,Indianapolis, IN
|
||||
24,Burn 'Em Brewing,Michigan City, IN
|
||||
25,Sun King Brewing Company,Indianapolis, IN
|
||||
26,Evil Czech Brewery,Mishawaka, IN
|
||||
27,450 North Brewing Company,Columbus, IN
|
||||
28,Taxman Brewing Company,Bargersville, IN
|
||||
29,Cedar Creek Brewery,Seven Points, TX
|
||||
30,SanTan Brewing Company,Chandler, AZ
|
||||
31,Boulevard Brewing Company,Kansas City, MO
|
||||
32,James Page Brewing Company,Stevens Point, WI
|
||||
33,The Dudes' Brewing Company,Torrance, CA
|
||||
34,Ballast Point Brewing Company,San Diego, CA
|
||||
35,Anchor Brewing Company,San Francisco, CA
|
||||
36,Figueroa Mountain Brewing Company,Buellton, CA
|
||||
37,Avery Brewing Company,Boulder, CO
|
||||
38,Twisted X Brewing Company,Dripping Springs, TX
|
||||
39,Gonzo's BiggDogg Brewing,Kalamazoo, MI
|
||||
40,Big Muddy Brewing,Murphysboro, IL
|
||||
41,Lost Nation Brewing,East Fairfield, VT
|
||||
42,Rising Tide Brewing Company,Portland, ME
|
||||
43,Rivertowne Brewing Company,Export, PA
|
||||
44,Revolution Brewing Company,Chicago, IL
|
||||
45,Tallgrass Brewing Company,Manhattan, KS
|
||||
46,Sixpoint Craft Ales,Brooklyn, NY
|
||||
47,White Birch Brewing,Hooksett, NH
|
||||
48,Firestone Walker Brewing Company,Paso Robles, CA
|
||||
49,SweetWater Brewing Company,Atlanta, GA
|
||||
50,Flying Mouse Brewery,Troutville, VA
|
||||
51,Upslope Brewing Company,Boulder, CO
|
||||
52,Pipeworks Brewing Company,Chicago, IL
|
||||
53,Bent Brewstillery,Roseville, MN
|
||||
54,Flesk Brewing Company,Lombard, IL
|
||||
55,Pollyanna Brewing Company,Lemont, IL
|
||||
56,BuckleDown Brewing,Lyons, IL
|
||||
57,Destihl Brewery,Bloomington, IL
|
||||
58,Summit Brewing Company,St. Paul, MN
|
||||
59,Latitude 42 Brewing Company,Portage, MI
|
||||
60,4 Hands Brewing Company,Saint Louis, MO
|
||||
61,Surly Brewing Company,Brooklyn Center, MN
|
||||
62,Against The Grain Brewery,Louisville, KY
|
||||
63,Crazy Mountain Brewing Company,Edwards, CO
|
||||
64,SlapShot Brewing Company,Chicago, IL
|
||||
65,Mikerphone Brewing,Chicago, IL
|
||||
66,Freetail Brewing Company,San Antonio, TX
|
||||
67,3 Daughters Brewing,St Petersburg, FL
|
||||
68,Red Shedman Farm Brewery and Hop...,Mt. Airy, MD
|
||||
69,Appalachian Mountain Brewery,Boone, NC
|
||||
70,Birdsong Brewing Company,Charlotte, NC
|
||||
71,Union Craft Brewing,Baltimore, MD
|
||||
72,Atwater Brewery,Detroit, MI
|
||||
73,Ale Asylum,Madison, WI
|
||||
74,Two Brothers Brewing Company,Warrenville, IL
|
||||
75,Bent Paddle Brewing Company,Duluth, MN
|
||||
76,Bell's Brewery,Kalamazoo, MI
|
||||
77,Blue Owl Brewing,Austin, TX
|
||||
78,Speakasy Ales & Lagers,San Francisco, CA
|
||||
79,Black Tooth Brewing Company,Sheridan, WY
|
||||
80,Hopworks Urban Brewery,Portland, OR
|
||||
81,Epic Brewing,Denver, CO
|
||||
82,New Belgium Brewing Company,Fort Collins, CO
|
||||
83,Sierra Nevada Brewing Company,Chico, CA
|
||||
84,Keweenaw Brewing Company,Houghton, MI
|
||||
85,Brewery Terra Firma,Traverse City, MI
|
||||
86,Grey Sail Brewing Company,Westerly, RI
|
||||
87,Kirkwood Station Brewing Company,Kirkwood, MO
|
||||
88,Goose Island Brewing Company,Chicago, IL
|
||||
89,Broad Brook Brewing LLC,East Windsor, CT
|
||||
90,The Lion Brewery,Wilkes-Barre, PA
|
||||
91,Madtree Brewing Company,Cincinnati, OH
|
||||
92,Jackie O's Pub & Brewery,Athens, OH
|
||||
93,Rhinegeist Brewery,Cincinnati, OH
|
||||
94,Warped Wing Brewing Company,Dayton, OH
|
||||
95,Blackrocks Brewery,Marquette, MA
|
||||
96,Catawba Valley Brewing Company,Morganton, NC
|
||||
97,Tröegs Brewing Company,Hershey, PA
|
||||
98,Mission Brewery,San Diego, CA
|
||||
99,Christian Moerlein Brewing Company,Cincinnati, OH
|
||||
100,West Sixth Brewing,Lexington, KY
|
||||
101,Coastal Extreme Brewing Company,Newport, RI
|
||||
102,King Street Brewing Company,Anchorage, AK
|
||||
103,Beer Works Brewery,Lowell, MA
|
||||
104,Lone Tree Brewing Company,Lone Tree, CO
|
||||
105,Four String Brewing Company,Columbus, OH
|
||||
106,Glabrous Brewing Company,Pineland, ME
|
||||
107,Bonfire Brewing Company,Eagle, CO
|
||||
108,Thomas Hooker Brewing Company,Bloomfield, CT
|
||||
109,"Woodstock Inn, Station & Brewery",North Woodstock, NH
|
||||
110,Renegade Brewing Company,Denver, CO
|
||||
111,Mother Earth Brew Company,Vista, CA
|
||||
112,Black Market Brewing Company,Temecula, CA
|
||||
113,Vault Brewing Company,Yardley, PA
|
||||
114,Jailbreak Brewing Company,Laurel, MD
|
||||
115,Smartmouth Brewing Company,Norfolk, VA
|
||||
116,Base Camp Brewing Co.,Portland, OR
|
||||
117,Alameda Brewing,Portland, OR
|
||||
118,Southern Star Brewing Company,Conroe, TX
|
||||
119,Steamworks Brewing Company,Durango, CO
|
||||
120,Horny Goat Brew Pub,Milwaukee, WI
|
||||
121,Cheboygan Brewing Company,Cheboygan, MI
|
||||
122,Center of the Universe Brewing C...,Ashland, VA
|
||||
123,Ipswich Ale Brewery,Ipswich, MA
|
||||
124,Griffin Claw Brewing Company,Birmingham, MI
|
||||
125,Karbach Brewing Company,Houston, TX
|
||||
126,Uncle Billy's Brewery and Smokeh...,Austin, TX
|
||||
127,Deep Ellum Brewing Company,Dallas, TX
|
||||
128,Real Ale Brewing Company,Blanco, TX
|
||||
129,Straub Brewery,St Mary's, PA
|
||||
130,Shebeen Brewing Company,Wolcott, CT
|
||||
131,Stevens Point Brewery,Stevens Point, WI
|
||||
132,Weston Brewing Company,Weston, MO
|
||||
133,Southern Prohibition Brewing Com...,Hattiesburg, MS
|
||||
134,Minhas Craft Brewery,Monroe, WI
|
||||
135,Pug Ryan's Brewery,Dillon, CO
|
||||
136,Hops & Grains Brewing Company,Austin, TX
|
||||
137,Sietsema Orchards and Cider Mill,Ada, MI
|
||||
138,Summit Brewing Company,St Paul, MN
|
||||
139,Core Brewing & Distilling Company,Springdale, AR
|
||||
140,Independence Brewing Company,Austin, TX
|
||||
141,Cigar City Brewing Company,Tampa, FL
|
||||
142,Third Street Brewhouse,Cold Spring, MN
|
||||
143,Narragansett Brewing Company,Providence, RI
|
||||
144,Grimm Brothers Brewhouse,Loveland, CO
|
||||
145,Cisco Brewers,Nantucket, MA
|
||||
146,Angry Minnow,Hayward, WI
|
||||
147,Platform Beer Company,Cleveland, OH
|
||||
148,Odyssey Beerwerks,Arvada, CO
|
||||
149,Lonerider Brewing Company,Raleigh, NC
|
||||
150,Oakshire Brewing,Eugene, OR
|
||||
151,Fort Pitt Brewing Company,Latrobe, PA
|
||||
152,Tin Roof Brewing Company,Baton Rouge, LA
|
||||
153,Three Creeks Brewing,Sisters, OR
|
||||
154,2 Towns Ciderhouse,Corvallis, OR
|
||||
155,Caldera Brewing Company,Ashland, OR
|
||||
156,Greenbrier Valley Brewing Company,Lewisburg, WV
|
||||
157,Phoenix Ale Brewery,Phoenix, AZ
|
||||
158,Lumberyard Brewing Company,Flagstaff, AZ
|
||||
159,Uinta Brewing Company,Salt Lake City, UT
|
||||
160,Four Peaks Brewing Company,Tempe, AZ
|
||||
161,Martin House Brewing Company,Fort Worth, TX
|
||||
162,Right Brain Brewery,Traverse City, MI
|
||||
163,Sly Fox Brewing Company,Phoenixville, PA
|
||||
164,Round Guys Brewing,Lansdale, PA
|
||||
165,Great Crescent Brewery,Aurora, IN
|
||||
166,Oskar Blues Brewery,Longmont, CO
|
||||
167,Boxcar Brewing Company,West Chester, PA
|
||||
168,High Hops Brewery,Windsor, CO
|
||||
169,Crooked Fence Brewing Company,Garden City, ID
|
||||
170,Everybody's Brewing,White Salmon, WA
|
||||
171,Anderson Valley Brewing Company,Boonville, CA
|
||||
172,Fiddlehead Brewing Company,Shelburne, VT
|
||||
173,Evil Twin Brewing,Brooklyn, NY
|
||||
174,New Orleans Lager & Ale Brewing ...,New Orleans, LA
|
||||
175,Spiteful Brewing Company,Chicago, IL
|
||||
176,Rahr & Sons Brewing Company,Fort Worth, TX
|
||||
177,18th Street Brewery,Gary, IN
|
||||
178,Cambridge Brewing Company,Cambridge, MA
|
||||
179,Carolina Brewery,Pittsboro, NC
|
||||
180,Frog Level Brewing Company,Waynesville, NC
|
||||
181,Wild Wolf Brewing Company,Nellysford, VA
|
||||
182,COOP Ale Works,Oklahoma City, OK
|
||||
183,Seventh Son Brewing Company,Columbus, OH
|
||||
184,Oasis Texas Brewing Company,Austin, TX
|
||||
185,Vander Mill Ciders,Spring Lake, MI
|
||||
186,St. Julian Winery,Paw Paw, MI
|
||||
187,Pedernales Brewing Company,Fredericksburg, TX
|
||||
188,Mother's Brewing,Springfield, MO
|
||||
189,Modern Monks Brewery,Lincoln, NE
|
||||
190,Two Beers Brewing Company,Seattle, WA
|
||||
191,Snake River Brewing Company,Jackson, WY
|
||||
192,Capital Brewery,Middleton, WI
|
||||
193,Anthem Brewing Company,Oklahoma City, OK
|
||||
194,Goodlife Brewing Co.,Bend, OR
|
||||
195,Breakside Brewery,Portland, OR
|
||||
196,Goose Island Brewery Company,Chicago, IL
|
||||
197,Burnside Brewing Co.,Portland, OR
|
||||
198,Hop Valley Brewing Company,Springfield, OR
|
||||
199,Worthy Brewing Company,Bend, OR
|
||||
200,Occidental Brewing Company,Portland, OR
|
||||
201,Fearless Brewing Company,Estacada, OR
|
||||
202,Upland Brewing Company,Bloomington, IN
|
||||
203,Mehana Brewing Co.,Hilo, HI
|
||||
204,Hawai'i Nui Brewing Co.,Hilo, HI
|
||||
205,People's Brewing Company,Lafayette, IN
|
||||
206,Fort George Brewery,Astoria, OR
|
||||
207,Branchline Brewing Company,San Antonio, TX
|
||||
208,Kalona Brewing Company,Kalona, IA
|
||||
209,Modern Times Beer,San Diego, CA
|
||||
210,Temperance Beer Company,Evanston, IL
|
||||
211,Wisconsin Brewing Company,Verona, WI
|
||||
212,Crow Peak Brewing Company,Spearfish, SD
|
||||
213,Grapevine Craft Brewery,Farmers Branch, TX
|
||||
214,Buffalo Bayou Brewing Company,Houston, TX
|
||||
215,Texian Brewing Co.,Richmond, TX
|
||||
216,Orpheus Brewing,Atlanta, GA
|
||||
217,Forgotten Boardwalk,Cherry Hill, NJ
|
||||
218,Laughing Dog Brewing Company,Ponderay, ID
|
||||
219,Bozeman Brewing Company,Bozeman, MT
|
||||
220,Big Choice Brewing,Broomfield, CO
|
||||
221,Big Storm Brewing Company,Odessa, FL
|
||||
222,Carton Brewing Company,Atlantic Highlands, NJ
|
||||
223,Midnight Sun Brewing Company,Anchorage, AK
|
||||
224,Fat Head's Brewery,Middleburg Heights, OH
|
||||
225,Refuge Brewery,Temecula, CA
|
||||
226,Chatham Brewing,Chatham, NY
|
||||
227,DC Brau Brewing Company,Washington, DC
|
||||
228,Geneva Lake Brewing Company,Lake Geneva, WI
|
||||
229,Rochester Mills Brewing Company,Rochester, MI
|
||||
230,Cape Ann Brewing Company,Gloucester, MA
|
||||
231,Borderlands Brewing Company,Tucson, AZ
|
||||
232,College Street Brewhouse and Pub,Lake Havasu City, AZ
|
||||
233,Joseph James Brewing Company,Henderson, NV
|
||||
234,Harpoon Brewery,Boston, MA
|
||||
235,Back East Brewing Company,Bloomfield, CT
|
||||
236,Champion Brewing Company,Charlottesville, VA
|
||||
237,Devil's Backbone Brewing Company,Lexington, VA
|
||||
238,Newburgh Brewing Company,Newburgh, NY
|
||||
239,Wiseacre Brewing Company,Memphis, TN
|
||||
240,Golden Road Brewing,Los Angeles, CA
|
||||
241,New Republic Brewing Company,College Station, TX
|
||||
242,Infamous Brewing Company,Austin, TX
|
||||
243,Two Henrys Brewing Company,Plant City, FL
|
||||
244,Lift Bridge Brewing Company,Stillwater, MN
|
||||
245,Lucky Town Brewing Company,Jackson, MS
|
||||
246,Quest Brewing Company,Greenville, SC
|
||||
247,Creature Comforts,Athens, GA
|
||||
248,Half Full Brewery,Stamford, CT
|
||||
249,Southampton Publick House,Southampton, NY
|
||||
250,Chapman's Brewing,Angola, IN
|
||||
251,Barrio Brewing Company,Tucson, AZ
|
||||
252,Santa Cruz Mountain Brewing,Santa Cruz, CA
|
||||
253,Frankenmuth Brewery,Frankenmuth, MI
|
||||
254,Meckley's Cidery,Somerset Center, MI
|
||||
255,Stillwater Artisanal Ales,Baltimore, MD
|
||||
256,Finch's Beer Company,Chicago, IL
|
||||
257,South Austin Brewery,South Austin, TX
|
||||
258,Bauhaus Brew Labs,Minneapolis, MN
|
||||
259,Ozark Beer Company,Rogers, AR
|
||||
260,Mountain Town Brewing Company ,Mount Pleasant, MI
|
||||
261,Otter Creek Brewing,Waterbury, VT
|
||||
262,The Brewer's Art,Baltimore, MD
|
||||
263,Denver Beer Company,Denver, CO
|
||||
264,Ska Brewing Company,Durango, CO
|
||||
265,Tractor Brewing Company,Albuquerque, NM
|
||||
266,Peak Organic Brewing Company,Portland, ME
|
||||
267,Cape Cod Beer,Hyannis, MA
|
||||
268,Long Trail Brewing Company,Bridgewater Corners, VT
|
||||
269,Great Raft Brewing Company,Shreveport, LA
|
||||
270,Alaskan Brewing Company,Juneau, AK
|
||||
271,Notch Brewing Company,Ipswich, MA
|
||||
272,The Alchemist,Waterbury, VT
|
||||
273,Three Notch'd Brewing Company,Charlottesville, VA
|
||||
274,Portside Brewery,Cleveland, OH
|
||||
275,Otter Creek Brewing,Middlebury, VT
|
||||
276,Montauk Brewing Company,Montauk, NY
|
||||
277,Indeed Brewing Company,Minneapolis, MN
|
||||
278,Berkshire Brewing Company,South Deerfield, MA
|
||||
279,Foolproof Brewing Company,Pawtucket, RI
|
||||
280,Headlands Brewing Company,Mill Valley, CA
|
||||
281,Bolero Snort Brewery,Ridgefield Park, NJ
|
||||
282,Thunderhead Brewing Company,Kearney, NE
|
||||
283,Defiance Brewing Company,Hays, KS
|
||||
284,Milwaukee Brewing Company,Milwaukee, WI
|
||||
285,Catawba Island Brewing,Port Clinton, OH
|
||||
286,Back Forty Beer Company,Gadsden, AL
|
||||
287,Four Corners Brewing Company,Dallas, TX
|
||||
288,Saint Archer Brewery,San Diego, CA
|
||||
289,Rogue Ales,Newport, OR
|
||||
290,Hale's Ales,Seattle, WA
|
||||
291,Tommyknocker Brewery,Idaho Springs, CO
|
||||
292,Baxter Brewing Company,Lewiston, ME
|
||||
293,Northampton Brewery,Northamtpon, MA
|
||||
294,Black Shirt Brewing Company,Denver, CO
|
||||
295,Wachusett Brewing Company,Westminster, MA
|
||||
296,Widmer Brothers Brewing Company,Portland, OR
|
||||
297,Hop Farm Brewing Company,Pittsburgh, PA
|
||||
298,Liquid Hero Brewery,York, PA
|
||||
299,Matt Brewing Company,Utica, NY
|
||||
300,Boston Beer Company,Boston, MA
|
||||
301,Old Forge Brewing Company,Danville, PA
|
||||
302,Utah Brewers Cooperative,Salt Lake City, UT
|
||||
303,Magic Hat Brewing Company,South Burlington, VT
|
||||
304,Blue Hills Brewery,Canton, MA
|
||||
305,Night Shift Brewing,Everett, MA
|
||||
306,Beach Brewing Company,Virginia Beach, VA
|
||||
307,Payette Brewing Company,Garden City, ID
|
||||
308,Brew Bus Brewing,Tampa, FL
|
||||
309,Sockeye Brewing Company,Boise, ID
|
||||
310,Pine Street Brewery,San Francisco, CA
|
||||
311,Dirty Bucket Brewing Company,Woodinville, WA
|
||||
312,Jackalope Brewing Company,Nashville, TN
|
||||
313,Slanted Rock Brewing Company,Meridian, ID
|
||||
314,Piney River Brewing Company,Bucryus, MO
|
||||
315,Cutters Brewing Company,Avon, IN
|
||||
316,Iron Hill Brewery & Restaurant,Wilmington, DE
|
||||
317,Marshall Wharf Brewing Company,Belfast, ME
|
||||
318,Banner Beer Company,Williamsburg, MA
|
||||
319,Dick's Brewing Company,Centralia, WA
|
||||
320,Claremont Craft Ales,Claremont, CA
|
||||
321,Rivertown Brewing Company,Lockland, OH
|
||||
322,Voodoo Brewery,Meadville, PA
|
||||
323,D.L. Geary Brewing Company,Portland, ME
|
||||
324,Pisgah Brewing Company,Black Mountain, NC
|
||||
325,Neshaminy Creek Brewing Company,Croydon, PA
|
||||
326,Morgan Street Brewery,Saint Louis, MO
|
||||
327,Half Acre Beer Company,Chicago, IL
|
||||
328,The Just Beer Project,Burlington, VT
|
||||
329,The Bronx Brewery,Bronx, NY
|
||||
330,Dead Armadillo Craft Brewing,Tulsa, OK
|
||||
331,Catawba Brewing Company,Morganton, NC
|
||||
332,La Cumbre Brewing Company,Albuquerque, NM
|
||||
333,David's Ale Works,Diamond Springs, CA
|
||||
334,The Traveler Beer Company,Burlington, VT
|
||||
335,Fargo Brewing Company,Fargo, ND
|
||||
336,Big Sky Brewing Company,Missoula, MT
|
||||
337,Nebraska Brewing Company,Papillion, NE
|
||||
338,Uncle John's Fruit House Winery,St. John's, MI
|
||||
339,Wormtown Brewery,Worcester, MA
|
||||
340,Due South Brewing Company,Boynton Beach, FL
|
||||
341,Palisade Brewing Company,Palisade, CO
|
||||
342,KelSo Beer Company,Brooklyn, NY
|
||||
343,Hardywood Park Craft Brewery,Richmond, VA
|
||||
344,Wolf Hills Brewing Company,Abingdon, VA
|
||||
345,Lavery Brewing Company,Erie, PA
|
||||
346,Manzanita Brewing Company,Santee, CA
|
||||
347,Fullsteam Brewery,Durham, NC
|
||||
348,Four Horsemen Brewing Company,South Bend, IN
|
||||
349,Hinterland Brewery,Green Bay, WI
|
||||
350,Central Coast Brewing Company,San Luis Obispo, CA
|
||||
351,Westfield River Brewing Company,Westfield, MA
|
||||
352,Elevator Brewing Company,Columbus, OH
|
||||
353,Aslan Brewing Company,Bellingham, WA
|
||||
354,Kulshan Brewery,Bellingham, WA
|
||||
355,Pikes Peak Brewing Company,Monument, CO
|
||||
356,Manayunk Brewing Company,Philadelphia, PA
|
||||
357,Buckeye Brewing,Cleveland, OH
|
||||
358,Daredevil Brewing Company,Shelbyville, IN
|
||||
359,NoDa Brewing Company,Charlotte, NC
|
||||
360,Aviator Brewing Company,Fuquay-Varina, NC
|
||||
361,Wild Onion Brewing Company,Lake Barrington, IL
|
||||
362,Hilliard's Beer,Seattle, WA
|
||||
363,Mikkeller,Pottstown, PA
|
||||
364,Bohemian Brewery,Midvale, UT
|
||||
365,Great River Brewery,Davenport, IA
|
||||
366,Mustang Brewing Company,Mustang, OK
|
||||
367,Airways Brewing Company,Kent, WA
|
||||
368,21st Amendment Brewery,San Francisco, CA
|
||||
369,Eddyline Brewery & Restaurant,Buena Vista, CO
|
||||
370,Pizza Port Brewing Company,Carlsbad, CA
|
||||
371,Sly Fox Brewing Company,Pottstown, PA
|
||||
372,Spring House Brewing Company,Conestoga, PA
|
||||
373,7venth Sun,Dunedin, FL
|
||||
374,Astoria Brewing Company,Astoria, OR
|
||||
375,Maui Brewing Company,Lahaina, HI
|
||||
376,RoughTail Brewing Company,Midwest City, OK
|
||||
377,Lucette Brewing Company,Menominee, WI
|
||||
378,Bold City Brewery,Jacksonville, FL
|
||||
379,Grey Sail Brewing of Rhode Island,Westerly, RI
|
||||
380,Blue Blood Brewing Company,Lincoln, NE
|
||||
381,Swashbuckler Brewing Company,Manheim, PA
|
||||
382,Blue Mountain Brewery,Afton, VA
|
||||
383,Starr Hill Brewery,Crozet, VA
|
||||
384,Westbrook Brewing Company,Mt. Pleasant, SC
|
||||
385,Shipyard Brewing Company,Portland, ME
|
||||
386,Revolution Brewing,Paonia, CO
|
||||
387,Natian Brewery,Portland, OR
|
||||
388,Alltech's Lexington Brewing Company,Lexington, KY
|
||||
389,Oskar Blues Brewery (North Carol...,Brevard, NC
|
||||
390,Orlison Brewing Company,Airway Heights, WA
|
||||
391,Breckenridge Brewery,Denver, CO
|
||||
392,Santa Fe Brewing Company,Santa Fe, NM
|
||||
393,Miami Brewing Company,Miami, FL
|
||||
394,Schilling & Company,Seattle, WA
|
||||
395,Hops & Grain Brewery,Austin, TX
|
||||
396,White Flame Brewing Company,Hudsonville, MI
|
||||
397,Ruhstaller Beer Company,Sacramento, CA
|
||||
398,Saugatuck Brewing Company,Douglas, MI
|
||||
399,Moab Brewery,Moab, UT
|
||||
400,Macon Beer Company,Macon, GA
|
||||
401,Amnesia Brewing Company,Washougal, WA
|
||||
402,Wolverine State Brewing Company,Ann Arbor, MI
|
||||
403,Red Tank Cider Company,Bend, OR
|
||||
404,Cascadia Ciderworks United,Portland, OR
|
||||
405,Fate Brewing Company,Boulder, CO
|
||||
406,Lazy Monk Brewing,Eau Claire, WI
|
||||
407,Bitter Root Brewing,Hamilton, MT
|
||||
408,10 Barrel Brewing Company,Bend, OR
|
||||
409,Tamarack Brewing Company,Lakeside, MT
|
||||
410,New England Brewing Company,Woodbridge, CT
|
||||
411,Seattle Cider Company,Seattle, WA
|
||||
412,Straight to Ale,Huntsville, AL
|
||||
413,Austin Beerworks,Austin, TX
|
||||
414,Blue Mountain Brewery,Arrington, VA
|
||||
415,Coastal Empire Beer Company,Savannah, GA
|
||||
416,Jack's Hard Cider (Hauser Estate...,Biglerville, PA
|
||||
417,Boulder Beer Company,Boulder, CO
|
||||
418,Coalition Brewing Company,Portland, OR
|
||||
419,Sanitas Brewing Company,Boulder, CO
|
||||
420,Gore Range Brewery,Edwards, CO
|
||||
421,Redstone Meadery,Boulder, CO
|
||||
422,Blue Dog Mead,Eugene, OR
|
||||
423,Hess Brewing Company,San Diego, CA
|
||||
424,Wynkoop Brewing Company,Denver, CO
|
||||
425,Ciderboys,Stevens Point, WI
|
||||
426,Armadillo Ale Works,Denton, TX
|
||||
427,Roanoke Railhouse Brewery,Roanoke, VA
|
||||
428,Schlafly Brewing Company,Saint Louis, MO
|
||||
429,Asher Brewing Company,Boulder, CO
|
||||
430,Lost Rhino Brewing Company,Ashburn, VA
|
||||
431,North Country Brewing Company,Slippery Rock, PA
|
||||
432,Seabright Brewery,Santa Cruz, CA
|
||||
433,French Broad Brewery,Asheville, NC
|
||||
434,Angry Orchard Cider Company,Cincinnati, OH
|
||||
435,Two Roads Brewing Company,Stratford, CT
|
||||
436,Southern Oregon Brewing Company,Medford, OR
|
||||
437,Brooklyn Brewery,Brooklyn, NY
|
||||
438,The Right Brain Brewery,Traverse City, MI
|
||||
439,Kona Brewing Company,Kona, HI
|
||||
440,MillKing It Productions,Royal Oak, MI
|
||||
441,Pateros Creek Brewing Company,Fort Collins, CO
|
||||
442,O'Fallon Brewery,O'Fallon, MO
|
||||
443,Marble Brewery,Albuquerque, NM
|
||||
444,Big Wood Brewery,Vadnais Heights, MN
|
||||
445,Howard Brewing Company,Lenoir, NC
|
||||
446,Downeast Cider House,Leominster, MA
|
||||
447,Swamp Head Brewery,Gainesville, FL
|
||||
448,Mavericks Beer Company,Half Moon Bay, CA
|
||||
449,TailGate Beer,San Diego, CA
|
||||
450,Northwest Brewing Company,Pacific, WA
|
||||
451,Dad & Dude's Breweria,Aurora, CO
|
||||
452,Centennial Beer Company,Edwards, CO
|
||||
453,Denali Brewing Company,Talkeetna, AK
|
||||
454,Deschutes Brewery,Bend, OR
|
||||
455,Sunken City Brewing Company,Hardy, VA
|
||||
456,Lucette Brewing Company,Menominie, WI
|
||||
457,The Black Tooth Brewing Company,Sheridan, WY
|
||||
458,Kenai River Brewing Company,Soldotna, AK
|
||||
459,River North Brewery,Denver, CO
|
||||
460,Fremont Brewing Company,Seattle, WA
|
||||
461,Armstrong Brewing Company,South San Francisco, CA
|
||||
462,AC Golden Brewing Company,Golden, CO
|
||||
463,Big Bend Brewing Company,Alpine, TX
|
||||
464,Good Life Brewing Company,Bend, OR
|
||||
465,Engine 15 Brewing,Jacksonville Beach, FL
|
||||
466,Green Room Brewing,Jacksonville, FL
|
||||
467,Brindle Dog Brewing Company,Tampa Bay, FL
|
||||
468,Peace Tree Brewing Company,Knoxville, IA
|
||||
469,Terrapin Brewing Company,Athens, GA
|
||||
470,Pete's Brewing Company,San Antonio, TX
|
||||
471,Okoboji Brewing Company,Spirit Lake, IA
|
||||
472,Crystal Springs Brewing Company,Boulder, CO
|
||||
473,Engine House 9,Tacoma, WA
|
||||
474,Tonka Beer Company,Minnetonka, MN
|
||||
475,Red Hare Brewing Company,Marietta, GA
|
||||
476,Hangar 24 Craft Brewery,Redlands, CA
|
||||
477,Big Elm Brewing,Sheffield, MA
|
||||
478,Good People Brewing Company,Birmingham, AL
|
||||
479,Heavy Seas Beer,Halethorpe, MD
|
||||
480,Telluride Brewing Company,Telluride, CO
|
||||
481,7 Seas Brewing Company,Gig Harbor, WA
|
||||
482,Confluence Brewing Company,Des Moines, IA
|
||||
483,Bale Breaker Brewing Company,Yakima, WA
|
||||
484,The Manhattan Brewing Company,New York, NY
|
||||
485,MacTarnahans Brewing Company,Portland, OR
|
||||
486,Stillmank Beer Company,Green Bay, WI
|
||||
487,Redhook Brewery,Woodinville, WA
|
||||
488,Dock Street Brewery,Philadelphia, PA
|
||||
489,Blue Point Brewing Company,Patchogue, NY
|
||||
490,Tampa Bay Brewing Company,Tampa, FL
|
||||
491,Devil's Canyon Brewery,Belmont, CA
|
||||
492,Stone Coast Brewing Company,Portland, ME
|
||||
493,Broken Tooth Brewing Company,Anchorage, AK
|
||||
494,Seven Brides Brewery,Silverton, OR
|
||||
495,Newburyport Brewing Company,Newburyport, MA
|
||||
496,Dry Dock Brewing Company,Aurora, CO
|
||||
497,Cans Bar and Canteen,Charlotte, NC
|
||||
498,Sprecher Brewing Company,Glendale, WI
|
||||
499,Wildwood Brewing Company,Stevensville, MT
|
||||
500,High Noon Saloon And Brewery,Leavenworth, KS
|
||||
501,Woodchuck Hard Cider,Middlebury, VT
|
||||
502,Sea Dog Brewing Company,Portland, ME
|
||||
503,Oskar Blues Brewery,Lyons, CO
|
||||
504,Carolina Beer & Beverage,Mooresville, NC
|
||||
505,Krebs Brewing Company (Pete's Pl...,Krebs, OK
|
||||
506,Warbird Brewing Company,Fort Wayne, IN
|
||||
507,Mudshark Brewing Company,Lake Havasu City, AZ
|
||||
508,Spilker Ales,Cortland, NE
|
||||
509,Wingman Brewers,Tacoma, WA
|
||||
510,Kettle House Brewing Company,Missoula, MT
|
||||
511,Sherwood Forest Brewers,Marlborough, MA
|
||||
512,Cottrell Brewing,Pawcatuck, CT
|
||||
513,Arctic Craft Brewery,Colorado Springs, CO
|
||||
514,Monkey Paw Pub & Brewery,San Diego, CA
|
||||
515,Crabtree Brewing Company,Greeley, CO
|
||||
516,Emerald City Beer Company,Seattle, WA
|
||||
517,Butcher's Brewing,Carlsbad, CA
|
||||
518,New South Brewing Company,Myrtle Beach, SC
|
||||
519,Big River Brewing Company,Chattanooga, TN
|
||||
520,Twisted Pine Brewing Company,Boulder, CO
|
||||
521,Flying Dog Brewery,Frederick, MD
|
||||
522,Uncommon Brewers,Santa Cruz, CA
|
||||
523,Aspen Brewing Company,Aspen, CO
|
||||
524,Triangle Brewing Company,Durham, NC
|
||||
525,Bomb Beer Company,New York, NY
|
||||
526,Churchkey Can Company,Seattle, WA
|
||||
527,Intuition Ale Works,Jacksonville, FL
|
||||
528,Asheville Brewing Company,Asheville, NC
|
||||
529,Northwoods Brewpub,Eau Claire, WI
|
||||
530,Buckbean Brewing Company,Reno, NV
|
||||
531,Dolores River Brewery,Dolores, CO
|
||||
532,Flat Rock Brewing Company,Smithton, PA
|
||||
533,Abita Brewing Company,Abita Springs, LA
|
||||
534,Mammoth Brewing Company,Mammoth Lakes, CA
|
||||
535,Harvest Moon Brewing Company,Belt, MT
|
||||
536,Grand Canyon Brewing Company,Williams, AZ
|
||||
537,Lewis and Clark Brewing Company,Helena, MT
|
||||
538,Dundee Brewing Company,Rochester, NY
|
||||
539,Twin Lakes Brewing Company,Greenville, DE
|
||||
540,Mother Earth Brewing Company,Kinston, NC
|
||||
541,Arcadia Brewing Company,Battle Creek, MI
|
||||
542,Angry Minnow Brewing Company,Hayward, WI
|
||||
543,Great Northern Brewing Company,Whitefish, MT
|
||||
544,Pyramid Breweries,Seattle, WA
|
||||
545,Lancaster Brewing Company,Lancaster, PA
|
||||
546,Upstate Brewing Company,Elmira, NY
|
||||
547,Moat Mountain Smoke House & Brew...,North Conway, NH
|
||||
548,Prescott Brewing Company,Prescott, AZ
|
||||
549,Mogollon Brewing Company,Flagstaff, AZ
|
||||
550,Wind River Brewing Company,Pinedale, WY
|
||||
551,Silverton Brewery,Silverton, CO
|
||||
552,Mickey Finn's Brewery,Libertyville, IL
|
||||
553,Covington Brewhouse,Covington, LA
|
||||
554,Dave's Brewfarm,Wilson, WI
|
||||
555,Ukiah Brewing Company,Ukiah, CA
|
||||
556,Butternuts Beer and Ale,Garrattsville, NY
|
||||
557,Sleeping Lady Brewing Company,Anchorage, AK
|
||||
|
162686
misc/raw-data/breweries.json
Normal file
162686
misc/raw-data/breweries.json
Normal file
File diff suppressed because it is too large
Load Diff
578
misc/raw-data/ontariobreweries.json
Normal file
578
misc/raw-data/ontariobreweries.json
Normal file
@@ -0,0 +1,578 @@
|
||||
[
|
||||
{
|
||||
"text": "100 Acre Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/100-acre-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "All My Friends Beer Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/all-my-friends-beer-co/"
|
||||
},
|
||||
{
|
||||
"text": "All or Nothing Brewhouse",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/all-or-nothing-brewhouse/"
|
||||
},
|
||||
{
|
||||
"text": "Anderson Craft Ales",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/anderson-craft-ales/"
|
||||
},
|
||||
{
|
||||
"text": "Badlands Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/badlands-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Bancroft Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/bancroft-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Banded Goose Brewing Comany",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/banded-goose-brewing-comany/"
|
||||
},
|
||||
{
|
||||
"text": "Beau’s Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/beaus-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "BeerLab! London",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/beerlab-london/"
|
||||
},
|
||||
{
|
||||
"text": "Bellwoods Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/bellwoods-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Bench Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/bench-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Beyond The Pale Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/beyond-the-pale-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Bicycle Craft Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/bicycle-craft-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Big Rig Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/big-rig-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Big Rock Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/big-rock-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Black Gold Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/black-gold-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Black Oak Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/black-oak-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Block 3 Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/block-3-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Blood Brothers Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/blood-brothers-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Bobcaygeon Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/bobcaygeon-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Boshkung Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/boshkung-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Brauwerk Hoffman – Rockland",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/brauwerk-hoffman-rockland/"
|
||||
},
|
||||
{
|
||||
"text": "Bridge Masters Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/bridge-masters-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Broadhead Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/broadhead-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Broken Rail Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/broken-rail-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Burdock Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/burdock-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "C’est What Durham Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/cest-what-durham-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Calabogie Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/calabogie-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Cameron’s Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/camerons-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Canvas Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/canvas-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Caps Off Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/caps-off-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Century Barn Brewing & Beverage Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/century-barn-brewing-and-beverage-company/"
|
||||
},
|
||||
{
|
||||
"text": "Chronicle Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/chronicle-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Clifford Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/clifford-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Cold Bear Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/cold-bear-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Collective Arts Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/collective-arts-brewing-ltd/"
|
||||
},
|
||||
{
|
||||
"text": "Common Good Beer Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/common-good-beer-co/"
|
||||
},
|
||||
{
|
||||
"text": "Couchiching Craft Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/couchiching-craft-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Cowbell Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/cowbell-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Cured Craft Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/cured-craft-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Daft Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/daft-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Dog House Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/dog-house-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Dominion City Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/dominion-city-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Eastbound Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/eastbound-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Equals Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/equals-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Fairweather Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/fairweather-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Farm League Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/farm-league-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Fixed Gear Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/fixed-gear-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Flying Monkeys Craft Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/flying-monkeys-craft-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Focal Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/focal-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Foundry Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/foundry-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Four Fathers Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/four-fathers-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Frank Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/frank-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Freddy’s",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/freddys/"
|
||||
},
|
||||
{
|
||||
"text": "Full Beard Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/1068-2/"
|
||||
},
|
||||
{
|
||||
"text": "Furnace Room Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/furnace-room-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Gateway City Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/gateway-city-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Glasstown Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/glasstown-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Godspeed Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/godspeed-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Goldenfield Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/goldenfield-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Grand River Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/grand-river-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Granite Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/granite-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Great Lakes Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/great-lakes-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Haliburton Highlands Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/haliburton-highlands-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Imperial City Brew House",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/imperial-city-brew-house/"
|
||||
},
|
||||
{
|
||||
"text": "Indie Ale House",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/indie-ale-house/"
|
||||
},
|
||||
{
|
||||
"text": "Jobsite Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/jobsite-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Kichesippi Beer Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/kichesippi-beer-co/"
|
||||
},
|
||||
{
|
||||
"text": "Kick and Push Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/kick-and-push-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Lake of Bays Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/lake-of-bays-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Lake Of The Woods Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/lake-of-the-woods-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Left Field Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/left-field-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Lightcaster Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/lightcaster-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "MacKinnon Brothers Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/mackinnon-brothers-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Maclean’s Ales Inc.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/macleans-ales-inc/"
|
||||
},
|
||||
{
|
||||
"text": "Magnotta Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/magnotta-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Market Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/market-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Mascot Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/mascot-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Matron Fine Beer",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/matron-fine-beer/"
|
||||
},
|
||||
{
|
||||
"text": "Meyers Creek Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/meyers-creek-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Midtown Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/midtown-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Miski Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/miski-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Muddy York Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/muddy-york-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Muskoka Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/muskoka-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Natterjack Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/natterjack-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Newark Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/newark-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Niagara Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/niagara-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Niagara College Teaching Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/niagara-college-teaching-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Niagara Oast House Brewers",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/niagara-oast-house-brewers/"
|
||||
},
|
||||
{
|
||||
"text": "Nickel Brook Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/nickel-brook-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Northern Superior Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/northern-superior-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Old Credit Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/old-credit-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Old Flame Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/1239-2/"
|
||||
},
|
||||
{
|
||||
"text": "Orléans Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/orleans-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Overflow Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/overflow-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Parsons Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/parsons-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Perth Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/perth-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Prince Eddy’s Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/prince-eddys-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Quayle’s Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/quayles-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Quetico Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/quetico-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Railway City Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/railway-city-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Ramblin’ Road Brewery Farm",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/ramblin-road-brewery-farm/"
|
||||
},
|
||||
{
|
||||
"text": "Red Barn Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/red-barn-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Refined Fool Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/refined-fool-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Rouge River Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/rouge-river-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Royal City Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/royal-city-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Sassy Britches Brewing Co Ltd",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/sassy-britches-brewing-co-ltd/"
|
||||
},
|
||||
{
|
||||
"text": "Sawdust City Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/sawdust-city-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Shawn & Ed Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/shawn-ed-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Silversmith Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/silversmith-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Slake Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/slake-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Sleeping Giant Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/sleeping-giant-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Something in the Water Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/something-in-the-water-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Sonnen Hill Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/sonnen-hill-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Sons of Kent Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/sons-of-kent-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Spark Beer",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/spark-beer/"
|
||||
},
|
||||
{
|
||||
"text": "Split Rail Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/split-rail-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Stack Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/stack-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Steam Whistle Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/steam-whistle-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Steel Wheel Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/steel-wheel-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Stonehooker Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/stonehooker-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Stonepicker Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/stonepicker-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Stray Dog Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/stray-dog-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "The Exchange Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/the-exchange-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "The Grove Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/the-grove-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "The Second Wedge Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/the-second-wedge-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Thornbury Village Craft Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/thornbury-village-craft-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Three Sheets Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/three-sheets-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Tooth and Nail Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/tooth-and-nail-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Torched Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/torched-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Town Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/town-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Trestle Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/trestle-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "True History Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/true-history-brewing/"
|
||||
},
|
||||
{
|
||||
"text": "Upper Thames Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/upper-thames-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Vimy Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/vimy-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Walkerville Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/walkerville-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Wave Maker Craft Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/wave-maker-craft-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Wellington Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/wellington-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Whiprsnapr Brewing Co.",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/whiprsnapr-brewing-co/"
|
||||
},
|
||||
{
|
||||
"text": "Whiskeyjack Beer Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/whiskeyjack-beer-company/"
|
||||
},
|
||||
{
|
||||
"text": "Whitewater Brewing Company",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/whitewater-brewing-company/"
|
||||
},
|
||||
{
|
||||
"text": "Willibald Farm Distillery & Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/willibald-farm-distillery-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Windmill Brewery",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/windmill-brewery/"
|
||||
},
|
||||
{
|
||||
"text": "Wishbone Brewing",
|
||||
"href": "https://ontariocraftbrewers.com/brewery-profile/wishbone-brewing/"
|
||||
}
|
||||
]
|
||||
5
pipeline/.clang-format
Normal file
5
pipeline/.clang-format
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
BasedOnStyle: Google
|
||||
ColumnLimit: 80
|
||||
IndentWidth: 3
|
||||
...
|
||||
17
pipeline/.clang-tidy
Normal file
17
pipeline/.clang-tidy
Normal file
@@ -0,0 +1,17 @@
|
||||
---
|
||||
Checks: >
|
||||
-*,
|
||||
bugprone-*,
|
||||
clang-analyzer-*,
|
||||
cppcoreguidelines-*,
|
||||
google-*,
|
||||
modernize-*,
|
||||
performance-*,
|
||||
readability-*,
|
||||
-cppcoreguidelines-avoid-magic-numbers,
|
||||
-cppcoreguidelines-owning-memory,
|
||||
-readability-magic-numbers,
|
||||
-google-readability-todo
|
||||
HeaderFilterRegex: "^(src|includes)/.*"
|
||||
FormatStyle: file
|
||||
...
|
||||
3
pipeline/.gitignore
vendored
Normal file
3
pipeline/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
dist
|
||||
build
|
||||
data
|
||||
169
pipeline/CMakeLists.txt
Normal file
169
pipeline/CMakeLists.txt
Normal file
@@ -0,0 +1,169 @@
|
||||
cmake_minimum_required(VERSION 3.20)
|
||||
project(biergarten-pipeline VERSION 0.1.0 LANGUAGES CXX)
|
||||
|
||||
# Allows older dependencies to configure on newer CMake.
|
||||
set(CMAKE_POLICY_VERSION_MINIMUM 3.5)
|
||||
|
||||
# Policies
|
||||
cmake_policy(SET CMP0167 NEW) # FindBoost improvements
|
||||
|
||||
# Global Settings
|
||||
set(CMAKE_CXX_STANDARD 23)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
||||
|
||||
option(ENABLE_CLANG_TIDY "Enable clang-tidy static analysis for project targets" ON)
|
||||
option(ENABLE_CLANG_FORMAT_TARGETS "Enable clang-format helper targets" ON)
|
||||
|
||||
if(ENABLE_CLANG_TIDY)
|
||||
find_program(CLANG_TIDY_EXE NAMES clang-tidy)
|
||||
if(CLANG_TIDY_EXE)
|
||||
set(BIERGARTEN_CLANG_TIDY_COMMAND
|
||||
"${CLANG_TIDY_EXE};--config-file=${CMAKE_CURRENT_SOURCE_DIR}/.clang-tidy")
|
||||
message(STATUS "clang-tidy enabled: ${CLANG_TIDY_EXE}")
|
||||
else()
|
||||
message(STATUS "clang-tidy not found; static analysis is disabled")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Compiler Options & Warnings (Interface Library)
|
||||
# -----------------------------------------------------------------------------
|
||||
add_library(project_options INTERFACE)
|
||||
target_compile_options(project_options INTERFACE
|
||||
$<$<CXX_COMPILER_ID:GNU,Clang>:
|
||||
-Wall -Wextra -Wpedantic -Wshadow -Wconversion -Wsign-conversion -Wunused
|
||||
>
|
||||
$<$<CXX_COMPILER_ID:MSVC>:
|
||||
/W4 /WX /permissive-
|
||||
>
|
||||
)
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Dependencies
|
||||
# -----------------------------------------------------------------------------
|
||||
find_package(CURL REQUIRED)
|
||||
find_package(SQLite3 REQUIRED)
|
||||
find_package(Boost 1.75 REQUIRED COMPONENTS program_options json)
|
||||
|
||||
include(FetchContent)
|
||||
|
||||
# spdlog (Logging)
|
||||
FetchContent_Declare(
|
||||
spdlog
|
||||
GIT_REPOSITORY https://github.com/gabime/spdlog.git
|
||||
GIT_TAG v1.11.0
|
||||
)
|
||||
FetchContent_MakeAvailable(spdlog)
|
||||
|
||||
# llama.cpp (LLM Inference)
|
||||
set(LLAMA_BUILD_TESTS OFF CACHE BOOL "" FORCE)
|
||||
set(LLAMA_BUILD_EXAMPLES OFF CACHE BOOL "" FORCE)
|
||||
set(LLAMA_BUILD_SERVER OFF CACHE BOOL "" FORCE)
|
||||
FetchContent_Declare(
|
||||
llama_cpp
|
||||
GIT_REPOSITORY https://github.com/ggerganov/llama.cpp.git
|
||||
GIT_TAG b8611
|
||||
)
|
||||
FetchContent_MakeAvailable(llama_cpp)
|
||||
|
||||
if(TARGET llama)
|
||||
target_compile_options(llama PRIVATE
|
||||
$<$<CXX_COMPILER_ID:AppleClang>:-include algorithm>
|
||||
)
|
||||
endif()
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Main Executable
|
||||
# -----------------------------------------------------------------------------
|
||||
set(PIPELINE_SOURCES
|
||||
src/biergarten_data_generator.cpp
|
||||
src/web_client/curl_web_client.cpp
|
||||
src/data_generation/data_downloader.cpp
|
||||
src/database/database.cpp
|
||||
src/json_handling/json_loader.cpp
|
||||
src/data_generation/llama/destructor.cpp
|
||||
src/data_generation/llama/set_sampling_options.cpp
|
||||
src/data_generation/llama/load.cpp
|
||||
src/data_generation/llama/infer.cpp
|
||||
src/data_generation/llama/generate_brewery.cpp
|
||||
src/data_generation/llama/generate_user.cpp
|
||||
src/data_generation/llama/helpers.cpp
|
||||
src/data_generation/mock/data.cpp
|
||||
src/data_generation/mock/deterministic_hash.cpp
|
||||
src/data_generation/mock/load.cpp
|
||||
src/data_generation/mock/generate_brewery.cpp
|
||||
src/data_generation/mock/generate_user.cpp
|
||||
src/json_handling/stream_parser.cpp
|
||||
src/wikipedia/wikipedia_service.cpp
|
||||
src/main.cpp
|
||||
)
|
||||
|
||||
add_executable(biergarten-pipeline ${PIPELINE_SOURCES})
|
||||
|
||||
if(BIERGARTEN_CLANG_TIDY_COMMAND)
|
||||
set_target_properties(biergarten-pipeline PROPERTIES
|
||||
CXX_CLANG_TIDY "${BIERGARTEN_CLANG_TIDY_COMMAND}"
|
||||
)
|
||||
endif()
|
||||
|
||||
target_include_directories(biergarten-pipeline
|
||||
PRIVATE
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/includes
|
||||
${llama_cpp_SOURCE_DIR}/include
|
||||
)
|
||||
|
||||
target_link_libraries(biergarten-pipeline
|
||||
PRIVATE
|
||||
project_options
|
||||
CURL::libcurl
|
||||
SQLite::SQLite3
|
||||
spdlog::spdlog
|
||||
llama
|
||||
Boost::program_options
|
||||
Boost::json
|
||||
)
|
||||
|
||||
if(ENABLE_CLANG_FORMAT_TARGETS)
|
||||
find_program(CLANG_FORMAT_EXE NAMES clang-format)
|
||||
if(CLANG_FORMAT_EXE)
|
||||
file(GLOB_RECURSE FORMAT_SOURCES CONFIGURE_DEPENDS
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/src/**/*.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/src/**/*.cc
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/includes/**/*.h
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/includes/**/*.hpp
|
||||
)
|
||||
|
||||
add_custom_target(format
|
||||
COMMAND ${CLANG_FORMAT_EXE} -style=file -i ${FORMAT_SOURCES}
|
||||
COMMENT "Formatting source files with clang-format (Google style)"
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
add_custom_target(format-check
|
||||
COMMAND ${CLANG_FORMAT_EXE} -style=file --dry-run --Werror ${FORMAT_SOURCES}
|
||||
COMMENT "Checking source formatting with clang-format (Google style)"
|
||||
VERBATIM
|
||||
)
|
||||
else()
|
||||
message(STATUS "clang-format not found; format targets are disabled")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Post-Build Steps & Utilities
|
||||
# -----------------------------------------------------------------------------
|
||||
add_custom_command(TARGET biergarten-pipeline POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_SOURCE_DIR}/output
|
||||
COMMENT "Ensuring output directory exists"
|
||||
)
|
||||
|
||||
find_program(VALGRIND valgrind)
|
||||
if(VALGRIND)
|
||||
add_custom_target(memcheck
|
||||
COMMAND ${VALGRIND} --leak-check=full --error-exitcode=1 $<TARGET_FILE:biergarten-pipeline> --help
|
||||
DEPENDS biergarten-pipeline
|
||||
COMMENT "Running Valgrind memory check"
|
||||
)
|
||||
endif()
|
||||
406
pipeline/README.md
Normal file
406
pipeline/README.md
Normal file
@@ -0,0 +1,406 @@
|
||||
# Biergarten Pipeline
|
||||
|
||||
A high-performance C++23 data pipeline for fetching, parsing, and storing geographic data (countries, states, cities) with brewery metadata generation capabilities. The system supports both mock and LLM-based (llama.cpp) generation modes.
|
||||
|
||||
## Overview
|
||||
|
||||
The pipeline orchestrates **four key stages**:
|
||||
|
||||
1. **Download** - Fetches `countries+states+cities.json` from a pinned GitHub commit with optional local filesystem caching
|
||||
2. **Parse** - Streams JSON using Boost.JSON's `basic_parser` to extract country/state/city records without loading the entire file into memory
|
||||
3. **Store** - Inserts records into a file-based SQLite database with all operations performed sequentially in a single thread
|
||||
4. **Generate** - Produces brewery metadata or user profiles (mock implementation; supports future LLM integration via llama.cpp)
|
||||
|
||||
## System Architecture
|
||||
|
||||
### Data Sources and Formats
|
||||
|
||||
- **Hierarchical Structure**: Countries array → states per country → cities per state
|
||||
- **Data Fields**:
|
||||
- `id` (integer)
|
||||
- `name` (string)
|
||||
- `iso2` / `iso3` (ISO country/state codes)
|
||||
- `latitude` / `longitude` (geographic coordinates)
|
||||
- **Source**: [dr5hn/countries-states-cities-database](https://github.com/dr5hn/countries-states-cities-database) on GitHub
|
||||
- **Output**: Structured SQLite file-based database (`biergarten-pipeline.db`) + structured logging via spdlog
|
||||
|
||||
### Concurrency Model
|
||||
|
||||
The pipeline currently operates **single-threaded** with sequential stage execution:
|
||||
|
||||
1. **Download Phase**: Main thread blocks while downloading the source JSON file (if not in cache)
|
||||
2. **Parse & Store Phase**: Main thread performs streaming JSON parse with immediate SQLite inserts
|
||||
|
||||
**Thread Safety**: While single-threaded, the `SqliteDatabase` component is **mutex-protected** using `std::mutex` (`dbMutex`) for all database operations. This design enables safe future parallelization without code modifications.
|
||||
|
||||
## Core Components
|
||||
|
||||
| Component | Purpose | Thread Safety | Dependencies |
|
||||
| ----------------------------- | ----------------------------------------------------------------------------------------------- | -------------------------------------------- | --------------------------------------------- |
|
||||
| **BiergartenDataGenerator** | Orchestrates pipeline execution; manages lifecycle of downloader, parser, and generator | Single-threaded coordinator | ApplicationOptions, WebClient, SqliteDatabase |
|
||||
| **DataDownloader** | HTTP fetch with curl; optional filesystem cache; ETag support and retries | Blocking I/O; safe for startup | IWebClient, filesystem |
|
||||
| **StreamingJsonParser** | Extends `boost::json::basic_parser`; emits country/state/city via callbacks; tracks parse depth | Single-threaded parse; callbacks thread-safe | Boost.JSON |
|
||||
| **JsonLoader** | Wraps parser; dispatches callbacks for country/state/city; manages WorkQueue lifecycle | Produces to WorkQueue; safe callbacks | StreamingJsonParser, SqliteDatabase |
|
||||
| **SqliteDatabase** | Manages schema initialization; insert/query methods for geographic data | Mutex-guarded all operations | SQLite3 |
|
||||
| **IDataGenerator** (Abstract) | Interface for brewery/user metadata generation | Stateless virtual methods | N/A |
|
||||
| **LlamaGenerator** | LLM-based generation via llama.cpp; configurable sampling (temperature, top-p, seed) | Manages llama_model* and llama_context* | llama.cpp, BreweryResult, UserResult |
|
||||
| **MockGenerator** | Deterministic mock generation using seeded randomization | Stateless; thread-safe | N/A |
|
||||
| **CURLWebClient** | HTTP client adapter; URL encoding; file downloads | cURL library bindings | libcurl |
|
||||
| **WikipediaService** | (Planned) Wikipedia data lookups for enrichment | N/A | IWebClient |
|
||||
|
||||
## Database Schema
|
||||
|
||||
SQLite file-based database with **three core tables** and **indexes for fast lookups**:
|
||||
|
||||
### Countries
|
||||
|
||||
```sql
|
||||
CREATE TABLE countries (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
iso2 TEXT,
|
||||
iso3 TEXT
|
||||
);
|
||||
CREATE INDEX idx_countries_iso2 ON countries(iso2);
|
||||
```
|
||||
|
||||
### States
|
||||
|
||||
```sql
|
||||
CREATE TABLE states (
|
||||
id INTEGER PRIMARY KEY,
|
||||
country_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
iso2 TEXT,
|
||||
FOREIGN KEY (country_id) REFERENCES countries(id)
|
||||
);
|
||||
CREATE INDEX idx_states_country ON states(country_id);
|
||||
```
|
||||
|
||||
### Cities
|
||||
|
||||
```sql
|
||||
CREATE TABLE cities (
|
||||
id INTEGER PRIMARY KEY,
|
||||
state_id INTEGER NOT NULL,
|
||||
country_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
latitude REAL,
|
||||
longitude REAL,
|
||||
FOREIGN KEY (state_id) REFERENCES states(id),
|
||||
FOREIGN KEY (country_id) REFERENCES countries(id)
|
||||
);
|
||||
CREATE INDEX idx_cities_state ON cities(state_id);
|
||||
CREATE INDEX idx_cities_country ON cities(country_id);
|
||||
```
|
||||
|
||||
## Architecture Diagram
|
||||
|
||||
```plantuml
|
||||
@startuml biergarten-pipeline
|
||||
!theme plain
|
||||
skinparam monochrome true
|
||||
skinparam classBackgroundColor #FFFFFF
|
||||
skinparam classBorderColor #000000
|
||||
|
||||
package "Application Layer" {
|
||||
class BiergartenDataGenerator {
|
||||
- options: ApplicationOptions
|
||||
- webClient: IWebClient
|
||||
- database: SqliteDatabase
|
||||
- generator: IDataGenerator
|
||||
--
|
||||
+ Run() : int
|
||||
}
|
||||
}
|
||||
|
||||
package "Data Acquisition" {
|
||||
class DataDownloader {
|
||||
- webClient: IWebClient
|
||||
--
|
||||
+ Download(url: string, filePath: string)
|
||||
+ DownloadWithCache(url: string, cachePath: string)
|
||||
}
|
||||
|
||||
interface IWebClient {
|
||||
+ DownloadToFile(url: string, filePath: string)
|
||||
+ Get(url: string) : string
|
||||
+ UrlEncode(value: string) : string
|
||||
}
|
||||
|
||||
class CURLWebClient {
|
||||
- globalState: CurlGlobalState
|
||||
--
|
||||
+ DownloadToFile(url: string, filePath: string)
|
||||
+ Get(url: string) : string
|
||||
+ UrlEncode(value: string) : string
|
||||
}
|
||||
}
|
||||
|
||||
package "JSON Processing" {
|
||||
class StreamingJsonParser {
|
||||
- depth: int
|
||||
--
|
||||
+ on_object_begin()
|
||||
+ on_object_end()
|
||||
+ on_array_begin()
|
||||
+ on_array_end()
|
||||
+ on_key(str: string)
|
||||
+ on_string(str: string)
|
||||
+ on_number(value: int)
|
||||
}
|
||||
|
||||
class JsonLoader {
|
||||
--
|
||||
+ LoadWorldCities(jsonPath: string, db: SqliteDatabase)
|
||||
}
|
||||
}
|
||||
|
||||
package "Data Storage" {
|
||||
class SqliteDatabase {
|
||||
- db: sqlite3*
|
||||
- dbMutex: std::mutex
|
||||
--
|
||||
+ Initialize(dbPath: string)
|
||||
+ InsertCountry(id: int, name: string, iso2: string, iso3: string)
|
||||
+ InsertState(id: int, countryId: int, name: string, iso2: string)
|
||||
+ InsertCity(id: int, stateId: int, countryId: int, name: string, lat: double, lon: double)
|
||||
+ QueryCountries(limit: int) : vector<Country>
|
||||
+ QueryStates(limit: int) : vector<State>
|
||||
+ QueryCities() : vector<City>
|
||||
+ BeginTransaction()
|
||||
+ CommitTransaction()
|
||||
# InitializeSchema()
|
||||
}
|
||||
|
||||
struct Country {
|
||||
id: int
|
||||
name: string
|
||||
iso2: string
|
||||
iso3: string
|
||||
}
|
||||
|
||||
struct State {
|
||||
id: int
|
||||
name: string
|
||||
iso2: string
|
||||
countryId: int
|
||||
}
|
||||
|
||||
struct City {
|
||||
id: int
|
||||
name: string
|
||||
countryId: int
|
||||
}
|
||||
}
|
||||
|
||||
package "Data Generation" {
|
||||
interface IDataGenerator {
|
||||
+ load(modelPath: string)
|
||||
+ generateBrewery(cityName: string, countryName: string, regionContext: string) : BreweryResult
|
||||
+ generateUser(locale: string) : UserResult
|
||||
}
|
||||
|
||||
class LlamaGenerator {
|
||||
- model: llama_model*
|
||||
- context: llama_context*
|
||||
- sampling_temperature: float
|
||||
- sampling_top_p: float
|
||||
- sampling_seed: uint32_t
|
||||
--
|
||||
+ load(modelPath: string)
|
||||
+ generateBrewery(...) : BreweryResult
|
||||
+ generateUser(locale: string) : UserResult
|
||||
+ setSamplingOptions(temperature: float, topP: float, seed: int)
|
||||
# infer(prompt: string) : string
|
||||
}
|
||||
|
||||
class MockGenerator {
|
||||
--
|
||||
+ load(modelPath: string)
|
||||
+ generateBrewery(...) : BreweryResult
|
||||
+ generateUser(locale: string) : UserResult
|
||||
}
|
||||
|
||||
struct BreweryResult {
|
||||
name: string
|
||||
description: string
|
||||
}
|
||||
|
||||
struct UserResult {
|
||||
username: string
|
||||
bio: string
|
||||
}
|
||||
}
|
||||
|
||||
package "Enrichment (Planned)" {
|
||||
class WikipediaService {
|
||||
- webClient: IWebClient
|
||||
--
|
||||
+ SearchCity(cityName: string, countryName: string) : string
|
||||
}
|
||||
}
|
||||
|
||||
' Relationships
|
||||
BiergartenDataGenerator --> DataDownloader
|
||||
BiergartenDataGenerator --> JsonLoader
|
||||
BiergartenDataGenerator --> SqliteDatabase
|
||||
BiergartenDataGenerator --> IDataGenerator
|
||||
|
||||
DataDownloader --> IWebClient
|
||||
CURLWebClient ..|> IWebClient
|
||||
|
||||
JsonLoader --> StreamingJsonParser
|
||||
JsonLoader --> SqliteDatabase
|
||||
|
||||
LlamaGenerator ..|> IDataGenerator
|
||||
MockGenerator ..|> IDataGenerator
|
||||
|
||||
SqliteDatabase --> Country
|
||||
SqliteDatabase --> State
|
||||
SqliteDatabase --> City
|
||||
|
||||
LlamaGenerator --> BreweryResult
|
||||
LlamaGenerator --> UserResult
|
||||
MockGenerator --> BreweryResult
|
||||
MockGenerator --> UserResult
|
||||
|
||||
WikipediaService --> IWebClient
|
||||
|
||||
@enduml
|
||||
```
|
||||
|
||||
## Configuration and Extensibility
|
||||
|
||||
### Command-Line Arguments
|
||||
|
||||
Boost.Program_options provides named CLI arguments. Running without arguments displays usage instructions.
|
||||
|
||||
```bash
|
||||
./biergarten-pipeline [options]
|
||||
```
|
||||
|
||||
**Requirement**: Exactly one of `--mocked` or `--model` must be specified.
|
||||
|
||||
| Argument | Short | Type | Purpose |
|
||||
| --------------- | ----- | ------ | --------------------------------------------------------------- |
|
||||
| `--mocked` | - | flag | Use mocked generator for brewery/user data |
|
||||
| `--model` | `-m` | string | Path to LLM model file (gguf); mutually exclusive with --mocked |
|
||||
| `--cache-dir` | `-c` | path | Directory for cached JSON (default: `/tmp`) |
|
||||
| `--temperature` | - | float | LLM sampling temperature 0.0-1.0 (default: `0.8`) |
|
||||
| `--top-p` | - | float | Nucleus sampling parameter 0.0-1.0 (default: `0.92`) |
|
||||
| `--seed` | - | int | Random seed: -1 for random (default: `-1`) |
|
||||
| `--help` | `-h` | flag | Show help message |
|
||||
|
||||
**Note**: The data source is always pinned to commit `c5eb7772` (stable 2026-03-28) and cannot be changed.
|
||||
|
||||
**Note**: When `--mocked` is used, any sampling parameters (`--temperature`, `--top-p`, `--seed`) are ignored with a warning.
|
||||
|
||||
### Usage Examples
|
||||
|
||||
```bash
|
||||
# Mocked generator (deterministic, no LLM required)
|
||||
./biergarten-pipeline --mocked
|
||||
|
||||
# With LLM model
|
||||
./biergarten-pipeline --model ./models/llama.gguf --cache-dir /var/cache
|
||||
|
||||
# Mocked with extra parameters provided (will be ignored with warning)
|
||||
./biergarten-pipeline --mocked --temperature 0.5 --top-p 0.8 --seed 42
|
||||
|
||||
# Show help
|
||||
./biergarten-pipeline --help
|
||||
```
|
||||
|
||||
## Building and Running
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- **C++23 compiler** (g++, clang, MSVC)
|
||||
- **CMake** 3.20+
|
||||
- **curl** (for HTTP downloads)
|
||||
- **sqlite3** (database backend)
|
||||
- **Boost** 1.75+ (requires Boost.JSON and Boost.Program_options)
|
||||
- **spdlog** v1.11.0 (fetched via CMake FetchContent)
|
||||
- **llama.cpp** (fetched via CMake FetchContent for LLM inference)
|
||||
|
||||
### Build
|
||||
|
||||
```bash
|
||||
mkdir -p build
|
||||
cd build
|
||||
cmake ..
|
||||
cmake --build . --target biergarten-pipeline -- -j
|
||||
```
|
||||
|
||||
### Run
|
||||
|
||||
```bash
|
||||
./build/biergarten-pipeline
|
||||
```
|
||||
|
||||
**Output**:
|
||||
|
||||
- Console logs with structured spdlog output
|
||||
- Cached JSON file: `/tmp/countries+states+cities.json`
|
||||
- SQLite database: `biergarten-pipeline.db` (in output directory)
|
||||
|
||||
## Code Quality and Static Analysis
|
||||
|
||||
### Formatting
|
||||
|
||||
This project uses **clang-format** with the **Google C++ style guide**:
|
||||
|
||||
```bash
|
||||
# Apply formatting to all source files
|
||||
cmake --build build --target format
|
||||
|
||||
# Check formatting without modifications
|
||||
cmake --build build --target format-check
|
||||
```
|
||||
|
||||
### Static Analysis
|
||||
|
||||
This project uses **clang-tidy** with configurations for Google, modernize, performance, and bug-prone rules (`.clang-tidy`):
|
||||
|
||||
Static analysis runs automatically during compilation if `clang-tidy` is available.
|
||||
|
||||
## Code Implementation Summary
|
||||
|
||||
### Key Achievements
|
||||
|
||||
✅ **Full pipeline implementation** - Download → Parse → Store → Generate
|
||||
✅ **Streaming JSON parser** - Memory-efficient processing via Boost.JSON callbacks
|
||||
✅ **Thread-safe SQLite wrapper** - Mutex-protected database for future parallelization
|
||||
✅ **Flexible data generation** - Abstract IDataGenerator interface supporting both mock and LLM modes
|
||||
✅ **Comprehensive CLI** - Boost.Program_options with sensible defaults
|
||||
✅ **Production-grade logging** - spdlog integration for structured output
|
||||
✅ **Build quality** - CMake with clang-format/clang-tidy integration
|
||||
|
||||
### Architecture Patterns
|
||||
|
||||
- **Interface-based design**: `IWebClient`, `IDataGenerator` abstract base classes enable substitution and testing
|
||||
- **Dependency injection**: Components receive dependencies via constructors (BiergartenDataGenerator)
|
||||
- **RAII principle**: SQLite connections and resources managed via destructors
|
||||
- **Callback-driven parsing**: Boost.JSON parser emits events to processing callbacks
|
||||
- **Transaction-scoped inserts**: BeginTransaction/CommitTransaction for batch performance
|
||||
|
||||
### External Dependencies
|
||||
|
||||
| Dependency | Version | Purpose | Type |
|
||||
| ---------- | ------- | ---------------------------------- | ------- |
|
||||
| Boost | 1.75+ | JSON parsing, CLI argument parsing | Library |
|
||||
| SQLite3 | - | Persistent data storage | System |
|
||||
| libcurl | - | HTTP downloads | System |
|
||||
| spdlog | v1.11.0 | Structured logging | Fetched |
|
||||
| llama.cpp | b8611 | LLM inference engine | Fetched |
|
||||
|
||||
to validate formatting without modifying files.
|
||||
|
||||
clang-tidy runs automatically on the biergarten-pipeline target when available. You can disable it at configure time:
|
||||
|
||||
cmake -DENABLE_CLANG_TIDY=OFF ..
|
||||
|
||||
You can also disable format helper targets:
|
||||
|
||||
cmake -DENABLE_CLANG_FORMAT_TARGETS=OFF ..
|
||||
153
pipeline/includes/biergarten_data_generator.h
Normal file
153
pipeline/includes/biergarten_data_generator.h
Normal file
@@ -0,0 +1,153 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_BIERGARTEN_DATA_GENERATOR_H_
|
||||
#define BIERGARTEN_PIPELINE_BIERGARTEN_DATA_GENERATOR_H_
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
|
||||
#include "data_generation/data_generator.h"
|
||||
#include "database/database.h"
|
||||
#include "web_client/web_client.h"
|
||||
#include "wikipedia/wikipedia_service.h"
|
||||
|
||||
/**
|
||||
* @brief Program options for the Biergarten pipeline application.
|
||||
*/
|
||||
struct ApplicationOptions {
|
||||
/// @brief Path to the LLM model file (gguf format); mutually exclusive with
|
||||
/// use_mocked.
|
||||
std::string model_path;
|
||||
|
||||
/// @brief Use mocked generator instead of LLM; mutually exclusive with
|
||||
/// model_path.
|
||||
bool use_mocked = false;
|
||||
|
||||
/// @brief Directory for cached JSON and database files.
|
||||
std::string cache_dir;
|
||||
|
||||
/// @brief LLM sampling temperature (0.0 to 1.0, higher = more random).
|
||||
float temperature = 0.8f;
|
||||
|
||||
/// @brief LLM nucleus sampling top-p parameter (0.0 to 1.0, higher = more
|
||||
/// random).
|
||||
float top_p = 0.92f;
|
||||
|
||||
/// @brief Random seed for sampling (-1 for random, otherwise non-negative).
|
||||
int seed = -1;
|
||||
|
||||
/// @brief Git commit hash for database consistency (always pinned to
|
||||
/// c5eb7772).
|
||||
std::string commit = "c5eb7772";
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Main data generator class for the Biergarten pipeline.
|
||||
*
|
||||
* This class encapsulates the core logic for generating brewery data.
|
||||
* It handles database initialization, data loading/downloading, and brewery
|
||||
* generation.
|
||||
*/
|
||||
class BiergartenDataGenerator {
|
||||
public:
|
||||
/**
|
||||
* @brief Construct a BiergartenDataGenerator with injected dependencies.
|
||||
*
|
||||
* @param options Application configuration options.
|
||||
* @param web_client HTTP client for downloading data.
|
||||
* @param database SQLite database instance.
|
||||
*/
|
||||
BiergartenDataGenerator(const ApplicationOptions& options,
|
||||
std::shared_ptr<WebClient> web_client,
|
||||
SqliteDatabase& database);
|
||||
|
||||
/**
|
||||
* @brief Run the data generation pipeline.
|
||||
*
|
||||
* Performs the following steps:
|
||||
* 1. Initialize database
|
||||
* 2. Download geographic data if needed
|
||||
* 3. Initialize the generator (LLM or Mock)
|
||||
* 4. Generate brewery data for sample cities
|
||||
*
|
||||
* @return 0 on success, 1 on failure.
|
||||
*/
|
||||
int Run();
|
||||
|
||||
private:
|
||||
/// @brief Immutable application options.
|
||||
const ApplicationOptions options_;
|
||||
|
||||
/// @brief Shared HTTP client dependency.
|
||||
std::shared_ptr<WebClient> webClient_;
|
||||
|
||||
/// @brief Database dependency.
|
||||
SqliteDatabase& database_;
|
||||
|
||||
/**
|
||||
* @brief Enriched city data with Wikipedia context.
|
||||
*/
|
||||
struct EnrichedCity {
|
||||
int city_id;
|
||||
std::string city_name;
|
||||
std::string country_name;
|
||||
std::string region_context;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Initialize the data generator based on options.
|
||||
*
|
||||
* Creates either a MockGenerator (if no model path) or LlamaGenerator.
|
||||
*
|
||||
* @return A unique_ptr to the initialized generator.
|
||||
*/
|
||||
std::unique_ptr<DataGenerator> InitializeGenerator();
|
||||
|
||||
/**
|
||||
* @brief Download and load geographic data if not cached.
|
||||
*/
|
||||
void LoadGeographicData();
|
||||
|
||||
/**
|
||||
* @brief Query cities from database and build country name map.
|
||||
*
|
||||
* @return Vector of (City, country_name) pairs capped at 30 entries.
|
||||
*/
|
||||
std::vector<std::pair<City, std::string>> QueryCitiesWithCountries();
|
||||
|
||||
/**
|
||||
* @brief Enrich cities with Wikipedia summaries.
|
||||
*
|
||||
* @param cities Vector of (City, country_name) pairs.
|
||||
* @return Vector of enriched city data with context.
|
||||
*/
|
||||
std::vector<EnrichedCity> EnrichWithWikipedia(
|
||||
const std::vector<std::pair<City, std::string>>& cities);
|
||||
|
||||
/**
|
||||
* @brief Generate breweries for enriched cities.
|
||||
*
|
||||
* @param generator The data generator instance.
|
||||
* @param cities Vector of enriched city data.
|
||||
*/
|
||||
void GenerateBreweries(DataGenerator& generator,
|
||||
const std::vector<EnrichedCity>& cities);
|
||||
|
||||
/**
|
||||
* @brief Log the generated brewery results.
|
||||
*/
|
||||
void LogResults() const;
|
||||
|
||||
/**
|
||||
* @brief Helper struct to store generated brewery data.
|
||||
*/
|
||||
struct GeneratedBrewery {
|
||||
int city_id;
|
||||
std::string city_name;
|
||||
BreweryResult brewery;
|
||||
};
|
||||
|
||||
/// @brief Stores generated brewery data.
|
||||
std::vector<GeneratedBrewery> generatedBreweries_;
|
||||
};
|
||||
#endif // BIERGARTEN_PIPELINE_BIERGARTEN_DATA_GENERATOR_H_
|
||||
31
pipeline/includes/data_generation/data_downloader.h
Normal file
31
pipeline/includes/data_generation/data_downloader.h
Normal file
@@ -0,0 +1,31 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_DATA_GENERATION_DATA_DOWNLOADER_H_
|
||||
#define BIERGARTEN_PIPELINE_DATA_GENERATION_DATA_DOWNLOADER_H_
|
||||
|
||||
#include <memory>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
|
||||
#include "web_client/web_client.h"
|
||||
|
||||
/// @brief Downloads and caches source geography JSON payloads.
|
||||
class DataDownloader {
|
||||
public:
|
||||
/// @brief Initializes global curl state used by this downloader.
|
||||
explicit DataDownloader(std::shared_ptr<WebClient> web_client);
|
||||
|
||||
/// @brief Cleans up global curl state.
|
||||
~DataDownloader();
|
||||
|
||||
/// @brief Returns a local JSON path, downloading it when cache is missing.
|
||||
std::string DownloadCountriesDatabase(
|
||||
const std::string& cache_path,
|
||||
const std::string& commit =
|
||||
"c5eb7772" // Stable commit: 2026-03-28 export
|
||||
);
|
||||
|
||||
private:
|
||||
static bool FileExists(const std::string& file_path);
|
||||
std::shared_ptr<WebClient> web_client_;
|
||||
};
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_DATA_GENERATION_DATA_DOWNLOADER_H_
|
||||
29
pipeline/includes/data_generation/data_generator.h
Normal file
29
pipeline/includes/data_generation/data_generator.h
Normal file
@@ -0,0 +1,29 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_DATA_GENERATION_DATA_GENERATOR_H_
|
||||
#define BIERGARTEN_PIPELINE_DATA_GENERATION_DATA_GENERATOR_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
struct BreweryResult {
|
||||
std::string name;
|
||||
std::string description;
|
||||
};
|
||||
|
||||
struct UserResult {
|
||||
std::string username;
|
||||
std::string bio;
|
||||
};
|
||||
|
||||
class DataGenerator {
|
||||
public:
|
||||
virtual ~DataGenerator() = default;
|
||||
|
||||
virtual void Load(const std::string& model_path) = 0;
|
||||
|
||||
virtual BreweryResult GenerateBrewery(const std::string& city_name,
|
||||
const std::string& country_name,
|
||||
const std::string& region_context) = 0;
|
||||
|
||||
virtual UserResult GenerateUser(const std::string& locale) = 0;
|
||||
};
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_DATA_GENERATION_DATA_GENERATOR_H_
|
||||
44
pipeline/includes/data_generation/llama_generator.h
Normal file
44
pipeline/includes/data_generation/llama_generator.h
Normal file
@@ -0,0 +1,44 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_DATA_GENERATION_LLAMA_GENERATOR_H_
|
||||
#define BIERGARTEN_PIPELINE_DATA_GENERATION_LLAMA_GENERATOR_H_
|
||||
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
|
||||
#include "data_generation/data_generator.h"
|
||||
|
||||
struct llama_model;
|
||||
struct llama_context;
|
||||
|
||||
class LlamaGenerator final : public DataGenerator {
|
||||
public:
|
||||
LlamaGenerator() = default;
|
||||
~LlamaGenerator() override;
|
||||
|
||||
void SetSamplingOptions(float temperature, float top_p, int seed = -1);
|
||||
|
||||
void Load(const std::string& model_path) override;
|
||||
BreweryResult GenerateBrewery(const std::string& city_name,
|
||||
const std::string& country_name,
|
||||
const std::string& region_context) override;
|
||||
UserResult GenerateUser(const std::string& locale) override;
|
||||
|
||||
private:
|
||||
std::string Infer(const std::string& prompt, int max_tokens = 10000);
|
||||
// Overload that allows passing a system message separately so chat-capable
|
||||
// models receive a proper system role instead of having the system text
|
||||
// concatenated into the user prompt (helps avoid revealing internal
|
||||
// reasoning or instructions in model output).
|
||||
std::string Infer(const std::string& system_prompt,
|
||||
const std::string& prompt, int max_tokens = 10000);
|
||||
|
||||
std::string InferFormatted(const std::string& formatted_prompt,
|
||||
int max_tokens = 10000);
|
||||
|
||||
llama_model* model_ = nullptr;
|
||||
llama_context* context_ = nullptr;
|
||||
float sampling_temperature_ = 0.8f;
|
||||
float sampling_top_p_ = 0.92f;
|
||||
uint32_t sampling_seed_ = 0xFFFFFFFFu;
|
||||
};
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_DATA_GENERATION_LLAMA_GENERATOR_H_
|
||||
32
pipeline/includes/data_generation/llama_generator_helpers.h
Normal file
32
pipeline/includes/data_generation/llama_generator_helpers.h
Normal file
@@ -0,0 +1,32 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_DATA_GENERATION_LLAMA_GENERATOR_HELPERS_H_
|
||||
#define BIERGARTEN_PIPELINE_DATA_GENERATION_LLAMA_GENERATOR_HELPERS_H_
|
||||
|
||||
#include <string>
|
||||
#include <utility>
|
||||
|
||||
struct llama_model;
|
||||
struct llama_vocab;
|
||||
typedef int llama_token;
|
||||
|
||||
// Helper functions for LlamaGenerator methods
|
||||
std::string PrepareRegionContextPublic(std::string_view region_context,
|
||||
std::size_t max_chars = 700);
|
||||
|
||||
std::pair<std::string, std::string> ParseTwoLineResponsePublic(
|
||||
const std::string& raw, const std::string& error_message);
|
||||
|
||||
std::string ToChatPromptPublic(const llama_model* model,
|
||||
const std::string& user_prompt);
|
||||
|
||||
std::string ToChatPromptPublic(const llama_model* model,
|
||||
const std::string& system_prompt,
|
||||
const std::string& user_prompt);
|
||||
|
||||
void AppendTokenPiecePublic(const llama_vocab* vocab, llama_token token,
|
||||
std::string& output);
|
||||
|
||||
std::string ValidateBreweryJsonPublic(const std::string& raw,
|
||||
std::string& name_out,
|
||||
std::string& description_out);
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_DATA_GENERATION_LLAMA_GENERATOR_HELPERS_H_
|
||||
28
pipeline/includes/data_generation/mock_generator.h
Normal file
28
pipeline/includes/data_generation/mock_generator.h
Normal file
@@ -0,0 +1,28 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_DATA_GENERATION_MOCK_GENERATOR_H_
|
||||
#define BIERGARTEN_PIPELINE_DATA_GENERATION_MOCK_GENERATOR_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "data_generation/data_generator.h"
|
||||
|
||||
class MockGenerator final : public DataGenerator {
|
||||
public:
|
||||
void Load(const std::string& model_path) override;
|
||||
BreweryResult GenerateBrewery(const std::string& city_name,
|
||||
const std::string& country_name,
|
||||
const std::string& region_context) override;
|
||||
UserResult GenerateUser(const std::string& locale) override;
|
||||
|
||||
private:
|
||||
static std::size_t DeterministicHash(const std::string& a,
|
||||
const std::string& b);
|
||||
|
||||
static const std::vector<std::string> kBreweryAdjectives;
|
||||
static const std::vector<std::string> kBreweryNouns;
|
||||
static const std::vector<std::string> kBreweryDescriptions;
|
||||
static const std::vector<std::string> kUsernames;
|
||||
static const std::vector<std::string> kBios;
|
||||
};
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_DATA_GENERATION_MOCK_GENERATOR_H_
|
||||
84
pipeline/includes/database/database.h
Normal file
84
pipeline/includes/database/database.h
Normal file
@@ -0,0 +1,84 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_DATABASE_DATABASE_H_
|
||||
#define BIERGARTEN_PIPELINE_DATABASE_DATABASE_H_
|
||||
|
||||
#include <sqlite3.h>
|
||||
|
||||
#include <mutex>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
struct Country {
|
||||
/// @brief Country identifier from the source dataset.
|
||||
int id;
|
||||
/// @brief Country display name.
|
||||
std::string name;
|
||||
/// @brief ISO 3166-1 alpha-2 code.
|
||||
std::string iso2;
|
||||
/// @brief ISO 3166-1 alpha-3 code.
|
||||
std::string iso3;
|
||||
};
|
||||
|
||||
struct State {
|
||||
/// @brief State or province identifier from the source dataset.
|
||||
int id;
|
||||
/// @brief State or province display name.
|
||||
std::string name;
|
||||
/// @brief State or province short code.
|
||||
std::string iso2;
|
||||
/// @brief Parent country identifier.
|
||||
int country_id;
|
||||
};
|
||||
|
||||
struct City {
|
||||
/// @brief City identifier from the source dataset.
|
||||
int id;
|
||||
/// @brief City display name.
|
||||
std::string name;
|
||||
/// @brief Parent country identifier.
|
||||
int country_id;
|
||||
};
|
||||
|
||||
/// @brief Thread-safe SQLite wrapper for pipeline writes and readbacks.
|
||||
class SqliteDatabase {
|
||||
private:
|
||||
sqlite3* db_ = nullptr;
|
||||
std::mutex db_mutex_;
|
||||
|
||||
void InitializeSchema();
|
||||
|
||||
public:
|
||||
/// @brief Closes the SQLite connection if initialized.
|
||||
~SqliteDatabase();
|
||||
|
||||
/// @brief Opens the SQLite database at db_path and creates schema objects.
|
||||
void Initialize(const std::string& db_path = ":memory:");
|
||||
|
||||
/// @brief Starts a database transaction for batched writes.
|
||||
void BeginTransaction();
|
||||
|
||||
/// @brief Commits the active database transaction.
|
||||
void CommitTransaction();
|
||||
|
||||
/// @brief Inserts a country row.
|
||||
void InsertCountry(int id, const std::string& name, const std::string& iso2,
|
||||
const std::string& iso3);
|
||||
|
||||
/// @brief Inserts a state row linked to a country.
|
||||
void InsertState(int id, int country_id, const std::string& name,
|
||||
const std::string& iso2);
|
||||
|
||||
/// @brief Inserts a city row linked to state and country.
|
||||
void InsertCity(int id, int state_id, int country_id,
|
||||
const std::string& name, double latitude, double longitude);
|
||||
|
||||
/// @brief Returns city records including parent country id.
|
||||
std::vector<City> QueryCities();
|
||||
|
||||
/// @brief Returns countries with optional row limit.
|
||||
std::vector<Country> QueryCountries(int limit = 0);
|
||||
|
||||
/// @brief Returns states with optional row limit.
|
||||
std::vector<State> QueryStates(int limit = 0);
|
||||
};
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_DATABASE_DATABASE_H_
|
||||
17
pipeline/includes/json_handling/json_loader.h
Normal file
17
pipeline/includes/json_handling/json_loader.h
Normal file
@@ -0,0 +1,17 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_JSON_HANDLING_JSON_LOADER_H_
|
||||
#define BIERGARTEN_PIPELINE_JSON_HANDLING_JSON_LOADER_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "database/database.h"
|
||||
#include "json_handling/stream_parser.h"
|
||||
|
||||
/// @brief Loads world-city JSON data into SQLite through streaming parsing.
|
||||
class JsonLoader {
|
||||
public:
|
||||
/// @brief Parses a JSON file and writes country/state/city rows into db.
|
||||
static void LoadWorldCities(const std::string& json_path,
|
||||
SqliteDatabase& db);
|
||||
};
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_JSON_HANDLING_JSON_LOADER_H_
|
||||
52
pipeline/includes/json_handling/stream_parser.h
Normal file
52
pipeline/includes/json_handling/stream_parser.h
Normal file
@@ -0,0 +1,52 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_JSON_HANDLING_STREAM_PARSER_H_
|
||||
#define BIERGARTEN_PIPELINE_JSON_HANDLING_STREAM_PARSER_H_
|
||||
|
||||
#include <functional>
|
||||
#include <string>
|
||||
|
||||
#include "database/database.h"
|
||||
|
||||
// Forward declaration to avoid circular dependency
|
||||
class SqliteDatabase;
|
||||
|
||||
/// @brief In-memory representation of one parsed city entry.
|
||||
struct CityRecord {
|
||||
int id;
|
||||
int state_id;
|
||||
int country_id;
|
||||
std::string name;
|
||||
double latitude;
|
||||
double longitude;
|
||||
};
|
||||
|
||||
/// @brief Streaming SAX parser that emits city records during traversal.
|
||||
class StreamingJsonParser {
|
||||
public:
|
||||
/// @brief Parses file_path and invokes callbacks for city rows and progress.
|
||||
static void Parse(const std::string& file_path, SqliteDatabase& db,
|
||||
std::function<void(const CityRecord&)> on_city,
|
||||
std::function<void(size_t, size_t)> on_progress = nullptr);
|
||||
|
||||
private:
|
||||
/// @brief Mutable SAX handler state while traversing nested JSON arrays.
|
||||
struct ParseState {
|
||||
int current_country_id = 0;
|
||||
int current_state_id = 0;
|
||||
|
||||
CityRecord current_city = {};
|
||||
bool building_city = false;
|
||||
std::string current_key;
|
||||
|
||||
int array_depth = 0;
|
||||
int object_depth = 0;
|
||||
bool in_countries_array = false;
|
||||
bool in_states_array = false;
|
||||
bool in_cities_array = false;
|
||||
|
||||
std::function<void(const CityRecord&)> on_city;
|
||||
std::function<void(size_t, size_t)> on_progress;
|
||||
size_t bytes_processed = 0;
|
||||
};
|
||||
};
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_JSON_HANDLING_STREAM_PARSER_H_
|
||||
30
pipeline/includes/web_client/curl_web_client.h
Normal file
30
pipeline/includes/web_client/curl_web_client.h
Normal file
@@ -0,0 +1,30 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_WEB_CLIENT_CURL_WEB_CLIENT_H_
|
||||
#define BIERGARTEN_PIPELINE_WEB_CLIENT_CURL_WEB_CLIENT_H_
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "web_client/web_client.h"
|
||||
|
||||
// RAII for curl_global_init/cleanup.
|
||||
// An instance of this class should be created in main() before any curl
|
||||
// operations and exist for the lifetime of the application.
|
||||
class CurlGlobalState {
|
||||
public:
|
||||
CurlGlobalState();
|
||||
~CurlGlobalState();
|
||||
CurlGlobalState(const CurlGlobalState&) = delete;
|
||||
CurlGlobalState& operator=(const CurlGlobalState&) = delete;
|
||||
};
|
||||
|
||||
class CURLWebClient : public WebClient {
|
||||
public:
|
||||
CURLWebClient();
|
||||
~CURLWebClient() override;
|
||||
|
||||
void DownloadToFile(const std::string& url,
|
||||
const std::string& file_path) override;
|
||||
std::string Get(const std::string& url) override;
|
||||
std::string UrlEncode(const std::string& value) override;
|
||||
};
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_WEB_CLIENT_CURL_WEB_CLIENT_H_
|
||||
22
pipeline/includes/web_client/web_client.h
Normal file
22
pipeline/includes/web_client/web_client.h
Normal file
@@ -0,0 +1,22 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_WEB_CLIENT_WEB_CLIENT_H_
|
||||
#define BIERGARTEN_PIPELINE_WEB_CLIENT_WEB_CLIENT_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
class WebClient {
|
||||
public:
|
||||
virtual ~WebClient() = default;
|
||||
|
||||
// Downloads content from a URL to a file. Throws on error.
|
||||
virtual void DownloadToFile(const std::string& url,
|
||||
const std::string& file_path) = 0;
|
||||
|
||||
// Performs a GET request and returns the response body as a string. Throws
|
||||
// on error.
|
||||
virtual std::string Get(const std::string& url) = 0;
|
||||
|
||||
// URL-encodes a string.
|
||||
virtual std::string UrlEncode(const std::string& value) = 0;
|
||||
};
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_WEB_CLIENT_WEB_CLIENT_H_
|
||||
27
pipeline/includes/wikipedia/wikipedia_service.h
Normal file
27
pipeline/includes/wikipedia/wikipedia_service.h
Normal file
@@ -0,0 +1,27 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_WIKIPEDIA_WIKIPEDIA_SERVICE_H_
|
||||
#define BIERGARTEN_PIPELINE_WIKIPEDIA_WIKIPEDIA_SERVICE_H_
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "web_client/web_client.h"
|
||||
|
||||
/// @brief Provides cached Wikipedia summary lookups for city and country pairs.
|
||||
class WikipediaService {
|
||||
public:
|
||||
/// @brief Creates a new Wikipedia service with the provided web client.
|
||||
explicit WikipediaService(std::shared_ptr<WebClient> client);
|
||||
|
||||
/// @brief Returns the Wikipedia summary extract for city and country.
|
||||
[[nodiscard]] std::string GetSummary(std::string_view city,
|
||||
std::string_view country);
|
||||
|
||||
private:
|
||||
std::string FetchExtract(std::string_view query);
|
||||
std::shared_ptr<WebClient> client_;
|
||||
std::unordered_map<std::string, std::string> cache_;
|
||||
};
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_WIKIPEDIA_WIKIPEDIA_SERVICE_H_
|
||||
157
pipeline/src/biergarten_data_generator.cpp
Normal file
157
pipeline/src/biergarten_data_generator.cpp
Normal file
@@ -0,0 +1,157 @@
|
||||
#include "biergarten_data_generator.h"
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <filesystem>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "data_generation/data_downloader.h"
|
||||
#include "data_generation/llama_generator.h"
|
||||
#include "data_generation/mock_generator.h"
|
||||
#include "json_handling/json_loader.h"
|
||||
#include "wikipedia/wikipedia_service.h"
|
||||
|
||||
BiergartenDataGenerator::BiergartenDataGenerator(
|
||||
const ApplicationOptions& options, std::shared_ptr<WebClient> web_client,
|
||||
SqliteDatabase& database)
|
||||
: options_(options), webClient_(web_client), database_(database) {}
|
||||
|
||||
std::unique_ptr<DataGenerator> BiergartenDataGenerator::InitializeGenerator() {
|
||||
spdlog::info("Initializing brewery generator...");
|
||||
|
||||
std::unique_ptr<DataGenerator> generator;
|
||||
if (options_.model_path.empty()) {
|
||||
generator = std::make_unique<MockGenerator>();
|
||||
spdlog::info("[Generator] Using MockGenerator (no model path provided)");
|
||||
} else {
|
||||
auto llama_generator = std::make_unique<LlamaGenerator>();
|
||||
llama_generator->SetSamplingOptions(options_.temperature, options_.top_p,
|
||||
options_.seed);
|
||||
spdlog::info(
|
||||
"[Generator] Using LlamaGenerator: {} (temperature={}, top-p={}, "
|
||||
"seed={})",
|
||||
options_.model_path, options_.temperature, options_.top_p,
|
||||
options_.seed);
|
||||
generator = std::move(llama_generator);
|
||||
}
|
||||
generator->Load(options_.model_path);
|
||||
|
||||
return generator;
|
||||
}
|
||||
|
||||
void BiergartenDataGenerator::LoadGeographicData() {
|
||||
std::string json_path = options_.cache_dir + "/countries+states+cities.json";
|
||||
std::string db_path = options_.cache_dir + "/biergarten-pipeline.db";
|
||||
|
||||
bool has_json_cache = std::filesystem::exists(json_path);
|
||||
bool has_db_cache = std::filesystem::exists(db_path);
|
||||
|
||||
spdlog::info("Initializing SQLite database at {}...", db_path);
|
||||
database_.Initialize(db_path);
|
||||
|
||||
if (has_db_cache && has_json_cache) {
|
||||
spdlog::info("[Pipeline] Cache hit: skipping download and parse");
|
||||
} else {
|
||||
spdlog::info("\n[Pipeline] Downloading geographic data from GitHub...");
|
||||
DataDownloader downloader(webClient_);
|
||||
downloader.DownloadCountriesDatabase(json_path, options_.commit);
|
||||
|
||||
JsonLoader::LoadWorldCities(json_path, database_);
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<std::pair<City, std::string>>
|
||||
BiergartenDataGenerator::QueryCitiesWithCountries() {
|
||||
spdlog::info("\n=== GEOGRAPHIC DATA OVERVIEW ===");
|
||||
|
||||
auto cities = database_.QueryCities();
|
||||
|
||||
// Build a quick map of country id -> name for per-city lookups.
|
||||
auto all_countries = database_.QueryCountries(0);
|
||||
std::unordered_map<int, std::string> country_map;
|
||||
for (const auto& c : all_countries) {
|
||||
country_map[c.id] = c.name;
|
||||
}
|
||||
|
||||
spdlog::info("\nTotal records loaded:");
|
||||
spdlog::info(" Countries: {}", database_.QueryCountries(0).size());
|
||||
spdlog::info(" States: {}", database_.QueryStates(0).size());
|
||||
spdlog::info(" Cities: {}", cities.size());
|
||||
|
||||
// Cap at 30 entries.
|
||||
const size_t sample_count = std::min(size_t(30), cities.size());
|
||||
std::vector<std::pair<City, std::string>> result;
|
||||
|
||||
for (size_t i = 0; i < sample_count; i++) {
|
||||
const auto& city = cities[i];
|
||||
std::string country_name;
|
||||
const auto country_it = country_map.find(city.country_id);
|
||||
if (country_it != country_map.end()) {
|
||||
country_name = country_it->second;
|
||||
}
|
||||
result.push_back({city, country_name});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
std::vector<BiergartenDataGenerator::EnrichedCity>
|
||||
BiergartenDataGenerator::EnrichWithWikipedia(
|
||||
const std::vector<std::pair<City, std::string>>& cities) {
|
||||
WikipediaService wikipedia_service(webClient_);
|
||||
std::vector<EnrichedCity> enriched;
|
||||
|
||||
for (const auto& [city, country_name] : cities) {
|
||||
const std::string region_context =
|
||||
wikipedia_service.GetSummary(city.name, country_name);
|
||||
spdlog::debug("[Pipeline] Region context for {}: {}", city.name,
|
||||
region_context);
|
||||
|
||||
enriched.push_back({city.id, city.name, country_name, region_context});
|
||||
}
|
||||
|
||||
return enriched;
|
||||
}
|
||||
|
||||
void BiergartenDataGenerator::GenerateBreweries(
|
||||
DataGenerator& generator, const std::vector<EnrichedCity>& cities) {
|
||||
spdlog::info("\n=== SAMPLE BREWERY GENERATION ===");
|
||||
generatedBreweries_.clear();
|
||||
|
||||
for (const auto& enriched_city : cities) {
|
||||
auto brewery = generator.GenerateBrewery(enriched_city.city_name,
|
||||
enriched_city.country_name,
|
||||
enriched_city.region_context);
|
||||
generatedBreweries_.push_back(
|
||||
{enriched_city.city_id, enriched_city.city_name, brewery});
|
||||
}
|
||||
}
|
||||
|
||||
void BiergartenDataGenerator::LogResults() const {
|
||||
spdlog::info("\n=== GENERATED DATA DUMP ===");
|
||||
for (size_t i = 0; i < generatedBreweries_.size(); i++) {
|
||||
const auto& entry = generatedBreweries_[i];
|
||||
spdlog::info("{}. city_id={} city=\"{}\"", i + 1, entry.city_id,
|
||||
entry.city_name);
|
||||
spdlog::info(" brewery_name=\"{}\"", entry.brewery.name);
|
||||
spdlog::info(" brewery_description=\"{}\"", entry.brewery.description);
|
||||
}
|
||||
}
|
||||
|
||||
int BiergartenDataGenerator::Run() {
|
||||
try {
|
||||
LoadGeographicData();
|
||||
auto generator = InitializeGenerator();
|
||||
auto cities = QueryCitiesWithCountries();
|
||||
auto enriched = EnrichWithWikipedia(cities);
|
||||
GenerateBreweries(*generator, enriched);
|
||||
LogResults();
|
||||
|
||||
spdlog::info("\nOK: Pipeline completed successfully");
|
||||
return 0;
|
||||
} catch (const std::exception& e) {
|
||||
spdlog::error("ERROR: Pipeline failed: {}", e.what());
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
49
pipeline/src/data_generation/data_downloader.cpp
Normal file
49
pipeline/src/data_generation/data_downloader.cpp
Normal file
@@ -0,0 +1,49 @@
|
||||
#include "data_generation/data_downloader.h"
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include <filesystem>
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
#include <stdexcept>
|
||||
|
||||
#include "web_client/web_client.h"
|
||||
|
||||
DataDownloader::DataDownloader(std::shared_ptr<WebClient> web_client)
|
||||
: web_client_(std::move(web_client)) {}
|
||||
|
||||
DataDownloader::~DataDownloader() {}
|
||||
|
||||
bool DataDownloader::FileExists(const std::string& file_path) {
|
||||
return std::filesystem::exists(file_path);
|
||||
}
|
||||
|
||||
std::string DataDownloader::DownloadCountriesDatabase(
|
||||
const std::string& cache_path, const std::string& commit) {
|
||||
if (FileExists(cache_path)) {
|
||||
spdlog::info("[DataDownloader] Cache hit: {}", cache_path);
|
||||
return cache_path;
|
||||
}
|
||||
|
||||
std::string short_commit = commit;
|
||||
if (commit.length() > 7) {
|
||||
short_commit = commit.substr(0, 7);
|
||||
}
|
||||
|
||||
std::string url =
|
||||
"https://raw.githubusercontent.com/dr5hn/"
|
||||
"countries-states-cities-database/" +
|
||||
short_commit + "/json/countries+states+cities.json";
|
||||
|
||||
spdlog::info("[DataDownloader] Downloading: {}", url);
|
||||
|
||||
web_client_->DownloadToFile(url, cache_path);
|
||||
|
||||
std::ifstream file_check(cache_path, std::ios::binary | std::ios::ate);
|
||||
std::streamsize size = file_check.tellg();
|
||||
file_check.close();
|
||||
|
||||
spdlog::info("[DataDownloader] OK: Download complete: {} ({:.2f} MB)",
|
||||
cache_path, (size / (1024.0 * 1024.0)));
|
||||
return cache_path;
|
||||
}
|
||||
16
pipeline/src/data_generation/llama/destructor.cpp
Normal file
16
pipeline/src/data_generation/llama/destructor.cpp
Normal file
@@ -0,0 +1,16 @@
|
||||
#include "data_generation/llama_generator.h"
|
||||
#include "llama.h"
|
||||
|
||||
LlamaGenerator::~LlamaGenerator() {
|
||||
if (context_ != nullptr) {
|
||||
llama_free(context_);
|
||||
context_ = nullptr;
|
||||
}
|
||||
|
||||
if (model_ != nullptr) {
|
||||
llama_model_free(model_);
|
||||
model_ = nullptr;
|
||||
}
|
||||
|
||||
llama_backend_free();
|
||||
}
|
||||
74
pipeline/src/data_generation/llama/generate_brewery.cpp
Normal file
74
pipeline/src/data_generation/llama/generate_brewery.cpp
Normal file
@@ -0,0 +1,74 @@
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
|
||||
#include "data_generation/llama_generator.h"
|
||||
#include "data_generation/llama_generator_helpers.h"
|
||||
|
||||
BreweryResult LlamaGenerator::GenerateBrewery(
|
||||
const std::string& city_name, const std::string& country_name,
|
||||
const std::string& region_context) {
|
||||
const std::string safe_region_context =
|
||||
PrepareRegionContextPublic(region_context);
|
||||
|
||||
const std::string system_prompt =
|
||||
"You are the brewmaster and owner of a local craft brewery. "
|
||||
"Write a name and a short, soulful description for your brewery that "
|
||||
"reflects your pride in the local community and your craft. "
|
||||
"The tone should be authentic and welcoming, like a note on a "
|
||||
"chalkboard "
|
||||
"menu. Output ONLY a single JSON object with keys \"name\" and "
|
||||
"\"description\". "
|
||||
"Do not include markdown formatting or backticks.";
|
||||
|
||||
std::string prompt =
|
||||
"Write a brewery name and place-specific long description for a craft "
|
||||
"brewery in " +
|
||||
city_name +
|
||||
(country_name.empty() ? std::string("")
|
||||
: std::string(", ") + country_name) +
|
||||
(safe_region_context.empty()
|
||||
? std::string(".")
|
||||
: std::string(". Regional context: ") + safe_region_context);
|
||||
|
||||
const int max_attempts = 3;
|
||||
std::string raw;
|
||||
std::string last_error;
|
||||
for (int attempt = 0; attempt < max_attempts; ++attempt) {
|
||||
raw = Infer(system_prompt, prompt, 384);
|
||||
spdlog::debug("LlamaGenerator: raw output (attempt {}): {}", attempt + 1,
|
||||
raw);
|
||||
|
||||
std::string name;
|
||||
std::string description;
|
||||
const std::string validation_error =
|
||||
ValidateBreweryJsonPublic(raw, name, description);
|
||||
if (validation_error.empty()) {
|
||||
return {std::move(name), std::move(description)};
|
||||
}
|
||||
|
||||
last_error = validation_error;
|
||||
spdlog::warn("LlamaGenerator: malformed brewery JSON (attempt {}): {}",
|
||||
attempt + 1, validation_error);
|
||||
|
||||
prompt =
|
||||
"Your previous response was invalid. Error: " + validation_error +
|
||||
"\nReturn ONLY valid JSON with this exact schema: "
|
||||
"{\"name\": \"string\", \"description\": \"string\"}."
|
||||
"\nDo not include markdown, comments, or extra keys."
|
||||
"\n\nLocation: " +
|
||||
city_name +
|
||||
(country_name.empty() ? std::string("")
|
||||
: std::string(", ") + country_name) +
|
||||
(safe_region_context.empty()
|
||||
? std::string("")
|
||||
: std::string("\nRegional context: ") + safe_region_context);
|
||||
}
|
||||
|
||||
spdlog::error(
|
||||
"LlamaGenerator: malformed brewery response after {} attempts: "
|
||||
"{}",
|
||||
max_attempts, last_error.empty() ? raw : last_error);
|
||||
throw std::runtime_error("LlamaGenerator: malformed brewery response");
|
||||
}
|
||||
57
pipeline/src/data_generation/llama/generate_user.cpp
Normal file
57
pipeline/src/data_generation/llama/generate_user.cpp
Normal file
@@ -0,0 +1,57 @@
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
|
||||
#include "data_generation/llama_generator.h"
|
||||
#include "data_generation/llama_generator_helpers.h"
|
||||
|
||||
UserResult LlamaGenerator::GenerateUser(const std::string& locale) {
|
||||
const std::string system_prompt =
|
||||
"You generate plausible social media profiles for craft beer "
|
||||
"enthusiasts. "
|
||||
"Respond with exactly two lines: "
|
||||
"the first line is a username (lowercase, no spaces, 8-20 characters), "
|
||||
"the second line is a one-sentence bio (20-40 words). "
|
||||
"The profile should feel consistent with the locale. "
|
||||
"No preamble, no labels.";
|
||||
|
||||
std::string prompt =
|
||||
"Generate a craft beer enthusiast profile. Locale: " + locale;
|
||||
|
||||
const int max_attempts = 3;
|
||||
std::string raw;
|
||||
for (int attempt = 0; attempt < max_attempts; ++attempt) {
|
||||
raw = Infer(system_prompt, prompt, 128);
|
||||
spdlog::debug("LlamaGenerator (user): raw output (attempt {}): {}",
|
||||
attempt + 1, raw);
|
||||
|
||||
try {
|
||||
auto [username, bio] = ParseTwoLineResponsePublic(
|
||||
raw, "LlamaGenerator: malformed user response");
|
||||
|
||||
username.erase(
|
||||
std::remove_if(username.begin(), username.end(),
|
||||
[](unsigned char ch) { return std::isspace(ch); }),
|
||||
username.end());
|
||||
|
||||
if (username.empty() || bio.empty()) {
|
||||
throw std::runtime_error("LlamaGenerator: malformed user response");
|
||||
}
|
||||
|
||||
if (bio.size() > 200) bio = bio.substr(0, 200);
|
||||
|
||||
return {username, bio};
|
||||
} catch (const std::exception& e) {
|
||||
spdlog::warn(
|
||||
"LlamaGenerator: malformed user response (attempt {}): {}",
|
||||
attempt + 1, e.what());
|
||||
}
|
||||
}
|
||||
|
||||
spdlog::error(
|
||||
"LlamaGenerator: malformed user response after {} attempts: {}",
|
||||
max_attempts, raw);
|
||||
throw std::runtime_error("LlamaGenerator: malformed user response");
|
||||
}
|
||||
398
pipeline/src/data_generation/llama/helpers.cpp
Normal file
398
pipeline/src/data_generation/llama/helpers.cpp
Normal file
@@ -0,0 +1,398 @@
|
||||
#include <algorithm>
|
||||
#include <array>
|
||||
#include <boost/json.hpp>
|
||||
#include <cctype>
|
||||
#include <sstream>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "data_generation/llama_generator.h"
|
||||
#include "llama.h"
|
||||
|
||||
namespace {
|
||||
|
||||
std::string Trim(std::string value) {
|
||||
auto not_space = [](unsigned char ch) { return !std::isspace(ch); };
|
||||
|
||||
value.erase(value.begin(),
|
||||
std::find_if(value.begin(), value.end(), not_space));
|
||||
value.erase(std::find_if(value.rbegin(), value.rend(), not_space).base(),
|
||||
value.end());
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
std::string CondenseWhitespace(std::string text) {
|
||||
std::string out;
|
||||
out.reserve(text.size());
|
||||
|
||||
bool in_whitespace = false;
|
||||
for (unsigned char ch : text) {
|
||||
if (std::isspace(ch)) {
|
||||
if (!in_whitespace) {
|
||||
out.push_back(' ');
|
||||
in_whitespace = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
in_whitespace = false;
|
||||
out.push_back(static_cast<char>(ch));
|
||||
}
|
||||
|
||||
return Trim(std::move(out));
|
||||
}
|
||||
|
||||
std::string PrepareRegionContext(std::string_view region_context,
|
||||
std::size_t max_chars) {
|
||||
std::string normalized = CondenseWhitespace(std::string(region_context));
|
||||
if (normalized.size() <= max_chars) {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
normalized.resize(max_chars);
|
||||
const std::size_t last_space = normalized.find_last_of(' ');
|
||||
if (last_space != std::string::npos && last_space > max_chars / 2) {
|
||||
normalized.resize(last_space);
|
||||
}
|
||||
|
||||
normalized += "...";
|
||||
return normalized;
|
||||
}
|
||||
|
||||
std::string StripCommonPrefix(std::string line) {
|
||||
line = Trim(std::move(line));
|
||||
|
||||
if (!line.empty() && (line[0] == '-' || line[0] == '*')) {
|
||||
line = Trim(line.substr(1));
|
||||
} else {
|
||||
std::size_t i = 0;
|
||||
while (i < line.size() &&
|
||||
std::isdigit(static_cast<unsigned char>(line[i]))) {
|
||||
++i;
|
||||
}
|
||||
if (i > 0 && i < line.size() && (line[i] == '.' || line[i] == ')')) {
|
||||
line = Trim(line.substr(i + 1));
|
||||
}
|
||||
}
|
||||
|
||||
auto strip_label = [&line](const std::string& label) {
|
||||
if (line.size() >= label.size()) {
|
||||
bool matches = true;
|
||||
for (std::size_t i = 0; i < label.size(); ++i) {
|
||||
if (std::tolower(static_cast<unsigned char>(line[i])) !=
|
||||
std::tolower(static_cast<unsigned char>(label[i]))) {
|
||||
matches = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (matches) {
|
||||
line = Trim(line.substr(label.size()));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
strip_label("name:");
|
||||
strip_label("brewery name:");
|
||||
strip_label("description:");
|
||||
strip_label("username:");
|
||||
strip_label("bio:");
|
||||
|
||||
return Trim(std::move(line));
|
||||
}
|
||||
|
||||
std::pair<std::string, std::string> ParseTwoLineResponse(
|
||||
const std::string& raw, const std::string& error_message) {
|
||||
std::string normalized = raw;
|
||||
std::replace(normalized.begin(), normalized.end(), '\r', '\n');
|
||||
|
||||
std::vector<std::string> lines;
|
||||
std::stringstream stream(normalized);
|
||||
std::string line;
|
||||
while (std::getline(stream, line)) {
|
||||
line = StripCommonPrefix(std::move(line));
|
||||
if (!line.empty()) lines.push_back(std::move(line));
|
||||
}
|
||||
|
||||
std::vector<std::string> filtered;
|
||||
for (auto& l : lines) {
|
||||
std::string low = l;
|
||||
std::transform(low.begin(), low.end(), low.begin(), [](unsigned char c) {
|
||||
return static_cast<char>(std::tolower(c));
|
||||
});
|
||||
if (!l.empty() && l.front() == '<' && low.back() == '>') continue;
|
||||
if (low.rfind("okay,", 0) == 0 || low.rfind("hmm", 0) == 0) continue;
|
||||
filtered.push_back(std::move(l));
|
||||
}
|
||||
|
||||
if (filtered.size() < 2) throw std::runtime_error(error_message);
|
||||
|
||||
std::string first = Trim(filtered.front());
|
||||
std::string second;
|
||||
for (size_t i = 1; i < filtered.size(); ++i) {
|
||||
if (!second.empty()) second += ' ';
|
||||
second += filtered[i];
|
||||
}
|
||||
second = Trim(std::move(second));
|
||||
|
||||
if (first.empty() || second.empty()) throw std::runtime_error(error_message);
|
||||
return {first, second};
|
||||
}
|
||||
|
||||
std::string ToChatPrompt(const llama_model* model,
|
||||
const std::string& user_prompt) {
|
||||
const char* tmpl = llama_model_chat_template(model, nullptr);
|
||||
if (tmpl == nullptr) {
|
||||
return user_prompt;
|
||||
}
|
||||
|
||||
const llama_chat_message message{"user", user_prompt.c_str()};
|
||||
|
||||
std::vector<char> buffer(
|
||||
std::max<std::size_t>(1024, user_prompt.size() * 4));
|
||||
int32_t required =
|
||||
llama_chat_apply_template(tmpl, &message, 1, true, buffer.data(),
|
||||
static_cast<int32_t>(buffer.size()));
|
||||
|
||||
if (required < 0) {
|
||||
throw std::runtime_error("LlamaGenerator: failed to apply chat template");
|
||||
}
|
||||
|
||||
if (required >= static_cast<int32_t>(buffer.size())) {
|
||||
buffer.resize(static_cast<std::size_t>(required) + 1);
|
||||
required =
|
||||
llama_chat_apply_template(tmpl, &message, 1, true, buffer.data(),
|
||||
static_cast<int32_t>(buffer.size()));
|
||||
if (required < 0) {
|
||||
throw std::runtime_error(
|
||||
"LlamaGenerator: failed to apply chat template");
|
||||
}
|
||||
}
|
||||
|
||||
return std::string(buffer.data(), static_cast<std::size_t>(required));
|
||||
}
|
||||
|
||||
std::string ToChatPrompt(const llama_model* model,
|
||||
const std::string& system_prompt,
|
||||
const std::string& user_prompt) {
|
||||
const char* tmpl = llama_model_chat_template(model, nullptr);
|
||||
if (tmpl == nullptr) {
|
||||
return system_prompt + "\n\n" + user_prompt;
|
||||
}
|
||||
|
||||
const llama_chat_message messages[2] = {{"system", system_prompt.c_str()},
|
||||
{"user", user_prompt.c_str()}};
|
||||
|
||||
std::vector<char> buffer(std::max<std::size_t>(
|
||||
1024, (system_prompt.size() + user_prompt.size()) * 4));
|
||||
int32_t required =
|
||||
llama_chat_apply_template(tmpl, messages, 2, true, buffer.data(),
|
||||
static_cast<int32_t>(buffer.size()));
|
||||
|
||||
if (required < 0) {
|
||||
throw std::runtime_error("LlamaGenerator: failed to apply chat template");
|
||||
}
|
||||
|
||||
if (required >= static_cast<int32_t>(buffer.size())) {
|
||||
buffer.resize(static_cast<std::size_t>(required) + 1);
|
||||
required =
|
||||
llama_chat_apply_template(tmpl, messages, 2, true, buffer.data(),
|
||||
static_cast<int32_t>(buffer.size()));
|
||||
if (required < 0) {
|
||||
throw std::runtime_error(
|
||||
"LlamaGenerator: failed to apply chat template");
|
||||
}
|
||||
}
|
||||
|
||||
return std::string(buffer.data(), static_cast<std::size_t>(required));
|
||||
}
|
||||
|
||||
void AppendTokenPiece(const llama_vocab* vocab, llama_token token,
|
||||
std::string& output) {
|
||||
std::array<char, 256> buffer{};
|
||||
int32_t bytes =
|
||||
llama_token_to_piece(vocab, token, buffer.data(),
|
||||
static_cast<int32_t>(buffer.size()), 0, true);
|
||||
|
||||
if (bytes < 0) {
|
||||
std::vector<char> dynamic_buffer(static_cast<std::size_t>(-bytes));
|
||||
bytes = llama_token_to_piece(vocab, token, dynamic_buffer.data(),
|
||||
static_cast<int32_t>(dynamic_buffer.size()),
|
||||
0, true);
|
||||
if (bytes < 0) {
|
||||
throw std::runtime_error(
|
||||
"LlamaGenerator: failed to decode sampled token piece");
|
||||
}
|
||||
|
||||
output.append(dynamic_buffer.data(), static_cast<std::size_t>(bytes));
|
||||
return;
|
||||
}
|
||||
|
||||
output.append(buffer.data(), static_cast<std::size_t>(bytes));
|
||||
}
|
||||
|
||||
bool ExtractFirstJsonObject(const std::string& text, std::string& json_out) {
|
||||
std::size_t start = std::string::npos;
|
||||
int depth = 0;
|
||||
bool in_string = false;
|
||||
bool escaped = false;
|
||||
|
||||
for (std::size_t i = 0; i < text.size(); ++i) {
|
||||
const char ch = text[i];
|
||||
|
||||
if (in_string) {
|
||||
if (escaped) {
|
||||
escaped = false;
|
||||
} else if (ch == '\\') {
|
||||
escaped = true;
|
||||
} else if (ch == '"') {
|
||||
in_string = false;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ch == '"') {
|
||||
in_string = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ch == '{') {
|
||||
if (depth == 0) {
|
||||
start = i;
|
||||
}
|
||||
++depth;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ch == '}') {
|
||||
if (depth == 0) {
|
||||
continue;
|
||||
}
|
||||
--depth;
|
||||
if (depth == 0 && start != std::string::npos) {
|
||||
json_out = text.substr(start, i - start + 1);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
std::string ValidateBreweryJson(const std::string& raw, std::string& name_out,
|
||||
std::string& description_out) {
|
||||
auto validate_object = [&](const boost::json::value& jv,
|
||||
std::string& error_out) -> bool {
|
||||
if (!jv.is_object()) {
|
||||
error_out = "JSON root must be an object";
|
||||
return false;
|
||||
}
|
||||
|
||||
const auto& obj = jv.get_object();
|
||||
if (!obj.contains("name") || !obj.at("name").is_string()) {
|
||||
error_out = "JSON field 'name' is missing or not a string";
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!obj.contains("description") || !obj.at("description").is_string()) {
|
||||
error_out = "JSON field 'description' is missing or not a string";
|
||||
return false;
|
||||
}
|
||||
|
||||
name_out = Trim(std::string(obj.at("name").as_string().c_str()));
|
||||
description_out =
|
||||
Trim(std::string(obj.at("description").as_string().c_str()));
|
||||
|
||||
if (name_out.empty()) {
|
||||
error_out = "JSON field 'name' must not be empty";
|
||||
return false;
|
||||
}
|
||||
|
||||
if (description_out.empty()) {
|
||||
error_out = "JSON field 'description' must not be empty";
|
||||
return false;
|
||||
}
|
||||
|
||||
std::string name_lower = name_out;
|
||||
std::string description_lower = description_out;
|
||||
std::transform(
|
||||
name_lower.begin(), name_lower.end(), name_lower.begin(),
|
||||
[](unsigned char c) { return static_cast<char>(std::tolower(c)); });
|
||||
std::transform(description_lower.begin(), description_lower.end(),
|
||||
description_lower.begin(), [](unsigned char c) {
|
||||
return static_cast<char>(std::tolower(c));
|
||||
});
|
||||
|
||||
if (name_lower == "string" || description_lower == "string") {
|
||||
error_out = "JSON appears to be a schema placeholder, not content";
|
||||
return false;
|
||||
}
|
||||
|
||||
error_out.clear();
|
||||
return true;
|
||||
};
|
||||
|
||||
boost::system::error_code ec;
|
||||
boost::json::value jv = boost::json::parse(raw, ec);
|
||||
std::string validation_error;
|
||||
if (ec) {
|
||||
std::string extracted;
|
||||
if (!ExtractFirstJsonObject(raw, extracted)) {
|
||||
return "JSON parse error: " + ec.message();
|
||||
}
|
||||
|
||||
ec.clear();
|
||||
jv = boost::json::parse(extracted, ec);
|
||||
if (ec) {
|
||||
return "JSON parse error: " + ec.message();
|
||||
}
|
||||
|
||||
if (!validate_object(jv, validation_error)) {
|
||||
return validation_error;
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
if (!validate_object(jv, validation_error)) {
|
||||
return validation_error;
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
// Forward declarations for helper functions exposed to other translation units
|
||||
std::string PrepareRegionContextPublic(std::string_view region_context,
|
||||
std::size_t max_chars) {
|
||||
return PrepareRegionContext(region_context, max_chars);
|
||||
}
|
||||
|
||||
std::pair<std::string, std::string> ParseTwoLineResponsePublic(
|
||||
const std::string& raw, const std::string& error_message) {
|
||||
return ParseTwoLineResponse(raw, error_message);
|
||||
}
|
||||
|
||||
std::string ToChatPromptPublic(const llama_model* model,
|
||||
const std::string& user_prompt) {
|
||||
return ToChatPrompt(model, user_prompt);
|
||||
}
|
||||
|
||||
std::string ToChatPromptPublic(const llama_model* model,
|
||||
const std::string& system_prompt,
|
||||
const std::string& user_prompt) {
|
||||
return ToChatPrompt(model, system_prompt, user_prompt);
|
||||
}
|
||||
|
||||
void AppendTokenPiecePublic(const llama_vocab* vocab, llama_token token,
|
||||
std::string& output) {
|
||||
AppendTokenPiece(vocab, token, output);
|
||||
}
|
||||
|
||||
std::string ValidateBreweryJsonPublic(const std::string& raw,
|
||||
std::string& name_out,
|
||||
std::string& description_out) {
|
||||
return ValidateBreweryJson(raw, name_out, description_out);
|
||||
}
|
||||
111
pipeline/src/data_generation/llama/infer.cpp
Normal file
111
pipeline/src/data_generation/llama/infer.cpp
Normal file
@@ -0,0 +1,111 @@
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <memory>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "data_generation/llama_generator.h"
|
||||
#include "data_generation/llama_generator_helpers.h"
|
||||
#include "llama.h"
|
||||
|
||||
std::string LlamaGenerator::Infer(const std::string& prompt, int max_tokens) {
|
||||
return InferFormatted(ToChatPromptPublic(model_, prompt), max_tokens);
|
||||
}
|
||||
|
||||
std::string LlamaGenerator::Infer(const std::string& system_prompt,
|
||||
const std::string& prompt, int max_tokens) {
|
||||
return InferFormatted(ToChatPromptPublic(model_, system_prompt, prompt),
|
||||
max_tokens);
|
||||
}
|
||||
|
||||
std::string LlamaGenerator::InferFormatted(const std::string& formatted_prompt,
|
||||
int max_tokens) {
|
||||
if (model_ == nullptr || context_ == nullptr)
|
||||
throw std::runtime_error("LlamaGenerator: model not loaded");
|
||||
|
||||
const llama_vocab* vocab = llama_model_get_vocab(model_);
|
||||
if (vocab == nullptr)
|
||||
throw std::runtime_error("LlamaGenerator: vocab unavailable");
|
||||
|
||||
llama_memory_clear(llama_get_memory(context_), true);
|
||||
|
||||
std::vector<llama_token> prompt_tokens(formatted_prompt.size() + 8);
|
||||
int32_t token_count = llama_tokenize(
|
||||
vocab, formatted_prompt.c_str(),
|
||||
static_cast<int32_t>(formatted_prompt.size()), prompt_tokens.data(),
|
||||
static_cast<int32_t>(prompt_tokens.size()), true, true);
|
||||
|
||||
if (token_count < 0) {
|
||||
prompt_tokens.resize(static_cast<std::size_t>(-token_count));
|
||||
token_count = llama_tokenize(
|
||||
vocab, formatted_prompt.c_str(),
|
||||
static_cast<int32_t>(formatted_prompt.size()), prompt_tokens.data(),
|
||||
static_cast<int32_t>(prompt_tokens.size()), true, true);
|
||||
}
|
||||
|
||||
if (token_count < 0)
|
||||
throw std::runtime_error("LlamaGenerator: prompt tokenization failed");
|
||||
|
||||
const int32_t n_ctx = static_cast<int32_t>(llama_n_ctx(context_));
|
||||
const int32_t n_batch = static_cast<int32_t>(llama_n_batch(context_));
|
||||
if (n_ctx <= 1 || n_batch <= 0)
|
||||
throw std::runtime_error("LlamaGenerator: invalid context or batch size");
|
||||
|
||||
const int32_t effective_max_tokens =
|
||||
std::max(1, std::min(max_tokens, n_ctx - 1));
|
||||
int32_t prompt_budget = std::min(n_batch, n_ctx - effective_max_tokens);
|
||||
prompt_budget = std::max<int32_t>(1, prompt_budget);
|
||||
|
||||
prompt_tokens.resize(static_cast<std::size_t>(token_count));
|
||||
if (token_count > prompt_budget) {
|
||||
spdlog::warn(
|
||||
"LlamaGenerator: prompt too long ({} tokens), truncating to {} "
|
||||
"tokens to fit n_batch/n_ctx limits",
|
||||
token_count, prompt_budget);
|
||||
prompt_tokens.resize(static_cast<std::size_t>(prompt_budget));
|
||||
token_count = prompt_budget;
|
||||
}
|
||||
|
||||
const llama_batch prompt_batch = llama_batch_get_one(
|
||||
prompt_tokens.data(), static_cast<int32_t>(prompt_tokens.size()));
|
||||
if (llama_decode(context_, prompt_batch) != 0)
|
||||
throw std::runtime_error("LlamaGenerator: prompt decode failed");
|
||||
|
||||
llama_sampler_chain_params sampler_params =
|
||||
llama_sampler_chain_default_params();
|
||||
using SamplerPtr =
|
||||
std::unique_ptr<llama_sampler, decltype(&llama_sampler_free)>;
|
||||
SamplerPtr sampler(llama_sampler_chain_init(sampler_params),
|
||||
&llama_sampler_free);
|
||||
if (!sampler)
|
||||
throw std::runtime_error("LlamaGenerator: failed to initialize sampler");
|
||||
|
||||
llama_sampler_chain_add(sampler.get(),
|
||||
llama_sampler_init_temp(sampling_temperature_));
|
||||
llama_sampler_chain_add(sampler.get(),
|
||||
llama_sampler_init_top_p(sampling_top_p_, 1));
|
||||
llama_sampler_chain_add(sampler.get(),
|
||||
llama_sampler_init_dist(sampling_seed_));
|
||||
|
||||
std::vector<llama_token> generated_tokens;
|
||||
generated_tokens.reserve(static_cast<std::size_t>(effective_max_tokens));
|
||||
|
||||
for (int i = 0; i < effective_max_tokens; ++i) {
|
||||
const llama_token next =
|
||||
llama_sampler_sample(sampler.get(), context_, -1);
|
||||
if (llama_vocab_is_eog(vocab, next)) break;
|
||||
generated_tokens.push_back(next);
|
||||
llama_token token = next;
|
||||
const llama_batch one_token_batch = llama_batch_get_one(&token, 1);
|
||||
if (llama_decode(context_, one_token_batch) != 0)
|
||||
throw std::runtime_error(
|
||||
"LlamaGenerator: decode failed during generation");
|
||||
}
|
||||
|
||||
std::string output;
|
||||
for (const llama_token token : generated_tokens)
|
||||
AppendTokenPiecePublic(vocab, token, output);
|
||||
return output;
|
||||
}
|
||||
42
pipeline/src/data_generation/llama/load.cpp
Normal file
42
pipeline/src/data_generation/llama/load.cpp
Normal file
@@ -0,0 +1,42 @@
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
|
||||
#include "data_generation/llama_generator.h"
|
||||
#include "llama.h"
|
||||
|
||||
void LlamaGenerator::Load(const std::string& model_path) {
|
||||
if (model_path.empty())
|
||||
throw std::runtime_error("LlamaGenerator: model path must not be empty");
|
||||
|
||||
if (context_ != nullptr) {
|
||||
llama_free(context_);
|
||||
context_ = nullptr;
|
||||
}
|
||||
if (model_ != nullptr) {
|
||||
llama_model_free(model_);
|
||||
model_ = nullptr;
|
||||
}
|
||||
|
||||
llama_backend_init();
|
||||
|
||||
llama_model_params model_params = llama_model_default_params();
|
||||
model_ = llama_model_load_from_file(model_path.c_str(), model_params);
|
||||
if (model_ == nullptr) {
|
||||
throw std::runtime_error(
|
||||
"LlamaGenerator: failed to load model from path: " + model_path);
|
||||
}
|
||||
|
||||
llama_context_params context_params = llama_context_default_params();
|
||||
context_params.n_ctx = 2048;
|
||||
|
||||
context_ = llama_init_from_model(model_, context_params);
|
||||
if (context_ == nullptr) {
|
||||
llama_model_free(model_);
|
||||
model_ = nullptr;
|
||||
throw std::runtime_error("LlamaGenerator: failed to create context");
|
||||
}
|
||||
|
||||
spdlog::info("[LlamaGenerator] Loaded model: {}", model_path);
|
||||
}
|
||||
25
pipeline/src/data_generation/llama/set_sampling_options.cpp
Normal file
25
pipeline/src/data_generation/llama/set_sampling_options.cpp
Normal file
@@ -0,0 +1,25 @@
|
||||
#include <stdexcept>
|
||||
|
||||
#include "data_generation/llama_generator.h"
|
||||
#include "llama.h"
|
||||
|
||||
void LlamaGenerator::SetSamplingOptions(float temperature, float top_p,
|
||||
int seed) {
|
||||
if (temperature < 0.0f) {
|
||||
throw std::runtime_error(
|
||||
"LlamaGenerator: sampling temperature must be >= 0");
|
||||
}
|
||||
if (!(top_p > 0.0f && top_p <= 1.0f)) {
|
||||
throw std::runtime_error(
|
||||
"LlamaGenerator: sampling top-p must be in (0, 1]");
|
||||
}
|
||||
if (seed < -1) {
|
||||
throw std::runtime_error(
|
||||
"LlamaGenerator: seed must be >= 0, or -1 for random");
|
||||
}
|
||||
|
||||
sampling_temperature_ = temperature;
|
||||
sampling_top_p_ = top_p;
|
||||
sampling_seed_ = (seed < 0) ? static_cast<uint32_t>(LLAMA_DEFAULT_SEED)
|
||||
: static_cast<uint32_t>(seed);
|
||||
}
|
||||
65
pipeline/src/data_generation/mock/data.cpp
Normal file
65
pipeline/src/data_generation/mock/data.cpp
Normal file
@@ -0,0 +1,65 @@
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "data_generation/mock_generator.h"
|
||||
|
||||
const std::vector<std::string> MockGenerator::kBreweryAdjectives = {
|
||||
"Craft", "Heritage", "Local", "Artisan", "Pioneer", "Golden",
|
||||
"Modern", "Classic", "Summit", "Northern", "Riverstone", "Barrel",
|
||||
"Hinterland", "Harbor", "Wild", "Granite", "Copper", "Maple"};
|
||||
|
||||
const std::vector<std::string> MockGenerator::kBreweryNouns = {
|
||||
"Brewing Co.", "Brewery", "Bier Haus", "Taproom", "Works",
|
||||
"House", "Fermentery", "Ale Co.", "Cellars", "Collective",
|
||||
"Project", "Foundry", "Malthouse", "Public House", "Co-op",
|
||||
"Lab", "Beer Hall", "Guild"};
|
||||
|
||||
const std::vector<std::string> MockGenerator::kBreweryDescriptions = {
|
||||
"Handcrafted pale ales and seasonal IPAs with local ingredients.",
|
||||
"Traditional lagers and experimental sours in small batches.",
|
||||
"Award-winning stouts and wildly hoppy blonde ales.",
|
||||
"Craft brewery specializing in Belgian-style triples and dark porters.",
|
||||
"Modern brewery blending tradition with bold experimental flavors.",
|
||||
"Neighborhood-focused taproom pouring crisp pilsners and citrusy pale "
|
||||
"ales.",
|
||||
"Small-batch brewery known for barrel-aged releases and smoky lagers.",
|
||||
"Independent brewhouse pairing farmhouse ales with rotating food pop-ups.",
|
||||
"Community brewpub making balanced bitters, saisons, and hazy IPAs.",
|
||||
"Experimental nanobrewery exploring local yeast and regional grains.",
|
||||
"Family-run brewery producing smooth amber ales and robust porters.",
|
||||
"Urban brewery crafting clean lagers and bright, fruit-forward sours.",
|
||||
"Riverfront brewhouse featuring oak-matured ales and seasonal blends.",
|
||||
"Modern taproom focused on sessionable lagers and classic pub styles.",
|
||||
"Brewery rooted in tradition with a lineup of malty reds and crisp lagers.",
|
||||
"Creative brewery offering rotating collaborations and limited draft-only "
|
||||
"pours.",
|
||||
"Locally inspired brewery serving approachable ales with bold hop "
|
||||
"character.",
|
||||
"Destination taproom known for balanced IPAs and cocoa-rich stouts."};
|
||||
|
||||
const std::vector<std::string> MockGenerator::kUsernames = {
|
||||
"hopseeker", "malttrail", "yeastwhisper", "lagerlane",
|
||||
"barrelbound", "foamfinder", "taphunter", "graingeist",
|
||||
"brewscout", "aleatlas", "caskcompass", "hopsandmaps",
|
||||
"mashpilot", "pintnomad", "fermentfriend", "stoutsignal",
|
||||
"sessionwander", "kettlekeeper"};
|
||||
|
||||
const std::vector<std::string> MockGenerator::kBios = {
|
||||
"Always chasing balanced IPAs and crisp lagers across local taprooms.",
|
||||
"Weekend brewery explorer with a soft spot for dark, roasty stouts.",
|
||||
"Documenting tiny brewpubs, fresh pours, and unforgettable beer gardens.",
|
||||
"Fan of farmhouse ales, food pairings, and long tasting flights.",
|
||||
"Collecting favorite pilsners one city at a time.",
|
||||
"Hops-first drinker who still saves room for classic malt-forward styles.",
|
||||
"Finding hidden tap lists and sharing the best seasonal releases.",
|
||||
"Brewery road-tripper focused on local ingredients and clean fermentation.",
|
||||
"Always comparing house lagers and ranking patio pint vibes.",
|
||||
"Curious about yeast strains, barrel programs, and cellar experiments.",
|
||||
"Believes every neighborhood deserves a great community taproom.",
|
||||
"Looking for session beers that taste great from first sip to last.",
|
||||
"Belgian ale enthusiast who never skips a new saison.",
|
||||
"Hazy IPA critic with deep respect for a perfectly clear pilsner.",
|
||||
"Visits breweries for the stories, stays for the flagship pours.",
|
||||
"Craft beer fan mapping tasting notes and favorite brew routes.",
|
||||
"Always ready to trade recommendations for underrated local breweries.",
|
||||
"Keeping a running list of must-try collab releases and tap takeovers."};
|
||||
12
pipeline/src/data_generation/mock/deterministic_hash.cpp
Normal file
12
pipeline/src/data_generation/mock/deterministic_hash.cpp
Normal file
@@ -0,0 +1,12 @@
|
||||
#include <string>
|
||||
|
||||
#include "data_generation/mock_generator.h"
|
||||
|
||||
std::size_t MockGenerator::DeterministicHash(const std::string& a,
|
||||
const std::string& b) {
|
||||
std::size_t seed = std::hash<std::string>{}(a);
|
||||
const std::size_t mixed = std::hash<std::string>{}(b);
|
||||
seed ^= mixed + 0x9e3779b97f4a7c15ULL + (seed << 6) + (seed >> 2);
|
||||
seed = (seed << 13) | (seed >> ((sizeof(std::size_t) * 8) - 13));
|
||||
return seed;
|
||||
}
|
||||
21
pipeline/src/data_generation/mock/generate_brewery.cpp
Normal file
21
pipeline/src/data_generation/mock/generate_brewery.cpp
Normal file
@@ -0,0 +1,21 @@
|
||||
#include <functional>
|
||||
#include <string>
|
||||
|
||||
#include "data_generation/mock_generator.h"
|
||||
|
||||
BreweryResult MockGenerator::GenerateBrewery(
|
||||
const std::string& city_name, const std::string& country_name,
|
||||
const std::string& region_context) {
|
||||
const std::string location_key =
|
||||
country_name.empty() ? city_name : city_name + "," + country_name;
|
||||
const std::size_t hash =
|
||||
region_context.empty() ? std::hash<std::string>{}(location_key)
|
||||
: DeterministicHash(location_key, region_context);
|
||||
|
||||
BreweryResult result;
|
||||
result.name = kBreweryAdjectives[hash % kBreweryAdjectives.size()] + " " +
|
||||
kBreweryNouns[(hash / 7) % kBreweryNouns.size()];
|
||||
result.description =
|
||||
kBreweryDescriptions[(hash / 13) % kBreweryDescriptions.size()];
|
||||
return result;
|
||||
}
|
||||
13
pipeline/src/data_generation/mock/generate_user.cpp
Normal file
13
pipeline/src/data_generation/mock/generate_user.cpp
Normal file
@@ -0,0 +1,13 @@
|
||||
#include <functional>
|
||||
#include <string>
|
||||
|
||||
#include "data_generation/mock_generator.h"
|
||||
|
||||
UserResult MockGenerator::GenerateUser(const std::string& locale) {
|
||||
const std::size_t hash = std::hash<std::string>{}(locale);
|
||||
|
||||
UserResult result;
|
||||
result.username = kUsernames[hash % kUsernames.size()];
|
||||
result.bio = kBios[(hash / 11) % kBios.size()];
|
||||
return result;
|
||||
}
|
||||
9
pipeline/src/data_generation/mock/load.cpp
Normal file
9
pipeline/src/data_generation/mock/load.cpp
Normal file
@@ -0,0 +1,9 @@
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "data_generation/mock_generator.h"
|
||||
|
||||
void MockGenerator::Load(const std::string& /*modelPath*/) {
|
||||
spdlog::info("[MockGenerator] No model needed");
|
||||
}
|
||||
253
pipeline/src/database/database.cpp
Normal file
253
pipeline/src/database/database.cpp
Normal file
@@ -0,0 +1,253 @@
|
||||
#include "database/database.h"
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include <stdexcept>
|
||||
|
||||
void SqliteDatabase::InitializeSchema() {
|
||||
std::lock_guard<std::mutex> lock(db_mutex_);
|
||||
|
||||
const char* schema = R"(
|
||||
CREATE TABLE IF NOT EXISTS countries (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
iso2 TEXT,
|
||||
iso3 TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS states (
|
||||
id INTEGER PRIMARY KEY,
|
||||
country_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
iso2 TEXT,
|
||||
FOREIGN KEY(country_id) REFERENCES countries(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS cities (
|
||||
id INTEGER PRIMARY KEY,
|
||||
state_id INTEGER NOT NULL,
|
||||
country_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
latitude REAL,
|
||||
longitude REAL,
|
||||
FOREIGN KEY(state_id) REFERENCES states(id),
|
||||
FOREIGN KEY(country_id) REFERENCES countries(id)
|
||||
);
|
||||
)";
|
||||
|
||||
char* errMsg = nullptr;
|
||||
int rc = sqlite3_exec(db_, schema, nullptr, nullptr, &errMsg);
|
||||
if (rc != SQLITE_OK) {
|
||||
std::string error = errMsg ? std::string(errMsg) : "Unknown error";
|
||||
sqlite3_free(errMsg);
|
||||
throw std::runtime_error("Failed to create schema: " + error);
|
||||
}
|
||||
}
|
||||
|
||||
SqliteDatabase::~SqliteDatabase() {
|
||||
if (db_) {
|
||||
sqlite3_close(db_);
|
||||
}
|
||||
}
|
||||
|
||||
void SqliteDatabase::Initialize(const std::string& db_path) {
|
||||
int rc = sqlite3_open(db_path.c_str(), &db_);
|
||||
if (rc) {
|
||||
throw std::runtime_error("Failed to open SQLite database: " + db_path);
|
||||
}
|
||||
spdlog::info("OK: SQLite database opened: {}", db_path);
|
||||
InitializeSchema();
|
||||
}
|
||||
|
||||
void SqliteDatabase::BeginTransaction() {
|
||||
std::lock_guard<std::mutex> lock(db_mutex_);
|
||||
char* err = nullptr;
|
||||
if (sqlite3_exec(db_, "BEGIN TRANSACTION", nullptr, nullptr, &err) !=
|
||||
SQLITE_OK) {
|
||||
std::string msg = err ? err : "unknown";
|
||||
sqlite3_free(err);
|
||||
throw std::runtime_error("BeginTransaction failed: " + msg);
|
||||
}
|
||||
}
|
||||
|
||||
void SqliteDatabase::CommitTransaction() {
|
||||
std::lock_guard<std::mutex> lock(db_mutex_);
|
||||
char* err = nullptr;
|
||||
if (sqlite3_exec(db_, "COMMIT", nullptr, nullptr, &err) != SQLITE_OK) {
|
||||
std::string msg = err ? err : "unknown";
|
||||
sqlite3_free(err);
|
||||
throw std::runtime_error("CommitTransaction failed: " + msg);
|
||||
}
|
||||
}
|
||||
|
||||
void SqliteDatabase::InsertCountry(int id, const std::string& name,
|
||||
const std::string& iso2,
|
||||
const std::string& iso3) {
|
||||
std::lock_guard<std::mutex> lock(db_mutex_);
|
||||
|
||||
const char* query = R"(
|
||||
INSERT OR IGNORE INTO countries (id, name, iso2, iso3)
|
||||
VALUES (?, ?, ?, ?)
|
||||
)";
|
||||
|
||||
sqlite3_stmt* stmt;
|
||||
int rc = sqlite3_prepare_v2(db_, query, -1, &stmt, nullptr);
|
||||
if (rc != SQLITE_OK)
|
||||
throw std::runtime_error("Failed to prepare country insert");
|
||||
|
||||
sqlite3_bind_int(stmt, 1, id);
|
||||
sqlite3_bind_text(stmt, 2, name.c_str(), -1, SQLITE_STATIC);
|
||||
sqlite3_bind_text(stmt, 3, iso2.c_str(), -1, SQLITE_STATIC);
|
||||
sqlite3_bind_text(stmt, 4, iso3.c_str(), -1, SQLITE_STATIC);
|
||||
|
||||
if (sqlite3_step(stmt) != SQLITE_DONE) {
|
||||
throw std::runtime_error("Failed to insert country");
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
|
||||
void SqliteDatabase::InsertState(int id, int country_id,
|
||||
const std::string& name,
|
||||
const std::string& iso2) {
|
||||
std::lock_guard<std::mutex> lock(db_mutex_);
|
||||
|
||||
const char* query = R"(
|
||||
INSERT OR IGNORE INTO states (id, country_id, name, iso2)
|
||||
VALUES (?, ?, ?, ?)
|
||||
)";
|
||||
|
||||
sqlite3_stmt* stmt;
|
||||
int rc = sqlite3_prepare_v2(db_, query, -1, &stmt, nullptr);
|
||||
if (rc != SQLITE_OK)
|
||||
throw std::runtime_error("Failed to prepare state insert");
|
||||
|
||||
sqlite3_bind_int(stmt, 1, id);
|
||||
sqlite3_bind_int(stmt, 2, country_id);
|
||||
sqlite3_bind_text(stmt, 3, name.c_str(), -1, SQLITE_STATIC);
|
||||
sqlite3_bind_text(stmt, 4, iso2.c_str(), -1, SQLITE_STATIC);
|
||||
|
||||
if (sqlite3_step(stmt) != SQLITE_DONE) {
|
||||
throw std::runtime_error("Failed to insert state");
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
|
||||
void SqliteDatabase::InsertCity(int id, int state_id, int country_id,
|
||||
const std::string& name, double latitude,
|
||||
double longitude) {
|
||||
std::lock_guard<std::mutex> lock(db_mutex_);
|
||||
|
||||
const char* query = R"(
|
||||
INSERT OR IGNORE INTO cities (id, state_id, country_id, name, latitude, longitude)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
)";
|
||||
|
||||
sqlite3_stmt* stmt;
|
||||
int rc = sqlite3_prepare_v2(db_, query, -1, &stmt, nullptr);
|
||||
if (rc != SQLITE_OK)
|
||||
throw std::runtime_error("Failed to prepare city insert");
|
||||
|
||||
sqlite3_bind_int(stmt, 1, id);
|
||||
sqlite3_bind_int(stmt, 2, state_id);
|
||||
sqlite3_bind_int(stmt, 3, country_id);
|
||||
sqlite3_bind_text(stmt, 4, name.c_str(), -1, SQLITE_STATIC);
|
||||
sqlite3_bind_double(stmt, 5, latitude);
|
||||
sqlite3_bind_double(stmt, 6, longitude);
|
||||
|
||||
if (sqlite3_step(stmt) != SQLITE_DONE) {
|
||||
throw std::runtime_error("Failed to insert city");
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
|
||||
std::vector<City> SqliteDatabase::QueryCities() {
|
||||
std::lock_guard<std::mutex> lock(db_mutex_);
|
||||
std::vector<City> cities;
|
||||
sqlite3_stmt* stmt = nullptr;
|
||||
|
||||
const char* query = "SELECT id, name, country_id FROM cities ORDER BY name";
|
||||
int rc = sqlite3_prepare_v2(db_, query, -1, &stmt, nullptr);
|
||||
|
||||
if (rc != SQLITE_OK) {
|
||||
throw std::runtime_error("Failed to prepare query");
|
||||
}
|
||||
|
||||
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
int id = sqlite3_column_int(stmt, 0);
|
||||
const char* name =
|
||||
reinterpret_cast<const char*>(sqlite3_column_text(stmt, 1));
|
||||
int country_id = sqlite3_column_int(stmt, 2);
|
||||
cities.push_back({id, name ? std::string(name) : "", country_id});
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
return cities;
|
||||
}
|
||||
|
||||
std::vector<Country> SqliteDatabase::QueryCountries(int limit) {
|
||||
std::lock_guard<std::mutex> lock(db_mutex_);
|
||||
|
||||
std::vector<Country> countries;
|
||||
sqlite3_stmt* stmt = nullptr;
|
||||
|
||||
std::string query =
|
||||
"SELECT id, name, iso2, iso3 FROM countries ORDER BY name";
|
||||
if (limit > 0) {
|
||||
query += " LIMIT " + std::to_string(limit);
|
||||
}
|
||||
|
||||
int rc = sqlite3_prepare_v2(db_, query.c_str(), -1, &stmt, nullptr);
|
||||
|
||||
if (rc != SQLITE_OK) {
|
||||
throw std::runtime_error("Failed to prepare countries query");
|
||||
}
|
||||
|
||||
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
int id = sqlite3_column_int(stmt, 0);
|
||||
const char* name =
|
||||
reinterpret_cast<const char*>(sqlite3_column_text(stmt, 1));
|
||||
const char* iso2 =
|
||||
reinterpret_cast<const char*>(sqlite3_column_text(stmt, 2));
|
||||
const char* iso3 =
|
||||
reinterpret_cast<const char*>(sqlite3_column_text(stmt, 3));
|
||||
countries.push_back({id, name ? std::string(name) : "",
|
||||
iso2 ? std::string(iso2) : "",
|
||||
iso3 ? std::string(iso3) : ""});
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
return countries;
|
||||
}
|
||||
|
||||
std::vector<State> SqliteDatabase::QueryStates(int limit) {
|
||||
std::lock_guard<std::mutex> lock(db_mutex_);
|
||||
|
||||
std::vector<State> states;
|
||||
sqlite3_stmt* stmt = nullptr;
|
||||
|
||||
std::string query =
|
||||
"SELECT id, name, iso2, country_id FROM states ORDER BY name";
|
||||
if (limit > 0) {
|
||||
query += " LIMIT " + std::to_string(limit);
|
||||
}
|
||||
|
||||
int rc = sqlite3_prepare_v2(db_, query.c_str(), -1, &stmt, nullptr);
|
||||
|
||||
if (rc != SQLITE_OK) {
|
||||
throw std::runtime_error("Failed to prepare states query");
|
||||
}
|
||||
|
||||
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
int id = sqlite3_column_int(stmt, 0);
|
||||
const char* name =
|
||||
reinterpret_cast<const char*>(sqlite3_column_text(stmt, 1));
|
||||
const char* iso2 =
|
||||
reinterpret_cast<const char*>(sqlite3_column_text(stmt, 2));
|
||||
int country_id = sqlite3_column_int(stmt, 3);
|
||||
states.push_back({id, name ? std::string(name) : "",
|
||||
iso2 ? std::string(iso2) : "", country_id});
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
return states;
|
||||
}
|
||||
66
pipeline/src/json_handling/json_loader.cpp
Normal file
66
pipeline/src/json_handling/json_loader.cpp
Normal file
@@ -0,0 +1,66 @@
|
||||
#include "json_handling/json_loader.h"
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include <chrono>
|
||||
|
||||
#include "json_handling/stream_parser.h"
|
||||
|
||||
void JsonLoader::LoadWorldCities(const std::string& json_path,
|
||||
SqliteDatabase& db) {
|
||||
constexpr size_t kBatchSize = 10000;
|
||||
|
||||
auto startTime = std::chrono::high_resolution_clock::now();
|
||||
spdlog::info("\nLoading {} (streaming RapidJSON SAX)...", json_path);
|
||||
|
||||
db.BeginTransaction();
|
||||
bool transactionOpen = true;
|
||||
|
||||
size_t citiesProcessed = 0;
|
||||
try {
|
||||
StreamingJsonParser::Parse(
|
||||
json_path, db,
|
||||
[&](const CityRecord& record) {
|
||||
db.InsertCity(record.id, record.state_id, record.country_id,
|
||||
record.name, record.latitude, record.longitude);
|
||||
++citiesProcessed;
|
||||
|
||||
if (citiesProcessed % kBatchSize == 0) {
|
||||
db.CommitTransaction();
|
||||
db.BeginTransaction();
|
||||
}
|
||||
},
|
||||
[&](size_t current, size_t /*total*/) {
|
||||
if (current % kBatchSize == 0 && current > 0) {
|
||||
spdlog::info(" [Progress] Parsed {} cities...", current);
|
||||
}
|
||||
});
|
||||
|
||||
spdlog::info(" OK: Parsed all cities from JSON");
|
||||
|
||||
if (transactionOpen) {
|
||||
db.CommitTransaction();
|
||||
transactionOpen = false;
|
||||
}
|
||||
} catch (...) {
|
||||
if (transactionOpen) {
|
||||
db.CommitTransaction();
|
||||
}
|
||||
throw;
|
||||
}
|
||||
|
||||
auto endTime = std::chrono::high_resolution_clock::now();
|
||||
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
endTime - startTime);
|
||||
|
||||
spdlog::info("\n=== World City Data Loading Summary ===\n");
|
||||
spdlog::info("Cities inserted: {}", citiesProcessed);
|
||||
spdlog::info("Elapsed time: {} ms", duration.count());
|
||||
long long throughput =
|
||||
(citiesProcessed > 0 && duration.count() > 0)
|
||||
? (1000LL * static_cast<long long>(citiesProcessed)) /
|
||||
static_cast<long long>(duration.count())
|
||||
: 0LL;
|
||||
spdlog::info("Throughput: {} cities/sec", throughput);
|
||||
spdlog::info("=======================================\n");
|
||||
}
|
||||
289
pipeline/src/json_handling/stream_parser.cpp
Normal file
289
pipeline/src/json_handling/stream_parser.cpp
Normal file
@@ -0,0 +1,289 @@
|
||||
#include "json_handling/stream_parser.h"
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include <boost/json.hpp>
|
||||
#include <boost/json/basic_parser_impl.hpp>
|
||||
#include <cstdio>
|
||||
#include <stdexcept>
|
||||
|
||||
#include "database/database.h"
|
||||
|
||||
class CityRecordHandler {
|
||||
friend class boost::json::basic_parser<CityRecordHandler>;
|
||||
|
||||
public:
|
||||
static constexpr std::size_t max_array_size = static_cast<std::size_t>(-1);
|
||||
static constexpr std::size_t max_object_size = static_cast<std::size_t>(-1);
|
||||
static constexpr std::size_t max_string_size = static_cast<std::size_t>(-1);
|
||||
static constexpr std::size_t max_key_size = static_cast<std::size_t>(-1);
|
||||
|
||||
struct ParseContext {
|
||||
SqliteDatabase* db = nullptr;
|
||||
std::function<void(const CityRecord&)> on_city;
|
||||
std::function<void(size_t, size_t)> on_progress;
|
||||
size_t cities_emitted = 0;
|
||||
size_t total_file_size = 0;
|
||||
int countries_inserted = 0;
|
||||
int states_inserted = 0;
|
||||
};
|
||||
|
||||
explicit CityRecordHandler(ParseContext& ctx) : context(ctx) {}
|
||||
|
||||
private:
|
||||
ParseContext& context;
|
||||
|
||||
int depth = 0;
|
||||
bool in_countries_array = false;
|
||||
bool in_country_object = false;
|
||||
bool in_states_array = false;
|
||||
bool in_state_object = false;
|
||||
bool in_cities_array = false;
|
||||
bool building_city = false;
|
||||
|
||||
int current_country_id = 0;
|
||||
int current_state_id = 0;
|
||||
CityRecord current_city = {};
|
||||
std::string current_key;
|
||||
std::string current_key_val;
|
||||
std::string current_string_val;
|
||||
|
||||
std::string country_info[3];
|
||||
std::string state_info[2];
|
||||
|
||||
// Boost.JSON SAX Hooks
|
||||
bool on_document_begin(boost::system::error_code&) { return true; }
|
||||
bool on_document_end(boost::system::error_code&) { return true; }
|
||||
|
||||
bool on_array_begin(boost::system::error_code&) {
|
||||
depth++;
|
||||
if (depth == 1) {
|
||||
in_countries_array = true;
|
||||
} else if (depth == 3 && current_key == "states") {
|
||||
in_states_array = true;
|
||||
} else if (depth == 5 && current_key == "cities") {
|
||||
in_cities_array = true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool on_array_end(std::size_t, boost::system::error_code&) {
|
||||
if (depth == 1) {
|
||||
in_countries_array = false;
|
||||
} else if (depth == 3) {
|
||||
in_states_array = false;
|
||||
} else if (depth == 5) {
|
||||
in_cities_array = false;
|
||||
}
|
||||
depth--;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool on_object_begin(boost::system::error_code&) {
|
||||
depth++;
|
||||
if (depth == 2 && in_countries_array) {
|
||||
in_country_object = true;
|
||||
current_country_id = 0;
|
||||
country_info[0].clear();
|
||||
country_info[1].clear();
|
||||
country_info[2].clear();
|
||||
} else if (depth == 4 && in_states_array) {
|
||||
in_state_object = true;
|
||||
current_state_id = 0;
|
||||
state_info[0].clear();
|
||||
state_info[1].clear();
|
||||
} else if (depth == 6 && in_cities_array) {
|
||||
building_city = true;
|
||||
current_city = {};
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool on_object_end(std::size_t, boost::system::error_code&) {
|
||||
if (depth == 6 && building_city) {
|
||||
if (current_city.id > 0 && current_state_id > 0 &&
|
||||
current_country_id > 0) {
|
||||
current_city.state_id = current_state_id;
|
||||
current_city.country_id = current_country_id;
|
||||
|
||||
try {
|
||||
context.on_city(current_city);
|
||||
context.cities_emitted++;
|
||||
|
||||
if (context.on_progress && context.cities_emitted % 10000 == 0) {
|
||||
context.on_progress(context.cities_emitted,
|
||||
context.total_file_size);
|
||||
}
|
||||
} catch (const std::exception& e) {
|
||||
spdlog::warn("Record parsing failed: {}", e.what());
|
||||
}
|
||||
}
|
||||
building_city = false;
|
||||
} else if (depth == 4 && in_state_object) {
|
||||
if (current_state_id > 0 && current_country_id > 0) {
|
||||
try {
|
||||
context.db->InsertState(current_state_id, current_country_id,
|
||||
state_info[0], state_info[1]);
|
||||
context.states_inserted++;
|
||||
} catch (const std::exception& e) {
|
||||
spdlog::warn("Record parsing failed: {}", e.what());
|
||||
}
|
||||
}
|
||||
in_state_object = false;
|
||||
} else if (depth == 2 && in_country_object) {
|
||||
if (current_country_id > 0) {
|
||||
try {
|
||||
context.db->InsertCountry(current_country_id, country_info[0],
|
||||
country_info[1], country_info[2]);
|
||||
context.countries_inserted++;
|
||||
} catch (const std::exception& e) {
|
||||
spdlog::warn("Record parsing failed: {}", e.what());
|
||||
}
|
||||
}
|
||||
in_country_object = false;
|
||||
}
|
||||
|
||||
depth--;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool on_key_part(boost::json::string_view s, std::size_t,
|
||||
boost::system::error_code&) {
|
||||
current_key_val.append(s.data(), s.size());
|
||||
return true;
|
||||
}
|
||||
|
||||
bool on_key(boost::json::string_view s, std::size_t,
|
||||
boost::system::error_code&) {
|
||||
current_key_val.append(s.data(), s.size());
|
||||
current_key = current_key_val;
|
||||
current_key_val.clear();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool on_string_part(boost::json::string_view s, std::size_t,
|
||||
boost::system::error_code&) {
|
||||
current_string_val.append(s.data(), s.size());
|
||||
return true;
|
||||
}
|
||||
|
||||
bool on_string(boost::json::string_view s, std::size_t,
|
||||
boost::system::error_code&) {
|
||||
current_string_val.append(s.data(), s.size());
|
||||
|
||||
if (building_city && current_key == "name") {
|
||||
current_city.name = current_string_val;
|
||||
} else if (in_state_object && current_key == "name") {
|
||||
state_info[0] = current_string_val;
|
||||
} else if (in_state_object && current_key == "iso2") {
|
||||
state_info[1] = current_string_val;
|
||||
} else if (in_country_object && current_key == "name") {
|
||||
country_info[0] = current_string_val;
|
||||
} else if (in_country_object && current_key == "iso2") {
|
||||
country_info[1] = current_string_val;
|
||||
} else if (in_country_object && current_key == "iso3") {
|
||||
country_info[2] = current_string_val;
|
||||
}
|
||||
|
||||
current_string_val.clear();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool on_number_part(boost::json::string_view, boost::system::error_code&) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool on_int64(int64_t i, boost::json::string_view,
|
||||
boost::system::error_code&) {
|
||||
if (building_city && current_key == "id") {
|
||||
current_city.id = static_cast<int>(i);
|
||||
} else if (in_state_object && current_key == "id") {
|
||||
current_state_id = static_cast<int>(i);
|
||||
} else if (in_country_object && current_key == "id") {
|
||||
current_country_id = static_cast<int>(i);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool on_uint64(uint64_t u, boost::json::string_view,
|
||||
boost::system::error_code& ec) {
|
||||
return on_int64(static_cast<int64_t>(u), "", ec);
|
||||
}
|
||||
|
||||
bool on_double(double d, boost::json::string_view,
|
||||
boost::system::error_code&) {
|
||||
if (building_city) {
|
||||
if (current_key == "latitude") {
|
||||
current_city.latitude = d;
|
||||
} else if (current_key == "longitude") {
|
||||
current_city.longitude = d;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool on_bool(bool, boost::system::error_code&) { return true; }
|
||||
bool on_null(boost::system::error_code&) { return true; }
|
||||
bool on_comment_part(boost::json::string_view, boost::system::error_code&) {
|
||||
return true;
|
||||
}
|
||||
bool on_comment(boost::json::string_view, boost::system::error_code&) {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
void StreamingJsonParser::Parse(
|
||||
const std::string& file_path, SqliteDatabase& db,
|
||||
std::function<void(const CityRecord&)> on_city,
|
||||
std::function<void(size_t, size_t)> on_progress) {
|
||||
spdlog::info(" Streaming parse of {} (Boost.JSON)...", file_path);
|
||||
|
||||
FILE* file = std::fopen(file_path.c_str(), "rb");
|
||||
if (!file) {
|
||||
throw std::runtime_error("Failed to open JSON file: " + file_path);
|
||||
}
|
||||
|
||||
size_t total_size = 0;
|
||||
if (std::fseek(file, 0, SEEK_END) == 0) {
|
||||
long file_size = std::ftell(file);
|
||||
if (file_size > 0) {
|
||||
total_size = static_cast<size_t>(file_size);
|
||||
}
|
||||
std::rewind(file);
|
||||
}
|
||||
|
||||
CityRecordHandler::ParseContext ctx{&db, on_city, on_progress, 0, total_size,
|
||||
0, 0};
|
||||
boost::json::basic_parser<CityRecordHandler> parser(
|
||||
boost::json::parse_options{}, ctx);
|
||||
|
||||
char buf[65536];
|
||||
size_t bytes_read;
|
||||
boost::system::error_code ec;
|
||||
|
||||
while ((bytes_read = std::fread(buf, 1, sizeof(buf), file)) > 0) {
|
||||
char const* p = buf;
|
||||
std::size_t remain = bytes_read;
|
||||
|
||||
while (remain > 0) {
|
||||
std::size_t consumed = parser.write_some(true, p, remain, ec);
|
||||
if (ec) {
|
||||
std::fclose(file);
|
||||
throw std::runtime_error("JSON parse error: " + ec.message());
|
||||
}
|
||||
p += consumed;
|
||||
remain -= consumed;
|
||||
}
|
||||
}
|
||||
|
||||
parser.write_some(false, nullptr, 0, ec); // Signal EOF
|
||||
std::fclose(file);
|
||||
|
||||
if (ec) {
|
||||
throw std::runtime_error("JSON parse error at EOF: " + ec.message());
|
||||
}
|
||||
|
||||
spdlog::info(" OK: Parsed {} countries, {} states, {} cities",
|
||||
ctx.countries_inserted, ctx.states_inserted,
|
||||
ctx.cities_emitted);
|
||||
}
|
||||
118
pipeline/src/main.cpp
Normal file
118
pipeline/src/main.cpp
Normal file
@@ -0,0 +1,118 @@
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
|
||||
#include <boost/program_options.hpp>
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include "biergarten_data_generator.h"
|
||||
#include "web_client/curl_web_client.h"
|
||||
#include "database/database.h"
|
||||
|
||||
namespace po = boost::program_options;
|
||||
|
||||
/**
|
||||
* @brief Parse command-line arguments into ApplicationOptions.
|
||||
*
|
||||
* @param argc Command-line argument count.
|
||||
* @param argv Command-line arguments.
|
||||
* @param options Output ApplicationOptions struct.
|
||||
* @return true if parsing succeeded and should proceed, false otherwise.
|
||||
*/
|
||||
bool ParseArguments(int argc, char **argv, ApplicationOptions &options) {
|
||||
// If no arguments provided, display usage and exit
|
||||
if (argc == 1) {
|
||||
std::cout << "Biergarten Pipeline - Geographic Data Pipeline with Brewery Generation\n\n";
|
||||
std::cout << "Usage: biergarten-pipeline [options]\n\n";
|
||||
std::cout << "Options:\n";
|
||||
std::cout << " --mocked Use mocked generator for brewery/user data\n";
|
||||
std::cout << " --model, -m PATH Path to LLM model file (gguf) for generation\n";
|
||||
std::cout << " --cache-dir, -c DIR Directory for cached JSON (default: /tmp)\n";
|
||||
std::cout << " --temperature TEMP LLM sampling temperature 0.0-1.0 (default: 0.8)\n";
|
||||
std::cout << " --top-p VALUE Nucleus sampling parameter 0.0-1.0 (default: 0.92)\n";
|
||||
std::cout << " --seed SEED Random seed: -1 for random (default: -1)\n";
|
||||
std::cout << " --help, -h Show this help message\n\n";
|
||||
std::cout << "Note: --mocked and --model are mutually exclusive. Exactly one must be provided.\n";
|
||||
std::cout << "Data source is always pinned to commit c5eb7772 (stable 2026-03-28).\n";
|
||||
return false;
|
||||
}
|
||||
|
||||
po::options_description desc("Pipeline Options");
|
||||
desc.add_options()("help,h", "Produce help message")(
|
||||
"mocked", po::bool_switch(),
|
||||
"Use mocked generator for brewery/user data")(
|
||||
"model,m", po::value<std::string>()->default_value(""),
|
||||
"Path to LLM model (gguf)")(
|
||||
"cache-dir,c", po::value<std::string>()->default_value("/tmp"),
|
||||
"Directory for cached JSON")(
|
||||
"temperature", po::value<float>()->default_value(0.8f),
|
||||
"Sampling temperature (higher = more random)")(
|
||||
"top-p", po::value<float>()->default_value(0.92f),
|
||||
"Nucleus sampling top-p in (0,1] (higher = more random)")(
|
||||
"seed", po::value<int>()->default_value(-1),
|
||||
"Sampler seed: -1 for random, otherwise non-negative integer");
|
||||
|
||||
po::variables_map vm;
|
||||
po::store(po::parse_command_line(argc, argv, desc), vm);
|
||||
po::notify(vm);
|
||||
|
||||
if (vm.count("help")) {
|
||||
std::cout << desc << "\n";
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for mutually exclusive --mocked and --model flags
|
||||
bool use_mocked = vm["mocked"].as<bool>();
|
||||
std::string model_path = vm["model"].as<std::string>();
|
||||
|
||||
if (use_mocked && !model_path.empty()) {
|
||||
spdlog::error("ERROR: --mocked and --model are mutually exclusive");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!use_mocked && model_path.empty()) {
|
||||
spdlog::error("ERROR: Either --mocked or --model must be specified");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Warn if sampling parameters are provided with --mocked
|
||||
if (use_mocked) {
|
||||
bool hasTemperature = vm["temperature"].defaulted() == false;
|
||||
bool hasTopP = vm["top-p"].defaulted() == false;
|
||||
bool hasSeed = vm["seed"].defaulted() == false;
|
||||
|
||||
if (hasTemperature || hasTopP || hasSeed) {
|
||||
spdlog::warn("WARNING: Sampling parameters (--temperature, --top-p, --seed) are ignored when using --mocked");
|
||||
}
|
||||
}
|
||||
|
||||
options.use_mocked = use_mocked;
|
||||
options.model_path = model_path;
|
||||
options.cache_dir = vm["cache-dir"].as<std::string>();
|
||||
options.temperature = vm["temperature"].as<float>();
|
||||
options.top_p = vm["top-p"].as<float>();
|
||||
options.seed = vm["seed"].as<int>();
|
||||
// commit is always pinned to c5eb7772
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
try {
|
||||
const CurlGlobalState curl_state;
|
||||
|
||||
ApplicationOptions options;
|
||||
if (!ParseArguments(argc, argv, options)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
auto webClient = std::make_shared<CURLWebClient>();
|
||||
SqliteDatabase database;
|
||||
|
||||
BiergartenDataGenerator generator(options, webClient, database);
|
||||
return generator.Run();
|
||||
|
||||
} catch (const std::exception &e) {
|
||||
spdlog::error("ERROR: Application failed: {}", e.what());
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
141
pipeline/src/web_client/curl_web_client.cpp
Normal file
141
pipeline/src/web_client/curl_web_client.cpp
Normal file
@@ -0,0 +1,141 @@
|
||||
#include "web_client/curl_web_client.h"
|
||||
|
||||
#include <curl/curl.h>
|
||||
|
||||
#include <cstdio>
|
||||
#include <fstream>
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
#include <stdexcept>
|
||||
|
||||
CurlGlobalState::CurlGlobalState() {
|
||||
if (curl_global_init(CURL_GLOBAL_DEFAULT) != CURLE_OK) {
|
||||
throw std::runtime_error(
|
||||
"[CURLWebClient] Failed to initialize libcurl globally");
|
||||
}
|
||||
}
|
||||
|
||||
CurlGlobalState::~CurlGlobalState() { curl_global_cleanup(); }
|
||||
|
||||
namespace {
|
||||
// curl write callback that appends response data into a std::string
|
||||
size_t WriteCallbackString(void* contents, size_t size, size_t nmemb,
|
||||
void* userp) {
|
||||
size_t realsize = size * nmemb;
|
||||
auto* s = static_cast<std::string*>(userp);
|
||||
s->append(static_cast<char*>(contents), realsize);
|
||||
return realsize;
|
||||
}
|
||||
|
||||
// curl write callback that writes to a file stream
|
||||
size_t WriteCallbackFile(void* contents, size_t size, size_t nmemb,
|
||||
void* userp) {
|
||||
size_t realsize = size * nmemb;
|
||||
auto* outFile = static_cast<std::ofstream*>(userp);
|
||||
outFile->write(static_cast<char*>(contents), realsize);
|
||||
return realsize;
|
||||
}
|
||||
|
||||
// RAII wrapper for CURL handle using unique_ptr
|
||||
using CurlHandle = std::unique_ptr<CURL, decltype(&curl_easy_cleanup)>;
|
||||
|
||||
CurlHandle create_handle() {
|
||||
CURL* handle = curl_easy_init();
|
||||
if (!handle) {
|
||||
throw std::runtime_error(
|
||||
"[CURLWebClient] Failed to initialize libcurl handle");
|
||||
}
|
||||
return CurlHandle(handle, &curl_easy_cleanup);
|
||||
}
|
||||
|
||||
void set_common_get_options(CURL* curl, const std::string& url,
|
||||
long connect_timeout, long total_timeout) {
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_USERAGENT, "biergarten-pipeline/0.1.0");
|
||||
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
|
||||
curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 5L);
|
||||
curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, connect_timeout);
|
||||
curl_easy_setopt(curl, CURLOPT_TIMEOUT, total_timeout);
|
||||
curl_easy_setopt(curl, CURLOPT_ACCEPT_ENCODING, "gzip");
|
||||
}
|
||||
} // namespace
|
||||
|
||||
CURLWebClient::CURLWebClient() {}
|
||||
|
||||
CURLWebClient::~CURLWebClient() {}
|
||||
|
||||
void CURLWebClient::DownloadToFile(const std::string& url,
|
||||
const std::string& file_path) {
|
||||
auto curl = create_handle();
|
||||
|
||||
std::ofstream outFile(file_path, std::ios::binary);
|
||||
if (!outFile.is_open()) {
|
||||
throw std::runtime_error(
|
||||
"[CURLWebClient] Cannot open file for writing: " + file_path);
|
||||
}
|
||||
|
||||
set_common_get_options(curl.get(), url, 30L, 300L);
|
||||
curl_easy_setopt(curl.get(), CURLOPT_WRITEFUNCTION, WriteCallbackFile);
|
||||
curl_easy_setopt(curl.get(), CURLOPT_WRITEDATA,
|
||||
static_cast<void*>(&outFile));
|
||||
|
||||
CURLcode res = curl_easy_perform(curl.get());
|
||||
outFile.close();
|
||||
|
||||
if (res != CURLE_OK) {
|
||||
std::remove(file_path.c_str());
|
||||
std::string error = std::string("[CURLWebClient] Download failed: ") +
|
||||
curl_easy_strerror(res);
|
||||
throw std::runtime_error(error);
|
||||
}
|
||||
|
||||
long httpCode = 0;
|
||||
curl_easy_getinfo(curl.get(), CURLINFO_RESPONSE_CODE, &httpCode);
|
||||
|
||||
if (httpCode != 200) {
|
||||
std::remove(file_path.c_str());
|
||||
std::stringstream ss;
|
||||
ss << "[CURLWebClient] HTTP error " << httpCode << " for URL " << url;
|
||||
throw std::runtime_error(ss.str());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CURLWebClient::Get(const std::string& url) {
|
||||
auto curl = create_handle();
|
||||
|
||||
std::string response_string;
|
||||
set_common_get_options(curl.get(), url, 10L, 20L);
|
||||
curl_easy_setopt(curl.get(), CURLOPT_WRITEFUNCTION, WriteCallbackString);
|
||||
curl_easy_setopt(curl.get(), CURLOPT_WRITEDATA, &response_string);
|
||||
|
||||
CURLcode res = curl_easy_perform(curl.get());
|
||||
|
||||
if (res != CURLE_OK) {
|
||||
std::string error =
|
||||
std::string("[CURLWebClient] GET failed: ") + curl_easy_strerror(res);
|
||||
throw std::runtime_error(error);
|
||||
}
|
||||
|
||||
long httpCode = 0;
|
||||
curl_easy_getinfo(curl.get(), CURLINFO_RESPONSE_CODE, &httpCode);
|
||||
|
||||
if (httpCode != 200) {
|
||||
std::stringstream ss;
|
||||
ss << "[CURLWebClient] HTTP error " << httpCode << " for URL " << url;
|
||||
throw std::runtime_error(ss.str());
|
||||
}
|
||||
|
||||
return response_string;
|
||||
}
|
||||
|
||||
std::string CURLWebClient::UrlEncode(const std::string& value) {
|
||||
// A NULL handle is fine for UTF-8 encoding according to libcurl docs.
|
||||
char* output = curl_easy_escape(nullptr, value.c_str(), 0);
|
||||
|
||||
if (output) {
|
||||
std::string result(output);
|
||||
curl_free(output);
|
||||
return result;
|
||||
}
|
||||
throw std::runtime_error("[CURLWebClient] curl_easy_escape failed");
|
||||
}
|
||||
78
pipeline/src/wikipedia/wikipedia_service.cpp
Normal file
78
pipeline/src/wikipedia/wikipedia_service.cpp
Normal file
@@ -0,0 +1,78 @@
|
||||
#include "wikipedia/wikipedia_service.h"
|
||||
|
||||
#include <spdlog/spdlog.h>
|
||||
|
||||
#include <boost/json.hpp>
|
||||
|
||||
WikipediaService::WikipediaService(std::shared_ptr<WebClient> client)
|
||||
: client_(std::move(client)) {}
|
||||
|
||||
std::string WikipediaService::FetchExtract(std::string_view query) {
|
||||
const std::string encoded = client_->UrlEncode(std::string(query));
|
||||
const std::string url =
|
||||
"https://en.wikipedia.org/w/api.php?action=query&titles=" + encoded +
|
||||
"&prop=extracts&explaintext=true&format=json";
|
||||
|
||||
const std::string body = client_->Get(url);
|
||||
|
||||
boost::system::error_code ec;
|
||||
boost::json::value doc = boost::json::parse(body, ec);
|
||||
|
||||
if (!ec && doc.is_object()) {
|
||||
auto& pages = doc.at("query").at("pages").get_object();
|
||||
if (!pages.empty()) {
|
||||
auto& page = pages.begin()->value().get_object();
|
||||
if (page.contains("extract") && page.at("extract").is_string()) {
|
||||
std::string extract(page.at("extract").as_string().c_str());
|
||||
spdlog::debug("WikipediaService fetched {} chars for '{}'",
|
||||
extract.size(), query);
|
||||
return extract;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
std::string WikipediaService::GetSummary(std::string_view city,
|
||||
std::string_view country) {
|
||||
const std::string key = std::string(city) + "|" + std::string(country);
|
||||
const auto cacheIt = cache_.find(key);
|
||||
if (cacheIt != cache_.end()) {
|
||||
return cacheIt->second;
|
||||
}
|
||||
|
||||
std::string result;
|
||||
|
||||
if (!client_) {
|
||||
cache_.emplace(key, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string regionQuery(city);
|
||||
if (!country.empty()) {
|
||||
regionQuery += ", ";
|
||||
regionQuery += country;
|
||||
}
|
||||
|
||||
const std::string beerQuery = "beer in " + std::string(city);
|
||||
|
||||
try {
|
||||
const std::string regionExtract = FetchExtract(regionQuery);
|
||||
const std::string beerExtract = FetchExtract(beerQuery);
|
||||
|
||||
if (!regionExtract.empty()) {
|
||||
result += regionExtract;
|
||||
}
|
||||
if (!beerExtract.empty()) {
|
||||
if (!result.empty()) result += "\n\n";
|
||||
result += beerExtract;
|
||||
}
|
||||
} catch (const std::runtime_error& e) {
|
||||
spdlog::debug("WikipediaService lookup failed for '{}': {}", regionQuery,
|
||||
e.what());
|
||||
}
|
||||
|
||||
cache_.emplace(key, result);
|
||||
return result;
|
||||
}
|
||||
25
src/Core/.dockerignore
Normal file
25
src/Core/.dockerignore
Normal file
@@ -0,0 +1,25 @@
|
||||
**/.dockerignore
|
||||
**/.env
|
||||
**/.git
|
||||
**/.gitignore
|
||||
**/.project
|
||||
**/.settings
|
||||
**/.toolstarget
|
||||
**/.vs
|
||||
**/.vscode
|
||||
**/.idea
|
||||
**/*.*proj.user
|
||||
**/*.dbmdl
|
||||
**/*.jfm
|
||||
**/azds.yaml
|
||||
**/bin
|
||||
**/charts
|
||||
**/docker-compose*
|
||||
**/Dockerfile*
|
||||
**/node_modules
|
||||
**/npm-debug.log
|
||||
**/obj
|
||||
**/secrets.dev.yaml
|
||||
**/values.dev.yaml
|
||||
LICENSE
|
||||
README.md
|
||||
42
src/Core/API/API.Core/API.Core.csproj
Normal file
42
src/Core/API/API.Core/API.Core.csproj
Normal file
@@ -0,0 +1,42 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<RootNamespace>API.Core</RootNamespace>
|
||||
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference
|
||||
Include="Microsoft.AspNetCore.OpenApi"
|
||||
Version="9.0.11"
|
||||
/>
|
||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.6.2" />
|
||||
<PackageReference
|
||||
Include="FluentValidation.AspNetCore"
|
||||
Version="11.3.0"
|
||||
/>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Folder Include="Infrastructure\" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\Domain\Domain.Entities\Domain.Entities.csproj" />
|
||||
<ProjectReference Include="..\..\Domain\Domain.Exceptions\Domain.Exceptions.csproj" />
|
||||
<ProjectReference Include="..\..\Infrastructure\Infrastructure.Email\Infrastructure.Email.csproj" />
|
||||
<ProjectReference Include="..\..\Infrastructure\Infrastructure.Email.Templates\Infrastructure.Email.Templates.csproj" />
|
||||
<ProjectReference Include="..\..\Infrastructure\Infrastructure.Repository\Infrastructure.Repository.csproj" />
|
||||
<ProjectReference Include="..\..\Infrastructure\Infrastructure.Jwt\Infrastructure.Jwt.csproj" />
|
||||
<ProjectReference Include="..\..\Service\Service.Auth\Service.Auth.csproj" />
|
||||
<ProjectReference Include="..\..\Service\Service.UserManagement\Service.UserManagement.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="..\..\.dockerignore">
|
||||
<Link>.dockerignore</Link>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,86 @@
|
||||
using System.Security.Claims;
|
||||
using System.Text.Encodings.Web;
|
||||
using System.Text.Json;
|
||||
using API.Core.Contracts.Common;
|
||||
using Infrastructure.Jwt;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace API.Core.Authentication;
|
||||
|
||||
public class JwtAuthenticationHandler(
|
||||
IOptionsMonitor<JwtAuthenticationOptions> options,
|
||||
ILoggerFactory logger,
|
||||
UrlEncoder encoder,
|
||||
ITokenInfrastructure tokenInfrastructure,
|
||||
IConfiguration configuration
|
||||
) : AuthenticationHandler<JwtAuthenticationOptions>(options, logger, encoder)
|
||||
{
|
||||
protected override async Task<AuthenticateResult> HandleAuthenticateAsync()
|
||||
{
|
||||
// Use the same access-token secret source as TokenService to avoid mismatched validation.
|
||||
var secret = Environment.GetEnvironmentVariable("ACCESS_TOKEN_SECRET");
|
||||
if (string.IsNullOrWhiteSpace(secret))
|
||||
{
|
||||
secret = configuration["Jwt:SecretKey"];
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(secret))
|
||||
{
|
||||
return AuthenticateResult.Fail("JWT secret is not configured");
|
||||
}
|
||||
|
||||
// Check if Authorization header exists
|
||||
if (
|
||||
!Request.Headers.TryGetValue(
|
||||
"Authorization",
|
||||
out var authHeaderValue
|
||||
)
|
||||
)
|
||||
{
|
||||
return AuthenticateResult.Fail("Authorization header is missing");
|
||||
}
|
||||
|
||||
var authHeader = authHeaderValue.ToString();
|
||||
if (
|
||||
!authHeader.StartsWith(
|
||||
"Bearer ",
|
||||
StringComparison.OrdinalIgnoreCase
|
||||
)
|
||||
)
|
||||
{
|
||||
return AuthenticateResult.Fail(
|
||||
"Invalid authorization header format"
|
||||
);
|
||||
}
|
||||
|
||||
var token = authHeader.Substring("Bearer ".Length).Trim();
|
||||
|
||||
try
|
||||
{
|
||||
var claimsPrincipal = await tokenInfrastructure.ValidateJwtAsync(
|
||||
token,
|
||||
secret
|
||||
);
|
||||
var ticket = new AuthenticationTicket(claimsPrincipal, Scheme.Name);
|
||||
return AuthenticateResult.Success(ticket);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return AuthenticateResult.Fail(
|
||||
$"Token validation failed: {ex.Message}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task HandleChallengeAsync(AuthenticationProperties properties)
|
||||
{
|
||||
Response.ContentType = "application/json";
|
||||
Response.StatusCode = 401;
|
||||
|
||||
var response = new ResponseBody { Message = "Unauthorized: Invalid or missing authentication token" };
|
||||
await Response.WriteAsJsonAsync(response);
|
||||
}
|
||||
}
|
||||
|
||||
public class JwtAuthenticationOptions : AuthenticationSchemeOptions { }
|
||||
21
src/Core/API/API.Core/Contracts/Auth/AuthDTO.cs
Normal file
21
src/Core/API/API.Core/Contracts/Auth/AuthDTO.cs
Normal file
@@ -0,0 +1,21 @@
|
||||
using Domain.Entities;
|
||||
using Org.BouncyCastle.Asn1.Cms;
|
||||
|
||||
namespace API.Core.Contracts.Auth;
|
||||
|
||||
public record LoginPayload(
|
||||
Guid UserAccountId,
|
||||
string Username,
|
||||
string RefreshToken,
|
||||
string AccessToken
|
||||
);
|
||||
|
||||
public record RegistrationPayload(
|
||||
Guid UserAccountId,
|
||||
string Username,
|
||||
string RefreshToken,
|
||||
string AccessToken,
|
||||
bool ConfirmationEmailSent
|
||||
);
|
||||
|
||||
public record ConfirmationPayload(Guid UserAccountId, DateTime ConfirmedDate);
|
||||
20
src/Core/API/API.Core/Contracts/Auth/Login.cs
Normal file
20
src/Core/API/API.Core/Contracts/Auth/Login.cs
Normal file
@@ -0,0 +1,20 @@
|
||||
using API.Core.Contracts.Common;
|
||||
using FluentValidation;
|
||||
|
||||
namespace API.Core.Contracts.Auth;
|
||||
|
||||
public record LoginRequest
|
||||
{
|
||||
public string Username { get; init; } = default!;
|
||||
public string Password { get; init; } = default!;
|
||||
}
|
||||
|
||||
public class LoginRequestValidator : AbstractValidator<LoginRequest>
|
||||
{
|
||||
public LoginRequestValidator()
|
||||
{
|
||||
RuleFor(x => x.Username).NotEmpty().WithMessage("Username is required");
|
||||
|
||||
RuleFor(x => x.Password).NotEmpty().WithMessage("Password is required");
|
||||
}
|
||||
}
|
||||
19
src/Core/API/API.Core/Contracts/Auth/RefreshToken.cs
Normal file
19
src/Core/API/API.Core/Contracts/Auth/RefreshToken.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
using FluentValidation;
|
||||
|
||||
namespace API.Core.Contracts.Auth;
|
||||
|
||||
public record RefreshTokenRequest
|
||||
{
|
||||
public string RefreshToken { get; init; } = default!;
|
||||
}
|
||||
|
||||
public class RefreshTokenRequestValidator
|
||||
: AbstractValidator<RefreshTokenRequest>
|
||||
{
|
||||
public RefreshTokenRequestValidator()
|
||||
{
|
||||
RuleFor(x => x.RefreshToken)
|
||||
.NotEmpty()
|
||||
.WithMessage("Refresh token is required");
|
||||
}
|
||||
}
|
||||
71
src/Core/API/API.Core/Contracts/Auth/Register.cs
Normal file
71
src/Core/API/API.Core/Contracts/Auth/Register.cs
Normal file
@@ -0,0 +1,71 @@
|
||||
using API.Core.Contracts.Common;
|
||||
using FluentValidation;
|
||||
|
||||
namespace API.Core.Contracts.Auth;
|
||||
|
||||
public record RegisterRequest(
|
||||
string Username,
|
||||
string FirstName,
|
||||
string LastName,
|
||||
string Email,
|
||||
DateTime DateOfBirth,
|
||||
string Password
|
||||
);
|
||||
|
||||
public class RegisterRequestValidator : AbstractValidator<RegisterRequest>
|
||||
{
|
||||
public RegisterRequestValidator()
|
||||
{
|
||||
RuleFor(x => x.Username)
|
||||
.NotEmpty()
|
||||
.WithMessage("Username is required")
|
||||
.Length(3, 64)
|
||||
.WithMessage("Username must be between 3 and 64 characters")
|
||||
.Matches("^[a-zA-Z0-9._-]+$")
|
||||
.WithMessage(
|
||||
"Username can only contain letters, numbers, dots, underscores, and hyphens"
|
||||
);
|
||||
|
||||
RuleFor(x => x.FirstName)
|
||||
.NotEmpty()
|
||||
.WithMessage("First name is required")
|
||||
.MaximumLength(128)
|
||||
.WithMessage("First name cannot exceed 128 characters");
|
||||
|
||||
RuleFor(x => x.LastName)
|
||||
.NotEmpty()
|
||||
.WithMessage("Last name is required")
|
||||
.MaximumLength(128)
|
||||
.WithMessage("Last name cannot exceed 128 characters");
|
||||
|
||||
RuleFor(x => x.Email)
|
||||
.NotEmpty()
|
||||
.WithMessage("Email is required")
|
||||
.EmailAddress()
|
||||
.WithMessage("Invalid email format")
|
||||
.MaximumLength(128)
|
||||
.WithMessage("Email cannot exceed 128 characters");
|
||||
|
||||
RuleFor(x => x.DateOfBirth)
|
||||
.NotEmpty()
|
||||
.WithMessage("Date of birth is required")
|
||||
.LessThan(DateTime.Today.AddYears(-19))
|
||||
.WithMessage("You must be at least 19 years old to register");
|
||||
|
||||
RuleFor(x => x.Password)
|
||||
.NotEmpty()
|
||||
.WithMessage("Password is required")
|
||||
.MinimumLength(8)
|
||||
.WithMessage("Password must be at least 8 characters")
|
||||
.Matches("[A-Z]")
|
||||
.WithMessage("Password must contain at least one uppercase letter")
|
||||
.Matches("[a-z]")
|
||||
.WithMessage("Password must contain at least one lowercase letter")
|
||||
.Matches("[0-9]")
|
||||
.WithMessage("Password must contain at least one number")
|
||||
.Matches("[^a-zA-Z0-9]")
|
||||
.WithMessage(
|
||||
"Password must contain at least one special character"
|
||||
);
|
||||
}
|
||||
}
|
||||
12
src/Core/API/API.Core/Contracts/Common/ResponseBody.cs
Normal file
12
src/Core/API/API.Core/Contracts/Common/ResponseBody.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
namespace API.Core.Contracts.Common;
|
||||
|
||||
public record ResponseBody<T>
|
||||
{
|
||||
public required string Message { get; init; }
|
||||
public required T Payload { get; init; }
|
||||
}
|
||||
|
||||
public record ResponseBody
|
||||
{
|
||||
public required string Message { get; init; }
|
||||
}
|
||||
111
src/Core/API/API.Core/Controllers/AuthController.cs
Normal file
111
src/Core/API/API.Core/Controllers/AuthController.cs
Normal file
@@ -0,0 +1,111 @@
|
||||
using API.Core.Contracts.Auth;
|
||||
using API.Core.Contracts.Common;
|
||||
using Domain.Entities;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Service.Auth;
|
||||
|
||||
namespace API.Core.Controllers
|
||||
{
|
||||
[ApiController]
|
||||
[Route("api/[controller]")]
|
||||
[Authorize(AuthenticationSchemes = "JWT")]
|
||||
public class AuthController(
|
||||
IRegisterService registerService,
|
||||
ILoginService loginService,
|
||||
IConfirmationService confirmationService,
|
||||
ITokenService tokenService
|
||||
) : ControllerBase
|
||||
{
|
||||
[AllowAnonymous]
|
||||
[HttpPost("register")]
|
||||
public async Task<ActionResult<UserAccount>> Register(
|
||||
[FromBody] RegisterRequest req
|
||||
)
|
||||
{
|
||||
var rtn = await registerService.RegisterAsync(
|
||||
new UserAccount
|
||||
{
|
||||
UserAccountId = Guid.Empty,
|
||||
Username = req.Username,
|
||||
FirstName = req.FirstName,
|
||||
LastName = req.LastName,
|
||||
Email = req.Email,
|
||||
DateOfBirth = req.DateOfBirth,
|
||||
},
|
||||
req.Password
|
||||
);
|
||||
|
||||
var response = new ResponseBody<RegistrationPayload>
|
||||
{
|
||||
Message = "User registered successfully.",
|
||||
Payload = new RegistrationPayload(
|
||||
rtn.UserAccount.UserAccountId,
|
||||
rtn.UserAccount.Username,
|
||||
rtn.RefreshToken,
|
||||
rtn.AccessToken,
|
||||
rtn.EmailSent
|
||||
),
|
||||
};
|
||||
return Created("/", response);
|
||||
}
|
||||
|
||||
[AllowAnonymous]
|
||||
[HttpPost("login")]
|
||||
public async Task<ActionResult> Login([FromBody] LoginRequest req)
|
||||
{
|
||||
var rtn = await loginService.LoginAsync(req.Username, req.Password);
|
||||
|
||||
return Ok(
|
||||
new ResponseBody<LoginPayload>
|
||||
{
|
||||
Message = "Logged in successfully.",
|
||||
Payload = new LoginPayload(
|
||||
rtn.UserAccount.UserAccountId,
|
||||
rtn.UserAccount.Username,
|
||||
rtn.RefreshToken,
|
||||
rtn.AccessToken
|
||||
),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
[HttpPost("confirm")]
|
||||
public async Task<ActionResult> Confirm([FromQuery] string token)
|
||||
{
|
||||
var rtn = await confirmationService.ConfirmUserAsync(token);
|
||||
return Ok(
|
||||
new ResponseBody<ConfirmationPayload>
|
||||
{
|
||||
Message = "User with ID " + rtn.UserId + " is confirmed.",
|
||||
Payload = new ConfirmationPayload(
|
||||
rtn.UserId,
|
||||
rtn.ConfirmedAt
|
||||
),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
[AllowAnonymous]
|
||||
[HttpPost("refresh")]
|
||||
public async Task<ActionResult> Refresh(
|
||||
[FromBody] RefreshTokenRequest req
|
||||
)
|
||||
{
|
||||
var rtn = await tokenService.RefreshTokenAsync(req.RefreshToken);
|
||||
|
||||
return Ok(
|
||||
new ResponseBody<LoginPayload>
|
||||
{
|
||||
Message = "Token refreshed successfully.",
|
||||
Payload = new LoginPayload(
|
||||
rtn.UserAccount.UserAccountId,
|
||||
rtn.UserAccount.Username,
|
||||
rtn.RefreshToken,
|
||||
rtn.AccessToken
|
||||
),
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
16
src/Core/API/API.Core/Controllers/NotFoundController.cs
Normal file
16
src/Core/API/API.Core/Controllers/NotFoundController.cs
Normal file
@@ -0,0 +1,16 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
|
||||
namespace API.Core.Controllers
|
||||
{
|
||||
[ApiController]
|
||||
[ApiExplorerSettings(IgnoreApi = true)]
|
||||
[Route("error")] // required
|
||||
public class NotFoundController : ControllerBase
|
||||
{
|
||||
[HttpGet("404")] //required
|
||||
public IActionResult Handle404()
|
||||
{
|
||||
return NotFound(new { message = "Route not found." });
|
||||
}
|
||||
}
|
||||
}
|
||||
27
src/Core/API/API.Core/Controllers/ProtectedController.cs
Normal file
27
src/Core/API/API.Core/Controllers/ProtectedController.cs
Normal file
@@ -0,0 +1,27 @@
|
||||
using System.Security.Claims;
|
||||
using API.Core.Contracts.Common;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
|
||||
namespace API.Core.Controllers;
|
||||
|
||||
[ApiController]
|
||||
[Route("api/[controller]")]
|
||||
[Authorize(AuthenticationSchemes = "JWT")]
|
||||
public class ProtectedController : ControllerBase
|
||||
{
|
||||
[HttpGet]
|
||||
public ActionResult<ResponseBody<object>> Get()
|
||||
{
|
||||
var userId = User.FindFirst(ClaimTypes.NameIdentifier)?.Value;
|
||||
var username = User.FindFirst(ClaimTypes.Name)?.Value;
|
||||
|
||||
return Ok(
|
||||
new ResponseBody<object>
|
||||
{
|
||||
Message = "Protected endpoint accessed successfully",
|
||||
Payload = new { userId, username },
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
28
src/Core/API/API.Core/Controllers/UserController.cs
Normal file
28
src/Core/API/API.Core/Controllers/UserController.cs
Normal file
@@ -0,0 +1,28 @@
|
||||
using Domain.Entities;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Service.UserManagement.User;
|
||||
|
||||
namespace API.Core.Controllers
|
||||
{
|
||||
[ApiController]
|
||||
[Route("api/[controller]")]
|
||||
public class UserController(IUserService userService) : ControllerBase
|
||||
{
|
||||
[HttpGet]
|
||||
public async Task<ActionResult<IEnumerable<UserAccount>>> GetAll(
|
||||
[FromQuery] int? limit,
|
||||
[FromQuery] int? offset
|
||||
)
|
||||
{
|
||||
var users = await userService.GetAllAsync(limit, offset);
|
||||
return Ok(users);
|
||||
}
|
||||
|
||||
[HttpGet("{id:guid}")]
|
||||
public async Task<ActionResult<UserAccount>> GetById(Guid id)
|
||||
{
|
||||
var user = await userService.GetByIdAsync(id);
|
||||
return Ok(user);
|
||||
}
|
||||
}
|
||||
}
|
||||
32
src/Core/API/API.Core/Dockerfile
Normal file
32
src/Core/API/API.Core/Dockerfile
Normal file
@@ -0,0 +1,32 @@
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
|
||||
ARG APP_UID=1000
|
||||
USER $APP_UID
|
||||
WORKDIR /app
|
||||
EXPOSE 8080
|
||||
EXPOSE 8081
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
WORKDIR /src
|
||||
COPY ["API/API.Core/API.Core.csproj", "API/API.Core/"]
|
||||
COPY ["Domain/Domain.Entities/Domain.Entities.csproj", "Domain.Entities/"]
|
||||
COPY ["Domain/Domain.Exceptions/Domain.Exceptions.csproj", "Domain.Exceptions/"]
|
||||
COPY ["Infrastructure/Infrastructure.Repository/Infrastructure.Repository.csproj", "Infrastructure/Infrastructure.Repository/"]
|
||||
COPY ["Infrastructure/Infrastructure.Jwt/Infrastructure.Jwt.csproj", "Infrastructure/Infrastructure.Jwt/"]
|
||||
COPY ["Infrastructure/Infrastructure.PasswordHashing/Infrastructure.PasswordHashing.csproj", "Infrastructure/Infrastructure.PasswordHashing/"]
|
||||
COPY ["Infrastructure/Infrastructure.Email/Infrastructure.Email.csproj", "Infrastructure/Infrastructure.Email/"]
|
||||
COPY ["Service/Service.Auth/Service.Auth.csproj", "Service/Service.Auth/"]
|
||||
COPY ["Service/Service.UserManagement/Service.UserManagement.csproj", "Service/Service.UserManagement/"]
|
||||
RUN dotnet restore "API/API.Core/API.Core.csproj"
|
||||
COPY . .
|
||||
WORKDIR "/src/API/API.Core"
|
||||
RUN dotnet build "./API.Core.csproj" -c $BUILD_CONFIGURATION -o /app/build
|
||||
|
||||
FROM build AS publish
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
RUN dotnet publish "./API.Core.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
|
||||
|
||||
FROM base AS final
|
||||
WORKDIR /app
|
||||
COPY --from=publish /app/publish .
|
||||
ENTRYPOINT ["dotnet", "API.Core.dll"]
|
||||
109
src/Core/API/API.Core/GlobalException.cs
Normal file
109
src/Core/API/API.Core/GlobalException.cs
Normal file
@@ -0,0 +1,109 @@
|
||||
// API.Core/Filters/GlobalExceptionFilter.cs
|
||||
|
||||
using API.Core.Contracts.Common;
|
||||
using Domain.Exceptions;
|
||||
using FluentValidation;
|
||||
using Microsoft.Data.SqlClient;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Mvc.Filters;
|
||||
|
||||
namespace API.Core;
|
||||
|
||||
public class GlobalExceptionFilter(ILogger<GlobalExceptionFilter> logger)
|
||||
: IExceptionFilter
|
||||
{
|
||||
public void OnException(ExceptionContext context)
|
||||
{
|
||||
logger.LogError(context.Exception, "Unhandled exception occurred");
|
||||
|
||||
switch (context.Exception)
|
||||
{
|
||||
case FluentValidation.ValidationException fluentValidationException:
|
||||
var errors = fluentValidationException
|
||||
.Errors.GroupBy(e => e.PropertyName)
|
||||
.ToDictionary(
|
||||
g => g.Key,
|
||||
g => g.Select(e => e.ErrorMessage).ToArray()
|
||||
);
|
||||
|
||||
context.Result = new BadRequestObjectResult(
|
||||
new { message = "Validation failed", errors }
|
||||
);
|
||||
context.ExceptionHandled = true;
|
||||
break;
|
||||
|
||||
case ConflictException ex:
|
||||
context.Result = new ObjectResult(
|
||||
new ResponseBody { Message = ex.Message }
|
||||
)
|
||||
{
|
||||
StatusCode = 409,
|
||||
};
|
||||
context.ExceptionHandled = true;
|
||||
break;
|
||||
|
||||
case NotFoundException ex:
|
||||
context.Result = new ObjectResult(
|
||||
new ResponseBody { Message = ex.Message }
|
||||
)
|
||||
{
|
||||
StatusCode = 404,
|
||||
};
|
||||
context.ExceptionHandled = true;
|
||||
break;
|
||||
|
||||
case UnauthorizedException ex:
|
||||
context.Result = new ObjectResult(
|
||||
new ResponseBody { Message = ex.Message }
|
||||
)
|
||||
{
|
||||
StatusCode = 401,
|
||||
};
|
||||
context.ExceptionHandled = true;
|
||||
break;
|
||||
|
||||
case ForbiddenException ex:
|
||||
context.Result = new ObjectResult(
|
||||
new ResponseBody { Message = ex.Message }
|
||||
)
|
||||
{
|
||||
StatusCode = 403,
|
||||
};
|
||||
context.ExceptionHandled = true;
|
||||
break;
|
||||
|
||||
case SqlException ex:
|
||||
context.Result = new ObjectResult(
|
||||
new ResponseBody { Message = "A database error occurred." }
|
||||
)
|
||||
{
|
||||
StatusCode = 503,
|
||||
};
|
||||
context.ExceptionHandled = true;
|
||||
break;
|
||||
|
||||
case Domain.Exceptions.ValidationException ex:
|
||||
context.Result = new ObjectResult(
|
||||
new ResponseBody { Message = ex.Message }
|
||||
)
|
||||
{
|
||||
StatusCode = 400,
|
||||
};
|
||||
context.ExceptionHandled = true;
|
||||
break;
|
||||
|
||||
default:
|
||||
context.Result = new ObjectResult(
|
||||
new ResponseBody
|
||||
{
|
||||
Message = "An unexpected error occurred",
|
||||
}
|
||||
)
|
||||
{
|
||||
StatusCode = 500,
|
||||
};
|
||||
context.ExceptionHandled = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
108
src/Core/API/API.Core/Program.cs
Normal file
108
src/Core/API/API.Core/Program.cs
Normal file
@@ -0,0 +1,108 @@
|
||||
using API.Core;
|
||||
using API.Core.Authentication;
|
||||
using API.Core.Contracts.Common;
|
||||
using Domain.Exceptions;
|
||||
using FluentValidation;
|
||||
using FluentValidation.AspNetCore;
|
||||
using Infrastructure.Email;
|
||||
using Infrastructure.Email.Templates;
|
||||
using Infrastructure.Email.Templates.Rendering;
|
||||
using Infrastructure.Jwt;
|
||||
using Infrastructure.PasswordHashing;
|
||||
using Infrastructure.Repository.Auth;
|
||||
using Infrastructure.Repository.Sql;
|
||||
using Infrastructure.Repository.UserAccount;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Mvc.Filters;
|
||||
using Service.Auth;
|
||||
using Service.Emails;
|
||||
using Service.UserManagement.User;
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
// Global Exception Filter
|
||||
builder.Services.AddControllers(options =>
|
||||
{
|
||||
options.Filters.Add<GlobalExceptionFilter>();
|
||||
});
|
||||
|
||||
builder.Services.AddEndpointsApiExplorer();
|
||||
builder.Services.AddSwaggerGen();
|
||||
builder.Services.AddOpenApi();
|
||||
|
||||
// Add FluentValidation
|
||||
builder.Services.AddValidatorsFromAssemblyContaining<Program>();
|
||||
builder.Services.AddFluentValidationAutoValidation();
|
||||
|
||||
// Add health checks
|
||||
builder.Services.AddHealthChecks();
|
||||
|
||||
// Configure logging for container output
|
||||
builder.Logging.ClearProviders();
|
||||
builder.Logging.AddConsole();
|
||||
if (!builder.Environment.IsProduction())
|
||||
{
|
||||
builder.Logging.AddDebug();
|
||||
}
|
||||
|
||||
// Configure Dependency Injection -------------------------------------------------------------------------------------
|
||||
|
||||
builder.Services.AddSingleton<
|
||||
ISqlConnectionFactory,
|
||||
DefaultSqlConnectionFactory
|
||||
>();
|
||||
|
||||
builder.Services.AddScoped<IUserAccountRepository, UserAccountRepository>();
|
||||
builder.Services.AddScoped<IAuthRepository, AuthRepository>();
|
||||
|
||||
builder.Services.AddScoped<IUserService, UserService>();
|
||||
builder.Services.AddScoped<ILoginService, LoginService>();
|
||||
builder.Services.AddScoped<IRegisterService, RegisterService>();
|
||||
builder.Services.AddScoped<ITokenService, TokenService>();
|
||||
|
||||
builder.Services.AddScoped<ITokenInfrastructure, JwtInfrastructure>();
|
||||
builder.Services.AddScoped<IPasswordInfrastructure, Argon2Infrastructure>();
|
||||
builder.Services.AddScoped<IEmailProvider, SmtpEmailProvider>();
|
||||
builder.Services.AddScoped<IEmailTemplateProvider, EmailTemplateProvider>();
|
||||
builder.Services.AddScoped<IEmailService, EmailService>();
|
||||
builder.Services.AddScoped<IConfirmationService, ConfirmationService>();
|
||||
|
||||
// Register the exception filter
|
||||
builder.Services.AddScoped<GlobalExceptionFilter>();
|
||||
|
||||
// Configure JWT Authentication
|
||||
builder
|
||||
.Services.AddAuthentication("JWT")
|
||||
.AddScheme<JwtAuthenticationOptions, JwtAuthenticationHandler>(
|
||||
"JWT",
|
||||
options => { }
|
||||
);
|
||||
|
||||
builder.Services.AddAuthorization();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
app.UseSwagger();
|
||||
app.UseSwaggerUI();
|
||||
app.MapOpenApi();
|
||||
|
||||
app.UseHttpsRedirection();
|
||||
|
||||
app.UseAuthentication();
|
||||
app.UseAuthorization();
|
||||
|
||||
// Health check endpoint (used by Docker health checks and orchestrators)
|
||||
app.MapHealthChecks("/health");
|
||||
|
||||
app.MapControllers();
|
||||
app.MapFallbackToController("Handle404", "NotFound");
|
||||
|
||||
// Graceful shutdown handling
|
||||
var lifetime = app.Services.GetRequiredService<IHostApplicationLifetime>();
|
||||
lifetime.ApplicationStopping.Register(() =>
|
||||
{
|
||||
app.Logger.LogInformation("Application is shutting down gracefully...");
|
||||
});
|
||||
|
||||
app.Run();
|
||||
19
src/Core/API/API.Core/appsettings.Development.json
Normal file
19
src/Core/API/API.Core/appsettings.Development.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Information",
|
||||
"Microsoft.AspNetCore": "Information",
|
||||
"Microsoft.EntityFrameworkCore": "Information"
|
||||
},
|
||||
"Console": {
|
||||
"IncludeScopes": true,
|
||||
"TimestampFormat": "yyyy-MM-ddTHH:mm:ss.fffZ"
|
||||
}
|
||||
},
|
||||
"AllowedHosts": "*",
|
||||
"Jwt": {
|
||||
"ExpirationMinutes": 120,
|
||||
"Issuer": "biergarten-api",
|
||||
"Audience": "biergarten-users"
|
||||
}
|
||||
}
|
||||
19
src/Core/API/API.Core/appsettings.Production.json
Normal file
19
src/Core/API/API.Core/appsettings.Production.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Warning",
|
||||
"Microsoft.AspNetCore": "Warning",
|
||||
"Microsoft.EntityFrameworkCore": "Error"
|
||||
},
|
||||
"Console": {
|
||||
"IncludeScopes": false,
|
||||
"TimestampFormat": "yyyy-MM-ddTHH:mm:ss.fffZ"
|
||||
}
|
||||
},
|
||||
"AllowedHosts": "*",
|
||||
"Jwt": {
|
||||
"ExpirationMinutes": 60,
|
||||
"Issuer": "biergarten-api",
|
||||
"Audience": "biergarten-users"
|
||||
}
|
||||
}
|
||||
24
src/Core/API/API.Core/appsettings.json
Normal file
24
src/Core/API/API.Core/appsettings.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Information",
|
||||
"Microsoft.AspNetCore": "Warning",
|
||||
"Microsoft.EntityFrameworkCore": "Information"
|
||||
},
|
||||
"Console": {
|
||||
"IncludeScopes": true,
|
||||
"TimestampFormat": "yyyy-MM-ddTHH:mm:ss.fffZ"
|
||||
}
|
||||
},
|
||||
"AllowedHosts": "*",
|
||||
"ConnectionStrings": {
|
||||
"DefaultConnection": ""
|
||||
},
|
||||
"Jwt": {
|
||||
"SecretKey": "",
|
||||
"ExpirationMinutes": 60,
|
||||
"Issuer": "biergarten-api",
|
||||
"Audience": "biergarten-users"
|
||||
}
|
||||
}
|
||||
|
||||
46
src/Core/API/API.Specs/API.Specs.csproj
Normal file
46
src/Core/API/API.Specs/API.Specs.csproj
Normal file
@@ -0,0 +1,46 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<RootNamespace>API.Specs</RootNamespace>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageReference Include="FluentAssertions" Version="6.9.0" />
|
||||
<PackageReference Include="dbup" Version="5.0.41" />
|
||||
|
||||
<!-- Reqnroll core, xUnit adapter and code-behind generator -->
|
||||
<PackageReference Include="Reqnroll" Version="3.3.3" />
|
||||
<PackageReference Include="Reqnroll.xUnit" Version="3.3.3" />
|
||||
<PackageReference
|
||||
Include="Reqnroll.Tools.MsBuild.Generation"
|
||||
Version="3.3.3"
|
||||
PrivateAssets="all"
|
||||
/>
|
||||
|
||||
<!-- ASP.NET Core integration testing -->
|
||||
<PackageReference
|
||||
Include="Microsoft.AspNetCore.Mvc.Testing"
|
||||
Version="9.0.1"
|
||||
/>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Ensure feature files are included in the project -->
|
||||
<None Include="Features\**\*.feature" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\API.Core\API.Core.csproj" />
|
||||
<ProjectReference Include="..\..\Infrastructure\Infrastructure.Email\Infrastructure.Email.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
25
src/Core/API/API.Specs/Dockerfile
Normal file
25
src/Core/API/API.Specs/Dockerfile
Normal file
@@ -0,0 +1,25 @@
|
||||
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
WORKDIR /src
|
||||
COPY ["API/API.Core/API.Core.csproj", "API/API.Core/"]
|
||||
COPY ["API/API.Specs/API.Specs.csproj", "API/API.Specs/"]
|
||||
COPY ["Domain/Domain.Entities/Domain.Entities.csproj", "Domain.Entities/"]
|
||||
COPY ["Domain/Domain.Exceptions/Domain.Exceptions.csproj", "Domain.Exceptions/"]
|
||||
COPY ["Infrastructure/Infrastructure.Repository/Infrastructure.Repository.csproj", "Infrastructure/Infrastructure.Repository/"]
|
||||
COPY ["Infrastructure/Infrastructure.Jwt/Infrastructure.Jwt.csproj", "Infrastructure/Infrastructure.Jwt/"]
|
||||
COPY ["Infrastructure/Infrastructure.PasswordHashing/Infrastructure.PasswordHashing.csproj", "Infrastructure/Infrastructure.PasswordHashing/"]
|
||||
COPY ["Infrastructure/Infrastructure.Email/Infrastructure.Email.csproj", "Infrastructure/Infrastructure.Email/"]
|
||||
COPY ["Service/Service.Auth/Service.Auth.csproj", "Service/Service.Auth/"]
|
||||
COPY ["Service/Service.UserManagement/Service.UserManagement.csproj", "Service/Service.UserManagement/"]
|
||||
RUN dotnet restore "API/API.Specs/API.Specs.csproj"
|
||||
COPY . .
|
||||
WORKDIR "/src/API/API.Specs"
|
||||
RUN dotnet build "./API.Specs.csproj" -c $BUILD_CONFIGURATION -o /app/build
|
||||
|
||||
FROM build AS final
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
WORKDIR /src
|
||||
RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/*
|
||||
RUN mkdir -p /app/test-results/api-specs
|
||||
WORKDIR /src/API/API.Specs
|
||||
ENTRYPOINT ["dotnet", "test", "API.Specs.csproj", "-c", "Release", "--logger", "trx;LogFileName=/app/test-results/api-specs/results.trx"]
|
||||
@@ -0,0 +1,51 @@
|
||||
Feature: Protected Endpoint Access Token Validation
|
||||
As a backend developer
|
||||
I want protected endpoints to validate access tokens
|
||||
So that unauthorized requests are rejected
|
||||
|
||||
Scenario: Protected endpoint accepts valid access token
|
||||
Given the API is running
|
||||
And I have an existing account
|
||||
And I am logged in
|
||||
When I submit a request to a protected endpoint with a valid access token
|
||||
Then the response has HTTP status 200
|
||||
|
||||
Scenario: Protected endpoint rejects missing access token
|
||||
Given the API is running
|
||||
When I submit a request to a protected endpoint without an access token
|
||||
Then the response has HTTP status 401
|
||||
|
||||
Scenario: Protected endpoint rejects invalid access token
|
||||
Given the API is running
|
||||
When I submit a request to a protected endpoint with an invalid access token
|
||||
Then the response has HTTP status 401
|
||||
And the response JSON should have "message" containing "Unauthorized"
|
||||
|
||||
Scenario: Protected endpoint rejects expired access token
|
||||
Given the API is running
|
||||
And I have an existing account
|
||||
And I am logged in with an immediately-expiring access token
|
||||
When I submit a request to a protected endpoint with the expired token
|
||||
Then the response has HTTP status 401
|
||||
And the response JSON should have "message" containing "Unauthorized"
|
||||
|
||||
Scenario: Protected endpoint rejects token signed with wrong secret
|
||||
Given the API is running
|
||||
And I have an access token signed with the wrong secret
|
||||
When I submit a request to a protected endpoint with the tampered token
|
||||
Then the response has HTTP status 401
|
||||
And the response JSON should have "message" containing "Unauthorized"
|
||||
|
||||
Scenario: Protected endpoint rejects refresh token as access token
|
||||
Given the API is running
|
||||
And I have an existing account
|
||||
And I am logged in
|
||||
When I submit a request to a protected endpoint with my refresh token instead of access token
|
||||
Then the response has HTTP status 401
|
||||
|
||||
Scenario: Protected endpoint rejects confirmation token as access token
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have a valid confirmation token
|
||||
When I submit a request to a protected endpoint with my confirmation token instead of access token
|
||||
Then the response has HTTP status 401
|
||||
76
src/Core/API/API.Specs/Features/Confirmation.feature
Normal file
76
src/Core/API/API.Specs/Features/Confirmation.feature
Normal file
@@ -0,0 +1,76 @@
|
||||
Feature: User Account Confirmation
|
||||
As a newly registered user
|
||||
I want to confirm my email address via a validation token
|
||||
So that my account is fully activated
|
||||
Scenario: Successful confirmation with valid token
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have a valid confirmation token for my account
|
||||
And I have a valid access token for my account
|
||||
When I submit a confirmation request with the valid token
|
||||
Then the response has HTTP status 200
|
||||
And the response JSON should have "message" containing "is confirmed"
|
||||
|
||||
Scenario: Re-confirming an already verified account remains successful
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have a valid confirmation token for my account
|
||||
And I have a valid access token for my account
|
||||
When I submit a confirmation request with the valid token
|
||||
And I submit the same confirmation request again
|
||||
Then the response has HTTP status 200
|
||||
And the response JSON should have "message" containing "is confirmed"
|
||||
|
||||
Scenario: Confirmation fails with invalid token
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have a valid access token for my account
|
||||
When I submit a confirmation request with an invalid token
|
||||
Then the response has HTTP status 401
|
||||
And the response JSON should have "message" containing "Invalid token"
|
||||
|
||||
Scenario: Confirmation fails with expired token
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have an expired confirmation token for my account
|
||||
And I have a valid access token for my account
|
||||
When I submit a confirmation request with the expired token
|
||||
Then the response has HTTP status 401
|
||||
And the response JSON should have "message" containing "Invalid token"
|
||||
|
||||
Scenario: Confirmation fails with tampered token (wrong secret)
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have a confirmation token signed with the wrong secret
|
||||
And I have a valid access token for my account
|
||||
When I submit a confirmation request with the tampered token
|
||||
Then the response has HTTP status 401
|
||||
And the response JSON should have "message" containing "Invalid token"
|
||||
|
||||
Scenario: Confirmation fails when token is missing
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have a valid access token for my account
|
||||
When I submit a confirmation request with a missing token
|
||||
Then the response has HTTP status 400
|
||||
|
||||
Scenario: Confirmation endpoint only accepts POST requests
|
||||
Given the API is running
|
||||
And I have a valid confirmation token
|
||||
When I submit a confirmation request using an invalid HTTP method
|
||||
Then the response has HTTP status 404
|
||||
|
||||
Scenario: Confirmation fails with malformed token
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have a valid access token for my account
|
||||
When I submit a confirmation request with a malformed token
|
||||
Then the response has HTTP status 401
|
||||
And the response JSON should have "message" containing "Invalid token"
|
||||
|
||||
Scenario: Confirmation fails without an access token
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have a valid confirmation token for my account
|
||||
When I submit a confirmation request with the valid token without an access token
|
||||
Then the response has HTTP status 401
|
||||
39
src/Core/API/API.Specs/Features/Login.feature
Normal file
39
src/Core/API/API.Specs/Features/Login.feature
Normal file
@@ -0,0 +1,39 @@
|
||||
Feature: User Login
|
||||
As a registered user
|
||||
I want to log in to my account
|
||||
So that I receive an authentication token to access authenticated routes
|
||||
|
||||
Scenario: Successful login with valid credentials
|
||||
Given the API is running
|
||||
And I have an existing account
|
||||
When I submit a login request with a username and password
|
||||
Then the response has HTTP status 200
|
||||
And the response JSON should have "message" equal "Logged in successfully."
|
||||
And the response JSON should have an access token
|
||||
|
||||
Scenario: Login fails with invalid credentials
|
||||
Given the API is running
|
||||
And I do not have an existing account
|
||||
When I submit a login request with a username and password
|
||||
Then the response has HTTP status 401
|
||||
And the response JSON should have "message" equal "Invalid username or password."
|
||||
|
||||
Scenario: Login fails when required missing username
|
||||
Given the API is running
|
||||
When I submit a login request with a missing username
|
||||
Then the response has HTTP status 400
|
||||
|
||||
Scenario: Login fails when required missing password
|
||||
Given the API is running
|
||||
When I submit a login request with a missing password
|
||||
Then the response has HTTP status 400
|
||||
|
||||
Scenario: Login fails when both username and password are missing
|
||||
Given the API is running
|
||||
When I submit a login request with both username and password missing
|
||||
Then the response has HTTP status 400
|
||||
|
||||
Scenario: Login endpoint only accepts POST requests
|
||||
Given the API is running
|
||||
When I submit a login request using a GET request
|
||||
Then the response has HTTP status 404
|
||||
10
src/Core/API/API.Specs/Features/NotFound.feature
Normal file
10
src/Core/API/API.Specs/Features/NotFound.feature
Normal file
@@ -0,0 +1,10 @@
|
||||
Feature: NotFound Responses
|
||||
As a client of the API
|
||||
I want consistent 404 responses
|
||||
So that consumers can gracefully handle missing routes
|
||||
|
||||
Scenario: GET request to an invalid route returns 404
|
||||
Given the API is running
|
||||
When I send an HTTP request "GET" to "/invalid-route"
|
||||
Then the response has HTTP status 404
|
||||
And the response JSON should have "message" equal "Route not found."
|
||||
60
src/Core/API/API.Specs/Features/Registration.feature
Normal file
60
src/Core/API/API.Specs/Features/Registration.feature
Normal file
@@ -0,0 +1,60 @@
|
||||
Feature: User Registration
|
||||
As a new user
|
||||
I want to register an account
|
||||
So that I can log in and access authenticated routes
|
||||
|
||||
Scenario: Successful registration with valid details
|
||||
Given the API is running
|
||||
When I submit a registration request with values:
|
||||
| Username | FirstName | LastName | Email | DateOfBirth | Password |
|
||||
| newuser | New | User | newuser@example.com | 1990-01-01 | Password1! |
|
||||
Then the response has HTTP status 201
|
||||
And the response JSON should have "message" equal "User registered successfully."
|
||||
And the response JSON should have an access token
|
||||
|
||||
Scenario: Registration fails with existing username
|
||||
Given the API is running
|
||||
When I submit a registration request with values:
|
||||
| Username | FirstName | LastName | Email | DateOfBirth | Password |
|
||||
| test.user | Test | User | example@example.com | 2001-11-11 | Password1! |
|
||||
Then the response has HTTP status 409
|
||||
|
||||
Scenario: Registration fails with existing email
|
||||
Given the API is running
|
||||
When I submit a registration request with values:
|
||||
| Username | FirstName | LastName | Email | DateOfBirth | Password |
|
||||
| newuser | New | User | test.user@thebiergarten.app | 1990-01-01 | Password1! |
|
||||
Then the response has HTTP status 409
|
||||
|
||||
Scenario: Registration fails with missing required fields
|
||||
Given the API is running
|
||||
When I submit a registration request with values:
|
||||
| Username | FirstName | LastName | Email | DateOfBirth | Password |
|
||||
| | New | User | | | Password1! |
|
||||
Then the response has HTTP status 400
|
||||
|
||||
Scenario: Registration fails with invalid email format
|
||||
Given the API is running
|
||||
When I submit a registration request with values:
|
||||
| Username | FirstName | LastName | Email | DateOfBirth | Password |
|
||||
| newuser | New | User | invalidemail | 1990-01-01 | Password1! |
|
||||
Then the response has HTTP status 400
|
||||
|
||||
Scenario: Registration fails with weak password
|
||||
Given the API is running
|
||||
When I submit a registration request with values:
|
||||
| Username | FirstName | LastName | Email | DateOfBirth | Password |
|
||||
| newuser | New | User | newuser@example.com | 1990-01-01 | weakpass |
|
||||
Then the response has HTTP status 400
|
||||
|
||||
Scenario: Cannot register a user younger than 19 years of age (regulatory requirement)
|
||||
Given the API is running
|
||||
When I submit a registration request with values:
|
||||
| Username | FirstName | LastName | Email | DateOfBirth | Password |
|
||||
| younguser | Young | User | younguser@example.com | {underage_date} | Password1! |
|
||||
Then the response has HTTP status 400
|
||||
|
||||
Scenario: Registration endpoint only accepts POST requests
|
||||
Given the API is running
|
||||
When I submit a registration request using a GET request
|
||||
Then the response has HTTP status 404
|
||||
36
src/Core/API/API.Specs/Features/ResendConfirmation.feature
Normal file
36
src/Core/API/API.Specs/Features/ResendConfirmation.feature
Normal file
@@ -0,0 +1,36 @@
|
||||
Feature: Resend Confirmation Email
|
||||
As a user who did not receive the confirmation email
|
||||
I want to request a resend of the confirmation email
|
||||
So that I can obtain a working confirmation link while preventing abuse
|
||||
|
||||
Scenario: Legitimate resend for an unconfirmed user
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have a valid access token for my account
|
||||
When I submit a resend confirmation request for my account
|
||||
Then the response has HTTP status 200
|
||||
And the response JSON should have "message" containing "confirmation email has been resent"
|
||||
|
||||
Scenario: Resend is a no-op for an already confirmed user
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have a valid confirmation token for my account
|
||||
And I have a valid access token for my account
|
||||
And I have confirmed my account
|
||||
When I submit a resend confirmation request for my account
|
||||
Then the response has HTTP status 200
|
||||
And the response JSON should have "message" containing "confirmation email has been resent"
|
||||
|
||||
Scenario: Resend is a no-op for a non-existent user
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
And I have a valid access token for my account
|
||||
When I submit a resend confirmation request for a non-existent user
|
||||
Then the response has HTTP status 200
|
||||
And the response JSON should have "message" containing "confirmation email has been resent"
|
||||
|
||||
Scenario: Resend requires authentication
|
||||
Given the API is running
|
||||
And I have registered a new account
|
||||
When I submit a resend confirmation request without an access token
|
||||
Then the response has HTTP status 401
|
||||
39
src/Core/API/API.Specs/Features/TokenRefresh.feature
Normal file
39
src/Core/API/API.Specs/Features/TokenRefresh.feature
Normal file
@@ -0,0 +1,39 @@
|
||||
Feature: Token Refresh
|
||||
As an authenticated user
|
||||
I want to refresh my access token using my refresh token
|
||||
So that I can maintain my session without logging in again
|
||||
|
||||
Scenario: Successful token refresh with valid refresh token
|
||||
Given the API is running
|
||||
And I have an existing account
|
||||
And I am logged in
|
||||
When I submit a refresh token request with a valid refresh token
|
||||
Then the response has HTTP status 200
|
||||
And the response JSON should have "message" equal "Token refreshed successfully."
|
||||
And the response JSON should have a new access token
|
||||
And the response JSON should have a new refresh token
|
||||
|
||||
Scenario: Token refresh fails with invalid refresh token
|
||||
Given the API is running
|
||||
When I submit a refresh token request with an invalid refresh token
|
||||
Then the response has HTTP status 401
|
||||
And the response JSON should have "message" containing "Invalid"
|
||||
|
||||
Scenario: Token refresh fails with expired refresh token
|
||||
Given the API is running
|
||||
And I have an existing account
|
||||
And I am logged in with an immediately-expiring refresh token
|
||||
When I submit a refresh token request with the expired refresh token
|
||||
Then the response has HTTP status 401
|
||||
And the response JSON should have "message" containing "Invalid token"
|
||||
|
||||
Scenario: Token refresh fails when refresh token is missing
|
||||
Given the API is running
|
||||
When I submit a refresh token request with a missing refresh token
|
||||
Then the response has HTTP status 400
|
||||
|
||||
Scenario: Token refresh endpoint only accepts POST requests
|
||||
Given the API is running
|
||||
And I have a valid refresh token
|
||||
When I submit a refresh token request using a GET request
|
||||
Then the response has HTTP status 404
|
||||
68
src/Core/API/API.Specs/Mocks/MockEmailProvider.cs
Normal file
68
src/Core/API/API.Specs/Mocks/MockEmailProvider.cs
Normal file
@@ -0,0 +1,68 @@
|
||||
using Infrastructure.Email;
|
||||
|
||||
namespace API.Specs.Mocks;
|
||||
|
||||
/// <summary>
|
||||
/// Mock email provider for testing that doesn't actually send emails.
|
||||
/// Tracks sent emails for verification in tests if needed.
|
||||
/// </summary>
|
||||
public class MockEmailProvider : IEmailProvider
|
||||
{
|
||||
public List<SentEmail> SentEmails { get; } = new();
|
||||
|
||||
public Task SendAsync(
|
||||
string to,
|
||||
string subject,
|
||||
string body,
|
||||
bool isHtml = true
|
||||
)
|
||||
{
|
||||
SentEmails.Add(
|
||||
new SentEmail
|
||||
{
|
||||
To = [to],
|
||||
Subject = subject,
|
||||
Body = body,
|
||||
IsHtml = isHtml,
|
||||
SentAt = DateTime.UtcNow,
|
||||
}
|
||||
);
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task SendAsync(
|
||||
IEnumerable<string> to,
|
||||
string subject,
|
||||
string body,
|
||||
bool isHtml = true
|
||||
)
|
||||
{
|
||||
SentEmails.Add(
|
||||
new SentEmail
|
||||
{
|
||||
To = to.ToList(),
|
||||
Subject = subject,
|
||||
Body = body,
|
||||
IsHtml = isHtml,
|
||||
SentAt = DateTime.UtcNow,
|
||||
}
|
||||
);
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public void Clear()
|
||||
{
|
||||
SentEmails.Clear();
|
||||
}
|
||||
|
||||
public class SentEmail
|
||||
{
|
||||
public List<string> To { get; init; } = new();
|
||||
public string Subject { get; init; } = string.Empty;
|
||||
public string Body { get; init; } = string.Empty;
|
||||
public bool IsHtml { get; init; }
|
||||
public DateTime SentAt { get; init; }
|
||||
}
|
||||
}
|
||||
65
src/Core/API/API.Specs/Mocks/MockEmailService.cs
Normal file
65
src/Core/API/API.Specs/Mocks/MockEmailService.cs
Normal file
@@ -0,0 +1,65 @@
|
||||
using Domain.Entities;
|
||||
using Service.Emails;
|
||||
|
||||
namespace API.Specs.Mocks;
|
||||
|
||||
public class MockEmailService : IEmailService
|
||||
{
|
||||
public List<RegistrationEmail> SentRegistrationEmails { get; } = new();
|
||||
|
||||
public List<ResendConfirmationEmail> SentResendConfirmationEmails { get; } = new();
|
||||
|
||||
public Task SendRegistrationEmailAsync(
|
||||
UserAccount createdUser,
|
||||
string confirmationToken
|
||||
)
|
||||
{
|
||||
SentRegistrationEmails.Add(
|
||||
new RegistrationEmail
|
||||
{
|
||||
UserAccount = createdUser,
|
||||
ConfirmationToken = confirmationToken,
|
||||
SentAt = DateTime.UtcNow,
|
||||
}
|
||||
);
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task SendResendConfirmationEmailAsync(
|
||||
UserAccount user,
|
||||
string confirmationToken
|
||||
)
|
||||
{
|
||||
SentResendConfirmationEmails.Add(
|
||||
new ResendConfirmationEmail
|
||||
{
|
||||
UserAccount = user,
|
||||
ConfirmationToken = confirmationToken,
|
||||
SentAt = DateTime.UtcNow,
|
||||
}
|
||||
);
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public void Clear()
|
||||
{
|
||||
SentRegistrationEmails.Clear();
|
||||
SentResendConfirmationEmails.Clear();
|
||||
}
|
||||
|
||||
public class RegistrationEmail
|
||||
{
|
||||
public UserAccount UserAccount { get; init; } = null!;
|
||||
public string ConfirmationToken { get; init; } = string.Empty;
|
||||
public DateTime SentAt { get; init; }
|
||||
}
|
||||
|
||||
public class ResendConfirmationEmail
|
||||
{
|
||||
public UserAccount UserAccount { get; init; } = null!;
|
||||
public string ConfirmationToken { get; init; } = string.Empty;
|
||||
public DateTime SentAt { get; init; }
|
||||
}
|
||||
}
|
||||
209
src/Core/API/API.Specs/Steps/ApiGeneralSteps.cs
Normal file
209
src/Core/API/API.Specs/Steps/ApiGeneralSteps.cs
Normal file
@@ -0,0 +1,209 @@
|
||||
using System.Text.Json;
|
||||
using API.Specs;
|
||||
using FluentAssertions;
|
||||
using Reqnroll;
|
||||
|
||||
namespace API.Specs.Steps;
|
||||
|
||||
[Binding]
|
||||
public class ApiGeneralSteps(ScenarioContext scenario)
|
||||
{
|
||||
private const string ClientKey = "client";
|
||||
private const string FactoryKey = "factory";
|
||||
private const string ResponseKey = "response";
|
||||
private const string ResponseBodyKey = "responseBody";
|
||||
|
||||
private HttpClient GetClient()
|
||||
{
|
||||
if (scenario.TryGetValue<HttpClient>(ClientKey, out var client))
|
||||
{
|
||||
return client;
|
||||
}
|
||||
|
||||
var factory = scenario.TryGetValue<TestApiFactory>(
|
||||
FactoryKey,
|
||||
out var f
|
||||
)
|
||||
? f
|
||||
: new TestApiFactory();
|
||||
scenario[FactoryKey] = factory;
|
||||
|
||||
client = factory.CreateClient();
|
||||
scenario[ClientKey] = client;
|
||||
return client;
|
||||
}
|
||||
|
||||
[Given("the API is running")]
|
||||
public void GivenTheApiIsRunning()
|
||||
{
|
||||
GetClient();
|
||||
}
|
||||
|
||||
[When("I send an HTTP request {string} to {string} with body:")]
|
||||
public async Task WhenISendAnHttpRequestStringToStringWithBody(
|
||||
string method,
|
||||
string url,
|
||||
string jsonBody
|
||||
)
|
||||
{
|
||||
var client = GetClient();
|
||||
|
||||
var requestMessage = new HttpRequestMessage(new HttpMethod(method), url)
|
||||
{
|
||||
Content = new StringContent(
|
||||
jsonBody,
|
||||
System.Text.Encoding.UTF8,
|
||||
"application/json"
|
||||
),
|
||||
};
|
||||
|
||||
var response = await client.SendAsync(requestMessage);
|
||||
var responseBody = await response.Content.ReadAsStringAsync();
|
||||
|
||||
scenario[ResponseKey] = response;
|
||||
scenario[ResponseBodyKey] = responseBody;
|
||||
}
|
||||
|
||||
[When("I send an HTTP request {string} to {string}")]
|
||||
public async Task WhenISendAnHttpRequestStringToString(
|
||||
string method,
|
||||
string url
|
||||
)
|
||||
{
|
||||
var client = GetClient();
|
||||
var requestMessage = new HttpRequestMessage(
|
||||
new HttpMethod(method),
|
||||
url
|
||||
);
|
||||
var response = await client.SendAsync(requestMessage);
|
||||
var responseBody = await response.Content.ReadAsStringAsync();
|
||||
|
||||
scenario[ResponseKey] = response;
|
||||
scenario[ResponseBodyKey] = responseBody;
|
||||
}
|
||||
|
||||
[Then("the response status code should be {int}")]
|
||||
public void ThenTheResponseStatusCodeShouldBeInt(int expected)
|
||||
{
|
||||
scenario
|
||||
.TryGetValue<HttpResponseMessage>(ResponseKey, out var response)
|
||||
.Should()
|
||||
.BeTrue();
|
||||
((int)response!.StatusCode).Should().Be(expected);
|
||||
}
|
||||
|
||||
[Then("the response has HTTP status {int}")]
|
||||
public void ThenTheResponseHasHttpStatusInt(int expectedCode)
|
||||
{
|
||||
scenario
|
||||
.TryGetValue<HttpResponseMessage>(ResponseKey, out var response)
|
||||
.Should()
|
||||
.BeTrue("No response was received from the API");
|
||||
((int)response!.StatusCode).Should().Be(expectedCode);
|
||||
}
|
||||
|
||||
[Then("the response JSON should have {string} equal {string}")]
|
||||
public void ThenTheResponseJsonShouldHaveStringEqualString(
|
||||
string field,
|
||||
string expected
|
||||
)
|
||||
{
|
||||
scenario
|
||||
.TryGetValue<HttpResponseMessage>(ResponseKey, out var response)
|
||||
.Should()
|
||||
.BeTrue();
|
||||
scenario
|
||||
.TryGetValue<string>(ResponseBodyKey, out var responseBody)
|
||||
.Should()
|
||||
.BeTrue();
|
||||
|
||||
using var doc = JsonDocument.Parse(responseBody!);
|
||||
var root = doc.RootElement;
|
||||
|
||||
if (!root.TryGetProperty(field, out var value))
|
||||
{
|
||||
root.TryGetProperty("payload", out var payloadElem)
|
||||
.Should()
|
||||
.BeTrue(
|
||||
"Expected field '{0}' to be present either at the root or inside 'payload'",
|
||||
field
|
||||
);
|
||||
payloadElem
|
||||
.ValueKind.Should()
|
||||
.Be(JsonValueKind.Object, "payload must be an object");
|
||||
payloadElem
|
||||
.TryGetProperty(field, out value)
|
||||
.Should()
|
||||
.BeTrue(
|
||||
"Expected field '{0}' to be present inside 'payload'",
|
||||
field
|
||||
);
|
||||
}
|
||||
|
||||
value
|
||||
.ValueKind.Should()
|
||||
.Be(
|
||||
JsonValueKind.String,
|
||||
"Expected field '{0}' to be a string",
|
||||
field
|
||||
);
|
||||
value.GetString().Should().Be(expected);
|
||||
}
|
||||
|
||||
[Then("the response JSON should have {string} containing {string}")]
|
||||
public void ThenTheResponseJsonShouldHaveStringContainingString(
|
||||
string field,
|
||||
string expectedSubstring
|
||||
)
|
||||
{
|
||||
scenario
|
||||
.TryGetValue<HttpResponseMessage>(ResponseKey, out var response)
|
||||
.Should()
|
||||
.BeTrue();
|
||||
scenario
|
||||
.TryGetValue<string>(ResponseBodyKey, out var responseBody)
|
||||
.Should()
|
||||
.BeTrue();
|
||||
|
||||
using var doc = JsonDocument.Parse(responseBody!);
|
||||
var root = doc.RootElement;
|
||||
|
||||
if (!root.TryGetProperty(field, out var value))
|
||||
{
|
||||
root.TryGetProperty("payload", out var payloadElem)
|
||||
.Should()
|
||||
.BeTrue(
|
||||
"Expected field '{0}' to be present either at the root or inside 'payload'",
|
||||
field
|
||||
);
|
||||
payloadElem
|
||||
.ValueKind.Should()
|
||||
.Be(JsonValueKind.Object, "payload must be an object");
|
||||
payloadElem
|
||||
.TryGetProperty(field, out value)
|
||||
.Should()
|
||||
.BeTrue(
|
||||
"Expected field '{0}' to be present inside 'payload'",
|
||||
field
|
||||
);
|
||||
}
|
||||
|
||||
value
|
||||
.ValueKind.Should()
|
||||
.Be(
|
||||
JsonValueKind.String,
|
||||
"Expected field '{0}' to be a string",
|
||||
field
|
||||
);
|
||||
var actualValue = value.GetString();
|
||||
actualValue
|
||||
.Should()
|
||||
.Contain(
|
||||
expectedSubstring,
|
||||
"Expected field '{0}' to contain '{1}' but was '{2}'",
|
||||
field,
|
||||
expectedSubstring,
|
||||
actualValue
|
||||
);
|
||||
}
|
||||
}
|
||||
1214
src/Core/API/API.Specs/Steps/AuthSteps.cs
Normal file
1214
src/Core/API/API.Specs/Steps/AuthSteps.cs
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user