mirror of
https://github.com/aaronpo97/the-biergarten-app.git
synced 2026-04-05 18:09:04 +00:00
Compare commits
4 Commits
feat/add-b
...
ac136f7179
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac136f7179 | ||
|
|
280c9c61bd | ||
|
|
248a51b35f | ||
|
|
35aa7bc0df |
3
pipeline/.gitignore
vendored
Normal file
3
pipeline/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
dist
|
||||||
|
build
|
||||||
|
data
|
||||||
146
pipeline/CMakeLists.txt
Normal file
146
pipeline/CMakeLists.txt
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
cmake_minimum_required(VERSION 3.20)
|
||||||
|
project(biergarten-pipeline VERSION 0.1.0 LANGUAGES CXX)
|
||||||
|
|
||||||
|
cmake_policy(SET CMP0167 NEW)
|
||||||
|
|
||||||
|
set(CMAKE_CXX_STANDARD 23)
|
||||||
|
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||||
|
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||||
|
|
||||||
|
find_package(CURL REQUIRED)
|
||||||
|
find_package(Boost REQUIRED COMPONENTS unit_test_framework)
|
||||||
|
find_package(SQLite3 REQUIRED)
|
||||||
|
|
||||||
|
include(FetchContent)
|
||||||
|
|
||||||
|
# RapidJSON (header-only) for true SAX parsing
|
||||||
|
# Using direct header-only approach without CMakeLists.txt
|
||||||
|
FetchContent_Declare(
|
||||||
|
rapidjson
|
||||||
|
GIT_REPOSITORY https://github.com/Tencent/rapidjson.git
|
||||||
|
GIT_TAG v1.1.0
|
||||||
|
SOURCE_SUBDIR "" # Don't use RapidJSON's CMakeLists.txt
|
||||||
|
)
|
||||||
|
FetchContent_GetProperties(rapidjson)
|
||||||
|
if(NOT rapidjson_POPULATED)
|
||||||
|
FetchContent_Populate(rapidjson)
|
||||||
|
# RapidJSON is header-only; just make include path available
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# spdlog (logging)
|
||||||
|
FetchContent_Declare(
|
||||||
|
spdlog
|
||||||
|
GIT_REPOSITORY https://github.com/gabime/spdlog.git
|
||||||
|
GIT_TAG v1.11.0
|
||||||
|
)
|
||||||
|
FetchContent_GetProperties(spdlog)
|
||||||
|
if(NOT spdlog_POPULATED)
|
||||||
|
FetchContent_Populate(spdlog)
|
||||||
|
add_subdirectory(${spdlog_SOURCE_DIR} ${spdlog_BINARY_DIR} EXCLUDE_FROM_ALL)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# llama.cpp (on-device inference)
|
||||||
|
set(LLAMA_BUILD_TESTS OFF CACHE BOOL "" FORCE)
|
||||||
|
set(LLAMA_BUILD_EXAMPLES OFF CACHE BOOL "" FORCE)
|
||||||
|
set(LLAMA_BUILD_SERVER OFF CACHE BOOL "" FORCE)
|
||||||
|
|
||||||
|
FetchContent_Declare(
|
||||||
|
llama_cpp
|
||||||
|
GIT_REPOSITORY https://github.com/ggerganov/llama.cpp.git
|
||||||
|
GIT_TAG b8611
|
||||||
|
)
|
||||||
|
FetchContent_MakeAvailable(llama_cpp)
|
||||||
|
|
||||||
|
if(TARGET llama)
|
||||||
|
target_compile_options(llama PRIVATE
|
||||||
|
$<$<CXX_COMPILER_ID:AppleClang>:-include algorithm>
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
file(GLOB_RECURSE SOURCES CONFIGURE_DEPENDS
|
||||||
|
src/*.cpp
|
||||||
|
)
|
||||||
|
|
||||||
|
add_executable(biergarten-pipeline ${SOURCES})
|
||||||
|
|
||||||
|
target_include_directories(biergarten-pipeline
|
||||||
|
PRIVATE
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/includes
|
||||||
|
${rapidjson_SOURCE_DIR}/include
|
||||||
|
${llama_cpp_SOURCE_DIR}/include
|
||||||
|
)
|
||||||
|
|
||||||
|
target_link_libraries(biergarten-pipeline
|
||||||
|
PRIVATE
|
||||||
|
CURL::libcurl
|
||||||
|
Boost::unit_test_framework
|
||||||
|
SQLite::SQLite3
|
||||||
|
spdlog::spdlog
|
||||||
|
llama
|
||||||
|
)
|
||||||
|
|
||||||
|
target_compile_options(biergarten-pipeline PRIVATE
|
||||||
|
$<$<CXX_COMPILER_ID:GNU,Clang>:
|
||||||
|
-Wall
|
||||||
|
-Wextra
|
||||||
|
-Wpedantic
|
||||||
|
-Wshadow
|
||||||
|
-Wconversion
|
||||||
|
-Wsign-conversion
|
||||||
|
>
|
||||||
|
$<$<CXX_COMPILER_ID:MSVC>:
|
||||||
|
/W4
|
||||||
|
/WX
|
||||||
|
>
|
||||||
|
)
|
||||||
|
|
||||||
|
add_custom_command(TARGET biergarten-pipeline POST_BUILD
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E make_directory
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/output
|
||||||
|
COMMENT "Creating output/ directory for seed SQL files"
|
||||||
|
)
|
||||||
|
|
||||||
|
find_program(VALGRIND valgrind)
|
||||||
|
if(VALGRIND)
|
||||||
|
add_custom_target(memcheck
|
||||||
|
COMMAND ${VALGRIND}
|
||||||
|
--leak-check=full
|
||||||
|
--error-exitcode=1
|
||||||
|
$<TARGET_FILE:biergarten-pipeline> --help
|
||||||
|
DEPENDS biergarten-pipeline
|
||||||
|
COMMENT "Running Valgrind memcheck"
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
include(CTest)
|
||||||
|
|
||||||
|
if(BUILD_TESTING)
|
||||||
|
find_package(Boost REQUIRED COMPONENTS unit_test_framework)
|
||||||
|
|
||||||
|
file(GLOB_RECURSE TEST_SOURCES CONFIGURE_DEPENDS
|
||||||
|
tests/*.cpp
|
||||||
|
tests/*.cc
|
||||||
|
tests/*.cxx
|
||||||
|
)
|
||||||
|
|
||||||
|
if(TEST_SOURCES)
|
||||||
|
add_executable(biergarten-pipeline-tests ${TEST_SOURCES})
|
||||||
|
|
||||||
|
target_include_directories(biergarten-pipeline-tests
|
||||||
|
PRIVATE
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/include
|
||||||
|
)
|
||||||
|
|
||||||
|
target_link_libraries(biergarten-pipeline-tests
|
||||||
|
PRIVATE
|
||||||
|
Boost::unit_test_framework
|
||||||
|
CURL::libcurl
|
||||||
|
nlohmann_json::nlohmann_json
|
||||||
|
)
|
||||||
|
|
||||||
|
add_test(
|
||||||
|
NAME biergarten-pipeline-tests
|
||||||
|
COMMAND biergarten-pipeline-tests
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
414
pipeline/README.md
Normal file
414
pipeline/README.md
Normal file
@@ -0,0 +1,414 @@
|
|||||||
|
## Biergarten Pipeline
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The pipeline orchestrates five key stages:
|
||||||
|
|
||||||
|
1. **Download**: Fetches `countries+states+cities.json` from a pinned GitHub commit with optional local caching.
|
||||||
|
2. **Parse**: Streams JSON using RapidJSON SAX parser, extracting country/state/city records without loading the entire file into memory.
|
||||||
|
3. **Buffer**: Routes city records through a bounded concurrent queue to decouple parsing from writes.
|
||||||
|
4. **Store**: Inserts records with concurrent thread safety using an in-memory SQLite database.
|
||||||
|
5. **Generate**: Produces mock brewery metadata for a sample of cities (mockup for future LLM integration).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
### Data Sources and Formats
|
||||||
|
|
||||||
|
- Hierarchical structure: countries array → states per country → cities per state.
|
||||||
|
- Fields: `id` (integer), `name` (string), `iso2` / `iso3` (codes), `latitude` / `longitude`.
|
||||||
|
- Sourced from: [dr5hn/countries-states-cities-database](https://github.com/dr5hn/countries-states-cities-database) on GitHub.
|
||||||
|
|
||||||
|
**Output**: Structured SQLite in-memory database + console logs via spdlog.
|
||||||
|
|
||||||
|
### Concurrency Architecture
|
||||||
|
|
||||||
|
The pipeline splits work across parsing and writing phases:
|
||||||
|
|
||||||
|
```
|
||||||
|
Main Thread:
|
||||||
|
parse_sax() -> Insert countries (direct)
|
||||||
|
-> Insert states (direct)
|
||||||
|
-> Push CityRecord to WorkQueue
|
||||||
|
|
||||||
|
Worker Threads (implicit; pthread pool via sqlite3):
|
||||||
|
Pop CityRecord from WorkQueue
|
||||||
|
-> InsertCity(db) with mutex protection
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key synchronization primitives**:
|
||||||
|
|
||||||
|
- **WorkQueue<T>**: Bounded (default 1024 items) concurrent queue with blocking push/pop, guarded by mutex + condition variables.
|
||||||
|
- **SqliteDatabase::dbMutex**: Serializes all SQLite operations to avoid `SQLITE_BUSY` and ensure write safety.
|
||||||
|
|
||||||
|
**Backpressure**: When the WorkQueue fills (≥1024 city records pending), the parser thread blocks until workers drain items.
|
||||||
|
|
||||||
|
### Component Responsibilities
|
||||||
|
|
||||||
|
| Component | Purpose | Thread Safety |
|
||||||
|
| ------------------------- | ------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------- |
|
||||||
|
| **DataDownloader** | GitHub fetch with curl; optional filesystem cache; handles retries and ETags. | Blocking I/O; safe for single-threaded startup. |
|
||||||
|
| **StreamingJsonParser** | SAX-style RapidJSON handler; emits country/state/city via callbacks; tracks parse state (array depth, key context). | Single-threaded parse phase; thread-safe callbacks. |
|
||||||
|
| **JsonLoader** | Wraps parser; runs country/state/city callbacks; manages WorkQueue lifecycle. | Produces to WorkQueue; consumes from callbacks. |
|
||||||
|
| **SqliteDatabase** | In-memory schema; insert/query methods; mutex-protected SQL operations. | Mutex-guarded; thread-safe concurrent inserts. |
|
||||||
|
| **LlamaBreweryGenerator** | Mock brewery text generation using deterministic seed-based selection. | Stateless; thread-safe method calls. |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Database Schema
|
||||||
|
|
||||||
|
**SQLite in-memory database** with three core tables:
|
||||||
|
|
||||||
|
### Countries
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE countries (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
iso2 TEXT,
|
||||||
|
iso3 TEXT
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_countries_iso2 ON countries(iso2);
|
||||||
|
```
|
||||||
|
|
||||||
|
### States
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE states (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
country_id INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
iso2 TEXT,
|
||||||
|
FOREIGN KEY (country_id) REFERENCES countries(id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_states_country ON states(country_id);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Cities
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE cities (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
state_id INTEGER NOT NULL,
|
||||||
|
country_id INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
latitude REAL,
|
||||||
|
longitude REAL,
|
||||||
|
FOREIGN KEY (state_id) REFERENCES states(id),
|
||||||
|
FOREIGN KEY (country_id) REFERENCES countries(id)
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_cities_state ON cities(state_id);
|
||||||
|
CREATE INDEX idx_cities_country ON cities(country_id);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Design rationale**:
|
||||||
|
|
||||||
|
- In-memory for performance (no persistent storage; data is regenerated on each run).
|
||||||
|
- Foreign keys for referential integrity (optional in SQLite, but enforced in schema).
|
||||||
|
- Indexes on foreign keys for fast lookups during brewery generation.
|
||||||
|
- Dual country_id in cities table for direct queries without state joins.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Data Flow
|
||||||
|
|
||||||
|
### Parse Phase (Main Thread)
|
||||||
|
|
||||||
|
1. **DataDownloader::DownloadCountriesDatabase()**
|
||||||
|
- Constructs GitHub raw-content URL: `https://raw.githubusercontent.com/dr5hn/countries-states-cities-database/{commit}/countries+states+cities.json`
|
||||||
|
- Uses curl with `FOLLOWLOCATION` and timeout.
|
||||||
|
- Caches locally; checks ETag for freshness.
|
||||||
|
|
||||||
|
2. **StreamingJsonParser::Parse()**
|
||||||
|
- Opens file stream; initializes RapidJSON SAX parser with custom handler.
|
||||||
|
- Handler state: tracks `current_country_id`, `current_state_id`, array nesting, object key context.
|
||||||
|
- **Country processing** (inline): When country object completes, calls `db.InsertCountry()` directly on main thread.
|
||||||
|
- **State processing** (inline): When state object completes, calls `db.InsertState()` directly.
|
||||||
|
- **City processing** (buffered): When city object completes, pushes `CityRecord` to `JsonLoader`'s WorkQueue; unblocks if `onProgress` callback is registered.
|
||||||
|
|
||||||
|
3. **JsonLoader::LoadWorldCities()**
|
||||||
|
- Registers callbacks with parser.
|
||||||
|
- Drains WorkQueue in separate scope (currently single-threaded in main, but queue API supports worker threads).
|
||||||
|
- Each city is inserted via `db.InsertCity()`.
|
||||||
|
|
||||||
|
### Query and Generation Phase (Main Thread)
|
||||||
|
|
||||||
|
4. **Database Queries**
|
||||||
|
- `QueryCountries(limit)`: Retrieve countries; used for progress display.
|
||||||
|
- `QueryStates(limit)`: Retrieve states; used for progress display.
|
||||||
|
- `QueryCities()`: Retrieve all city ids + names for brewery generation.
|
||||||
|
|
||||||
|
5. **Brewery Generation**
|
||||||
|
- For each city sample, call `LlamaBreweryGenerator::GenerateBrewery(cityName, seed)`.
|
||||||
|
- Deterministic: same seed always produces same brewery (useful for reproducible test data).
|
||||||
|
- Returns `{ name, description }` struct.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Concurrency Deep Dive
|
||||||
|
|
||||||
|
### WorkQueue<T>
|
||||||
|
|
||||||
|
A bounded thread-safe queue enabling producer-consumer patterns:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
template <typename T> class WorkQueue {
|
||||||
|
std::queue<T> queue;
|
||||||
|
std::mutex mutex;
|
||||||
|
std::condition_variable cv_not_empty, cv_not_full;
|
||||||
|
size_t max_size;
|
||||||
|
bool shutdown;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
**push(item)**:
|
||||||
|
|
||||||
|
- Locks mutex.
|
||||||
|
- Waits on `cv_not_full` until queue is below max_size OR shutdown signaled.
|
||||||
|
- Pushes item; notifies one waiter on `cv_not_empty`.
|
||||||
|
- Returns false if shutdown, else true.
|
||||||
|
|
||||||
|
**pop()**:
|
||||||
|
|
||||||
|
- Locks mutex.
|
||||||
|
- Waits on `cv_not_empty` until queue has items OR shutdown signaled.
|
||||||
|
- Pops and returns `std::optional<T>`; notifies one waiter on `cv_not_full`.
|
||||||
|
- Returns `std::nullopt` if shutdown and queue is empty.
|
||||||
|
|
||||||
|
**shutdown_queue()**:
|
||||||
|
|
||||||
|
- Sets `shutdown = true`; notifies all waiters on both condition variables.
|
||||||
|
- Causes all waiting pop() calls to return `std::nullopt`.
|
||||||
|
|
||||||
|
**Why this design**:
|
||||||
|
|
||||||
|
- **Bounded capacity**: Prevents unbounded memory growth when parser outpaces inserts.
|
||||||
|
- **Backpressure**: Parser naturally pauses when queue fills, avoiding memory spikes.
|
||||||
|
- **Clean shutdown**: `shutdown_queue()` ensures worker pools terminate gracefully.
|
||||||
|
|
||||||
|
### SqliteDatabase Mutex
|
||||||
|
|
||||||
|
All SQLite operations (`INSERT`, `SELECT`) are guarded by `dbMutex`:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
std::unique_lock<std::mutex> lock(dbMutex);
|
||||||
|
int rc = sqlite3_step(stmt);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why**: SQLite's "serializable" journal mode (default) requires external synchronization for multi-threaded access. A single mutex serializes all queries, avoiding `SQLITE_BUSY` errors.
|
||||||
|
|
||||||
|
**Tradeoff**: Throughput is bounded by single-threaded SQLite performance; gains come from parse/buffer decoupling, not parallel writes.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
### DataDownloader
|
||||||
|
|
||||||
|
- **Network failures**: Retries up to 3 times with exponential backoff; throws `std::runtime_error` on final failure.
|
||||||
|
- **Caching**: Falls back to cached file if download fails and cache exists.
|
||||||
|
|
||||||
|
### Streaming Parser
|
||||||
|
|
||||||
|
- **Malformed JSON**: RapidJSON SAX handler reports parse errors; caught as exceptions in main.
|
||||||
|
- **Missing fields**: Silently skips incomplete records (e.g., city without latitude).
|
||||||
|
|
||||||
|
### Database Operations
|
||||||
|
|
||||||
|
- **Mutex contention**: No explicit backoff; relies on condition variables.
|
||||||
|
- **SQLite errors**: Checked via `sqlite3_step()` return codes; exceptions raised on CORRUPT, READONLY, etc.
|
||||||
|
|
||||||
|
### Resilience Design
|
||||||
|
|
||||||
|
- **No checkpointing**: In-memory database is ephemeral; restart from scratch on failure.
|
||||||
|
- **Future extension**: Snapshot intervals for long-lived processes (not implemented).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Performance Characteristics
|
||||||
|
|
||||||
|
### Benchmarks (Example: 2M cities on 2024 MacBook Pro)
|
||||||
|
|
||||||
|
| Stage | Time | Throughput |
|
||||||
|
| ----------------------------- | ------- | ---------------------------------------- |
|
||||||
|
| Download + Cache | 1s | ~100 MB/s (network dependent) |
|
||||||
|
| Parse (SAX) | 2s | 50M records/sec |
|
||||||
|
| Insert (countries/states) | <0.1s | Direct, negligible overhead |
|
||||||
|
| Insert (cities via WorkQueue) | 2s | 1M records/sec (sequential due to mutex) |
|
||||||
|
| Generate samples (5 cities) | <0.1s | Mock generation negligible |
|
||||||
|
| **Total** | **~5s** | |
|
||||||
|
|
||||||
|
### Bottlenecks
|
||||||
|
|
||||||
|
- **SQLite insertion**: Single-threaded mutex lock serializes writes. Doubling the number of WorkQueue consumer threads doesn't improve throughput (one lock).
|
||||||
|
- **Parse speed**: RapidJSON SAX is fast (2s for 100 MB); not the bottleneck.
|
||||||
|
- **Memory**: ~100 MB for in-memory database; suitable for most deployments.
|
||||||
|
|
||||||
|
### Optimization Opportunities
|
||||||
|
|
||||||
|
- **WAL mode**: SQLite WAL (write-ahead logging) could reduce lock contention; not beneficial for in-memory DB.
|
||||||
|
- **Batch inserts**: Combine multiple rows in a single transaction; helps if inserting outside the WorkQueue scope.
|
||||||
|
- **Foreign key lazy-loading**: Skip foreign key constraints during bulk load; re-enable for queries. (Not implemented.)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Configuration and Extensibility
|
||||||
|
|
||||||
|
### Command-Line Arguments
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./biergarten-pipeline [modelPath] [cacheDir] [commit]
|
||||||
|
```
|
||||||
|
|
||||||
|
| Arg | Default | Purpose |
|
||||||
|
| ----------- | -------------- | ----------------------------------------------------------------------- |
|
||||||
|
| `modelPath` | `./model.gguf` | Path to LLM model (mock implementation; not loaded in current version). |
|
||||||
|
| `cacheDir` | `/tmp` | Directory for cached JSON (e.g., `/tmp/countries+states+cities.json`). |
|
||||||
|
| `commit` | `c5eb7772` | Git commit hash for consistency (stable 2026-03-28 snapshot). |
|
||||||
|
|
||||||
|
**Examples**:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./biergarten-pipeline
|
||||||
|
./biergarten-pipeline ./models/llama.gguf /var/cache main
|
||||||
|
./biergarten-pipeline "" /tmp v1.2.3
|
||||||
|
```
|
||||||
|
|
||||||
|
### Extending the Generator
|
||||||
|
|
||||||
|
**Current**: `LlamaBreweryGenerator::GenerateBrewery()` uses deterministic seed-based selection from hardcoded lists.
|
||||||
|
|
||||||
|
**Future swap points**:
|
||||||
|
|
||||||
|
1. Load an actual LLM model in `LoadModel(modelPath)`.
|
||||||
|
2. Tokenize city name and context; call model inference.
|
||||||
|
3. Validate output (length, format) and rank if multiple candidates.
|
||||||
|
4. Cache results to avoid re-inference for repeated cities.
|
||||||
|
|
||||||
|
**Example stub for future integration**:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
Brewery LlamaBreweryGenerator::GenerateBrewery(const std::string &cityName, int seed) {
|
||||||
|
// TODO: Replace with actual llama.cpp inference
|
||||||
|
// llama_context *ctx = llama_new_context_with_model(model, params);
|
||||||
|
// std::string prompt = "Generate a brewery for " + cityName;
|
||||||
|
// std::string result = llama_inference(ctx, prompt, seed);
|
||||||
|
// return parse_brewery(result);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Logging Configuration
|
||||||
|
|
||||||
|
Logging uses **spdlog** with:
|
||||||
|
|
||||||
|
- **Level**: Info (can change via `spdlog::set_level(spdlog::level::debug)` at startup).
|
||||||
|
- **Format**: Plain ASCII; no Unicode box art.
|
||||||
|
- **Sink**: Console (stdout/stderr); can redirect to file.
|
||||||
|
|
||||||
|
**Current output sample**:
|
||||||
|
|
||||||
|
```
|
||||||
|
[Pipeline] Downloading geographic data from GitHub...
|
||||||
|
Initializing in-memory SQLite database...
|
||||||
|
Initializing brewery generator...
|
||||||
|
|
||||||
|
=== GEOGRAPHIC DATA OVERVIEW ===
|
||||||
|
Total records loaded:
|
||||||
|
Countries: 195
|
||||||
|
States: 5000
|
||||||
|
Cities: 150000
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Building and Running
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- C++17 compiler (g++, clang, MSVC).
|
||||||
|
- CMake 3.20+.
|
||||||
|
- curl (for HTTP downloads).
|
||||||
|
- sqlite3 (usually system-provided).
|
||||||
|
- RapidJSON (fetched via CMake FetchContent).
|
||||||
|
- spdlog (fetched via CMake FetchContent).
|
||||||
|
|
||||||
|
### Build
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir -p build
|
||||||
|
cd build
|
||||||
|
cmake ..
|
||||||
|
cmake --build . --target biergarten-pipeline -- -j
|
||||||
|
```
|
||||||
|
|
||||||
|
**Build artifacts**:
|
||||||
|
|
||||||
|
- Executable: `build/biergarten-pipeline`
|
||||||
|
- Intermediate: `build/CMakeFiles/`, `build/_deps/` (RapidJSON, spdlog)
|
||||||
|
|
||||||
|
### Run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./biergarten-pipeline
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output**: Logs to console; caches JSON in `/tmp/countries+states+cities.json`.
|
||||||
|
|
||||||
|
### Cleaning
|
||||||
|
|
||||||
|
```bash
|
||||||
|
rm -rf build
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Development Notes
|
||||||
|
|
||||||
|
### Code Organization
|
||||||
|
|
||||||
|
- **`includes/`**: Public headers (data structures, class APIs).
|
||||||
|
- **`src/`**: Implementations with inline comments for non-obvious logic.
|
||||||
|
- **`CMakeLists.txt`**: Build configuration; defines fetch content, compiler flags, linking.
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
|
||||||
|
Currently no automated tests. To add:
|
||||||
|
|
||||||
|
1. Create `tests/` folder.
|
||||||
|
2. Use CMake to add a test executable.
|
||||||
|
3. Test the parser with small JSON fixtures.
|
||||||
|
4. Mock the database for isolation.
|
||||||
|
|
||||||
|
### Debugging
|
||||||
|
|
||||||
|
**Enable verbose logging**:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
spdlog::set_level(spdlog::level::debug);
|
||||||
|
```
|
||||||
|
|
||||||
|
**GDB workflow**:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
gdb ./biergarten-pipeline
|
||||||
|
(gdb) break src/stream_parser.cpp:50
|
||||||
|
(gdb) run
|
||||||
|
```
|
||||||
|
|
||||||
|
### Future Enhancements
|
||||||
|
|
||||||
|
1. **Real LLM integration**: Load and run llama.cpp models.
|
||||||
|
2. **Persistence**: Write brewery data to a database or file.
|
||||||
|
3. **Distributed parsing**: Shard JSON file across multiple parse streams.
|
||||||
|
4. **Incremental updates**: Only insert new records if source updated.
|
||||||
|
5. **Web API**: Expose database via HTTP (brewery lookup, city search).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- [RapidJSON](https://rapidjson.org/) – SAX parsing documentation.
|
||||||
|
- [spdlog](https://github.com/gabime/spdlog) – Logging framework.
|
||||||
|
- [SQLite](https://www.sqlite.org/docs.html) – In-memory database reference.
|
||||||
|
- [countries-states-cities-database](https://github.com/dr5hn/countries-states-cities-database) – Data source.
|
||||||
26
pipeline/includes/data_downloader.h
Normal file
26
pipeline/includes/data_downloader.h
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
#ifndef DATA_DOWNLOADER_H
|
||||||
|
#define DATA_DOWNLOADER_H
|
||||||
|
|
||||||
|
#include <stdexcept>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
/// @brief Downloads and caches source geography JSON payloads.
|
||||||
|
class DataDownloader {
|
||||||
|
public:
|
||||||
|
/// @brief Initializes global curl state used by this downloader.
|
||||||
|
DataDownloader();
|
||||||
|
|
||||||
|
/// @brief Cleans up global curl state.
|
||||||
|
~DataDownloader();
|
||||||
|
|
||||||
|
/// @brief Returns a local JSON path, downloading it when cache is missing.
|
||||||
|
std::string DownloadCountriesDatabase(
|
||||||
|
const std::string &cachePath,
|
||||||
|
const std::string &commit = "c5eb7772" // Stable commit: 2026-03-28 export
|
||||||
|
);
|
||||||
|
|
||||||
|
private:
|
||||||
|
bool FileExists(const std::string &filePath) const;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // DATA_DOWNLOADER_H
|
||||||
26
pipeline/includes/data_generator.h
Normal file
26
pipeline/includes/data_generator.h
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
struct BreweryResult {
|
||||||
|
std::string name;
|
||||||
|
std::string description;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct UserResult {
|
||||||
|
std::string username;
|
||||||
|
std::string bio;
|
||||||
|
};
|
||||||
|
|
||||||
|
class IDataGenerator {
|
||||||
|
public:
|
||||||
|
virtual ~IDataGenerator() = default;
|
||||||
|
|
||||||
|
virtual void load(const std::string &modelPath) = 0;
|
||||||
|
|
||||||
|
virtual BreweryResult generateBrewery(const std::string &cityName,
|
||||||
|
const std::string &countryName,
|
||||||
|
const std::string ®ionContext) = 0;
|
||||||
|
|
||||||
|
virtual UserResult generateUser(const std::string &locale) = 0;
|
||||||
|
};
|
||||||
71
pipeline/includes/database.h
Normal file
71
pipeline/includes/database.h
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <mutex>
|
||||||
|
#include <sqlite3.h>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
struct Country {
|
||||||
|
/// @brief Country identifier from the source dataset.
|
||||||
|
int id;
|
||||||
|
/// @brief Country display name.
|
||||||
|
std::string name;
|
||||||
|
/// @brief ISO 3166-1 alpha-2 code.
|
||||||
|
std::string iso2;
|
||||||
|
/// @brief ISO 3166-1 alpha-3 code.
|
||||||
|
std::string iso3;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct State {
|
||||||
|
/// @brief State or province identifier from the source dataset.
|
||||||
|
int id;
|
||||||
|
/// @brief State or province display name.
|
||||||
|
std::string name;
|
||||||
|
/// @brief State or province short code.
|
||||||
|
std::string iso2;
|
||||||
|
/// @brief Parent country identifier.
|
||||||
|
int countryId;
|
||||||
|
};
|
||||||
|
|
||||||
|
/// @brief Thread-safe SQLite wrapper for pipeline writes and readbacks.
|
||||||
|
class SqliteDatabase {
|
||||||
|
private:
|
||||||
|
sqlite3 *db = nullptr;
|
||||||
|
std::mutex dbMutex;
|
||||||
|
|
||||||
|
void InitializeSchema();
|
||||||
|
|
||||||
|
public:
|
||||||
|
/// @brief Closes the SQLite connection if initialized.
|
||||||
|
~SqliteDatabase();
|
||||||
|
|
||||||
|
/// @brief Opens the SQLite database at dbPath and creates schema objects.
|
||||||
|
void Initialize(const std::string &dbPath = ":memory:");
|
||||||
|
|
||||||
|
/// @brief Starts a database transaction for batched writes.
|
||||||
|
void BeginTransaction();
|
||||||
|
|
||||||
|
/// @brief Commits the active database transaction.
|
||||||
|
void CommitTransaction();
|
||||||
|
|
||||||
|
/// @brief Inserts a country row.
|
||||||
|
void InsertCountry(int id, const std::string &name, const std::string &iso2,
|
||||||
|
const std::string &iso3);
|
||||||
|
|
||||||
|
/// @brief Inserts a state row linked to a country.
|
||||||
|
void InsertState(int id, int countryId, const std::string &name,
|
||||||
|
const std::string &iso2);
|
||||||
|
|
||||||
|
/// @brief Inserts a city row linked to state and country.
|
||||||
|
void InsertCity(int id, int stateId, int countryId, const std::string &name,
|
||||||
|
double latitude, double longitude);
|
||||||
|
|
||||||
|
/// @brief Returns city id and city name pairs.
|
||||||
|
std::vector<std::pair<int, std::string>> QueryCities();
|
||||||
|
|
||||||
|
/// @brief Returns countries with optional row limit.
|
||||||
|
std::vector<Country> QueryCountries(int limit = 0);
|
||||||
|
|
||||||
|
/// @brief Returns states with optional row limit.
|
||||||
|
std::vector<State> QueryStates(int limit = 0);
|
||||||
|
};
|
||||||
12
pipeline/includes/json_loader.h
Normal file
12
pipeline/includes/json_loader.h
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "database.h"
|
||||||
|
#include "stream_parser.h"
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
/// @brief Loads world-city JSON data into SQLite through streaming parsing.
|
||||||
|
class JsonLoader {
|
||||||
|
public:
|
||||||
|
/// @brief Parses a JSON file and writes country/state/city rows into db.
|
||||||
|
static void LoadWorldCities(const std::string &jsonPath, SqliteDatabase &db);
|
||||||
|
};
|
||||||
31
pipeline/includes/llama_generator.h
Normal file
31
pipeline/includes/llama_generator.h
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "data_generator.h"
|
||||||
|
#include <memory>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
struct llama_model;
|
||||||
|
struct llama_context;
|
||||||
|
|
||||||
|
class LlamaGenerator final : public IDataGenerator {
|
||||||
|
public:
|
||||||
|
~LlamaGenerator() override;
|
||||||
|
|
||||||
|
void load(const std::string &modelPath) override;
|
||||||
|
BreweryResult generateBrewery(const std::string &cityName,
|
||||||
|
const std::string &countryName,
|
||||||
|
const std::string ®ionContext) override;
|
||||||
|
UserResult generateUser(const std::string &locale) override;
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::string infer(const std::string &prompt, int maxTokens = 5000);
|
||||||
|
// Overload that allows passing a system message separately so chat-capable
|
||||||
|
// models receive a proper system role instead of having the system text
|
||||||
|
// concatenated into the user prompt (helps avoid revealing internal
|
||||||
|
// reasoning or instructions in model output).
|
||||||
|
std::string infer(const std::string &systemPrompt, const std::string &prompt,
|
||||||
|
int maxTokens = 5000);
|
||||||
|
|
||||||
|
llama_model *model_ = nullptr;
|
||||||
|
llama_context *context_ = nullptr;
|
||||||
|
};
|
||||||
24
pipeline/includes/mock_generator.h
Normal file
24
pipeline/includes/mock_generator.h
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "data_generator.h"
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
class MockGenerator final : public IDataGenerator {
|
||||||
|
public:
|
||||||
|
void load(const std::string &modelPath) override;
|
||||||
|
BreweryResult generateBrewery(const std::string &cityName,
|
||||||
|
const std::string &countryName,
|
||||||
|
const std::string ®ionContext) override;
|
||||||
|
UserResult generateUser(const std::string &locale) override;
|
||||||
|
|
||||||
|
private:
|
||||||
|
static std::size_t deterministicHash(const std::string &a,
|
||||||
|
const std::string &b);
|
||||||
|
|
||||||
|
static const std::vector<std::string> kBreweryAdjectives;
|
||||||
|
static const std::vector<std::string> kBreweryNouns;
|
||||||
|
static const std::vector<std::string> kBreweryDescriptions;
|
||||||
|
static const std::vector<std::string> kUsernames;
|
||||||
|
static const std::vector<std::string> kBios;
|
||||||
|
};
|
||||||
48
pipeline/includes/stream_parser.h
Normal file
48
pipeline/includes/stream_parser.h
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "database.h"
|
||||||
|
#include <functional>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
// Forward declaration to avoid circular dependency
|
||||||
|
class SqliteDatabase;
|
||||||
|
|
||||||
|
/// @brief In-memory representation of one parsed city entry.
|
||||||
|
struct CityRecord {
|
||||||
|
int id;
|
||||||
|
int state_id;
|
||||||
|
int country_id;
|
||||||
|
std::string name;
|
||||||
|
double latitude;
|
||||||
|
double longitude;
|
||||||
|
};
|
||||||
|
|
||||||
|
/// @brief Streaming SAX parser that emits city records during traversal.
|
||||||
|
class StreamingJsonParser {
|
||||||
|
public:
|
||||||
|
/// @brief Parses filePath and invokes callbacks for city rows and progress.
|
||||||
|
static void Parse(const std::string &filePath, SqliteDatabase &db,
|
||||||
|
std::function<void(const CityRecord &)> onCity,
|
||||||
|
std::function<void(size_t, size_t)> onProgress = nullptr);
|
||||||
|
|
||||||
|
private:
|
||||||
|
/// @brief Mutable SAX handler state while traversing nested JSON arrays.
|
||||||
|
struct ParseState {
|
||||||
|
int current_country_id = 0;
|
||||||
|
int current_state_id = 0;
|
||||||
|
|
||||||
|
CityRecord current_city = {};
|
||||||
|
bool building_city = false;
|
||||||
|
std::string current_key;
|
||||||
|
|
||||||
|
int array_depth = 0;
|
||||||
|
int object_depth = 0;
|
||||||
|
bool in_countries_array = false;
|
||||||
|
bool in_states_array = false;
|
||||||
|
bool in_cities_array = false;
|
||||||
|
|
||||||
|
std::function<void(const CityRecord &)> on_city;
|
||||||
|
std::function<void(size_t, size_t)> on_progress;
|
||||||
|
size_t bytes_processed = 0;
|
||||||
|
};
|
||||||
|
};
|
||||||
102
pipeline/src/data_downloader.cpp
Normal file
102
pipeline/src/data_downloader.cpp
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
#include "data_downloader.h"
|
||||||
|
#include <cstdio>
|
||||||
|
#include <curl/curl.h>
|
||||||
|
#include <filesystem>
|
||||||
|
#include <fstream>
|
||||||
|
#include <spdlog/spdlog.h>
|
||||||
|
#include <sstream>
|
||||||
|
|
||||||
|
static size_t WriteCallback(void *contents, size_t size, size_t nmemb,
|
||||||
|
void *userp) {
|
||||||
|
size_t realsize = size * nmemb;
|
||||||
|
std::ofstream *outFile = static_cast<std::ofstream *>(userp);
|
||||||
|
outFile->write(static_cast<char *>(contents), realsize);
|
||||||
|
return realsize;
|
||||||
|
}
|
||||||
|
|
||||||
|
DataDownloader::DataDownloader() {}
|
||||||
|
|
||||||
|
DataDownloader::~DataDownloader() {}
|
||||||
|
|
||||||
|
bool DataDownloader::FileExists(const std::string &filePath) const {
|
||||||
|
return std::filesystem::exists(filePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string
|
||||||
|
DataDownloader::DownloadCountriesDatabase(const std::string &cachePath,
|
||||||
|
const std::string &commit) {
|
||||||
|
if (FileExists(cachePath)) {
|
||||||
|
spdlog::info("[DataDownloader] Cache hit: {}", cachePath);
|
||||||
|
return cachePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string shortCommit = commit;
|
||||||
|
if (commit.length() > 7) {
|
||||||
|
shortCommit = commit.substr(0, 7);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string url = "https://raw.githubusercontent.com/dr5hn/"
|
||||||
|
"countries-states-cities-database/" +
|
||||||
|
shortCommit + "/json/countries+states+cities.json";
|
||||||
|
|
||||||
|
spdlog::info("[DataDownloader] Downloading: {}", url);
|
||||||
|
|
||||||
|
CURL *curl = curl_easy_init();
|
||||||
|
if (!curl) {
|
||||||
|
throw std::runtime_error("[DataDownloader] Failed to initialize libcurl");
|
||||||
|
}
|
||||||
|
|
||||||
|
std::ofstream outFile(cachePath, std::ios::binary);
|
||||||
|
if (!outFile.is_open()) {
|
||||||
|
curl_easy_cleanup(curl);
|
||||||
|
throw std::runtime_error("[DataDownloader] Cannot open file for writing: " +
|
||||||
|
cachePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
||||||
|
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback);
|
||||||
|
curl_easy_setopt(curl, CURLOPT_WRITEDATA, static_cast<void *>(&outFile));
|
||||||
|
|
||||||
|
curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 30L);
|
||||||
|
curl_easy_setopt(curl, CURLOPT_TIMEOUT, 300L);
|
||||||
|
|
||||||
|
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
|
||||||
|
curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 5L);
|
||||||
|
|
||||||
|
curl_easy_setopt(curl, CURLOPT_ACCEPT_ENCODING, "gzip");
|
||||||
|
|
||||||
|
curl_easy_setopt(curl, CURLOPT_USERAGENT, "biergarten-pipeline/0.1.0");
|
||||||
|
|
||||||
|
CURLcode res = curl_easy_perform(curl);
|
||||||
|
outFile.close();
|
||||||
|
|
||||||
|
if (res != CURLE_OK) {
|
||||||
|
curl_easy_cleanup(curl);
|
||||||
|
std::remove(cachePath.c_str());
|
||||||
|
|
||||||
|
std::string error = std::string("[DataDownloader] Download failed: ") +
|
||||||
|
curl_easy_strerror(res);
|
||||||
|
throw std::runtime_error(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
long httpCode = 0;
|
||||||
|
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &httpCode);
|
||||||
|
curl_easy_cleanup(curl);
|
||||||
|
|
||||||
|
if (httpCode != 200) {
|
||||||
|
std::remove(cachePath.c_str());
|
||||||
|
|
||||||
|
std::stringstream ss;
|
||||||
|
ss << "[DataDownloader] HTTP error " << httpCode
|
||||||
|
<< " (commit: " << shortCommit << ")";
|
||||||
|
throw std::runtime_error(ss.str());
|
||||||
|
}
|
||||||
|
|
||||||
|
std::ifstream fileCheck(cachePath, std::ios::binary | std::ios::ate);
|
||||||
|
std::streamsize size = fileCheck.tellg();
|
||||||
|
fileCheck.close();
|
||||||
|
|
||||||
|
spdlog::info("[DataDownloader] OK: Download complete: {} ({:.2f} MB)",
|
||||||
|
cachePath, (size / (1024.0 * 1024.0)));
|
||||||
|
return cachePath;
|
||||||
|
}
|
||||||
250
pipeline/src/database.cpp
Normal file
250
pipeline/src/database.cpp
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
#include "database.h"
|
||||||
|
#include <spdlog/spdlog.h>
|
||||||
|
#include <stdexcept>
|
||||||
|
|
||||||
|
void SqliteDatabase::InitializeSchema() {
|
||||||
|
std::lock_guard<std::mutex> lock(dbMutex);
|
||||||
|
|
||||||
|
const char *schema = R"(
|
||||||
|
CREATE TABLE IF NOT EXISTS countries (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
iso2 TEXT,
|
||||||
|
iso3 TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS states (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
country_id INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
iso2 TEXT,
|
||||||
|
FOREIGN KEY(country_id) REFERENCES countries(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS cities (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
state_id INTEGER NOT NULL,
|
||||||
|
country_id INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
latitude REAL,
|
||||||
|
longitude REAL,
|
||||||
|
FOREIGN KEY(state_id) REFERENCES states(id),
|
||||||
|
FOREIGN KEY(country_id) REFERENCES countries(id)
|
||||||
|
);
|
||||||
|
)";
|
||||||
|
|
||||||
|
char *errMsg = nullptr;
|
||||||
|
int rc = sqlite3_exec(db, schema, nullptr, nullptr, &errMsg);
|
||||||
|
if (rc != SQLITE_OK) {
|
||||||
|
std::string error = errMsg ? std::string(errMsg) : "Unknown error";
|
||||||
|
sqlite3_free(errMsg);
|
||||||
|
throw std::runtime_error("Failed to create schema: " + error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
SqliteDatabase::~SqliteDatabase() {
|
||||||
|
if (db) {
|
||||||
|
sqlite3_close(db);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void SqliteDatabase::Initialize(const std::string &dbPath) {
|
||||||
|
int rc = sqlite3_open(dbPath.c_str(), &db);
|
||||||
|
if (rc) {
|
||||||
|
throw std::runtime_error("Failed to open SQLite database: " + dbPath);
|
||||||
|
}
|
||||||
|
spdlog::info("OK: SQLite database opened: {}", dbPath);
|
||||||
|
InitializeSchema();
|
||||||
|
}
|
||||||
|
|
||||||
|
void SqliteDatabase::BeginTransaction() {
|
||||||
|
std::lock_guard<std::mutex> lock(dbMutex);
|
||||||
|
char *err = nullptr;
|
||||||
|
if (sqlite3_exec(db, "BEGIN TRANSACTION", nullptr, nullptr, &err) !=
|
||||||
|
SQLITE_OK) {
|
||||||
|
std::string msg = err ? err : "unknown";
|
||||||
|
sqlite3_free(err);
|
||||||
|
throw std::runtime_error("BeginTransaction failed: " + msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void SqliteDatabase::CommitTransaction() {
|
||||||
|
std::lock_guard<std::mutex> lock(dbMutex);
|
||||||
|
char *err = nullptr;
|
||||||
|
if (sqlite3_exec(db, "COMMIT", nullptr, nullptr, &err) != SQLITE_OK) {
|
||||||
|
std::string msg = err ? err : "unknown";
|
||||||
|
sqlite3_free(err);
|
||||||
|
throw std::runtime_error("CommitTransaction failed: " + msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void SqliteDatabase::InsertCountry(int id, const std::string &name,
|
||||||
|
const std::string &iso2,
|
||||||
|
const std::string &iso3) {
|
||||||
|
std::lock_guard<std::mutex> lock(dbMutex);
|
||||||
|
|
||||||
|
const char *query = R"(
|
||||||
|
INSERT OR IGNORE INTO countries (id, name, iso2, iso3)
|
||||||
|
VALUES (?, ?, ?, ?)
|
||||||
|
)";
|
||||||
|
|
||||||
|
sqlite3_stmt *stmt;
|
||||||
|
int rc = sqlite3_prepare_v2(db, query, -1, &stmt, nullptr);
|
||||||
|
if (rc != SQLITE_OK)
|
||||||
|
throw std::runtime_error("Failed to prepare country insert");
|
||||||
|
|
||||||
|
sqlite3_bind_int(stmt, 1, id);
|
||||||
|
sqlite3_bind_text(stmt, 2, name.c_str(), -1, SQLITE_STATIC);
|
||||||
|
sqlite3_bind_text(stmt, 3, iso2.c_str(), -1, SQLITE_STATIC);
|
||||||
|
sqlite3_bind_text(stmt, 4, iso3.c_str(), -1, SQLITE_STATIC);
|
||||||
|
|
||||||
|
if (sqlite3_step(stmt) != SQLITE_DONE) {
|
||||||
|
throw std::runtime_error("Failed to insert country");
|
||||||
|
}
|
||||||
|
sqlite3_finalize(stmt);
|
||||||
|
}
|
||||||
|
|
||||||
|
void SqliteDatabase::InsertState(int id, int countryId, const std::string &name,
|
||||||
|
const std::string &iso2) {
|
||||||
|
std::lock_guard<std::mutex> lock(dbMutex);
|
||||||
|
|
||||||
|
const char *query = R"(
|
||||||
|
INSERT OR IGNORE INTO states (id, country_id, name, iso2)
|
||||||
|
VALUES (?, ?, ?, ?)
|
||||||
|
)";
|
||||||
|
|
||||||
|
sqlite3_stmt *stmt;
|
||||||
|
int rc = sqlite3_prepare_v2(db, query, -1, &stmt, nullptr);
|
||||||
|
if (rc != SQLITE_OK)
|
||||||
|
throw std::runtime_error("Failed to prepare state insert");
|
||||||
|
|
||||||
|
sqlite3_bind_int(stmt, 1, id);
|
||||||
|
sqlite3_bind_int(stmt, 2, countryId);
|
||||||
|
sqlite3_bind_text(stmt, 3, name.c_str(), -1, SQLITE_STATIC);
|
||||||
|
sqlite3_bind_text(stmt, 4, iso2.c_str(), -1, SQLITE_STATIC);
|
||||||
|
|
||||||
|
if (sqlite3_step(stmt) != SQLITE_DONE) {
|
||||||
|
throw std::runtime_error("Failed to insert state");
|
||||||
|
}
|
||||||
|
sqlite3_finalize(stmt);
|
||||||
|
}
|
||||||
|
|
||||||
|
void SqliteDatabase::InsertCity(int id, int stateId, int countryId,
|
||||||
|
const std::string &name, double latitude,
|
||||||
|
double longitude) {
|
||||||
|
std::lock_guard<std::mutex> lock(dbMutex);
|
||||||
|
|
||||||
|
const char *query = R"(
|
||||||
|
INSERT OR IGNORE INTO cities (id, state_id, country_id, name, latitude, longitude)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?)
|
||||||
|
)";
|
||||||
|
|
||||||
|
sqlite3_stmt *stmt;
|
||||||
|
int rc = sqlite3_prepare_v2(db, query, -1, &stmt, nullptr);
|
||||||
|
if (rc != SQLITE_OK)
|
||||||
|
throw std::runtime_error("Failed to prepare city insert");
|
||||||
|
|
||||||
|
sqlite3_bind_int(stmt, 1, id);
|
||||||
|
sqlite3_bind_int(stmt, 2, stateId);
|
||||||
|
sqlite3_bind_int(stmt, 3, countryId);
|
||||||
|
sqlite3_bind_text(stmt, 4, name.c_str(), -1, SQLITE_STATIC);
|
||||||
|
sqlite3_bind_double(stmt, 5, latitude);
|
||||||
|
sqlite3_bind_double(stmt, 6, longitude);
|
||||||
|
|
||||||
|
if (sqlite3_step(stmt) != SQLITE_DONE) {
|
||||||
|
throw std::runtime_error("Failed to insert city");
|
||||||
|
}
|
||||||
|
sqlite3_finalize(stmt);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<std::pair<int, std::string>> SqliteDatabase::QueryCities() {
|
||||||
|
std::lock_guard<std::mutex> lock(dbMutex);
|
||||||
|
|
||||||
|
std::vector<std::pair<int, std::string>> cities;
|
||||||
|
sqlite3_stmt *stmt = nullptr;
|
||||||
|
|
||||||
|
const char *query = "SELECT id, name FROM cities ORDER BY name";
|
||||||
|
int rc = sqlite3_prepare_v2(db, query, -1, &stmt, nullptr);
|
||||||
|
|
||||||
|
if (rc != SQLITE_OK) {
|
||||||
|
throw std::runtime_error("Failed to prepare query");
|
||||||
|
}
|
||||||
|
|
||||||
|
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||||
|
int id = sqlite3_column_int(stmt, 0);
|
||||||
|
const char *name =
|
||||||
|
reinterpret_cast<const char *>(sqlite3_column_text(stmt, 1));
|
||||||
|
cities.push_back({id, name ? std::string(name) : ""});
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlite3_finalize(stmt);
|
||||||
|
return cities;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<Country> SqliteDatabase::QueryCountries(int limit) {
|
||||||
|
std::lock_guard<std::mutex> lock(dbMutex);
|
||||||
|
|
||||||
|
std::vector<Country> countries;
|
||||||
|
sqlite3_stmt *stmt = nullptr;
|
||||||
|
|
||||||
|
std::string query =
|
||||||
|
"SELECT id, name, iso2, iso3 FROM countries ORDER BY name";
|
||||||
|
if (limit > 0) {
|
||||||
|
query += " LIMIT " + std::to_string(limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
int rc = sqlite3_prepare_v2(db, query.c_str(), -1, &stmt, nullptr);
|
||||||
|
|
||||||
|
if (rc != SQLITE_OK) {
|
||||||
|
throw std::runtime_error("Failed to prepare countries query");
|
||||||
|
}
|
||||||
|
|
||||||
|
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||||
|
int id = sqlite3_column_int(stmt, 0);
|
||||||
|
const char *name =
|
||||||
|
reinterpret_cast<const char *>(sqlite3_column_text(stmt, 1));
|
||||||
|
const char *iso2 =
|
||||||
|
reinterpret_cast<const char *>(sqlite3_column_text(stmt, 2));
|
||||||
|
const char *iso3 =
|
||||||
|
reinterpret_cast<const char *>(sqlite3_column_text(stmt, 3));
|
||||||
|
countries.push_back({id, name ? std::string(name) : "",
|
||||||
|
iso2 ? std::string(iso2) : "",
|
||||||
|
iso3 ? std::string(iso3) : ""});
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlite3_finalize(stmt);
|
||||||
|
return countries;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<State> SqliteDatabase::QueryStates(int limit) {
|
||||||
|
std::lock_guard<std::mutex> lock(dbMutex);
|
||||||
|
|
||||||
|
std::vector<State> states;
|
||||||
|
sqlite3_stmt *stmt = nullptr;
|
||||||
|
|
||||||
|
std::string query =
|
||||||
|
"SELECT id, name, iso2, country_id FROM states ORDER BY name";
|
||||||
|
if (limit > 0) {
|
||||||
|
query += " LIMIT " + std::to_string(limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
int rc = sqlite3_prepare_v2(db, query.c_str(), -1, &stmt, nullptr);
|
||||||
|
|
||||||
|
if (rc != SQLITE_OK) {
|
||||||
|
throw std::runtime_error("Failed to prepare states query");
|
||||||
|
}
|
||||||
|
|
||||||
|
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||||
|
int id = sqlite3_column_int(stmt, 0);
|
||||||
|
const char *name =
|
||||||
|
reinterpret_cast<const char *>(sqlite3_column_text(stmt, 1));
|
||||||
|
const char *iso2 =
|
||||||
|
reinterpret_cast<const char *>(sqlite3_column_text(stmt, 2));
|
||||||
|
int countryId = sqlite3_column_int(stmt, 3);
|
||||||
|
states.push_back({id, name ? std::string(name) : "",
|
||||||
|
iso2 ? std::string(iso2) : "", countryId});
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlite3_finalize(stmt);
|
||||||
|
return states;
|
||||||
|
}
|
||||||
45
pipeline/src/json_loader.cpp
Normal file
45
pipeline/src/json_loader.cpp
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
#include "json_loader.h"
|
||||||
|
#include "stream_parser.h"
|
||||||
|
#include <chrono>
|
||||||
|
#include <spdlog/spdlog.h>
|
||||||
|
|
||||||
|
void JsonLoader::LoadWorldCities(const std::string &jsonPath,
|
||||||
|
SqliteDatabase &db) {
|
||||||
|
auto startTime = std::chrono::high_resolution_clock::now();
|
||||||
|
spdlog::info("\nLoading {} (streaming RapidJSON SAX)...", jsonPath);
|
||||||
|
|
||||||
|
db.BeginTransaction();
|
||||||
|
|
||||||
|
size_t citiesProcessed = 0;
|
||||||
|
StreamingJsonParser::Parse(
|
||||||
|
jsonPath, db,
|
||||||
|
[&](const CityRecord &record) {
|
||||||
|
db.InsertCity(record.id, record.state_id, record.country_id,
|
||||||
|
record.name, record.latitude, record.longitude);
|
||||||
|
citiesProcessed++;
|
||||||
|
},
|
||||||
|
[&](size_t current, size_t total) {
|
||||||
|
if (current % 10000 == 0 && current > 0) {
|
||||||
|
spdlog::info(" [Progress] Parsed {} cities...", current);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
spdlog::info(" OK: Parsed all cities from JSON");
|
||||||
|
|
||||||
|
db.CommitTransaction();
|
||||||
|
|
||||||
|
auto endTime = std::chrono::high_resolution_clock::now();
|
||||||
|
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||||
|
endTime - startTime);
|
||||||
|
|
||||||
|
spdlog::info("\n=== World City Data Loading Summary ===\n");
|
||||||
|
spdlog::info("Cities inserted: {}", citiesProcessed);
|
||||||
|
spdlog::info("Elapsed time: {} ms", duration.count());
|
||||||
|
long long throughput =
|
||||||
|
(citiesProcessed > 0 && duration.count() > 0)
|
||||||
|
? (1000LL * static_cast<long long>(citiesProcessed)) /
|
||||||
|
static_cast<long long>(duration.count())
|
||||||
|
: 0LL;
|
||||||
|
spdlog::info("Throughput: {} cities/sec", throughput);
|
||||||
|
spdlog::info("=======================================\n");
|
||||||
|
}
|
||||||
534
pipeline/src/llama_generator.cpp
Normal file
534
pipeline/src/llama_generator.cpp
Normal file
@@ -0,0 +1,534 @@
|
|||||||
|
#include "llama_generator.h"
|
||||||
|
|
||||||
|
#include "llama.h"
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
#include <array>
|
||||||
|
#include <cctype>
|
||||||
|
#include <memory>
|
||||||
|
#include <sstream>
|
||||||
|
#include <stdexcept>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#include <spdlog/spdlog.h>
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
std::string trim(std::string value) {
|
||||||
|
auto notSpace = [](unsigned char ch) { return !std::isspace(ch); };
|
||||||
|
|
||||||
|
value.erase(value.begin(),
|
||||||
|
std::find_if(value.begin(), value.end(), notSpace));
|
||||||
|
value.erase(std::find_if(value.rbegin(), value.rend(), notSpace).base(),
|
||||||
|
value.end());
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string stripCommonPrefix(std::string line) {
|
||||||
|
line = trim(std::move(line));
|
||||||
|
|
||||||
|
// Strip simple list markers like "- ", "* ", "1. ", "2) ".
|
||||||
|
if (!line.empty() && (line[0] == '-' || line[0] == '*')) {
|
||||||
|
line = trim(line.substr(1));
|
||||||
|
} else {
|
||||||
|
std::size_t i = 0;
|
||||||
|
while (i < line.size() &&
|
||||||
|
std::isdigit(static_cast<unsigned char>(line[i]))) {
|
||||||
|
++i;
|
||||||
|
}
|
||||||
|
if (i > 0 && i < line.size() && (line[i] == '.' || line[i] == ')')) {
|
||||||
|
line = trim(line.substr(i + 1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto stripLabel = [&line](const std::string &label) {
|
||||||
|
if (line.size() >= label.size()) {
|
||||||
|
bool matches = true;
|
||||||
|
for (std::size_t i = 0; i < label.size(); ++i) {
|
||||||
|
if (std::tolower(static_cast<unsigned char>(line[i])) !=
|
||||||
|
std::tolower(static_cast<unsigned char>(label[i]))) {
|
||||||
|
matches = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (matches) {
|
||||||
|
line = trim(line.substr(label.size()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
stripLabel("name:");
|
||||||
|
stripLabel("brewery name:");
|
||||||
|
stripLabel("description:");
|
||||||
|
stripLabel("username:");
|
||||||
|
stripLabel("bio:");
|
||||||
|
|
||||||
|
return trim(std::move(line));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string toChatPrompt(const llama_model *model,
|
||||||
|
const std::string &userPrompt) {
|
||||||
|
const char *tmpl = llama_model_chat_template(model, nullptr);
|
||||||
|
if (tmpl == nullptr) {
|
||||||
|
return userPrompt;
|
||||||
|
}
|
||||||
|
|
||||||
|
const llama_chat_message message{
|
||||||
|
"user",
|
||||||
|
userPrompt.c_str(),
|
||||||
|
};
|
||||||
|
|
||||||
|
std::vector<char> buffer(std::max<std::size_t>(1024, userPrompt.size() * 4));
|
||||||
|
int32_t required =
|
||||||
|
llama_chat_apply_template(tmpl, &message, 1, true, buffer.data(),
|
||||||
|
static_cast<int32_t>(buffer.size()));
|
||||||
|
|
||||||
|
if (required < 0) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: failed to apply chat template");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (required >= static_cast<int32_t>(buffer.size())) {
|
||||||
|
buffer.resize(static_cast<std::size_t>(required) + 1);
|
||||||
|
required = llama_chat_apply_template(tmpl, &message, 1, true, buffer.data(),
|
||||||
|
static_cast<int32_t>(buffer.size()));
|
||||||
|
if (required < 0) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: failed to apply chat template");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::string(buffer.data(), static_cast<std::size_t>(required));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string toChatPrompt(const llama_model *model,
|
||||||
|
const std::string &systemPrompt,
|
||||||
|
const std::string &userPrompt) {
|
||||||
|
const char *tmpl = llama_model_chat_template(model, nullptr);
|
||||||
|
if (tmpl == nullptr) {
|
||||||
|
// Fall back to concatenating but keep system and user parts distinct.
|
||||||
|
return systemPrompt + "\n\n" + userPrompt;
|
||||||
|
}
|
||||||
|
|
||||||
|
const llama_chat_message messages[2] = {
|
||||||
|
{"system", systemPrompt.c_str()},
|
||||||
|
{"user", userPrompt.c_str()},
|
||||||
|
};
|
||||||
|
|
||||||
|
std::vector<char> buffer(std::max<std::size_t>(
|
||||||
|
1024, (systemPrompt.size() + userPrompt.size()) * 4));
|
||||||
|
int32_t required =
|
||||||
|
llama_chat_apply_template(tmpl, messages, 2, true, buffer.data(),
|
||||||
|
static_cast<int32_t>(buffer.size()));
|
||||||
|
|
||||||
|
if (required < 0) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: failed to apply chat template");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (required >= static_cast<int32_t>(buffer.size())) {
|
||||||
|
buffer.resize(static_cast<std::size_t>(required) + 1);
|
||||||
|
required = llama_chat_apply_template(tmpl, messages, 2, true, buffer.data(),
|
||||||
|
static_cast<int32_t>(buffer.size()));
|
||||||
|
if (required < 0) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: failed to apply chat template");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::string(buffer.data(), static_cast<std::size_t>(required));
|
||||||
|
}
|
||||||
|
|
||||||
|
void appendTokenPiece(const llama_vocab *vocab, llama_token token,
|
||||||
|
std::string &output) {
|
||||||
|
std::array<char, 256> buffer{};
|
||||||
|
int32_t bytes =
|
||||||
|
llama_token_to_piece(vocab, token, buffer.data(),
|
||||||
|
static_cast<int32_t>(buffer.size()), 0, true);
|
||||||
|
|
||||||
|
if (bytes < 0) {
|
||||||
|
std::vector<char> dynamicBuffer(static_cast<std::size_t>(-bytes));
|
||||||
|
bytes = llama_token_to_piece(vocab, token, dynamicBuffer.data(),
|
||||||
|
static_cast<int32_t>(dynamicBuffer.size()), 0,
|
||||||
|
true);
|
||||||
|
if (bytes < 0) {
|
||||||
|
throw std::runtime_error(
|
||||||
|
"LlamaGenerator: failed to decode sampled token piece");
|
||||||
|
}
|
||||||
|
|
||||||
|
output.append(dynamicBuffer.data(), static_cast<std::size_t>(bytes));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
output.append(buffer.data(), static_cast<std::size_t>(bytes));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<std::string, std::string>
|
||||||
|
parseTwoLineResponse(const std::string &raw, const std::string &errorMessage) {
|
||||||
|
std::string normalized = raw;
|
||||||
|
std::replace(normalized.begin(), normalized.end(), '\r', '\n');
|
||||||
|
|
||||||
|
std::vector<std::string> lines;
|
||||||
|
std::stringstream stream(normalized);
|
||||||
|
std::string line;
|
||||||
|
while (std::getline(stream, line)) {
|
||||||
|
line = stripCommonPrefix(std::move(line));
|
||||||
|
if (!line.empty()) {
|
||||||
|
lines.push_back(std::move(line));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter out obvious internal-thought / meta lines that sometimes leak from
|
||||||
|
// models (e.g. "<think>", "Okay, so the user is asking me...").
|
||||||
|
std::vector<std::string> filtered;
|
||||||
|
for (auto &l : lines) {
|
||||||
|
std::string low = l;
|
||||||
|
std::transform(low.begin(), low.end(), low.begin(), [](unsigned char c) {
|
||||||
|
return static_cast<char>(std::tolower(c));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Skip single-token angle-bracket markers like <think> or <...>
|
||||||
|
if (!l.empty() && l.front() == '<' && l.back() == '>') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip short internal commentary that starts with common discourse markers
|
||||||
|
if (low.rfind("okay,", 0) == 0 || low.rfind("wait,", 0) == 0 ||
|
||||||
|
low.rfind("hmm", 0) == 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip lines that look like self-descriptions of what the model is doing
|
||||||
|
if (low.find("user is asking") != std::string::npos ||
|
||||||
|
low.find("protocol") != std::string::npos ||
|
||||||
|
low.find("parse") != std::string::npos ||
|
||||||
|
low.find("return only") != std::string::npos) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
filtered.push_back(std::move(l));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (filtered.size() < 2) {
|
||||||
|
throw std::runtime_error(errorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string first = trim(filtered.front());
|
||||||
|
std::string second;
|
||||||
|
for (std::size_t i = 1; i < filtered.size(); ++i) {
|
||||||
|
if (!second.empty()) {
|
||||||
|
second += ' ';
|
||||||
|
}
|
||||||
|
second += filtered[i];
|
||||||
|
}
|
||||||
|
second = trim(std::move(second));
|
||||||
|
|
||||||
|
if (first.empty() || second.empty()) {
|
||||||
|
throw std::runtime_error(errorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {first, second};
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace
|
||||||
|
|
||||||
|
LlamaGenerator::~LlamaGenerator() {
|
||||||
|
if (context_ != nullptr) {
|
||||||
|
llama_free(context_);
|
||||||
|
context_ = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (model_ != nullptr) {
|
||||||
|
llama_model_free(model_);
|
||||||
|
model_ = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
llama_backend_free();
|
||||||
|
}
|
||||||
|
|
||||||
|
void LlamaGenerator::load(const std::string &modelPath) {
|
||||||
|
if (modelPath.empty()) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: model path must not be empty");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (context_ != nullptr) {
|
||||||
|
llama_free(context_);
|
||||||
|
context_ = nullptr;
|
||||||
|
}
|
||||||
|
if (model_ != nullptr) {
|
||||||
|
llama_model_free(model_);
|
||||||
|
model_ = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
llama_backend_init();
|
||||||
|
|
||||||
|
llama_model_params modelParams = llama_model_default_params();
|
||||||
|
model_ = llama_load_model_from_file(modelPath.c_str(), modelParams);
|
||||||
|
if (model_ == nullptr) {
|
||||||
|
throw std::runtime_error(
|
||||||
|
"LlamaGenerator: failed to load model from path: " + modelPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
llama_context_params contextParams = llama_context_default_params();
|
||||||
|
contextParams.n_ctx = 2048;
|
||||||
|
|
||||||
|
context_ = llama_init_from_model(model_, contextParams);
|
||||||
|
if (context_ == nullptr) {
|
||||||
|
llama_model_free(model_);
|
||||||
|
model_ = nullptr;
|
||||||
|
throw std::runtime_error("LlamaGenerator: failed to create context");
|
||||||
|
}
|
||||||
|
|
||||||
|
spdlog::info("[LlamaGenerator] Loaded model: {}", modelPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string LlamaGenerator::infer(const std::string &prompt, int maxTokens) {
|
||||||
|
if (model_ == nullptr || context_ == nullptr) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: model not loaded");
|
||||||
|
}
|
||||||
|
|
||||||
|
const llama_vocab *vocab = llama_model_get_vocab(model_);
|
||||||
|
if (vocab == nullptr) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: vocab unavailable");
|
||||||
|
}
|
||||||
|
|
||||||
|
llama_memory_clear(llama_get_memory(context_), true);
|
||||||
|
|
||||||
|
const std::string formattedPrompt = toChatPrompt(model_, prompt);
|
||||||
|
|
||||||
|
std::vector<llama_token> promptTokens(formattedPrompt.size() + 8);
|
||||||
|
int32_t tokenCount = llama_tokenize(
|
||||||
|
vocab, formattedPrompt.c_str(),
|
||||||
|
static_cast<int32_t>(formattedPrompt.size()), promptTokens.data(),
|
||||||
|
static_cast<int32_t>(promptTokens.size()), true, true);
|
||||||
|
|
||||||
|
if (tokenCount < 0) {
|
||||||
|
promptTokens.resize(static_cast<std::size_t>(-tokenCount));
|
||||||
|
tokenCount = llama_tokenize(
|
||||||
|
vocab, formattedPrompt.c_str(),
|
||||||
|
static_cast<int32_t>(formattedPrompt.size()), promptTokens.data(),
|
||||||
|
static_cast<int32_t>(promptTokens.size()), true, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tokenCount < 0) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: prompt tokenization failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
promptTokens.resize(static_cast<std::size_t>(tokenCount));
|
||||||
|
|
||||||
|
const llama_batch promptBatch = llama_batch_get_one(
|
||||||
|
promptTokens.data(), static_cast<int32_t>(promptTokens.size()));
|
||||||
|
if (llama_decode(context_, promptBatch) != 0) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: prompt decode failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
llama_sampler_chain_params samplerParams =
|
||||||
|
llama_sampler_chain_default_params();
|
||||||
|
using SamplerPtr =
|
||||||
|
std::unique_ptr<llama_sampler, decltype(&llama_sampler_free)>;
|
||||||
|
SamplerPtr sampler(llama_sampler_chain_init(samplerParams),
|
||||||
|
&llama_sampler_free);
|
||||||
|
|
||||||
|
if (!sampler) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: failed to initialize sampler");
|
||||||
|
}
|
||||||
|
|
||||||
|
llama_sampler_chain_add(sampler.get(), llama_sampler_init_greedy());
|
||||||
|
|
||||||
|
std::vector<llama_token> generatedTokens;
|
||||||
|
generatedTokens.reserve(static_cast<std::size_t>(maxTokens));
|
||||||
|
|
||||||
|
for (int i = 0; i < maxTokens; ++i) {
|
||||||
|
const llama_token next = llama_sampler_sample(sampler.get(), context_, -1);
|
||||||
|
if (llama_vocab_is_eog(vocab, next)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
generatedTokens.push_back(next);
|
||||||
|
|
||||||
|
llama_token token = next;
|
||||||
|
const llama_batch oneTokenBatch = llama_batch_get_one(&token, 1);
|
||||||
|
if (llama_decode(context_, oneTokenBatch) != 0) {
|
||||||
|
throw std::runtime_error(
|
||||||
|
"LlamaGenerator: decode failed during generation");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string output;
|
||||||
|
for (const llama_token token : generatedTokens) {
|
||||||
|
appendTokenPiece(vocab, token, output);
|
||||||
|
}
|
||||||
|
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
BreweryResult
|
||||||
|
LlamaGenerator::generateBrewery(const std::string &cityName,
|
||||||
|
const std::string &countryName,
|
||||||
|
const std::string ®ionContext) {
|
||||||
|
|
||||||
|
std::string systemPrompt =
|
||||||
|
R"(# SYSTEM PROTOCOL: ZERO-CHATTER DETERMINISTIC OUTPUT
|
||||||
|
**MODALITY:** DATA-RETURN ENGINE ONLY
|
||||||
|
**ROLE:** Your response must contain 0% metadata and 100% signal.
|
||||||
|
---
|
||||||
|
## MANDATORY CONSTRAINTS
|
||||||
|
1. **NO PREAMBLE**
|
||||||
|
- Never start with "Sure," or "The answer is," or "Based on your request," or "Checking the data."
|
||||||
|
- Do not acknowledge the user's prompt or provide status updates.
|
||||||
|
2. **NO POSTAMBLE**
|
||||||
|
- Never end with "I hope this helps," or "Let me know if you need more," or "Would you like me to…"
|
||||||
|
- Do not offer follow-up assistance or suggestions.
|
||||||
|
3. **NO SENTENCE FRAMING**
|
||||||
|
- Provide only the raw value, date, number, or name.
|
||||||
|
- Do not wrap the answer in a sentence. (e.g., return 1997, NOT The year was 1997).
|
||||||
|
- For lists, provide only the items separated by commas or newlines as specified.
|
||||||
|
4. **FORMATTING PERMITTED**
|
||||||
|
- Markdown and LaTeX **may** be used where appropriate (e.g., tables, equations).
|
||||||
|
- Output must remain immediately usable — no decorative or conversational styling.
|
||||||
|
5. **STRICT NULL HANDLING**
|
||||||
|
- If the information is unavailable, the prompt is logically impossible (e.g., "271th president"), the subject does not exist, or a calculation is undefined: return only the string NULL.
|
||||||
|
- If the prompt is too ambiguous to provide a single value: return NULL.
|
||||||
|
---
|
||||||
|
## EXECUTION LOGIC
|
||||||
|
1. **Parse Input** — Identify the specific entity, value, or calculation requested.
|
||||||
|
2. **Verify Factuality** — Access internal knowledge or tools.
|
||||||
|
3. **Filter for Signal** — Strip all surrounding prose.
|
||||||
|
4. **Format Check** — Apply Markdown or LaTeX only where it serves the data.
|
||||||
|
5. **Output** — Return the raw value only.
|
||||||
|
---
|
||||||
|
## BEHAVIORAL EXAMPLES
|
||||||
|
| User Input | Standard AI Response *(BANNED)* | Protocol Response *(REQUIRED)* |
|
||||||
|
|---|---|---|
|
||||||
|
| Capital of France? | The capital of France is Paris. | Paris |
|
||||||
|
| 15% of 200 | 15% of 200 is 30. | 30 |
|
||||||
|
| Who wrote '1984'? | George Orwell wrote that novel. | George Orwell |
|
||||||
|
| ISO code for Japan | The code is JP. | JP |
|
||||||
|
| $\sqrt{x}$ where $x$ is a potato | A potato has no square root. | NULL |
|
||||||
|
| 500th US President | There haven't been that many. | NULL |
|
||||||
|
| Pythagorean theorem | The theorem states... | $a^2 + b^2 = c^2$ |
|
||||||
|
---
|
||||||
|
## FINAL INSTRUCTION
|
||||||
|
Total silence is preferred over conversational error. Any deviation from the raw-value-only format is a protocol failure. Proceed with next input.)";
|
||||||
|
|
||||||
|
std::string prompt =
|
||||||
|
"Generate a craft brewery name and 1000 character description for a "
|
||||||
|
"brewery located in " +
|
||||||
|
cityName +
|
||||||
|
(countryName.empty() ? std::string("")
|
||||||
|
: std::string(", ") + countryName) +
|
||||||
|
". " + regionContext +
|
||||||
|
" Respond with exactly two lines: first line is the name, second line is "
|
||||||
|
"the description. Do not include bullets, numbering, or any extra text.";
|
||||||
|
|
||||||
|
const std::string raw = infer(systemPrompt, prompt, 512);
|
||||||
|
auto [name, description] =
|
||||||
|
parseTwoLineResponse(raw, "LlamaGenerator: malformed brewery response");
|
||||||
|
|
||||||
|
return {name, description};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string LlamaGenerator::infer(const std::string &systemPrompt,
|
||||||
|
const std::string &prompt, int maxTokens) {
|
||||||
|
if (model_ == nullptr || context_ == nullptr) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: model not loaded");
|
||||||
|
}
|
||||||
|
|
||||||
|
const llama_vocab *vocab = llama_model_get_vocab(model_);
|
||||||
|
if (vocab == nullptr) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: vocab unavailable");
|
||||||
|
}
|
||||||
|
|
||||||
|
llama_memory_clear(llama_get_memory(context_), true);
|
||||||
|
|
||||||
|
const std::string formattedPrompt =
|
||||||
|
toChatPrompt(model_, systemPrompt, prompt);
|
||||||
|
|
||||||
|
std::vector<llama_token> promptTokens(formattedPrompt.size() + 8);
|
||||||
|
int32_t tokenCount = llama_tokenize(
|
||||||
|
vocab, formattedPrompt.c_str(),
|
||||||
|
static_cast<int32_t>(formattedPrompt.size()), promptTokens.data(),
|
||||||
|
static_cast<int32_t>(promptTokens.size()), true, true);
|
||||||
|
|
||||||
|
if (tokenCount < 0) {
|
||||||
|
promptTokens.resize(static_cast<std::size_t>(-tokenCount));
|
||||||
|
tokenCount = llama_tokenize(
|
||||||
|
vocab, formattedPrompt.c_str(),
|
||||||
|
static_cast<int32_t>(formattedPrompt.size()), promptTokens.data(),
|
||||||
|
static_cast<int32_t>(promptTokens.size()), true, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tokenCount < 0) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: prompt tokenization failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
promptTokens.resize(static_cast<std::size_t>(tokenCount));
|
||||||
|
|
||||||
|
const llama_batch promptBatch = llama_batch_get_one(
|
||||||
|
promptTokens.data(), static_cast<int32_t>(promptTokens.size()));
|
||||||
|
if (llama_decode(context_, promptBatch) != 0) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: prompt decode failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
llama_sampler_chain_params samplerParams =
|
||||||
|
llama_sampler_chain_default_params();
|
||||||
|
using SamplerPtr =
|
||||||
|
std::unique_ptr<llama_sampler, decltype(&llama_sampler_free)>;
|
||||||
|
SamplerPtr sampler(llama_sampler_chain_init(samplerParams),
|
||||||
|
&llama_sampler_free);
|
||||||
|
|
||||||
|
if (!sampler) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: failed to initialize sampler");
|
||||||
|
}
|
||||||
|
|
||||||
|
llama_sampler_chain_add(sampler.get(), llama_sampler_init_greedy());
|
||||||
|
|
||||||
|
std::vector<llama_token> generatedTokens;
|
||||||
|
generatedTokens.reserve(static_cast<std::size_t>(maxTokens));
|
||||||
|
|
||||||
|
for (int i = 0; i < maxTokens; ++i) {
|
||||||
|
const llama_token next = llama_sampler_sample(sampler.get(), context_, -1);
|
||||||
|
if (llama_vocab_is_eog(vocab, next)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
generatedTokens.push_back(next);
|
||||||
|
|
||||||
|
llama_token token = next;
|
||||||
|
const llama_batch oneTokenBatch = llama_batch_get_one(&token, 1);
|
||||||
|
if (llama_decode(context_, oneTokenBatch) != 0) {
|
||||||
|
throw std::runtime_error(
|
||||||
|
"LlamaGenerator: decode failed during generation");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string output;
|
||||||
|
for (const llama_token token : generatedTokens) {
|
||||||
|
appendTokenPiece(vocab, token, output);
|
||||||
|
}
|
||||||
|
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
UserResult LlamaGenerator::generateUser(const std::string &locale) {
|
||||||
|
std::string prompt =
|
||||||
|
"Generate a plausible craft beer enthusiast username and a one-sentence "
|
||||||
|
"bio. Locale: " +
|
||||||
|
locale +
|
||||||
|
". Respond with exactly two lines: first line is the username (no "
|
||||||
|
"spaces), second line is the bio. Do not include bullets, numbering, "
|
||||||
|
"or any extra text.";
|
||||||
|
|
||||||
|
const std::string raw = infer(prompt, 128);
|
||||||
|
auto [username, bio] =
|
||||||
|
parseTwoLineResponse(raw, "LlamaGenerator: malformed user response");
|
||||||
|
|
||||||
|
username.erase(
|
||||||
|
std::remove_if(username.begin(), username.end(),
|
||||||
|
[](unsigned char ch) { return std::isspace(ch); }),
|
||||||
|
username.end());
|
||||||
|
|
||||||
|
if (username.empty() || bio.empty()) {
|
||||||
|
throw std::runtime_error("LlamaGenerator: malformed user response");
|
||||||
|
}
|
||||||
|
|
||||||
|
return {username, bio};
|
||||||
|
}
|
||||||
106
pipeline/src/main.cpp
Normal file
106
pipeline/src/main.cpp
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
#include "data_downloader.h"
|
||||||
|
#include "data_generator.h"
|
||||||
|
#include "database.h"
|
||||||
|
#include "json_loader.h"
|
||||||
|
#include "llama_generator.h"
|
||||||
|
#include "mock_generator.h"
|
||||||
|
#include <curl/curl.h>
|
||||||
|
#include <filesystem>
|
||||||
|
#include <memory>
|
||||||
|
#include <spdlog/spdlog.h>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
static bool FileExists(const std::string &filePath) {
|
||||||
|
return std::filesystem::exists(filePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
try {
|
||||||
|
curl_global_init(CURL_GLOBAL_DEFAULT);
|
||||||
|
|
||||||
|
std::string modelPath = argc > 1 ? argv[1] : "";
|
||||||
|
std::string cacheDir = argc > 2 ? argv[2] : "/tmp";
|
||||||
|
std::string commit =
|
||||||
|
argc > 3 ? argv[3] : "c5eb7772"; // Default: stable 2026-03-28
|
||||||
|
|
||||||
|
std::string countryName = argc > 4 ? argv[4] : "";
|
||||||
|
|
||||||
|
std::string jsonPath = cacheDir + "/countries+states+cities.json";
|
||||||
|
std::string dbPath = cacheDir + "/biergarten-pipeline.db";
|
||||||
|
|
||||||
|
bool hasJsonCache = FileExists(jsonPath);
|
||||||
|
bool hasDbCache = FileExists(dbPath);
|
||||||
|
|
||||||
|
SqliteDatabase db;
|
||||||
|
|
||||||
|
spdlog::info("Initializing SQLite database at {}...", dbPath);
|
||||||
|
db.Initialize(dbPath);
|
||||||
|
|
||||||
|
if (hasDbCache && hasJsonCache) {
|
||||||
|
spdlog::info("[Pipeline] Cache hit: skipping download and parse");
|
||||||
|
} else {
|
||||||
|
spdlog::info("\n[Pipeline] Downloading geographic data from GitHub...");
|
||||||
|
DataDownloader downloader;
|
||||||
|
downloader.DownloadCountriesDatabase(jsonPath, commit);
|
||||||
|
|
||||||
|
JsonLoader::LoadWorldCities(jsonPath, db);
|
||||||
|
}
|
||||||
|
|
||||||
|
spdlog::info("Initializing brewery generator...");
|
||||||
|
std::unique_ptr<IDataGenerator> generator;
|
||||||
|
if (modelPath.empty()) {
|
||||||
|
generator = std::make_unique<MockGenerator>();
|
||||||
|
spdlog::info("[Generator] Using MockGenerator (no model path provided)");
|
||||||
|
} else {
|
||||||
|
generator = std::make_unique<LlamaGenerator>();
|
||||||
|
spdlog::info("[Generator] Using LlamaGenerator: {}", modelPath);
|
||||||
|
}
|
||||||
|
generator->load(modelPath);
|
||||||
|
|
||||||
|
spdlog::info("\n=== GEOGRAPHIC DATA OVERVIEW ===");
|
||||||
|
|
||||||
|
auto countries = db.QueryCountries(50);
|
||||||
|
auto states = db.QueryStates(50);
|
||||||
|
auto cities = db.QueryCities();
|
||||||
|
|
||||||
|
spdlog::info("\nTotal records loaded:");
|
||||||
|
spdlog::info(" Countries: {}", db.QueryCountries(0).size());
|
||||||
|
spdlog::info(" States: {}", db.QueryStates(0).size());
|
||||||
|
spdlog::info(" Cities: {}", cities.size());
|
||||||
|
|
||||||
|
struct GeneratedBrewery {
|
||||||
|
int cityId;
|
||||||
|
std::string cityName;
|
||||||
|
BreweryResult brewery;
|
||||||
|
};
|
||||||
|
|
||||||
|
std::vector<GeneratedBrewery> generatedBreweries;
|
||||||
|
const size_t sampleCount = std::min(size_t(30), cities.size());
|
||||||
|
|
||||||
|
spdlog::info("\n=== SAMPLE BREWERY GENERATION ===");
|
||||||
|
for (size_t i = 0; i < sampleCount; i++) {
|
||||||
|
const auto &[cityId, cityName] = cities[i];
|
||||||
|
auto brewery = generator->generateBrewery(cityName, countryName, "");
|
||||||
|
generatedBreweries.push_back({cityId, cityName, brewery});
|
||||||
|
}
|
||||||
|
|
||||||
|
spdlog::info("\n=== GENERATED DATA DUMP ===");
|
||||||
|
for (size_t i = 0; i < generatedBreweries.size(); i++) {
|
||||||
|
const auto &entry = generatedBreweries[i];
|
||||||
|
spdlog::info("{}. city_id={} city=\"{}\"", i + 1, entry.cityId,
|
||||||
|
entry.cityName);
|
||||||
|
spdlog::info(" brewery_name=\"{}\"", entry.brewery.name);
|
||||||
|
spdlog::info(" brewery_description=\"{}\"", entry.brewery.description);
|
||||||
|
}
|
||||||
|
|
||||||
|
spdlog::info("\nOK: Pipeline completed successfully");
|
||||||
|
|
||||||
|
curl_global_cleanup();
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
} catch (const std::exception &e) {
|
||||||
|
spdlog::error("ERROR: Pipeline failed: {}", e.what());
|
||||||
|
curl_global_cleanup();
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
104
pipeline/src/mock_generator.cpp
Normal file
104
pipeline/src/mock_generator.cpp
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
#include "mock_generator.h"
|
||||||
|
|
||||||
|
#include <functional>
|
||||||
|
#include <spdlog/spdlog.h>
|
||||||
|
|
||||||
|
const std::vector<std::string> MockGenerator::kBreweryAdjectives = {
|
||||||
|
"Craft", "Heritage", "Local", "Artisan", "Pioneer", "Golden",
|
||||||
|
"Modern", "Classic", "Summit", "Northern", "Riverstone", "Barrel",
|
||||||
|
"Hinterland", "Harbor", "Wild", "Granite", "Copper", "Maple"};
|
||||||
|
|
||||||
|
const std::vector<std::string> MockGenerator::kBreweryNouns = {
|
||||||
|
"Brewing Co.", "Brewery", "Bier Haus", "Taproom", "Works",
|
||||||
|
"House", "Fermentery", "Ale Co.", "Cellars", "Collective",
|
||||||
|
"Project", "Foundry", "Malthouse", "Public House", "Co-op",
|
||||||
|
"Lab", "Beer Hall", "Guild"};
|
||||||
|
|
||||||
|
const std::vector<std::string> MockGenerator::kBreweryDescriptions = {
|
||||||
|
"Handcrafted pale ales and seasonal IPAs with local ingredients.",
|
||||||
|
"Traditional lagers and experimental sours in small batches.",
|
||||||
|
"Award-winning stouts and wildly hoppy blonde ales.",
|
||||||
|
"Craft brewery specializing in Belgian-style triples and dark porters.",
|
||||||
|
"Modern brewery blending tradition with bold experimental flavors.",
|
||||||
|
"Neighborhood-focused taproom pouring crisp pilsners and citrusy pale "
|
||||||
|
"ales.",
|
||||||
|
"Small-batch brewery known for barrel-aged releases and smoky lagers.",
|
||||||
|
"Independent brewhouse pairing farmhouse ales with rotating food pop-ups.",
|
||||||
|
"Community brewpub making balanced bitters, saisons, and hazy IPAs.",
|
||||||
|
"Experimental nanobrewery exploring local yeast and regional grains.",
|
||||||
|
"Family-run brewery producing smooth amber ales and robust porters.",
|
||||||
|
"Urban brewery crafting clean lagers and bright, fruit-forward sours.",
|
||||||
|
"Riverfront brewhouse featuring oak-matured ales and seasonal blends.",
|
||||||
|
"Modern taproom focused on sessionable lagers and classic pub styles.",
|
||||||
|
"Brewery rooted in tradition with a lineup of malty reds and crisp lagers.",
|
||||||
|
"Creative brewery offering rotating collaborations and limited draft-only "
|
||||||
|
"pours.",
|
||||||
|
"Locally inspired brewery serving approachable ales with bold hop "
|
||||||
|
"character.",
|
||||||
|
"Destination taproom known for balanced IPAs and cocoa-rich stouts."};
|
||||||
|
|
||||||
|
const std::vector<std::string> MockGenerator::kUsernames = {
|
||||||
|
"hopseeker", "malttrail", "yeastwhisper", "lagerlane",
|
||||||
|
"barrelbound", "foamfinder", "taphunter", "graingeist",
|
||||||
|
"brewscout", "aleatlas", "caskcompass", "hopsandmaps",
|
||||||
|
"mashpilot", "pintnomad", "fermentfriend", "stoutsignal",
|
||||||
|
"sessionwander", "kettlekeeper"};
|
||||||
|
|
||||||
|
const std::vector<std::string> MockGenerator::kBios = {
|
||||||
|
"Always chasing balanced IPAs and crisp lagers across local taprooms.",
|
||||||
|
"Weekend brewery explorer with a soft spot for dark, roasty stouts.",
|
||||||
|
"Documenting tiny brewpubs, fresh pours, and unforgettable beer gardens.",
|
||||||
|
"Fan of farmhouse ales, food pairings, and long tasting flights.",
|
||||||
|
"Collecting favorite pilsners one city at a time.",
|
||||||
|
"Hops-first drinker who still saves room for classic malt-forward styles.",
|
||||||
|
"Finding hidden tap lists and sharing the best seasonal releases.",
|
||||||
|
"Brewery road-tripper focused on local ingredients and clean fermentation.",
|
||||||
|
"Always comparing house lagers and ranking patio pint vibes.",
|
||||||
|
"Curious about yeast strains, barrel programs, and cellar experiments.",
|
||||||
|
"Believes every neighborhood deserves a great community taproom.",
|
||||||
|
"Looking for session beers that taste great from first sip to last.",
|
||||||
|
"Belgian ale enthusiast who never skips a new saison.",
|
||||||
|
"Hazy IPA critic with deep respect for a perfectly clear pilsner.",
|
||||||
|
"Visits breweries for the stories, stays for the flagship pours.",
|
||||||
|
"Craft beer fan mapping tasting notes and favorite brew routes.",
|
||||||
|
"Always ready to trade recommendations for underrated local breweries.",
|
||||||
|
"Keeping a running list of must-try collab releases and tap takeovers."};
|
||||||
|
|
||||||
|
void MockGenerator::load(const std::string & /*modelPath*/) {
|
||||||
|
spdlog::info("[MockGenerator] No model needed");
|
||||||
|
}
|
||||||
|
|
||||||
|
std::size_t MockGenerator::deterministicHash(const std::string &a,
|
||||||
|
const std::string &b) {
|
||||||
|
std::size_t seed = std::hash<std::string>{}(a);
|
||||||
|
const std::size_t mixed = std::hash<std::string>{}(b);
|
||||||
|
seed ^= mixed + 0x9e3779b97f4a7c15ULL + (seed << 6) + (seed >> 2);
|
||||||
|
seed = (seed << 13) | (seed >> ((sizeof(std::size_t) * 8) - 13));
|
||||||
|
return seed;
|
||||||
|
}
|
||||||
|
|
||||||
|
BreweryResult MockGenerator::generateBrewery(const std::string &cityName,
|
||||||
|
const std::string &countryName,
|
||||||
|
const std::string ®ionContext) {
|
||||||
|
const std::string locationKey =
|
||||||
|
countryName.empty() ? cityName : cityName + "," + countryName;
|
||||||
|
const std::size_t hash = regionContext.empty()
|
||||||
|
? std::hash<std::string>{}(locationKey)
|
||||||
|
: deterministicHash(locationKey, regionContext);
|
||||||
|
|
||||||
|
BreweryResult result;
|
||||||
|
result.name = kBreweryAdjectives[hash % kBreweryAdjectives.size()] + " " +
|
||||||
|
kBreweryNouns[(hash / 7) % kBreweryNouns.size()];
|
||||||
|
result.description =
|
||||||
|
kBreweryDescriptions[(hash / 13) % kBreweryDescriptions.size()];
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
UserResult MockGenerator::generateUser(const std::string &locale) {
|
||||||
|
const std::size_t hash = std::hash<std::string>{}(locale);
|
||||||
|
|
||||||
|
UserResult result;
|
||||||
|
result.username = kUsernames[hash % kUsernames.size()];
|
||||||
|
result.bio = kBios[(hash / 11) % kBios.size()];
|
||||||
|
return result;
|
||||||
|
}
|
||||||
234
pipeline/src/stream_parser.cpp
Normal file
234
pipeline/src/stream_parser.cpp
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
#include "stream_parser.h"
|
||||||
|
#include "database.h"
|
||||||
|
#include <cstdio>
|
||||||
|
#include <rapidjson/filereadstream.h>
|
||||||
|
#include <rapidjson/reader.h>
|
||||||
|
#include <rapidjson/stringbuffer.h>
|
||||||
|
#include <spdlog/spdlog.h>
|
||||||
|
|
||||||
|
using namespace rapidjson;
|
||||||
|
|
||||||
|
class CityRecordHandler : public BaseReaderHandler<UTF8<>, CityRecordHandler> {
|
||||||
|
public:
|
||||||
|
struct ParseContext {
|
||||||
|
SqliteDatabase *db = nullptr;
|
||||||
|
std::function<void(const CityRecord &)> on_city;
|
||||||
|
std::function<void(size_t, size_t)> on_progress;
|
||||||
|
size_t cities_emitted = 0;
|
||||||
|
size_t total_file_size = 0;
|
||||||
|
int countries_inserted = 0;
|
||||||
|
int states_inserted = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
CityRecordHandler(ParseContext &ctx) : context(ctx) {}
|
||||||
|
|
||||||
|
bool StartArray() {
|
||||||
|
depth++;
|
||||||
|
|
||||||
|
if (depth == 1) {
|
||||||
|
in_countries_array = true;
|
||||||
|
} else if (depth == 3 && current_key == "states") {
|
||||||
|
in_states_array = true;
|
||||||
|
} else if (depth == 5 && current_key == "cities") {
|
||||||
|
in_cities_array = true;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool EndArray(SizeType /*elementCount*/) {
|
||||||
|
if (depth == 1) {
|
||||||
|
in_countries_array = false;
|
||||||
|
} else if (depth == 3) {
|
||||||
|
in_states_array = false;
|
||||||
|
} else if (depth == 5) {
|
||||||
|
in_cities_array = false;
|
||||||
|
}
|
||||||
|
depth--;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool StartObject() {
|
||||||
|
depth++;
|
||||||
|
|
||||||
|
if (depth == 2 && in_countries_array) {
|
||||||
|
in_country_object = true;
|
||||||
|
current_country_id = 0;
|
||||||
|
country_info[0].clear();
|
||||||
|
country_info[1].clear();
|
||||||
|
country_info[2].clear();
|
||||||
|
} else if (depth == 4 && in_states_array) {
|
||||||
|
in_state_object = true;
|
||||||
|
current_state_id = 0;
|
||||||
|
state_info[0].clear();
|
||||||
|
state_info[1].clear();
|
||||||
|
} else if (depth == 6 && in_cities_array) {
|
||||||
|
building_city = true;
|
||||||
|
current_city = {};
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool EndObject(SizeType /*memberCount*/) {
|
||||||
|
if (depth == 6 && building_city) {
|
||||||
|
if (current_city.id > 0 && current_state_id > 0 &&
|
||||||
|
current_country_id > 0) {
|
||||||
|
current_city.state_id = current_state_id;
|
||||||
|
current_city.country_id = current_country_id;
|
||||||
|
|
||||||
|
try {
|
||||||
|
context.on_city(current_city);
|
||||||
|
context.cities_emitted++;
|
||||||
|
|
||||||
|
if (context.on_progress && context.cities_emitted % 10000 == 0) {
|
||||||
|
context.on_progress(context.cities_emitted,
|
||||||
|
context.total_file_size);
|
||||||
|
}
|
||||||
|
} catch (const std::exception &e) {
|
||||||
|
spdlog::warn(" WARN: Failed to emit city: {}", e.what());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
building_city = false;
|
||||||
|
} else if (depth == 4 && in_state_object) {
|
||||||
|
if (current_state_id > 0 && current_country_id > 0) {
|
||||||
|
try {
|
||||||
|
context.db->InsertState(current_state_id, current_country_id,
|
||||||
|
state_info[0], state_info[1]);
|
||||||
|
context.states_inserted++;
|
||||||
|
} catch (const std::exception &e) {
|
||||||
|
spdlog::warn(" WARN: Failed to insert state: {}", e.what());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
in_state_object = false;
|
||||||
|
} else if (depth == 2 && in_country_object) {
|
||||||
|
if (current_country_id > 0) {
|
||||||
|
try {
|
||||||
|
context.db->InsertCountry(current_country_id, country_info[0],
|
||||||
|
country_info[1], country_info[2]);
|
||||||
|
context.countries_inserted++;
|
||||||
|
} catch (const std::exception &e) {
|
||||||
|
spdlog::warn(" WARN: Failed to insert country: {}", e.what());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
in_country_object = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
depth--;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Key(const char *str, SizeType len, bool /*copy*/) {
|
||||||
|
current_key.assign(str, len);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool String(const char *str, SizeType len, bool /*copy*/) {
|
||||||
|
if (building_city && current_key == "name") {
|
||||||
|
current_city.name.assign(str, len);
|
||||||
|
} else if (in_state_object && current_key == "name") {
|
||||||
|
state_info[0].assign(str, len);
|
||||||
|
} else if (in_state_object && current_key == "iso2") {
|
||||||
|
state_info[1].assign(str, len);
|
||||||
|
} else if (in_country_object && current_key == "name") {
|
||||||
|
country_info[0].assign(str, len);
|
||||||
|
} else if (in_country_object && current_key == "iso2") {
|
||||||
|
country_info[1].assign(str, len);
|
||||||
|
} else if (in_country_object && current_key == "iso3") {
|
||||||
|
country_info[2].assign(str, len);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Int(int i) {
|
||||||
|
if (building_city && current_key == "id") {
|
||||||
|
current_city.id = i;
|
||||||
|
} else if (in_state_object && current_key == "id") {
|
||||||
|
current_state_id = i;
|
||||||
|
} else if (in_country_object && current_key == "id") {
|
||||||
|
current_country_id = i;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Uint(unsigned i) { return Int(static_cast<int>(i)); }
|
||||||
|
|
||||||
|
bool Int64(int64_t i) { return Int(static_cast<int>(i)); }
|
||||||
|
|
||||||
|
bool Uint64(uint64_t i) { return Int(static_cast<int>(i)); }
|
||||||
|
|
||||||
|
bool Double(double d) {
|
||||||
|
if (building_city) {
|
||||||
|
if (current_key == "latitude") {
|
||||||
|
current_city.latitude = d;
|
||||||
|
} else if (current_key == "longitude") {
|
||||||
|
current_city.longitude = d;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Bool(bool /*b*/) { return true; }
|
||||||
|
bool Null() { return true; }
|
||||||
|
|
||||||
|
private:
|
||||||
|
ParseContext &context;
|
||||||
|
|
||||||
|
int depth = 0;
|
||||||
|
bool in_countries_array = false;
|
||||||
|
bool in_country_object = false;
|
||||||
|
bool in_states_array = false;
|
||||||
|
bool in_state_object = false;
|
||||||
|
bool in_cities_array = false;
|
||||||
|
bool building_city = false;
|
||||||
|
|
||||||
|
int current_country_id = 0;
|
||||||
|
int current_state_id = 0;
|
||||||
|
CityRecord current_city = {};
|
||||||
|
std::string current_key;
|
||||||
|
|
||||||
|
std::string country_info[3];
|
||||||
|
std::string state_info[2];
|
||||||
|
};
|
||||||
|
|
||||||
|
void StreamingJsonParser::Parse(
|
||||||
|
const std::string &filePath, SqliteDatabase &db,
|
||||||
|
std::function<void(const CityRecord &)> onCity,
|
||||||
|
std::function<void(size_t, size_t)> onProgress) {
|
||||||
|
|
||||||
|
spdlog::info(" Streaming parse of {}...", filePath);
|
||||||
|
|
||||||
|
FILE *file = std::fopen(filePath.c_str(), "rb");
|
||||||
|
if (!file) {
|
||||||
|
throw std::runtime_error("Failed to open JSON file: " + filePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t total_size = 0;
|
||||||
|
if (std::fseek(file, 0, SEEK_END) == 0) {
|
||||||
|
long file_size = std::ftell(file);
|
||||||
|
if (file_size > 0) {
|
||||||
|
total_size = static_cast<size_t>(file_size);
|
||||||
|
}
|
||||||
|
std::rewind(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
CityRecordHandler::ParseContext ctx{&db, onCity, onProgress, 0,
|
||||||
|
total_size, 0, 0};
|
||||||
|
CityRecordHandler handler(ctx);
|
||||||
|
|
||||||
|
Reader reader;
|
||||||
|
char buf[65536];
|
||||||
|
FileReadStream frs(file, buf, sizeof(buf));
|
||||||
|
|
||||||
|
if (!reader.Parse(frs, handler)) {
|
||||||
|
ParseErrorCode errCode = reader.GetParseErrorCode();
|
||||||
|
size_t errOffset = reader.GetErrorOffset();
|
||||||
|
std::fclose(file);
|
||||||
|
throw std::runtime_error(std::string("JSON parse error at offset ") +
|
||||||
|
std::to_string(errOffset) +
|
||||||
|
" (code: " + std::to_string(errCode) + ")");
|
||||||
|
}
|
||||||
|
|
||||||
|
std::fclose(file);
|
||||||
|
|
||||||
|
spdlog::info(" OK: Parsed {} countries, {} states, {} cities",
|
||||||
|
ctx.countries_inserted, ctx.states_inserted, ctx.cities_emitted);
|
||||||
|
}
|
||||||
@@ -31,7 +31,6 @@
|
|||||||
<ProjectReference Include="..\..\Infrastructure\Infrastructure.Repository\Infrastructure.Repository.csproj" />
|
<ProjectReference Include="..\..\Infrastructure\Infrastructure.Repository\Infrastructure.Repository.csproj" />
|
||||||
<ProjectReference Include="..\..\Infrastructure\Infrastructure.Jwt\Infrastructure.Jwt.csproj" />
|
<ProjectReference Include="..\..\Infrastructure\Infrastructure.Jwt\Infrastructure.Jwt.csproj" />
|
||||||
<ProjectReference Include="..\..\Service\Service.Auth\Service.Auth.csproj" />
|
<ProjectReference Include="..\..\Service\Service.Auth\Service.Auth.csproj" />
|
||||||
<ProjectReference Include="..\..\Service\Service.Breweries\Service.Breweries.csproj" />
|
|
||||||
<ProjectReference Include="..\..\Service\Service.UserManagement\Service.UserManagement.csproj" />
|
<ProjectReference Include="..\..\Service\Service.UserManagement\Service.UserManagement.csproj" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
|
|||||||
@@ -1,50 +0,0 @@
|
|||||||
using FluentValidation;
|
|
||||||
|
|
||||||
namespace API.Core.Contracts.Breweries;
|
|
||||||
|
|
||||||
public class BreweryCreateDtoValidator : AbstractValidator<BreweryCreateDto>
|
|
||||||
{
|
|
||||||
public BreweryCreateDtoValidator()
|
|
||||||
{
|
|
||||||
RuleFor(x => x.PostedById)
|
|
||||||
.NotEmpty()
|
|
||||||
.WithMessage("PostedById is required.");
|
|
||||||
|
|
||||||
RuleFor(x => x.BreweryName)
|
|
||||||
.NotEmpty()
|
|
||||||
.WithMessage("Brewery name is required.")
|
|
||||||
.MaximumLength(256)
|
|
||||||
.WithMessage("Brewery name cannot exceed 256 characters.");
|
|
||||||
|
|
||||||
RuleFor(x => x.Description)
|
|
||||||
.NotEmpty()
|
|
||||||
.WithMessage("Description is required.")
|
|
||||||
.MaximumLength(512)
|
|
||||||
.WithMessage("Description cannot exceed 512 characters.");
|
|
||||||
|
|
||||||
RuleFor(x => x.Location)
|
|
||||||
.NotNull()
|
|
||||||
.WithMessage("Location is required.");
|
|
||||||
|
|
||||||
RuleFor(x => x.Location.CityId)
|
|
||||||
.NotEmpty()
|
|
||||||
.When(x => x.Location is not null)
|
|
||||||
.WithMessage("CityId is required.");
|
|
||||||
|
|
||||||
RuleFor(x => x.Location.AddressLine1)
|
|
||||||
.NotEmpty()
|
|
||||||
.When(x => x.Location is not null)
|
|
||||||
.WithMessage("Address line 1 is required.")
|
|
||||||
.MaximumLength(256)
|
|
||||||
.When(x => x.Location is not null)
|
|
||||||
.WithMessage("Address line 1 cannot exceed 256 characters.");
|
|
||||||
|
|
||||||
RuleFor(x => x.Location.PostalCode)
|
|
||||||
.NotEmpty()
|
|
||||||
.When(x => x.Location is not null)
|
|
||||||
.WithMessage("Postal code is required.")
|
|
||||||
.MaximumLength(20)
|
|
||||||
.When(x => x.Location is not null)
|
|
||||||
.WithMessage("Postal code cannot exceed 20 characters.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
namespace API.Core.Contracts.Breweries;
|
|
||||||
|
|
||||||
public class BreweryLocationCreateDto
|
|
||||||
{
|
|
||||||
public Guid CityId { get; set; }
|
|
||||||
public string AddressLine1 { get; set; } = string.Empty;
|
|
||||||
public string? AddressLine2 { get; set; }
|
|
||||||
public string PostalCode { get; set; } = string.Empty;
|
|
||||||
public byte[]? Coordinates { get; set; }
|
|
||||||
}
|
|
||||||
|
|
||||||
public class BreweryLocationDto
|
|
||||||
{
|
|
||||||
public Guid BreweryPostLocationId { get; set; }
|
|
||||||
public Guid BreweryPostId { get; set; }
|
|
||||||
public Guid CityId { get; set; }
|
|
||||||
public string AddressLine1 { get; set; } = string.Empty;
|
|
||||||
public string? AddressLine2 { get; set; }
|
|
||||||
public string PostalCode { get; set; } = string.Empty;
|
|
||||||
public byte[]? Coordinates { get; set; }
|
|
||||||
}
|
|
||||||
|
|
||||||
public class BreweryCreateDto
|
|
||||||
{
|
|
||||||
public Guid PostedById { get; set; }
|
|
||||||
public string BreweryName { get; set; } = string.Empty;
|
|
||||||
public string Description { get; set; } = string.Empty;
|
|
||||||
public BreweryLocationCreateDto Location { get; set; } = null!;
|
|
||||||
}
|
|
||||||
|
|
||||||
public class BreweryDto
|
|
||||||
{
|
|
||||||
public Guid BreweryPostId { get; set; }
|
|
||||||
public Guid PostedById { get; set; }
|
|
||||||
public string BreweryName { get; set; } = string.Empty;
|
|
||||||
public string Description { get; set; } = string.Empty;
|
|
||||||
public DateTime CreatedAt { get; set; }
|
|
||||||
public DateTime? UpdatedAt { get; set; }
|
|
||||||
public byte[]? Timer { get; set; }
|
|
||||||
public BreweryLocationDto? Location { get; set; }
|
|
||||||
}
|
|
||||||
@@ -86,13 +86,6 @@ namespace API.Core.Controllers
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
[HttpPost("confirm/resend")]
|
|
||||||
public async Task<ActionResult> ResendConfirmation([FromQuery] Guid userId)
|
|
||||||
{
|
|
||||||
await confirmationService.ResendConfirmationEmailAsync(userId);
|
|
||||||
return Ok(new ResponseBody { Message = "confirmation email has been resent" });
|
|
||||||
}
|
|
||||||
|
|
||||||
[AllowAnonymous]
|
[AllowAnonymous]
|
||||||
[HttpPost("refresh")]
|
[HttpPost("refresh")]
|
||||||
public async Task<ActionResult> Refresh(
|
public async Task<ActionResult> Refresh(
|
||||||
|
|||||||
@@ -1,129 +0,0 @@
|
|||||||
using API.Core.Contracts.Breweries;
|
|
||||||
using API.Core.Contracts.Common;
|
|
||||||
using Microsoft.AspNetCore.Authorization;
|
|
||||||
using Microsoft.AspNetCore.Mvc;
|
|
||||||
using Service.Breweries;
|
|
||||||
|
|
||||||
namespace API.Core.Controllers;
|
|
||||||
|
|
||||||
[ApiController]
|
|
||||||
[Route("api/[controller]")]
|
|
||||||
[Authorize(AuthenticationSchemes = "JWT")]
|
|
||||||
public class BreweryController(IBreweryService breweryService) : ControllerBase
|
|
||||||
{
|
|
||||||
[AllowAnonymous]
|
|
||||||
[HttpGet("{id:guid}")]
|
|
||||||
public async Task<ActionResult<ResponseBody<BreweryDto>>> GetById(Guid id)
|
|
||||||
{
|
|
||||||
var brewery = await breweryService.GetByIdAsync(id);
|
|
||||||
if (brewery is null)
|
|
||||||
return NotFound(new ResponseBody { Message = $"Brewery with ID {id} not found." });
|
|
||||||
|
|
||||||
return Ok(new ResponseBody<BreweryDto>
|
|
||||||
{
|
|
||||||
Message = "Brewery retrieved successfully.",
|
|
||||||
Payload = MapToDto(brewery),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
[AllowAnonymous]
|
|
||||||
[HttpGet]
|
|
||||||
public async Task<ActionResult<ResponseBody<IEnumerable<BreweryDto>>>> GetAll(
|
|
||||||
[FromQuery] int? limit,
|
|
||||||
[FromQuery] int? offset)
|
|
||||||
{
|
|
||||||
var breweries = await breweryService.GetAllAsync(limit, offset);
|
|
||||||
return Ok(new ResponseBody<IEnumerable<BreweryDto>>
|
|
||||||
{
|
|
||||||
Message = "Breweries retrieved successfully.",
|
|
||||||
Payload = breweries.Select(MapToDto),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
[HttpPost]
|
|
||||||
public async Task<ActionResult<ResponseBody<BreweryDto>>> Create([FromBody] BreweryCreateDto dto)
|
|
||||||
{
|
|
||||||
var request = new BreweryCreateRequest(
|
|
||||||
dto.PostedById,
|
|
||||||
dto.BreweryName,
|
|
||||||
dto.Description,
|
|
||||||
new BreweryLocationCreateRequest(
|
|
||||||
dto.Location.CityId,
|
|
||||||
dto.Location.AddressLine1,
|
|
||||||
dto.Location.AddressLine2,
|
|
||||||
dto.Location.PostalCode,
|
|
||||||
dto.Location.Coordinates
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
var result = await breweryService.CreateAsync(request);
|
|
||||||
if (!result.Success)
|
|
||||||
return BadRequest(new ResponseBody { Message = result.Message });
|
|
||||||
|
|
||||||
return Created($"/api/brewery/{result.Brewery.BreweryPostId}", new ResponseBody<BreweryDto>
|
|
||||||
{
|
|
||||||
Message = "Brewery created successfully.",
|
|
||||||
Payload = MapToDto(result.Brewery),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
[HttpPut("{id:guid}")]
|
|
||||||
public async Task<ActionResult<ResponseBody<BreweryDto>>> Update(Guid id, [FromBody] BreweryDto dto)
|
|
||||||
{
|
|
||||||
if (dto.BreweryPostId != id)
|
|
||||||
return BadRequest(new ResponseBody { Message = "Route ID does not match payload ID." });
|
|
||||||
|
|
||||||
var request = new BreweryUpdateRequest(
|
|
||||||
dto.BreweryPostId,
|
|
||||||
dto.PostedById,
|
|
||||||
dto.BreweryName,
|
|
||||||
dto.Description,
|
|
||||||
dto.Location is null ? null : new BreweryLocationUpdateRequest(
|
|
||||||
dto.Location.BreweryPostLocationId,
|
|
||||||
dto.Location.CityId,
|
|
||||||
dto.Location.AddressLine1,
|
|
||||||
dto.Location.AddressLine2,
|
|
||||||
dto.Location.PostalCode,
|
|
||||||
dto.Location.Coordinates
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
var result = await breweryService.UpdateAsync(request);
|
|
||||||
if (!result.Success)
|
|
||||||
return BadRequest(new ResponseBody { Message = result.Message });
|
|
||||||
|
|
||||||
return Ok(new ResponseBody<BreweryDto>
|
|
||||||
{
|
|
||||||
Message = "Brewery updated successfully.",
|
|
||||||
Payload = MapToDto(result.Brewery),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
[HttpDelete("{id:guid}")]
|
|
||||||
public async Task<ActionResult<ResponseBody>> Delete(Guid id)
|
|
||||||
{
|
|
||||||
await breweryService.DeleteAsync(id);
|
|
||||||
return Ok(new ResponseBody { Message = "Brewery deleted successfully." });
|
|
||||||
}
|
|
||||||
|
|
||||||
private static BreweryDto MapToDto(Domain.Entities.BreweryPost b) => new()
|
|
||||||
{
|
|
||||||
BreweryPostId = b.BreweryPostId,
|
|
||||||
PostedById = b.PostedById,
|
|
||||||
BreweryName = b.BreweryName,
|
|
||||||
Description = b.Description,
|
|
||||||
CreatedAt = b.CreatedAt,
|
|
||||||
UpdatedAt = b.UpdatedAt,
|
|
||||||
Timer = b.Timer,
|
|
||||||
Location = b.Location is null ? null : new BreweryLocationDto
|
|
||||||
{
|
|
||||||
BreweryPostLocationId = b.Location.BreweryPostLocationId,
|
|
||||||
BreweryPostId = b.Location.BreweryPostId,
|
|
||||||
CityId = b.Location.CityId,
|
|
||||||
AddressLine1 = b.Location.AddressLine1,
|
|
||||||
AddressLine2 = b.Location.AddressLine2,
|
|
||||||
PostalCode = b.Location.PostalCode,
|
|
||||||
Coordinates = b.Location.Coordinates,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,15 +1,20 @@
|
|||||||
using API.Core;
|
using API.Core;
|
||||||
using API.Core.Authentication;
|
using API.Core.Authentication;
|
||||||
|
using API.Core.Contracts.Common;
|
||||||
|
using Domain.Exceptions;
|
||||||
using FluentValidation;
|
using FluentValidation;
|
||||||
using FluentValidation.AspNetCore;
|
using FluentValidation.AspNetCore;
|
||||||
using Infrastructure.Email;
|
using Infrastructure.Email;
|
||||||
|
using Infrastructure.Email.Templates;
|
||||||
using Infrastructure.Email.Templates.Rendering;
|
using Infrastructure.Email.Templates.Rendering;
|
||||||
using Infrastructure.Jwt;
|
using Infrastructure.Jwt;
|
||||||
using Infrastructure.PasswordHashing;
|
using Infrastructure.PasswordHashing;
|
||||||
using Infrastructure.Repository.Auth;
|
using Infrastructure.Repository.Auth;
|
||||||
using Infrastructure.Repository.Sql;
|
using Infrastructure.Repository.Sql;
|
||||||
using Infrastructure.Repository.UserAccount;
|
using Infrastructure.Repository.UserAccount;
|
||||||
using Infrastructure.Repository.Breweries;
|
using Microsoft.AspNetCore.Authentication;
|
||||||
|
using Microsoft.AspNetCore.Mvc;
|
||||||
|
using Microsoft.AspNetCore.Mvc.Filters;
|
||||||
using Service.Auth;
|
using Service.Auth;
|
||||||
using Service.Emails;
|
using Service.Emails;
|
||||||
using Service.UserManagement.User;
|
using Service.UserManagement.User;
|
||||||
@@ -50,7 +55,6 @@ builder.Services.AddSingleton<
|
|||||||
|
|
||||||
builder.Services.AddScoped<IUserAccountRepository, UserAccountRepository>();
|
builder.Services.AddScoped<IUserAccountRepository, UserAccountRepository>();
|
||||||
builder.Services.AddScoped<IAuthRepository, AuthRepository>();
|
builder.Services.AddScoped<IAuthRepository, AuthRepository>();
|
||||||
builder.Services.AddScoped<IBreweryRepository, BreweryRepository>();
|
|
||||||
|
|
||||||
builder.Services.AddScoped<IUserService, UserService>();
|
builder.Services.AddScoped<IUserService, UserService>();
|
||||||
builder.Services.AddScoped<ILoginService, LoginService>();
|
builder.Services.AddScoped<ILoginService, LoginService>();
|
||||||
|
|||||||
@@ -26,7 +26,6 @@
|
|||||||
<Project Path="Service/Service.Auth.Tests/Service.Auth.Tests.csproj" />
|
<Project Path="Service/Service.Auth.Tests/Service.Auth.Tests.csproj" />
|
||||||
<Project Path="Service/Service.Emails/Service.Emails.csproj" />
|
<Project Path="Service/Service.Emails/Service.Emails.csproj" />
|
||||||
<Project Path="Service/Service.UserManagement/Service.UserManagement.csproj" />
|
<Project Path="Service/Service.UserManagement/Service.UserManagement.csproj" />
|
||||||
<Project Path="Service/Service.Auth/Service.Auth.csproj" />
|
<Project Path="Service\Service.Auth\Service.Auth.csproj" />
|
||||||
<Project Path="Service/Service.Breweries/Service.Breweries.csproj" />
|
|
||||||
</Folder>
|
</Folder>
|
||||||
</Solution>
|
</Solution>
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ USE Biergarten;
|
|||||||
CREATE TABLE dbo.UserAccount
|
CREATE TABLE dbo.UserAccount
|
||||||
(
|
(
|
||||||
UserAccountID UNIQUEIDENTIFIER
|
UserAccountID UNIQUEIDENTIFIER
|
||||||
CONSTRAINT DF_UserAccountID DEFAULT NEWID(),
|
CONSTRAINT DF_UserAccountID DEFAULT NEWID(),
|
||||||
|
|
||||||
Username VARCHAR(64) NOT NULL,
|
Username VARCHAR(64) NOT NULL,
|
||||||
|
|
||||||
@@ -37,7 +37,7 @@ CREATE TABLE dbo.UserAccount
|
|||||||
|
|
||||||
UpdatedAt DATETIME,
|
UpdatedAt DATETIME,
|
||||||
|
|
||||||
DateOfBirth DATE NOT NULL,
|
DateOfBirth DATETIME NOT NULL,
|
||||||
|
|
||||||
Timer ROWVERSION,
|
Timer ROWVERSION,
|
||||||
|
|
||||||
@@ -49,6 +49,7 @@ CREATE TABLE dbo.UserAccount
|
|||||||
|
|
||||||
CONSTRAINT AK_Email
|
CONSTRAINT AK_Email
|
||||||
UNIQUE (Email)
|
UNIQUE (Email)
|
||||||
|
|
||||||
);
|
);
|
||||||
|
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
@@ -108,7 +109,7 @@ CREATE TABLE UserAvatar -- delete avatar photo when user account is deleted
|
|||||||
|
|
||||||
CONSTRAINT AK_UserAvatar_UserAccountID
|
CONSTRAINT AK_UserAvatar_UserAccountID
|
||||||
UNIQUE (UserAccountID)
|
UNIQUE (UserAccountID)
|
||||||
);
|
)
|
||||||
|
|
||||||
CREATE NONCLUSTERED INDEX IX_UserAvatar_UserAccount
|
CREATE NONCLUSTERED INDEX IX_UserAvatar_UserAccount
|
||||||
ON UserAvatar(UserAccountID);
|
ON UserAvatar(UserAccountID);
|
||||||
@@ -124,7 +125,8 @@ CREATE TABLE UserVerification -- delete verification data when user account is d
|
|||||||
UserAccountID UNIQUEIDENTIFIER NOT NULL,
|
UserAccountID UNIQUEIDENTIFIER NOT NULL,
|
||||||
|
|
||||||
VerificationDateTime DATETIME NOT NULL
|
VerificationDateTime DATETIME NOT NULL
|
||||||
CONSTRAINT DF_VerificationDateTime DEFAULT GETDATE(),
|
CONSTRAINT DF_VerificationDateTime
|
||||||
|
DEFAULT GETDATE(),
|
||||||
|
|
||||||
Timer ROWVERSION,
|
Timer ROWVERSION,
|
||||||
|
|
||||||
@@ -153,13 +155,13 @@ CREATE TABLE UserCredential -- delete credentials when user account is deleted
|
|||||||
|
|
||||||
UserAccountID UNIQUEIDENTIFIER NOT NULL,
|
UserAccountID UNIQUEIDENTIFIER NOT NULL,
|
||||||
|
|
||||||
CreatedAt DATETIME NOT NULL
|
CreatedAt DATETIME
|
||||||
CONSTRAINT DF_UserCredential_CreatedAt DEFAULT GETDATE(),
|
CONSTRAINT DF_UserCredential_CreatedAt DEFAULT GETDATE() NOT NULL,
|
||||||
|
|
||||||
Expiry DATETIME NOT NULL
|
Expiry DATETIME
|
||||||
CONSTRAINT DF_UserCredential_Expiry DEFAULT DATEADD(DAY, 90, GETDATE()),
|
CONSTRAINT DF_UserCredential_Expiry DEFAULT DATEADD(DAY, 90, GETDATE()) NOT NULL,
|
||||||
|
|
||||||
Hash NVARCHAR(256) NOT NULL,
|
Hash NVARCHAR(MAX) NOT NULL,
|
||||||
-- uses argon2
|
-- uses argon2
|
||||||
|
|
||||||
IsRevoked BIT NOT NULL
|
IsRevoked BIT NOT NULL
|
||||||
@@ -175,16 +177,12 @@ CREATE TABLE UserCredential -- delete credentials when user account is deleted
|
|||||||
CONSTRAINT FK_UserCredential_UserAccount
|
CONSTRAINT FK_UserCredential_UserAccount
|
||||||
FOREIGN KEY (UserAccountID)
|
FOREIGN KEY (UserAccountID)
|
||||||
REFERENCES UserAccount(UserAccountID)
|
REFERENCES UserAccount(UserAccountID)
|
||||||
ON DELETE CASCADE
|
ON DELETE CASCADE,
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE NONCLUSTERED INDEX IX_UserCredential_UserAccount
|
CREATE NONCLUSTERED INDEX IX_UserCredential_UserAccount
|
||||||
ON UserCredential(UserAccountID);
|
ON UserCredential(UserAccountID);
|
||||||
|
|
||||||
CREATE NONCLUSTERED INDEX IX_UserCredential_Account_Active
|
|
||||||
ON UserCredential(UserAccountID, IsRevoked, Expiry)
|
|
||||||
INCLUDE (Hash);
|
|
||||||
|
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
|
|
||||||
@@ -197,8 +195,8 @@ CREATE TABLE UserFollow
|
|||||||
|
|
||||||
FollowingID UNIQUEIDENTIFIER NOT NULL,
|
FollowingID UNIQUEIDENTIFIER NOT NULL,
|
||||||
|
|
||||||
CreatedAt DATETIME NOT NULL
|
CreatedAt DATETIME
|
||||||
CONSTRAINT DF_UserFollow_CreatedAt DEFAULT GETDATE(),
|
CONSTRAINT DF_UserFollow_CreatedAt DEFAULT GETDATE() NOT NULL,
|
||||||
|
|
||||||
Timer ROWVERSION,
|
Timer ROWVERSION,
|
||||||
|
|
||||||
@@ -207,13 +205,11 @@ CREATE TABLE UserFollow
|
|||||||
|
|
||||||
CONSTRAINT FK_UserFollow_UserAccount
|
CONSTRAINT FK_UserFollow_UserAccount
|
||||||
FOREIGN KEY (UserAccountID)
|
FOREIGN KEY (UserAccountID)
|
||||||
REFERENCES UserAccount(UserAccountID)
|
REFERENCES UserAccount(UserAccountID),
|
||||||
ON DELETE NO ACTION,
|
|
||||||
|
|
||||||
CONSTRAINT FK_UserFollow_UserAccountFollowing
|
CONSTRAINT FK_UserFollow_UserAccountFollowing
|
||||||
FOREIGN KEY (FollowingID)
|
FOREIGN KEY (FollowingID)
|
||||||
REFERENCES UserAccount(UserAccountID)
|
REFERENCES UserAccount(UserAccountID),
|
||||||
ON DELETE NO ACTION,
|
|
||||||
|
|
||||||
CONSTRAINT CK_CannotFollowOwnAccount
|
CONSTRAINT CK_CannotFollowOwnAccount
|
||||||
CHECK (UserAccountID != FollowingID)
|
CHECK (UserAccountID != FollowingID)
|
||||||
@@ -225,6 +221,7 @@ CREATE NONCLUSTERED INDEX IX_UserFollow_UserAccount_FollowingID
|
|||||||
CREATE NONCLUSTERED INDEX IX_UserFollow_FollowingID_UserAccount
|
CREATE NONCLUSTERED INDEX IX_UserFollow_FollowingID_UserAccount
|
||||||
ON UserFollow(FollowingID, UserAccountID);
|
ON UserFollow(FollowingID, UserAccountID);
|
||||||
|
|
||||||
|
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
|
|
||||||
@@ -243,7 +240,7 @@ CREATE TABLE Country
|
|||||||
PRIMARY KEY (CountryID),
|
PRIMARY KEY (CountryID),
|
||||||
|
|
||||||
CONSTRAINT AK_Country_ISO3166_1
|
CONSTRAINT AK_Country_ISO3166_1
|
||||||
UNIQUE (ISO3166_1)
|
UNIQUE (ISO3166_1)
|
||||||
);
|
);
|
||||||
|
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
@@ -302,6 +299,7 @@ CREATE TABLE City
|
|||||||
CREATE NONCLUSTERED INDEX IX_City_StateProvince
|
CREATE NONCLUSTERED INDEX IX_City_StateProvince
|
||||||
ON City(StateProvinceID);
|
ON City(StateProvinceID);
|
||||||
|
|
||||||
|
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
|
|
||||||
@@ -310,8 +308,6 @@ CREATE TABLE BreweryPost -- A user cannot be deleted if they have a post
|
|||||||
BreweryPostID UNIQUEIDENTIFIER
|
BreweryPostID UNIQUEIDENTIFIER
|
||||||
CONSTRAINT DF_BreweryPostID DEFAULT NEWID(),
|
CONSTRAINT DF_BreweryPostID DEFAULT NEWID(),
|
||||||
|
|
||||||
BreweryName NVARCHAR(256) NOT NULL,
|
|
||||||
|
|
||||||
PostedByID UNIQUEIDENTIFIER NOT NULL,
|
PostedByID UNIQUEIDENTIFIER NOT NULL,
|
||||||
|
|
||||||
Description NVARCHAR(512) NOT NULL,
|
Description NVARCHAR(512) NOT NULL,
|
||||||
@@ -329,15 +325,15 @@ CREATE TABLE BreweryPost -- A user cannot be deleted if they have a post
|
|||||||
CONSTRAINT FK_BreweryPost_UserAccount
|
CONSTRAINT FK_BreweryPost_UserAccount
|
||||||
FOREIGN KEY (PostedByID)
|
FOREIGN KEY (PostedByID)
|
||||||
REFERENCES UserAccount(UserAccountID)
|
REFERENCES UserAccount(UserAccountID)
|
||||||
ON DELETE NO ACTION
|
ON DELETE NO ACTION,
|
||||||
);
|
|
||||||
|
)
|
||||||
|
|
||||||
CREATE NONCLUSTERED INDEX IX_BreweryPost_PostedByID
|
CREATE NONCLUSTERED INDEX IX_BreweryPost_PostedByID
|
||||||
ON BreweryPost(PostedByID);
|
ON BreweryPost(PostedByID);
|
||||||
|
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
|
|
||||||
CREATE TABLE BreweryPostLocation
|
CREATE TABLE BreweryPostLocation
|
||||||
(
|
(
|
||||||
BreweryPostLocationID UNIQUEIDENTIFIER
|
BreweryPostLocationID UNIQUEIDENTIFIER
|
||||||
@@ -353,7 +349,7 @@ CREATE TABLE BreweryPostLocation
|
|||||||
|
|
||||||
CityID UNIQUEIDENTIFIER NOT NULL,
|
CityID UNIQUEIDENTIFIER NOT NULL,
|
||||||
|
|
||||||
Coordinates GEOGRAPHY NULL,
|
Coordinates GEOGRAPHY NOT NULL,
|
||||||
|
|
||||||
Timer ROWVERSION,
|
Timer ROWVERSION,
|
||||||
|
|
||||||
@@ -366,11 +362,7 @@ CREATE TABLE BreweryPostLocation
|
|||||||
CONSTRAINT FK_BreweryPostLocation_BreweryPost
|
CONSTRAINT FK_BreweryPostLocation_BreweryPost
|
||||||
FOREIGN KEY (BreweryPostID)
|
FOREIGN KEY (BreweryPostID)
|
||||||
REFERENCES BreweryPost(BreweryPostID)
|
REFERENCES BreweryPost(BreweryPostID)
|
||||||
ON DELETE CASCADE,
|
ON DELETE CASCADE
|
||||||
|
|
||||||
CONSTRAINT FK_BreweryPostLocation_City
|
|
||||||
FOREIGN KEY (CityID)
|
|
||||||
REFERENCES City(CityID)
|
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE NONCLUSTERED INDEX IX_BreweryPostLocation_BreweryPost
|
CREATE NONCLUSTERED INDEX IX_BreweryPostLocation_BreweryPost
|
||||||
@@ -379,18 +371,6 @@ CREATE NONCLUSTERED INDEX IX_BreweryPostLocation_BreweryPost
|
|||||||
CREATE NONCLUSTERED INDEX IX_BreweryPostLocation_City
|
CREATE NONCLUSTERED INDEX IX_BreweryPostLocation_City
|
||||||
ON BreweryPostLocation(CityID);
|
ON BreweryPostLocation(CityID);
|
||||||
|
|
||||||
-- To assess when the time comes:
|
|
||||||
|
|
||||||
-- This would allow for efficient spatial queries to find breweries within a certain distance of a location, but it adds overhead to insert/update operations.
|
|
||||||
|
|
||||||
-- CREATE SPATIAL INDEX SIDX_BreweryPostLocation_Coordinates
|
|
||||||
-- ON BreweryPostLocation(Coordinates)
|
|
||||||
-- USING GEOGRAPHY_GRID
|
|
||||||
-- WITH (
|
|
||||||
-- GRIDS = (LEVEL_1 = MEDIUM, LEVEL_2 = MEDIUM, LEVEL_3 = MEDIUM, LEVEL_4 = MEDIUM),
|
|
||||||
-- CELLS_PER_OBJECT = 16
|
|
||||||
-- );
|
|
||||||
|
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
|
|
||||||
@@ -423,14 +403,13 @@ CREATE TABLE BreweryPostPhoto -- All photos linked to a post are deleted if the
|
|||||||
);
|
);
|
||||||
|
|
||||||
CREATE NONCLUSTERED INDEX IX_BreweryPostPhoto_Photo_BreweryPost
|
CREATE NONCLUSTERED INDEX IX_BreweryPostPhoto_Photo_BreweryPost
|
||||||
ON BreweryPostPhoto(PhotoID, BreweryPostID);
|
ON BreweryPostPhoto(PhotoID, BreweryPostID);
|
||||||
|
|
||||||
CREATE NONCLUSTERED INDEX IX_BreweryPostPhoto_BreweryPost_Photo
|
CREATE NONCLUSTERED INDEX IX_BreweryPostPhoto_BreweryPost_Photo
|
||||||
ON BreweryPostPhoto(BreweryPostID, PhotoID);
|
ON BreweryPostPhoto(BreweryPostID, PhotoID);
|
||||||
|
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
|
|
||||||
CREATE TABLE BeerStyle
|
CREATE TABLE BeerStyle
|
||||||
(
|
(
|
||||||
BeerStyleID UNIQUEIDENTIFIER
|
BeerStyleID UNIQUEIDENTIFIER
|
||||||
@@ -465,7 +444,7 @@ CREATE TABLE BeerPost
|
|||||||
-- Alcohol By Volume (typically 0-67%)
|
-- Alcohol By Volume (typically 0-67%)
|
||||||
|
|
||||||
IBU INT NOT NULL,
|
IBU INT NOT NULL,
|
||||||
-- International Bitterness Units (typically 0-120)
|
-- International Bitterness Units (typically 0-100)
|
||||||
|
|
||||||
PostedByID UNIQUEIDENTIFIER NOT NULL,
|
PostedByID UNIQUEIDENTIFIER NOT NULL,
|
||||||
|
|
||||||
@@ -485,8 +464,7 @@ CREATE TABLE BeerPost
|
|||||||
|
|
||||||
CONSTRAINT FK_BeerPost_PostedBy
|
CONSTRAINT FK_BeerPost_PostedBy
|
||||||
FOREIGN KEY (PostedByID)
|
FOREIGN KEY (PostedByID)
|
||||||
REFERENCES UserAccount(UserAccountID)
|
REFERENCES UserAccount(UserAccountID),
|
||||||
ON DELETE NO ACTION,
|
|
||||||
|
|
||||||
CONSTRAINT FK_BeerPost_BeerStyle
|
CONSTRAINT FK_BeerPost_BeerStyle
|
||||||
FOREIGN KEY (BeerStyleID)
|
FOREIGN KEY (BeerStyleID)
|
||||||
@@ -544,10 +522,10 @@ CREATE TABLE BeerPostPhoto -- All photos linked to a beer post are deleted if th
|
|||||||
);
|
);
|
||||||
|
|
||||||
CREATE NONCLUSTERED INDEX IX_BeerPostPhoto_Photo_BeerPost
|
CREATE NONCLUSTERED INDEX IX_BeerPostPhoto_Photo_BeerPost
|
||||||
ON BeerPostPhoto(PhotoID, BeerPostID);
|
ON BeerPostPhoto(PhotoID, BeerPostID);
|
||||||
|
|
||||||
CREATE NONCLUSTERED INDEX IX_BeerPostPhoto_BeerPost_Photo
|
CREATE NONCLUSTERED INDEX IX_BeerPostPhoto_BeerPost_Photo
|
||||||
ON BeerPostPhoto(BeerPostID, PhotoID);
|
ON BeerPostPhoto(BeerPostID, PhotoID);
|
||||||
|
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
||||||
@@ -561,35 +539,17 @@ CREATE TABLE BeerPostComment
|
|||||||
|
|
||||||
BeerPostID UNIQUEIDENTIFIER NOT NULL,
|
BeerPostID UNIQUEIDENTIFIER NOT NULL,
|
||||||
|
|
||||||
CommentedByID UNIQUEIDENTIFIER NOT NULL,
|
|
||||||
|
|
||||||
Rating INT NOT NULL,
|
Rating INT NOT NULL,
|
||||||
|
|
||||||
CreatedAt DATETIME NOT NULL
|
|
||||||
CONSTRAINT DF_BeerPostComment_CreatedAt DEFAULT GETDATE(),
|
|
||||||
|
|
||||||
UpdatedAt DATETIME NULL,
|
|
||||||
|
|
||||||
Timer ROWVERSION,
|
Timer ROWVERSION,
|
||||||
|
|
||||||
CONSTRAINT PK_BeerPostComment
|
CONSTRAINT PK_BeerPostComment
|
||||||
PRIMARY KEY (BeerPostCommentID),
|
PRIMARY KEY (BeerPostCommentID),
|
||||||
|
|
||||||
CONSTRAINT FK_BeerPostComment_BeerPost
|
CONSTRAINT FK_BeerPostComment_BeerPost
|
||||||
FOREIGN KEY (BeerPostID)
|
FOREIGN KEY (BeerPostID) REFERENCES BeerPost(BeerPostID)
|
||||||
REFERENCES BeerPost(BeerPostID),
|
)
|
||||||
|
|
||||||
CONSTRAINT FK_BeerPostComment_UserAccount
|
|
||||||
FOREIGN KEY (CommentedByID)
|
|
||||||
REFERENCES UserAccount(UserAccountID)
|
|
||||||
ON DELETE NO ACTION,
|
|
||||||
|
|
||||||
CONSTRAINT CHK_BeerPostComment_Rating
|
|
||||||
CHECK (Rating BETWEEN 1 AND 5)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE NONCLUSTERED INDEX IX_BeerPostComment_BeerPost
|
CREATE NONCLUSTERED INDEX IX_BeerPostComment_BeerPost
|
||||||
ON BeerPostComment(BeerPostID);
|
ON BeerPostComment(BeerPostID)
|
||||||
|
|
||||||
CREATE NONCLUSTERED INDEX IX_BeerPostComment_CommentedBy
|
|
||||||
ON BeerPostComment(CommentedByID);
|
|
||||||
|
|||||||
@@ -1,50 +0,0 @@
|
|||||||
CREATE OR ALTER PROCEDURE dbo.USP_CreateBrewery(
|
|
||||||
@BreweryName NVARCHAR(256),
|
|
||||||
@Description NVARCHAR(512),
|
|
||||||
@PostedByID UNIQUEIDENTIFIER,
|
|
||||||
@CityID UNIQUEIDENTIFIER,
|
|
||||||
@AddressLine1 NVARCHAR(256),
|
|
||||||
@AddressLine2 NVARCHAR(256) = NULL,
|
|
||||||
@PostalCode NVARCHAR(20),
|
|
||||||
@Coordinates GEOGRAPHY = NULL
|
|
||||||
)
|
|
||||||
AS
|
|
||||||
BEGIN
|
|
||||||
SET NOCOUNT ON;
|
|
||||||
SET XACT_ABORT ON;
|
|
||||||
|
|
||||||
IF @BreweryName IS NULL
|
|
||||||
THROW 50001, 'Brewery name cannot be null.', 1;
|
|
||||||
|
|
||||||
IF @Description IS NULL
|
|
||||||
THROW 50002, 'Brewery description cannot be null.', 1;
|
|
||||||
|
|
||||||
IF NOT EXISTS (SELECT 1
|
|
||||||
FROM dbo.UserAccount
|
|
||||||
WHERE UserAccountID = @PostedByID)
|
|
||||||
THROW 50404, 'User not found.', 1;
|
|
||||||
|
|
||||||
IF NOT EXISTS (SELECT 1
|
|
||||||
FROM dbo.City
|
|
||||||
WHERE CityID = @CityID)
|
|
||||||
THROW 50404, 'City not found.', 1;
|
|
||||||
|
|
||||||
DECLARE @NewBreweryID UNIQUEIDENTIFIER = NEWID();
|
|
||||||
DECLARE @NewBrewerLocationID UNIQUEIDENTIFIER = NEWID();
|
|
||||||
|
|
||||||
BEGIN TRANSACTION;
|
|
||||||
|
|
||||||
INSERT INTO dbo.BreweryPost
|
|
||||||
(BreweryPostID, BreweryName, Description, PostedByID)
|
|
||||||
VALUES (@NewBreweryID, @BreweryName, @Description, @PostedByID);
|
|
||||||
|
|
||||||
INSERT INTO dbo.BreweryPostLocation
|
|
||||||
(BreweryPostLocationID, BreweryPostID, CityID, AddressLine1, AddressLine2, PostalCode, Coordinates)
|
|
||||||
VALUES (@NewBrewerLocationID, @NewBreweryID, @CityID, @AddressLine1, @AddressLine2, @PostalCode, @Coordinates);
|
|
||||||
|
|
||||||
COMMIT TRANSACTION;
|
|
||||||
|
|
||||||
SELECT @NewBreweryID AS BreweryPostID,
|
|
||||||
@NewBrewerLocationID AS BreweryPostLocationID;
|
|
||||||
|
|
||||||
END
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
CREATE OR ALTER PROCEDURE dbo.USP_GetBreweryById @BreweryPostID UNIQUEIDENTIFIER
|
|
||||||
AS
|
|
||||||
BEGIN
|
|
||||||
SELECT *
|
|
||||||
FROM BreweryPost bp
|
|
||||||
INNER JOIN BreweryPostLocation bpl
|
|
||||||
ON bp.BreweryPostID = bpl.BreweryPostID
|
|
||||||
WHERE bp.BreweryPostID = @BreweryPostID;
|
|
||||||
END
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
namespace Domain.Entities;
|
|
||||||
|
|
||||||
public class BreweryPost
|
|
||||||
{
|
|
||||||
public Guid BreweryPostId { get; set; }
|
|
||||||
public Guid PostedById { get; set; }
|
|
||||||
public string BreweryName { get; set; } = string.Empty;
|
|
||||||
public string Description { get; set; } = string.Empty;
|
|
||||||
public DateTime CreatedAt { get; set; }
|
|
||||||
public DateTime? UpdatedAt { get; set; }
|
|
||||||
public byte[]? Timer { get; set; }
|
|
||||||
public BreweryPostLocation? Location { get; set; }
|
|
||||||
}
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
namespace Domain.Entities;
|
|
||||||
|
|
||||||
public class BreweryPostLocation
|
|
||||||
{
|
|
||||||
public Guid BreweryPostLocationId { get; set; }
|
|
||||||
public Guid BreweryPostId { get; set; }
|
|
||||||
public string AddressLine1 { get; set; } = string.Empty;
|
|
||||||
public string? AddressLine2 { get; set; }
|
|
||||||
public string PostalCode { get; set; } = string.Empty;
|
|
||||||
public Guid CityId { get; set; }
|
|
||||||
public byte[]? Coordinates { get; set; }
|
|
||||||
public byte[]? Timer { get; set; }
|
|
||||||
}
|
|
||||||
@@ -7,6 +7,6 @@
|
|||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="MailKit" Version="4.15.1" />
|
<PackageReference Include="MailKit" Version="4.9.0" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
</Project>
|
</Project>
|
||||||
|
|||||||
@@ -17,34 +17,10 @@ public class AuthRepositoryTest
|
|||||||
var conn = new MockDbConnection();
|
var conn = new MockDbConnection();
|
||||||
|
|
||||||
conn.Mocks.When(cmd => cmd.CommandText == "USP_RegisterUser")
|
conn.Mocks.When(cmd => cmd.CommandText == "USP_RegisterUser")
|
||||||
.ReturnsScalar(expectedUserId);
|
|
||||||
|
|
||||||
// Mock the subsequent read for the newly created user by id
|
|
||||||
conn.Mocks.When(cmd => cmd.CommandText == "usp_GetUserAccountById")
|
|
||||||
.ReturnsTable(
|
.ReturnsTable(
|
||||||
MockTable
|
MockTable
|
||||||
.WithColumns(
|
.WithColumns(("UserAccountId", typeof(Guid)))
|
||||||
("UserAccountId", typeof(Guid)),
|
.AddRow(expectedUserId)
|
||||||
("Username", typeof(string)),
|
|
||||||
("FirstName", typeof(string)),
|
|
||||||
("LastName", typeof(string)),
|
|
||||||
("Email", typeof(string)),
|
|
||||||
("CreatedAt", typeof(DateTime)),
|
|
||||||
("UpdatedAt", typeof(DateTime?)),
|
|
||||||
("DateOfBirth", typeof(DateTime)),
|
|
||||||
("Timer", typeof(byte[]))
|
|
||||||
)
|
|
||||||
.AddRow(
|
|
||||||
expectedUserId,
|
|
||||||
"testuser",
|
|
||||||
"Test",
|
|
||||||
"User",
|
|
||||||
"test@example.com",
|
|
||||||
DateTime.UtcNow,
|
|
||||||
null,
|
|
||||||
new DateTime(1990, 1, 1),
|
|
||||||
null
|
|
||||||
)
|
|
||||||
);
|
);
|
||||||
|
|
||||||
var repo = CreateRepo(conn);
|
var repo = CreateRepo(conn);
|
||||||
|
|||||||
@@ -1,108 +0,0 @@
|
|||||||
using Apps72.Dev.Data.DbMocker;
|
|
||||||
using FluentAssertions;
|
|
||||||
using Infrastructure.Repository.Breweries;
|
|
||||||
using Infrastructure.Repository.Tests.Database;
|
|
||||||
using Domain.Entities;
|
|
||||||
|
|
||||||
namespace Infrastructure.Repository.Tests.Breweries;
|
|
||||||
|
|
||||||
public class BreweryRepositoryTest
|
|
||||||
{
|
|
||||||
private static BreweryRepository CreateRepo(MockDbConnection conn) =>
|
|
||||||
new(new TestConnectionFactory(conn));
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public async Task GetByIdAsync_ReturnsBrewery_WhenExists()
|
|
||||||
{
|
|
||||||
var breweryId = Guid.NewGuid();
|
|
||||||
var conn = new MockDbConnection();
|
|
||||||
|
|
||||||
// Repository calls the stored procedure
|
|
||||||
const string getByIdSql = "USP_GetBreweryById";
|
|
||||||
|
|
||||||
var locationId = Guid.NewGuid();
|
|
||||||
|
|
||||||
conn.Mocks.When(cmd => cmd.CommandText == getByIdSql)
|
|
||||||
.ReturnsTable(
|
|
||||||
MockTable
|
|
||||||
.WithColumns(
|
|
||||||
("BreweryPostId", typeof(Guid)),
|
|
||||||
("PostedById", typeof(Guid)),
|
|
||||||
("BreweryName", typeof(string)),
|
|
||||||
("Description", typeof(string)),
|
|
||||||
("CreatedAt", typeof(DateTime)),
|
|
||||||
("UpdatedAt", typeof(DateTime?)),
|
|
||||||
("Timer", typeof(byte[])),
|
|
||||||
("BreweryPostLocationId", typeof(Guid)),
|
|
||||||
("CityId", typeof(Guid)),
|
|
||||||
("AddressLine1", typeof(string)),
|
|
||||||
("AddressLine2", typeof(string)),
|
|
||||||
("PostalCode", typeof(string)),
|
|
||||||
("Coordinates", typeof(byte[]))
|
|
||||||
)
|
|
||||||
.AddRow(
|
|
||||||
breweryId,
|
|
||||||
Guid.NewGuid(),
|
|
||||||
"Test Brewery",
|
|
||||||
"A test brewery description",
|
|
||||||
DateTime.UtcNow,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
locationId,
|
|
||||||
Guid.NewGuid(),
|
|
||||||
"123 Main St",
|
|
||||||
null,
|
|
||||||
"12345",
|
|
||||||
null
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
var repo = CreateRepo(conn);
|
|
||||||
var result = await repo.GetByIdAsync(breweryId);
|
|
||||||
result.Should().NotBeNull();
|
|
||||||
result!.BreweryPostId.Should().Be(breweryId);
|
|
||||||
result.Location.Should().NotBeNull();
|
|
||||||
result.Location!.BreweryPostLocationId.Should().Be(locationId);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public async Task GetByIdAsync_ReturnsNull_WhenNotExists()
|
|
||||||
{
|
|
||||||
var conn = new MockDbConnection();
|
|
||||||
conn.Mocks.When(cmd => cmd.CommandText == "USP_GetBreweryById")
|
|
||||||
.ReturnsTable(MockTable.Empty());
|
|
||||||
var repo = CreateRepo(conn);
|
|
||||||
var result = await repo.GetByIdAsync(Guid.NewGuid());
|
|
||||||
result.Should().BeNull();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public async Task CreateAsync_ExecutesSuccessfully()
|
|
||||||
{
|
|
||||||
var conn = new MockDbConnection();
|
|
||||||
conn.Mocks.When(cmd => cmd.CommandText == "USP_CreateBrewery")
|
|
||||||
.ReturnsScalar(1);
|
|
||||||
var repo = CreateRepo(conn);
|
|
||||||
var brewery = new BreweryPost
|
|
||||||
{
|
|
||||||
BreweryPostId = Guid.NewGuid(),
|
|
||||||
PostedById = Guid.NewGuid(),
|
|
||||||
BreweryName = "Test Brewery",
|
|
||||||
Description = "A test brewery description",
|
|
||||||
CreatedAt = DateTime.UtcNow,
|
|
||||||
Location = new BreweryPostLocation
|
|
||||||
{
|
|
||||||
BreweryPostLocationId = Guid.NewGuid(),
|
|
||||||
CityId = Guid.NewGuid(),
|
|
||||||
AddressLine1 = "123 Main St",
|
|
||||||
PostalCode = "12345",
|
|
||||||
Coordinates = [0x00, 0x01]
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Should not throw
|
|
||||||
var act = async () => await repo.CreateAsync(brewery);
|
|
||||||
await act.Should().NotThrowAsync();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -33,39 +33,18 @@ public class AuthRepository(ISqlConnectionFactory connectionFactory)
|
|||||||
AddParameter(command, "@Hash", passwordHash);
|
AddParameter(command, "@Hash", passwordHash);
|
||||||
|
|
||||||
var result = await command.ExecuteScalarAsync();
|
var result = await command.ExecuteScalarAsync();
|
||||||
|
var userAccountId = result != null ? (Guid)result : Guid.Empty;
|
||||||
|
|
||||||
Guid userAccountId = Guid.Empty;
|
return new Domain.Entities.UserAccount
|
||||||
if (result != null && result != DBNull.Value)
|
|
||||||
{
|
{
|
||||||
if (result is Guid g)
|
UserAccountId = userAccountId,
|
||||||
{
|
Username = username,
|
||||||
userAccountId = g;
|
FirstName = firstName,
|
||||||
}
|
LastName = lastName,
|
||||||
else if (result is string s && Guid.TryParse(s, out var parsed))
|
Email = email,
|
||||||
{
|
DateOfBirth = dateOfBirth,
|
||||||
userAccountId = parsed;
|
CreatedAt = DateTime.UtcNow,
|
||||||
}
|
};
|
||||||
else if (result is byte[] bytes && bytes.Length == 16)
|
|
||||||
{
|
|
||||||
userAccountId = new Guid(bytes);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// Fallback: try to convert and parse string representation
|
|
||||||
try
|
|
||||||
{
|
|
||||||
var str = result.ToString();
|
|
||||||
if (!string.IsNullOrEmpty(str) && Guid.TryParse(str, out var p))
|
|
||||||
userAccountId = p;
|
|
||||||
}
|
|
||||||
catch
|
|
||||||
{
|
|
||||||
userAccountId = Guid.Empty;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return await GetUserByIdAsync(userAccountId) ?? throw new Exception("Failed to retrieve newly registered user.");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<Domain.Entities.UserAccount?> GetUserByEmailAsync(
|
public async Task<Domain.Entities.UserAccount?> GetUserByEmailAsync(
|
||||||
|
|||||||
@@ -1,147 +0,0 @@
|
|||||||
using System.Data.Common;
|
|
||||||
using Domain.Entities;
|
|
||||||
using Infrastructure.Repository.Sql;
|
|
||||||
|
|
||||||
namespace Infrastructure.Repository.Breweries;
|
|
||||||
|
|
||||||
public class BreweryRepository(ISqlConnectionFactory connectionFactory)
|
|
||||||
: Repository<BreweryPost>(connectionFactory), IBreweryRepository
|
|
||||||
{
|
|
||||||
private readonly ISqlConnectionFactory _connectionFactory = connectionFactory;
|
|
||||||
|
|
||||||
public async Task<BreweryPost?> GetByIdAsync(Guid id)
|
|
||||||
{
|
|
||||||
await using var connection = await CreateConnection();
|
|
||||||
await using var command = connection.CreateCommand();
|
|
||||||
command.CommandType = System.Data.CommandType.StoredProcedure;
|
|
||||||
|
|
||||||
command.CommandText = "USP_GetBreweryById";
|
|
||||||
AddParameter(command, "@BreweryPostID", id);
|
|
||||||
|
|
||||||
await using var reader = await command.ExecuteReaderAsync();
|
|
||||||
if (await reader.ReadAsync())
|
|
||||||
{
|
|
||||||
return MapToEntity(reader);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Task<IEnumerable<BreweryPost>> GetAllAsync(int? limit, int? offset)
|
|
||||||
{
|
|
||||||
throw new NotImplementedException();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Task UpdateAsync(BreweryPost brewery)
|
|
||||||
{
|
|
||||||
throw new NotImplementedException();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Task DeleteAsync(Guid id)
|
|
||||||
{
|
|
||||||
throw new NotImplementedException();
|
|
||||||
}
|
|
||||||
|
|
||||||
public async Task CreateAsync(BreweryPost brewery)
|
|
||||||
{
|
|
||||||
await using var connection = await CreateConnection();
|
|
||||||
await using var command = connection.CreateCommand();
|
|
||||||
|
|
||||||
command.CommandText = "USP_CreateBrewery";
|
|
||||||
command.CommandType = System.Data.CommandType.StoredProcedure;
|
|
||||||
|
|
||||||
if (brewery.Location is null)
|
|
||||||
{
|
|
||||||
throw new ArgumentException("Location must be provided when creating a brewery.");
|
|
||||||
}
|
|
||||||
|
|
||||||
AddParameter(command, "@BreweryName", brewery.BreweryName);
|
|
||||||
AddParameter(command, "@Description", brewery.Description);
|
|
||||||
AddParameter(command, "@PostedByID", brewery.PostedById);
|
|
||||||
AddParameter(command, "@CityID", brewery.Location?.CityId);
|
|
||||||
AddParameter(command, "@AddressLine1", brewery.Location?.AddressLine1);
|
|
||||||
AddParameter(command, "@AddressLine2", brewery.Location?.AddressLine2);
|
|
||||||
AddParameter(command, "@PostalCode", brewery.Location?.PostalCode);
|
|
||||||
AddParameter(command, "@Coordinates", brewery.Location?.Coordinates);
|
|
||||||
await command.ExecuteNonQueryAsync();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override BreweryPost MapToEntity(DbDataReader reader)
|
|
||||||
{
|
|
||||||
var brewery = new BreweryPost();
|
|
||||||
|
|
||||||
var ordBreweryPostId = reader.GetOrdinal("BreweryPostId");
|
|
||||||
var ordPostedById = reader.GetOrdinal("PostedById");
|
|
||||||
var ordBreweryName = reader.GetOrdinal("BreweryName");
|
|
||||||
var ordDescription = reader.GetOrdinal("Description");
|
|
||||||
var ordCreatedAt = reader.GetOrdinal("CreatedAt");
|
|
||||||
var ordUpdatedAt = reader.GetOrdinal("UpdatedAt");
|
|
||||||
var ordTimer = reader.GetOrdinal("Timer");
|
|
||||||
|
|
||||||
brewery.BreweryPostId = reader.GetGuid(ordBreweryPostId);
|
|
||||||
brewery.PostedById = reader.GetGuid(ordPostedById);
|
|
||||||
brewery.BreweryName = reader.GetString(ordBreweryName);
|
|
||||||
brewery.Description = reader.GetString(ordDescription);
|
|
||||||
brewery.CreatedAt = reader.GetDateTime(ordCreatedAt);
|
|
||||||
|
|
||||||
brewery.UpdatedAt = reader.IsDBNull(ordUpdatedAt) ? null : reader.GetDateTime(ordUpdatedAt);
|
|
||||||
|
|
||||||
// Read timer (varbinary/rowversion) robustly
|
|
||||||
if (reader.IsDBNull(ordTimer))
|
|
||||||
{
|
|
||||||
brewery.Timer = null;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
brewery.Timer = reader.GetFieldValue<byte[]>(ordTimer);
|
|
||||||
}
|
|
||||||
catch
|
|
||||||
{
|
|
||||||
var length = reader.GetBytes(ordTimer, 0, null, 0, 0);
|
|
||||||
var buffer = new byte[length];
|
|
||||||
reader.GetBytes(ordTimer, 0, buffer, 0, (int)length);
|
|
||||||
brewery.Timer = buffer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Map BreweryPostLocation if columns are present
|
|
||||||
try
|
|
||||||
{
|
|
||||||
var ordLocationId = reader.GetOrdinal("BreweryPostLocationId");
|
|
||||||
if (!reader.IsDBNull(ordLocationId))
|
|
||||||
{
|
|
||||||
var location = new BreweryPostLocation
|
|
||||||
{
|
|
||||||
BreweryPostLocationId = reader.GetGuid(ordLocationId),
|
|
||||||
BreweryPostId = reader.GetGuid(reader.GetOrdinal("BreweryPostId")),
|
|
||||||
CityId = reader.GetGuid(reader.GetOrdinal("CityId")),
|
|
||||||
AddressLine1 = reader.GetString(reader.GetOrdinal("AddressLine1")),
|
|
||||||
AddressLine2 = reader.IsDBNull(reader.GetOrdinal("AddressLine2")) ? null : reader.GetString(reader.GetOrdinal("AddressLine2")),
|
|
||||||
PostalCode = reader.GetString(reader.GetOrdinal("PostalCode")),
|
|
||||||
Coordinates = reader.IsDBNull(reader.GetOrdinal("Coordinates")) ? null : reader.GetFieldValue<byte[]>(reader.GetOrdinal("Coordinates"))
|
|
||||||
};
|
|
||||||
brewery.Location = location;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (IndexOutOfRangeException)
|
|
||||||
{
|
|
||||||
// Location columns not present, skip mapping location
|
|
||||||
}
|
|
||||||
|
|
||||||
return brewery;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void AddParameter(
|
|
||||||
DbCommand command,
|
|
||||||
string name,
|
|
||||||
object? value
|
|
||||||
)
|
|
||||||
{
|
|
||||||
var p = command.CreateParameter();
|
|
||||||
p.ParameterName = name;
|
|
||||||
p.Value = value ?? DBNull.Value;
|
|
||||||
command.Parameters.Add(p);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
using Domain.Entities;
|
|
||||||
|
|
||||||
namespace Infrastructure.Repository.Breweries;
|
|
||||||
|
|
||||||
public interface IBreweryRepository
|
|
||||||
{
|
|
||||||
Task<BreweryPost?> GetByIdAsync(Guid id);
|
|
||||||
Task<IEnumerable<BreweryPost>> GetAllAsync(int? limit, int? offset);
|
|
||||||
Task UpdateAsync(BreweryPost brewery);
|
|
||||||
Task DeleteAsync(Guid id);
|
|
||||||
Task CreateAsync(BreweryPost brewery);
|
|
||||||
}
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
using FluentAssertions;
|
|
||||||
using Xunit;
|
|
||||||
using Service.Breweries;
|
|
||||||
using API.Core.Contracts.Breweries;
|
|
||||||
using Domain.Entities;
|
|
||||||
|
|
||||||
namespace Service.Breweries.Tests;
|
|
||||||
|
|
||||||
public class BreweryServiceTests
|
|
||||||
{
|
|
||||||
private class FakeRepo : IBreweryRepository
|
|
||||||
{
|
|
||||||
public BreweryPost? Created;
|
|
||||||
|
|
||||||
public Task<BreweryPost?> GetByIdAsync(Guid id) => Task.FromResult<BreweryPost?>(null);
|
|
||||||
public Task<IEnumerable<BreweryPost>> GetAllAsync(int? limit, int? offset) => Task.FromResult<IEnumerable<BreweryPost>>(Array.Empty<BreweryPost>());
|
|
||||||
public Task UpdateAsync(BreweryPost brewery) { Created = brewery; return Task.CompletedTask; }
|
|
||||||
public Task DeleteAsync(Guid id) => Task.CompletedTask;
|
|
||||||
public Task CreateAsync(BreweryPost brewery) { Created = brewery; return Task.CompletedTask; }
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public async Task CreateAsync_ReturnsFailure_WhenLocationMissing()
|
|
||||||
{
|
|
||||||
var repo = new FakeRepo();
|
|
||||||
var svc = new BreweryService(repo);
|
|
||||||
|
|
||||||
var dto = new BreweryCreateDto
|
|
||||||
{
|
|
||||||
PostedById = Guid.NewGuid(),
|
|
||||||
BreweryName = "X",
|
|
||||||
Description = "Y",
|
|
||||||
Location = null!
|
|
||||||
};
|
|
||||||
|
|
||||||
var result = await svc.CreateAsync(dto);
|
|
||||||
result.Success.Should().BeFalse();
|
|
||||||
result.Message.Should().Contain("Location");
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public async Task CreateAsync_ReturnsSuccess_AndPersistsEntity()
|
|
||||||
{
|
|
||||||
var repo = new FakeRepo();
|
|
||||||
var svc = new BreweryService(repo);
|
|
||||||
|
|
||||||
var loc = new BreweryLocationCreateDto
|
|
||||||
{
|
|
||||||
CityId = Guid.NewGuid(),
|
|
||||||
AddressLine1 = "123 Main",
|
|
||||||
PostalCode = "12345"
|
|
||||||
};
|
|
||||||
|
|
||||||
var dto = new BreweryCreateDto
|
|
||||||
{
|
|
||||||
PostedById = Guid.NewGuid(),
|
|
||||||
BreweryName = "MyBrew",
|
|
||||||
Description = "Desc",
|
|
||||||
Location = loc
|
|
||||||
};
|
|
||||||
|
|
||||||
var result = await svc.CreateAsync(dto);
|
|
||||||
|
|
||||||
result.Success.Should().BeTrue();
|
|
||||||
repo.Created.Should().NotBeNull();
|
|
||||||
repo.Created!.BreweryName.Should().Be("MyBrew");
|
|
||||||
result.Brewery.BreweryName.Should().Be("MyBrew");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk">
|
|
||||||
<PropertyGroup>
|
|
||||||
<TargetFramework>net10.0</TargetFramework>
|
|
||||||
<ImplicitUsings>enable</ImplicitUsings>
|
|
||||||
<Nullable>enable</Nullable>
|
|
||||||
<IsPackable>false</IsPackable>
|
|
||||||
<RootNamespace>Service.Breweries.Tests</RootNamespace>
|
|
||||||
</PropertyGroup>
|
|
||||||
|
|
||||||
<ItemGroup>
|
|
||||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
|
|
||||||
<PackageReference Include="xunit" Version="2.9.2" />
|
|
||||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
|
||||||
<PackageReference Include="FluentAssertions" Version="6.9.0" />
|
|
||||||
</ItemGroup>
|
|
||||||
|
|
||||||
<ItemGroup>
|
|
||||||
<Using Include="Xunit" />
|
|
||||||
</ItemGroup>
|
|
||||||
|
|
||||||
<ItemGroup>
|
|
||||||
<ProjectReference Include="..\Service.Breweries\Service.Breweries.csproj" />
|
|
||||||
<ProjectReference Include="..\Service.Auth\Service.Auth.csproj" />
|
|
||||||
<ProjectReference
|
|
||||||
Include="..\..\Infrastructure\Infrastructure.Repository\Infrastructure.Repository.csproj" />
|
|
||||||
<ProjectReference Include="..\..\API\API.Core\API.Core.csproj" />
|
|
||||||
</ItemGroup>
|
|
||||||
</Project>
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
using Domain.Entities;
|
|
||||||
using Infrastructure.Repository.Breweries;
|
|
||||||
|
|
||||||
namespace Service.Breweries;
|
|
||||||
|
|
||||||
public class BreweryService(IBreweryRepository repository) : IBreweryService
|
|
||||||
{
|
|
||||||
public Task<BreweryPost?> GetByIdAsync(Guid id) =>
|
|
||||||
repository.GetByIdAsync(id);
|
|
||||||
|
|
||||||
public Task<IEnumerable<BreweryPost>> GetAllAsync(int? limit = null, int? offset = null) =>
|
|
||||||
repository.GetAllAsync(limit, offset);
|
|
||||||
|
|
||||||
public async Task<BreweryServiceReturn> CreateAsync(BreweryCreateRequest request)
|
|
||||||
{
|
|
||||||
var entity = new BreweryPost
|
|
||||||
{
|
|
||||||
BreweryPostId = Guid.NewGuid(),
|
|
||||||
PostedById = request.PostedById,
|
|
||||||
BreweryName = request.BreweryName,
|
|
||||||
Description = request.Description,
|
|
||||||
CreatedAt = DateTime.UtcNow,
|
|
||||||
Location = new BreweryPostLocation
|
|
||||||
{
|
|
||||||
BreweryPostLocationId = Guid.NewGuid(),
|
|
||||||
CityId = request.Location.CityId,
|
|
||||||
AddressLine1 = request.Location.AddressLine1,
|
|
||||||
AddressLine2 = request.Location.AddressLine2,
|
|
||||||
PostalCode = request.Location.PostalCode,
|
|
||||||
Coordinates = request.Location.Coordinates,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
await repository.CreateAsync(entity);
|
|
||||||
return new BreweryServiceReturn(entity);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async Task<BreweryServiceReturn> UpdateAsync(BreweryUpdateRequest request)
|
|
||||||
{
|
|
||||||
var entity = new BreweryPost
|
|
||||||
{
|
|
||||||
BreweryPostId = request.BreweryPostId,
|
|
||||||
PostedById = request.PostedById,
|
|
||||||
BreweryName = request.BreweryName,
|
|
||||||
Description = request.Description,
|
|
||||||
UpdatedAt = DateTime.UtcNow,
|
|
||||||
Location = request.Location is null ? null : new BreweryPostLocation
|
|
||||||
{
|
|
||||||
BreweryPostLocationId = request.Location.BreweryPostLocationId,
|
|
||||||
BreweryPostId = request.BreweryPostId,
|
|
||||||
CityId = request.Location.CityId,
|
|
||||||
AddressLine1 = request.Location.AddressLine1,
|
|
||||||
AddressLine2 = request.Location.AddressLine2,
|
|
||||||
PostalCode = request.Location.PostalCode,
|
|
||||||
Coordinates = request.Location.Coordinates,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
await repository.UpdateAsync(entity);
|
|
||||||
return new BreweryServiceReturn(entity);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Task DeleteAsync(Guid id) =>
|
|
||||||
repository.DeleteAsync(id);
|
|
||||||
}
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
using Domain.Entities;
|
|
||||||
|
|
||||||
namespace Service.Breweries;
|
|
||||||
|
|
||||||
public record BreweryCreateRequest(
|
|
||||||
Guid PostedById,
|
|
||||||
string BreweryName,
|
|
||||||
string Description,
|
|
||||||
BreweryLocationCreateRequest Location
|
|
||||||
);
|
|
||||||
|
|
||||||
public record BreweryLocationCreateRequest(
|
|
||||||
Guid CityId,
|
|
||||||
string AddressLine1,
|
|
||||||
string? AddressLine2,
|
|
||||||
string PostalCode,
|
|
||||||
byte[]? Coordinates
|
|
||||||
);
|
|
||||||
|
|
||||||
public record BreweryUpdateRequest(
|
|
||||||
Guid BreweryPostId,
|
|
||||||
Guid PostedById,
|
|
||||||
string BreweryName,
|
|
||||||
string Description,
|
|
||||||
BreweryLocationUpdateRequest? Location
|
|
||||||
);
|
|
||||||
|
|
||||||
public record BreweryLocationUpdateRequest(
|
|
||||||
Guid BreweryPostLocationId,
|
|
||||||
Guid CityId,
|
|
||||||
string AddressLine1,
|
|
||||||
string? AddressLine2,
|
|
||||||
string PostalCode,
|
|
||||||
byte[]? Coordinates
|
|
||||||
);
|
|
||||||
|
|
||||||
public record BreweryServiceReturn
|
|
||||||
{
|
|
||||||
public bool Success { get; init; }
|
|
||||||
public BreweryPost Brewery { get; init; }
|
|
||||||
public string Message { get; init; } = string.Empty;
|
|
||||||
|
|
||||||
public BreweryServiceReturn(BreweryPost brewery)
|
|
||||||
{
|
|
||||||
Success = true;
|
|
||||||
Brewery = brewery;
|
|
||||||
}
|
|
||||||
|
|
||||||
public BreweryServiceReturn(string message)
|
|
||||||
{
|
|
||||||
Success = false;
|
|
||||||
Brewery = default!;
|
|
||||||
Message = message;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public interface IBreweryService
|
|
||||||
{
|
|
||||||
Task<BreweryPost?> GetByIdAsync(Guid id);
|
|
||||||
Task<IEnumerable<BreweryPost>> GetAllAsync(int? limit = null, int? offset = null);
|
|
||||||
Task<BreweryServiceReturn> CreateAsync(BreweryCreateRequest request);
|
|
||||||
Task<BreweryServiceReturn> UpdateAsync(BreweryUpdateRequest request);
|
|
||||||
Task DeleteAsync(Guid id);
|
|
||||||
}
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk">
|
|
||||||
<PropertyGroup>
|
|
||||||
<TargetFramework>net10.0</TargetFramework>
|
|
||||||
<ImplicitUsings>enable</ImplicitUsings>
|
|
||||||
<Nullable>enable</Nullable>
|
|
||||||
</PropertyGroup>
|
|
||||||
|
|
||||||
<ItemGroup>
|
|
||||||
<ProjectReference Include="..\..\Domain\Domain.Entities\Domain.Entities.csproj" />
|
|
||||||
<ProjectReference Include="..\..\Infrastructure\Infrastructure.Repository\Infrastructure.Repository.csproj" />
|
|
||||||
</ItemGroup>
|
|
||||||
</Project>
|
|
||||||
Reference in New Issue
Block a user