mirror of
https://github.com/aaronpo97/the-biergarten-app.git
synced 2026-04-05 18:09:04 +00:00
Separate llama generator class src file into method files
This commit is contained in:
33
pipeline/includes/data_generation/llama_generator_helpers.h
Normal file
33
pipeline/includes/data_generation/llama_generator_helpers.h
Normal file
@@ -0,0 +1,33 @@
|
||||
#ifndef BIERGARTEN_PIPELINE_DATA_GENERATION_LLAMA_GENERATOR_HELPERS_H_
|
||||
#define BIERGARTEN_PIPELINE_DATA_GENERATION_LLAMA_GENERATOR_HELPERS_H_
|
||||
|
||||
#include <string>
|
||||
#include <utility>
|
||||
|
||||
struct llama_model;
|
||||
struct llama_vocab;
|
||||
typedef int llama_token;
|
||||
|
||||
// Helper functions for LlamaGenerator methods
|
||||
std::string PrepareRegionContextPublic(std::string_view region_context,
|
||||
std::size_t max_chars = 700);
|
||||
|
||||
std::pair<std::string, std::string>
|
||||
ParseTwoLineResponsePublic(const std::string& raw,
|
||||
const std::string& error_message);
|
||||
|
||||
std::string ToChatPromptPublic(const llama_model *model,
|
||||
const std::string& user_prompt);
|
||||
|
||||
std::string ToChatPromptPublic(const llama_model *model,
|
||||
const std::string& system_prompt,
|
||||
const std::string& user_prompt);
|
||||
|
||||
void AppendTokenPiecePublic(const llama_vocab *vocab, llama_token token,
|
||||
std::string& output);
|
||||
|
||||
std::string ValidateBreweryJsonPublic(const std::string& raw,
|
||||
std::string& name_out,
|
||||
std::string& description_out);
|
||||
|
||||
#endif // BIERGARTEN_PIPELINE_DATA_GENERATION_LLAMA_GENERATOR_HELPERS_H_
|
||||
Reference in New Issue
Block a user