mirror of
https://gitlab.com/niansa/llama_any.git
synced 2025-03-06 20:48:27 +01:00
62 lines
1.4 KiB
C++
62 lines
1.4 KiB
C++
#include "Runtime.hpp"
|
|
#include "AsyncManager.hpp"
|
|
#include "Client.hpp"
|
|
#include "basic-coro/AwaitableTask.hpp"
|
|
|
|
#include <iostream>
|
|
#include <string>
|
|
#include <string_view>
|
|
|
|
|
|
|
|
void on_progress(float progress) {
|
|
std::cout << unsigned(progress) << '\r' << std::flush;
|
|
}
|
|
|
|
basiccoro::AwaitableTask<void> async_main(Runtime& rt, AsyncManager &aMan) {
|
|
// Ask for server address
|
|
const std::string addr = rt.readInput("Server address");
|
|
|
|
// Create client
|
|
Client client(addr, 99181, aMan);
|
|
|
|
// Connection loop
|
|
for (;; rt.cooperate()) {
|
|
// Read prompt
|
|
const auto prompt = rt.readInput("Prompt");
|
|
|
|
// Clear screen
|
|
rt.clearScreen();
|
|
|
|
// Display prompt
|
|
std::cout << "Prompt: " << prompt << std::endl;
|
|
|
|
// Run inference
|
|
co_await client.ask(prompt, [&rt] (float progress) -> basiccoro::AwaitableTask<void> {
|
|
std::cout << unsigned(progress) << "%\r" << std::flush;
|
|
co_return;
|
|
}, [&rt] (std::string_view token) -> basiccoro::AwaitableTask<void> {
|
|
std::cout << token << std::flush;
|
|
co_return;
|
|
});
|
|
std::cout << "\n";
|
|
}
|
|
}
|
|
|
|
int main()
|
|
{
|
|
Runtime rt;
|
|
AsyncManager aMan(rt);
|
|
|
|
// Start async main()
|
|
async_main(rt, aMan);
|
|
|
|
// Print header
|
|
std::cout << "llama.any running on " PLATFORM ".\n"
|
|
"\n";
|
|
|
|
// Start async manager
|
|
aMan.run();
|
|
|
|
return 0;
|
|
}
|