feat: move main API to python

This commit is contained in:
2025-09-20 22:14:25 +02:00
parent 0f5bea98b1
commit 1cff0fac2b
29 changed files with 4028 additions and 725 deletions

484
src/backend/.gitignore vendored
View File

@@ -1,484 +0,0 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from `dotnet new gitignore`
# dotenv files
.env
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET
project.lock.json
project.fragment.lock.json
artifacts/
# Tye
.tye/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
*.dsw
*.dsp
# Visual Studio 6 technical files
*.ncb
*.aps
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# Visual Studio History (VSHistory) files
.vshistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
*.sln.iml
.idea/
##
## Visual studio for Mac
##
# globs
Makefile.in
*.userprefs
*.usertasks
config.make
config.status
aclocal.m4
install-sh
autom4te.cache/
*.tar.gz
tarballs/
test-results/
# Mac bundle stuff
*.dmg
*.app
# content below from: https://github.com/github/gitignore/blob/main/Global/macOS.gitignore
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# content below from: https://github.com/github/gitignore/blob/main/Global/Windows.gitignore
# Windows thumbnail cache files
Thumbs.db
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# Vim temporary swap files
*.swp

View File

@@ -1,34 +0,0 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.0.31903.59
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "KwisatzHaderach", "KwisatzHaderach\KwisatzHaderach.csproj", "{231D4FDD-27E9-4D73-B94B-48B9F9F97860}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Debug|x64 = Debug|x64
Debug|x86 = Debug|x86
Release|Any CPU = Release|Any CPU
Release|x64 = Release|x64
Release|x86 = Release|x86
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Debug|Any CPU.Build.0 = Debug|Any CPU
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Debug|x64.ActiveCfg = Debug|Any CPU
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Debug|x64.Build.0 = Debug|Any CPU
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Debug|x86.ActiveCfg = Debug|Any CPU
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Debug|x86.Build.0 = Debug|Any CPU
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Release|Any CPU.ActiveCfg = Release|Any CPU
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Release|Any CPU.Build.0 = Release|Any CPU
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Release|x64.ActiveCfg = Release|Any CPU
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Release|x64.Build.0 = Release|Any CPU
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Release|x86.ActiveCfg = Release|Any CPU
{231D4FDD-27E9-4D73-B94B-48B9F9F97860}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal

View File

@@ -1,6 +0,0 @@
namespace KwisatzHaderach.Configuration;
public record AIOptions
{
public required string Host { get; init; }
}

View File

@@ -1,71 +0,0 @@
using KwisatzHaderach.Infrastructure.Api;
using KwisatzHaderach.Models;
using Microsoft.AspNetCore.Mvc;
using System.Runtime.CompilerServices;
using System.Text.Json;
namespace KwisatzHaderach.Controllers;
[ApiController]
[Route("[controller]")]
public class PromptController : ControllerBase
{
public record StreamChunk(string? Content);
[HttpPost("ask")]
public async IAsyncEnumerable<StreamChunk> GetPromptResponseAsync([FromBody] PromptRequest req,
ILLMClient llmClient,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
var jsonSerializerOptions = new JsonSerializerOptions { PropertyNameCaseInsensitive = true };
HttpResponseMessage? llmResponse = null;
StreamChunk? errorChunk = null;
Exception? capturedException = null;
try
{
llmResponse = await llmClient.GetPromptResponseAsync(req, cancellationToken);
llmResponse.EnsureSuccessStatusCode();
}
catch (OperationCanceledException opEx)
{
capturedException = opEx;
}
catch (Exception ex)
{
errorChunk = new($"[ERROR] Error reading LLM response: {ex.Message}");
capturedException = ex;
}
if (capturedException != null || llmResponse == null)
{
if (errorChunk != null)
{
yield return errorChunk;
}
yield break;
}
using (llmResponse)
{
await using var responseStream = await llmResponse.Content.ReadAsStreamAsync(cancellationToken);
using var reader = new StreamReader(responseStream);
// Read the stream chunk by chunk until the server closes the connection.
char[] buffer = new char[1024]; // Read in chunks of 1KB
int bytesRead;
while ((bytesRead = await reader.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
// Get the string content that was just read.
string textChunk = new string(buffer, 0, bytesRead);
// Wrap the raw string into the StreamChunk object that the React client expects.
yield return new StreamChunk(textChunk);
}
}
yield break;
}
}

View File

@@ -1,10 +0,0 @@
using KwisatzHaderach.Models;
using Refit;
namespace KwisatzHaderach.Infrastructure.Api;
public interface ILLMClient
{
[Post("/ask-stream")]
Task<HttpResponseMessage> GetPromptResponseAsync([Body]PromptRequest req, CancellationToken cancellationToken);
}

View File

@@ -1,14 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.9" />
<PackageReference Include="Refit.HttpClientFactory" Version="8.0.0" />
</ItemGroup>
</Project>

View File

@@ -1,7 +0,0 @@
using System.Text.Json.Serialization;
namespace KwisatzHaderach.Models;
public record PromptRequest(
[property: JsonPropertyName("question")]string Question
);

View File

@@ -1,38 +0,0 @@
using KwisatzHaderach.Configuration;
using KwisatzHaderach.Infrastructure.Api;
using Microsoft.Extensions.Options;
using Refit;
var builder = WebApplication.CreateBuilder(args);
// Add services to the container.
builder.Services.AddControllers();
builder.Services.AddOpenApi();
builder.Services.Configure<AIOptions>(
builder.Configuration.GetSection("AI")
);
builder.Services.AddRefitClient<ILLMClient>()
.ConfigureHttpClient((provider, client) =>
{
IOptions<AIOptions> aiOptions = provider.GetRequiredService<IOptions<AIOptions>>();
client.BaseAddress = new(aiOptions.Value.Host);
});
var app = builder.Build();
// Configure the HTTP request pipeline.
if (app.Environment.IsDevelopment())
{
app.MapOpenApi();
}
app.UseHttpsRedirection();
app.UseAuthorization();
app.MapControllers();
app.Run();

View File

@@ -1,23 +0,0 @@
{
"$schema": "https://json.schemastore.org/launchsettings.json",
"profiles": {
"http": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": false,
"applicationUrl": "http://localhost:5187",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"https": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": false,
"applicationUrl": "https://localhost:7056;http://localhost:5187",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
}
}
}

View File

@@ -1,11 +0,0 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AI": {
"Host": "http://127.0.0.1:8000"
}
}

View File

@@ -1,9 +0,0 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*"
}

24
src/frontend/.gitignore vendored Normal file
View File

@@ -0,0 +1,24 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

69
src/frontend/README.md Normal file
View File

@@ -0,0 +1,69 @@
# React + TypeScript + Vite
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
Currently, two official plugins are available:
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) for Fast Refresh
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
## Expanding the ESLint configuration
If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules:
```js
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Remove tseslint.configs.recommended and replace with this
tseslint.configs.recommendedTypeChecked,
// Alternatively, use this for stricter rules
tseslint.configs.strictTypeChecked,
// Optionally, add this for stylistic rules
tseslint.configs.stylisticTypeChecked,
// Other configs...
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```
You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules:
```js
// eslint.config.js
import reactX from 'eslint-plugin-react-x'
import reactDom from 'eslint-plugin-react-dom'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Enable lint rules for React
reactX.configs['recommended-typescript'],
// Enable lint rules for React DOM
reactDom.configs.recommended,
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```

View File

@@ -0,0 +1,23 @@
import js from '@eslint/js'
import globals from 'globals'
import reactHooks from 'eslint-plugin-react-hooks'
import reactRefresh from 'eslint-plugin-react-refresh'
import tseslint from 'typescript-eslint'
import { defineConfig, globalIgnores } from 'eslint/config'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
js.configs.recommended,
tseslint.configs.recommended,
reactHooks.configs['recommended-latest'],
reactRefresh.configs.vite,
],
languageOptions: {
ecmaVersion: 2020,
globals: globals.browser,
},
},
])

13
src/frontend/index.html Normal file
View File

@@ -0,0 +1,13 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Vite + React + TS</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

3405
src/frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

29
src/frontend/package.json Normal file
View File

@@ -0,0 +1,29 @@
{
"name": "kwisatz-haderach",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc -b && vite build",
"lint": "eslint .",
"preview": "vite preview"
},
"dependencies": {
"react": "^19.1.1",
"react-dom": "^19.1.1"
},
"devDependencies": {
"@eslint/js": "^9.35.0",
"@types/react": "^19.1.13",
"@types/react-dom": "^19.1.9",
"@vitejs/plugin-react": "^5.0.2",
"eslint": "^9.35.0",
"eslint-plugin-react-hooks": "^5.2.0",
"eslint-plugin-react-refresh": "^0.4.20",
"globals": "^16.4.0",
"typescript": "~5.8.3",
"typescript-eslint": "^8.43.0",
"vite": "^7.1.6"
}
}

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

42
src/frontend/src/App.css Normal file
View File

@@ -0,0 +1,42 @@
#root {
max-width: 1280px;
margin: 0 auto;
padding: 2rem;
text-align: center;
}
.logo {
height: 6em;
padding: 1.5em;
will-change: filter;
transition: filter 300ms;
}
.logo:hover {
filter: drop-shadow(0 0 2em #646cffaa);
}
.logo.react:hover {
filter: drop-shadow(0 0 2em #61dafbaa);
}
@keyframes logo-spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}
@media (prefers-reduced-motion: no-preference) {
a:nth-of-type(2) .logo {
animation: logo-spin infinite 20s linear;
}
}
.card {
padding: 2em;
}
.read-the-docs {
color: #888;
}

35
src/frontend/src/App.tsx Normal file
View File

@@ -0,0 +1,35 @@
import { useState } from 'react'
import reactLogo from './assets/react.svg'
import viteLogo from '/vite.svg'
import './App.css'
function App() {
const [count, setCount] = useState(0)
return (
<>
<div>
<a href="https://vite.dev" target="_blank">
<img src={viteLogo} className="logo" alt="Vite logo" />
</a>
<a href="https://react.dev" target="_blank">
<img src={reactLogo} className="logo react" alt="React logo" />
</a>
</div>
<h1>Vite + React</h1>
<div className="card">
<button onClick={() => setCount((count) => count + 1)}>
count is {count}
</button>
<p>
Edit <code>src/App.tsx</code> and save to test HMR
</p>
</div>
<p className="read-the-docs">
Click on the Vite and React logos to learn more
</p>
</>
)
}
export default App

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>

After

Width:  |  Height:  |  Size: 4.0 KiB

View File

@@ -0,0 +1,68 @@
:root {
font-family: system-ui, Avenir, Helvetica, Arial, sans-serif;
line-height: 1.5;
font-weight: 400;
color-scheme: light dark;
color: rgba(255, 255, 255, 0.87);
background-color: #242424;
font-synthesis: none;
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
a {
font-weight: 500;
color: #646cff;
text-decoration: inherit;
}
a:hover {
color: #535bf2;
}
body {
margin: 0;
display: flex;
place-items: center;
min-width: 320px;
min-height: 100vh;
}
h1 {
font-size: 3.2em;
line-height: 1.1;
}
button {
border-radius: 8px;
border: 1px solid transparent;
padding: 0.6em 1.2em;
font-size: 1em;
font-weight: 500;
font-family: inherit;
background-color: #1a1a1a;
cursor: pointer;
transition: border-color 0.25s;
}
button:hover {
border-color: #646cff;
}
button:focus,
button:focus-visible {
outline: 4px auto -webkit-focus-ring-color;
}
@media (prefers-color-scheme: light) {
:root {
color: #213547;
background-color: #ffffff;
}
a:hover {
color: #747bff;
}
button {
background-color: #f9f9f9;
}
}

10
src/frontend/src/main.tsx Normal file
View File

@@ -0,0 +1,10 @@
import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client'
import './index.css'
import App from './App.tsx'
createRoot(document.getElementById('root')!).render(
<StrictMode>
<App />
</StrictMode>,
)

1
src/frontend/src/vite-env.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
/// <reference types="vite/client" />

View File

@@ -0,0 +1,27 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
"target": "ES2022",
"useDefineForClassFields": true,
"lib": ["ES2022", "DOM", "DOM.Iterable"],
"module": "ESNext",
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"moduleDetection": "force",
"noEmit": true,
"jsx": "react-jsx",
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["src"]
}

View File

@@ -0,0 +1,7 @@
{
"files": [],
"references": [
{ "path": "./tsconfig.app.json" },
{ "path": "./tsconfig.node.json" }
]
}

View File

@@ -0,0 +1,25 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
"target": "ES2023",
"lib": ["ES2023"],
"module": "ESNext",
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"moduleDetection": "force",
"noEmit": true,
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["vite.config.ts"]
}

View File

@@ -0,0 +1,7 @@
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
// https://vite.dev/config/
export default defineConfig({
plugins: [react()],
})

View File

@@ -3,6 +3,10 @@ from fastapi.responses import StreamingResponse
from pydantic import BaseModel
import requests
import json
import logging
from datetime import datetime
import uuid
from typing import Optional
from langchain_community.vectorstores import Chroma
from langchain_community.embeddings import HuggingFaceEmbeddings
@@ -19,6 +23,7 @@ If you don't know the answer from the context provided, just say that you don't
Combine all the relevant information from the context below into a single, cohesive, and comprehensive answer.
Do not break the answer into sections based on the source texts. Synthesize them.
Do not start with "based on the context provided".
The answer should be thorough and well-explained.
CONTEXT:
@@ -30,25 +35,95 @@ QUESTION:
ANSWER:
"""
# --- Pydantic Models (Same as before) ---
# --- Logging setup to match uvicorn format ---
logging.basicConfig(
level=logging.INFO,
format='%(levelname)s: %(name)s: %(message)s'
)
logger = logging.getLogger("dune_api")
# --- Conversation storage (in-memory for simplicity) ---
conversations = {} # session_id -> list of {question, answer} pairs
# --- Pydantic Models ---
class AskRequest(BaseModel):
question: str
class ConversationRequest(BaseModel):
question: str
session_id: Optional[str] = None
# --- Initialize FastAPI and load resources (Same as before) ---
app = FastAPI()
embeddings = HuggingFaceEmbeddings(model_name=EMBEDDING_MODEL_NAME, model_kwargs={'trust_remote_code': True})
vector_store = Chroma(persist_directory=DB_PATH, embedding_function=embeddings)
retriever = vector_store.as_retriever(search_kwargs={"k": 8})
app = FastAPI(
title="Dune Expert API",
description="Ask questions about the Dune universe and get expert answers",
version="1.0.0"
)
logger.info("Initializing Dune Expert API...")
try:
embeddings = HuggingFaceEmbeddings(model_name=EMBEDDING_MODEL_NAME, model_kwargs={'trust_remote_code': True})
vector_store = Chroma(persist_directory=DB_PATH, embedding_function=embeddings)
retriever = vector_store.as_retriever(search_kwargs={"k": 8})
logger.info("Successfully loaded embeddings and vector store")
except Exception as e:
logger.error(f"Failed to initialize vector store: {e}")
raise RuntimeError(f"Could not initialize vector store: {e}")
# --- Health check endpoint ---
@app.get("/health")
async def health_check():
return {"status": "healthy", "service": "dune-expert-api", "timestamp": datetime.now().isoformat()}
# --- Conversation management endpoints ---
@app.post("/conversation/start")
async def start_conversation():
"""Start a new conversation and return a session ID"""
session_id = str(uuid.uuid4())
conversations[session_id] = []
logger.info(f"Started new conversation: {session_id}")
return {"session_id": session_id}
@app.get("/conversation/{session_id}/history")
async def get_conversation_history(session_id: str):
"""Get the history of a conversation"""
if session_id not in conversations:
raise HTTPException(status_code=404, detail="Conversation not found")
return {"session_id": session_id, "history": conversations[session_id]}
@app.delete("/conversation/{session_id}")
async def clear_conversation(session_id: str):
"""Clear a conversation's history"""
if session_id not in conversations:
raise HTTPException(status_code=404, detail="Conversation not found")
conversations[session_id] = []
logger.info(f"Cleared conversation: {session_id}")
return {"message": "Conversation cleared"}
# --- NEW: The Streaming Endpoint ---
@app.post("/ask-stream")
async def ask_question_stream(request: AskRequest):
print(f"🔍 Streaming request for: {request.question}")
# Basic input validation
if not request.question.strip():
logger.warning("Empty question received")
raise HTTPException(status_code=400, detail="Question cannot be empty")
if len(request.question) > 1000:
logger.warning(f"Question too long: {len(request.question)} characters")
raise HTTPException(status_code=400, detail="Question too long (max 1000 chars)")
logger.info(f"🔍 Streaming request for: {request.question[:50]}{'...' if len(request.question) > 50 else ''}")
# 1. Retrieve context (this part is still blocking)
retrieved_docs = retriever.invoke(request.question)
context = "\n\n---\n\n".join([doc.page_content for doc in retrieved_docs])
prompt = PROMPT_TEMPLATE.format(context=context, question=request.question)
try:
retrieved_docs = retriever.invoke(request.question)
context = "\n\n---\n\n".join([doc.page_content for doc in retrieved_docs])
prompt = PROMPT_TEMPLATE.format(context=context, question=request.question)
logger.info(f"Retrieved {len(retrieved_docs)} documents for context")
except Exception as e:
logger.error(f"Failed to retrieve context: {e}")
raise HTTPException(status_code=500, detail="Failed to retrieve context from knowledge base")
# 2. Define the generator for the streaming response
async def stream_generator():
@@ -58,21 +133,169 @@ async def ask_question_stream(request: AskRequest):
"prompt": prompt,
"stream": True # <-- The key change to enable streaming from Ollama
}
logger.info(f"Sending request to Ollama with model: {OLLAMA_MODEL}")
# Use stream=True to get a streaming response from requests
with requests.post(OLLAMA_API_URL, json=ollama_payload, stream=True) as response:
with requests.post(OLLAMA_API_URL, json=ollama_payload, stream=True, timeout=30) as response:
response.raise_for_status()
# Ollama streams JSON objects separated by newlines
for line in response.iter_lines():
if line:
chunk = json.loads(line)
# Yield the actual text part of the token
yield chunk.get("response", "")
try:
chunk = json.loads(line)
# Yield the actual text part of the token
llm_response = chunk.get("response", "")
if llm_response: # Only log non-empty responses
logger.debug(f"LLM response chunk: {llm_response[:20]}...")
yield llm_response
except json.JSONDecodeError:
logger.warning(f"Failed to parse JSON chunk: {line}")
continue
except requests.exceptions.Timeout:
error_msg = "Request to language model timed out"
logger.error(error_msg)
yield f"Error: {error_msg}"
except requests.exceptions.ConnectionError:
error_msg = "Could not connect to the language model. Is Ollama running?"
logger.error(error_msg)
yield f"Error: {error_msg}"
except requests.RequestException as e:
print(f"Error communicating with Ollama: {e}")
yield "Error: Could not connect to the language model."
error_msg = f"Error communicating with Ollama: {e}"
logger.error(error_msg)
yield f"Error: {error_msg}"
except Exception as e:
print(f"An unexpected error occurred: {e}")
yield "Error: An unexpected error occurred while generating the answer."
error_msg = f"An unexpected error occurred: {e}"
logger.error(error_msg)
yield f"Error: {error_msg}"
# 3. Return the generator wrapped in a StreamingResponse
return StreamingResponse(stream_generator(), media_type="text/plain")
return StreamingResponse(stream_generator(), media_type="text/plain")
# --- Conversation-enabled streaming endpoint ---
@app.post("/ask-conversation")
async def ask_question_with_conversation(request: ConversationRequest):
# Basic input validation
if not request.question.strip():
logger.warning("Empty question received")
raise HTTPException(status_code=400, detail="Question cannot be empty")
if len(request.question) > 1000:
logger.warning(f"Question too long: {len(request.question)} characters")
raise HTTPException(status_code=400, detail="Question too long (max 1000 chars)")
# Handle session
session_id = request.session_id
if session_id and session_id not in conversations:
logger.warning(f"Unknown session ID: {session_id}")
raise HTTPException(status_code=404, detail="Conversation session not found")
elif not session_id:
# Create new session if none provided
session_id = str(uuid.uuid4())
conversations[session_id] = []
logger.info(f"Created new conversation session: {session_id}")
logger.info(f"🔍 Conversation request [{session_id[:8]}...]: {request.question[:50]}{'...' if len(request.question) > 50 else ''}")
# 1. Retrieve context from vector store
try:
retrieved_docs = retriever.invoke(request.question)
context = "\n\n---\n\n".join([doc.page_content for doc in retrieved_docs])
logger.info(f"Retrieved {len(retrieved_docs)} documents for context")
except Exception as e:
logger.error(f"Failed to retrieve context: {e}")
raise HTTPException(status_code=500, detail="Failed to retrieve context from knowledge base")
# 2. Build conversation context from history
conversation_history = conversations[session_id]
conversation_context = ""
if conversation_history:
conversation_context = "\n\nPREVIOUS CONVERSATION:\n"
# Include last 3 exchanges to keep context manageable
for exchange in conversation_history[-3:]:
conversation_context += f"Human: {exchange['question']}\nAssistant: {exchange['answer'][:200]}{'...' if len(exchange['answer']) > 200 else ''}\n\n"
conversation_context += "CURRENT QUESTION:\n"
# 3. Create enhanced prompt with conversation context
enhanced_prompt = f"""
You are an expert lore master for the Dune universe.
Your task is to answer the user's question with as much detail and context as possible, based *only* on the provided text excerpts.
If you don't know the answer from the context provided, just say that you don't know, don't try to make up an answer.
Pay attention to the conversation history if provided - the user might be asking follow-up questions or referring to previous topics.
Combine all the relevant information from the context below into a single, cohesive, and comprehensive answer.
Do not break the answer into sections based on the source texts. Synthesize them.
Do not start with "based on the context provided".
The answer should be thorough and well-explained.
CONTEXT FROM DUNE BOOKS:
{context}
{conversation_context}
QUESTION:
{request.question}
ANSWER:
"""
# 4. Collect the full response for conversation storage
full_response = ""
# 5. Define the generator for the streaming response
async def stream_generator():
nonlocal full_response
try:
ollama_payload = {
"model": OLLAMA_MODEL,
"prompt": enhanced_prompt,
"stream": True
}
logger.info(f"Sending request to Ollama with model: {OLLAMA_MODEL}")
with requests.post(OLLAMA_API_URL, json=ollama_payload, stream=True, timeout=30) as response:
response.raise_for_status()
for line in response.iter_lines():
if line:
try:
chunk = json.loads(line)
llm_response = chunk.get("response", "")
if llm_response:
full_response += llm_response
yield llm_response
except json.JSONDecodeError:
logger.warning(f"Failed to parse JSON chunk: {line}")
continue
# Store the complete exchange in conversation history
conversations[session_id].append({
"question": request.question,
"answer": full_response
})
logger.info(f"Stored exchange in conversation {session_id[:8]}... (total: {len(conversations[session_id])} exchanges)")
# Keep conversation history manageable (max 10 exchanges)
if len(conversations[session_id]) > 10:
conversations[session_id] = conversations[session_id][-10:]
except requests.exceptions.Timeout:
error_msg = "Request to language model timed out"
logger.error(error_msg)
yield f"Error: {error_msg}"
except requests.exceptions.ConnectionError:
error_msg = "Could not connect to the language model. Is Ollama running?"
logger.error(error_msg)
yield f"Error: {error_msg}"
except requests.RequestException as e:
error_msg = f"Error communicating with Ollama: {e}"
logger.error(error_msg)
yield f"Error: {error_msg}"
except Exception as e:
error_msg = f"An unexpected error occurred: {e}"
logger.error(error_msg)
yield f"Error: {error_msg}"
# 6. Return the response with session info in headers
response = StreamingResponse(stream_generator(), media_type="text/plain")
response.headers["X-Session-ID"] = session_id
return response