Skip to content

Commit 243f749

Browse files
committed
feat: [wip] native AI chat interface
1 parent 50174d2 commit 243f749

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+3743
-1166
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ Project N.O.M.A.D. is now installed on your device! Open a browser and navigate
3030

3131
## How It Works
3232
From a technical standpoint, N.O.M.A.D. is primarily a management UI ("Command Center") and API that orchestrates a goodie basket of containerized offline archive tools and resources such as
33-
[Kiwix](https://kiwix.org/), [OpenStreetMap](https://www.openstreetmap.org/), [Ollama](https://ollama.com/), [OpenWebUI](https://openwebui.com/), and more.
33+
[Kiwix](https://kiwix.org/), [ProtoMaps](https://protomaps.com), [Ollama](https://ollama.com/), and more.
3434

3535
By abstracting the installation of each of these awesome tools, N.O.M.A.D. makes getting your offline survival computer up and running a breeze! N.O.M.A.D. also includes some additional built-in handy tools, such as a ZIM library managment interface, calculators, and more.
3636

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
import { inject } from '@adonisjs/core'
2+
import type { HttpContext } from '@adonisjs/core/http'
3+
import { ChatService } from '#services/chat_service'
4+
import { createSessionSchema, updateSessionSchema, addMessageSchema } from '#validators/chat'
5+
6+
@inject()
7+
export default class ChatsController {
8+
constructor(private chatService: ChatService) {}
9+
10+
async index({}: HttpContext) {
11+
return await this.chatService.getAllSessions()
12+
}
13+
14+
async show({ params, response }: HttpContext) {
15+
const sessionId = parseInt(params.id)
16+
const session = await this.chatService.getSession(sessionId)
17+
18+
if (!session) {
19+
return response.status(404).json({ error: 'Session not found' })
20+
}
21+
22+
return session
23+
}
24+
25+
async store({ request, response }: HttpContext) {
26+
try {
27+
const data = await request.validateUsing(createSessionSchema)
28+
const session = await this.chatService.createSession(data.title, data.model)
29+
return response.status(201).json(session)
30+
} catch (error) {
31+
return response.status(500).json({
32+
error: error instanceof Error ? error.message : 'Failed to create session',
33+
})
34+
}
35+
}
36+
37+
async update({ params, request, response }: HttpContext) {
38+
try {
39+
const sessionId = parseInt(params.id)
40+
const data = await request.validateUsing(updateSessionSchema)
41+
const session = await this.chatService.updateSession(sessionId, data)
42+
return session
43+
} catch (error) {
44+
return response.status(500).json({
45+
error: error instanceof Error ? error.message : 'Failed to update session',
46+
})
47+
}
48+
}
49+
50+
async destroy({ params, response }: HttpContext) {
51+
try {
52+
const sessionId = parseInt(params.id)
53+
await this.chatService.deleteSession(sessionId)
54+
return response.status(204)
55+
} catch (error) {
56+
return response.status(500).json({
57+
error: error instanceof Error ? error.message : 'Failed to delete session',
58+
})
59+
}
60+
}
61+
62+
async addMessage({ params, request, response }: HttpContext) {
63+
try {
64+
const sessionId = parseInt(params.id)
65+
const data = await request.validateUsing(addMessageSchema)
66+
const message = await this.chatService.addMessage(sessionId, data.role, data.content)
67+
return response.status(201).json(message)
68+
} catch (error) {
69+
return response.status(500).json({
70+
error: error instanceof Error ? error.message : 'Failed to add message',
71+
})
72+
}
73+
}
74+
75+
async destroyAll({ response }: HttpContext) {
76+
try {
77+
const result = await this.chatService.deleteAllSessions()
78+
return response.status(200).json(result)
79+
} catch (error) {
80+
return response.status(500).json({
81+
error: error instanceof Error ? error.message : 'Failed to delete all sessions',
82+
})
83+
}
84+
}
85+
}
Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
import { OllamaService } from '#services/ollama_service'
2+
import { RagService } from '#services/rag_service'
3+
import { modelNameSchema } from '#validators/download'
4+
import { chatSchema, getAvailableModelsSchema } from '#validators/ollama'
5+
import { inject } from '@adonisjs/core'
6+
import type { HttpContext } from '@adonisjs/core/http'
7+
import { SYSTEM_PROMPTS } from '../../constants/ollama.js'
8+
9+
@inject()
10+
export default class OllamaController {
11+
constructor(
12+
private ollamaService: OllamaService,
13+
private ragService: RagService
14+
) {}
15+
16+
async availableModels({ request }: HttpContext) {
17+
const reqData = await request.validateUsing(getAvailableModelsSchema)
18+
return await this.ollamaService.getAvailableModels({
19+
sort: reqData.sort,
20+
recommendedOnly: reqData.recommendedOnly,
21+
})
22+
}
23+
24+
async chat({ request }: HttpContext) {
25+
const reqData = await request.validateUsing(chatSchema)
26+
27+
/**If there are no system messages in the chat
28+
*(i.e. first message from the user)inject system prompts
29+
**/
30+
const hasSystemMessage = reqData.messages.some((msg) => msg.role === 'system')
31+
if (!hasSystemMessage) {
32+
const systemPrompt = {
33+
role: 'system' as const,
34+
content: SYSTEM_PROMPTS.default,
35+
}
36+
reqData.messages.unshift(systemPrompt)
37+
}
38+
39+
// Get the last user message to use for RAG context retrieval
40+
const lastUserMessage = [...reqData.messages].reverse().find((msg) => msg.role === 'user')
41+
42+
if (lastUserMessage) {
43+
// Search for relevant context in the knowledge base
44+
const relevantDocs = await this.ragService.searchSimilarDocuments(
45+
lastUserMessage.content,
46+
5, // Retrieve top 5 most relevant chunks
47+
0.7 // Minimum similarity score of 0.7
48+
)
49+
50+
// If relevant context is found, inject as a system message
51+
if (relevantDocs.length > 0) {
52+
const contextText = relevantDocs
53+
.map((doc, idx) => `[Context ${idx + 1}]\n${doc.text}`)
54+
.join('\n\n')
55+
56+
const systemMessage = {
57+
role: 'system' as const,
58+
content: SYSTEM_PROMPTS.rag_context(contextText),
59+
}
60+
61+
// Insert system message at the beginning (after any existing system messages)
62+
const firstNonSystemIndex = reqData.messages.findIndex((msg) => msg.role !== 'system')
63+
const insertIndex = firstNonSystemIndex === -1 ? 0 : firstNonSystemIndex
64+
reqData.messages.splice(insertIndex, 0, systemMessage)
65+
}
66+
}
67+
68+
return await this.ollamaService.chat(reqData)
69+
}
70+
71+
async deleteModel({ request }: HttpContext) {
72+
const reqData = await request.validateUsing(modelNameSchema)
73+
await this.ollamaService.deleteModel(reqData.model)
74+
return {
75+
success: true,
76+
message: `Model deleted: ${reqData.model}`,
77+
}
78+
}
79+
80+
async dispatchModelDownload({ request }: HttpContext) {
81+
const reqData = await request.validateUsing(modelNameSchema)
82+
await this.ollamaService.dispatchModelDownload(reqData.model)
83+
return {
84+
success: true,
85+
message: `Download job dispatched for model: ${reqData.model}`,
86+
}
87+
}
88+
89+
async installedModels({}: HttpContext) {
90+
return await this.ollamaService.getModels()
91+
}
92+
}

admin/app/controllers/openwebui_controller.ts

Lines changed: 0 additions & 40 deletions
This file was deleted.
Lines changed: 26 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,41 @@
1-
import { cuid } from '@adonisjs/core/helpers'
1+
import { RagService } from '#services/rag_service'
2+
import { inject } from '@adonisjs/core'
23
import type { HttpContext } from '@adonisjs/core/http'
34
import app from '@adonisjs/core/services/app'
5+
import { randomBytes } from 'node:crypto'
6+
import { sanitizeFilename } from '../utils/fs.js'
7+
8+
@inject()
9+
export default class RagController {
10+
constructor(private ragService: RagService) {}
411

5-
export default class RagsController {
612
public async upload({ request, response }: HttpContext) {
713
const uploadedFile = request.file('file')
814
if (!uploadedFile) {
915
return response.status(400).json({ error: 'No file uploaded' })
1016
}
1117

12-
const fileName = `${cuid()}.${uploadedFile.extname}`
18+
const randomSuffix = randomBytes(6).toString('hex')
19+
const sanitizedName = sanitizeFilename(uploadedFile.clientName)
20+
21+
const fileName = `${sanitizedName}-${randomSuffix}.${uploadedFile.extname || 'txt'}`
22+
const fullPath = app.makePath('storage/uploads', fileName)
1323

1424
await uploadedFile.move(app.makePath('storage/uploads'), {
1525
name: fileName,
1626
})
27+
28+
// Don't await this - process in background
29+
this.ragService.processAndEmbedFile(fullPath)
30+
31+
return response.status(200).json({
32+
message: 'File has been uploaded and queued for processing.',
33+
file_path: `/uploads/${fileName}`,
34+
})
35+
}
36+
37+
public async getStoredFiles({ response }: HttpContext) {
38+
const files = await this.ragService.getStoredFiles()
39+
return response.status(200).json({ files })
1740
}
1841
}

admin/app/controllers/settings_controller.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { BenchmarkService } from '#services/benchmark_service';
22
import { MapService } from '#services/map_service';
3-
import { OpenWebUIService } from '#services/openwebui_service';
3+
import { OllamaService } from '#services/ollama_service';
44
import { SystemService } from '#services/system_service';
55
import { inject } from '@adonisjs/core';
66
import type { HttpContext } from '@adonisjs/core/http'
@@ -10,8 +10,8 @@ export default class SettingsController {
1010
constructor(
1111
private systemService: SystemService,
1212
private mapService: MapService,
13-
private openWebUIService: OpenWebUIService,
14-
private benchmarkService: BenchmarkService
13+
private benchmarkService: BenchmarkService,
14+
private ollamaService: OllamaService
1515
) { }
1616

1717
async system({ inertia }: HttpContext) {
@@ -48,8 +48,8 @@ export default class SettingsController {
4848
}
4949

5050
async models({ inertia }: HttpContext) {
51-
const availableModels = await this.openWebUIService.getAvailableModels();
52-
const installedModels = await this.openWebUIService.getInstalledModels();
51+
const availableModels = await this.ollamaService.getAvailableModels({ sort: 'pulls', recommendedOnly: false });
52+
const installedModels = await this.ollamaService.getModels();
5353
return inertia.render('settings/models', {
5454
models: {
5555
availableModels: availableModels || [],

admin/app/jobs/download_model_job.ts

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,8 @@
11
import { Job } from 'bullmq'
22
import { QueueService } from '#services/queue_service'
3-
import { OpenWebUIService } from '#services/openwebui_service'
43
import { createHash } from 'crypto'
54
import logger from '@adonisjs/core/services/logger'
6-
import { DockerService } from '#services/docker_service'
5+
import { OllamaService } from '#services/ollama_service'
76

87
export interface DownloadModelJobParams {
98
modelName: string
@@ -27,26 +26,23 @@ export class DownloadModelJob {
2726

2827
logger.info(`[DownloadModelJob] Attempting to download model: ${modelName}`)
2928

30-
// Check if OpenWebUI/Ollama services are ready
31-
const dockerService = new DockerService()
32-
const openWebUIService = new OpenWebUIService(dockerService)
29+
const ollamaService = new OllamaService()
3330

34-
// Use getInstalledModels to check if the service is ready
3531
// Even if no models are installed, this should return an empty array if ready
36-
const existingModels = await openWebUIService.getInstalledModels()
32+
const existingModels = await ollamaService.getModels()
3733
if (!existingModels) {
3834
logger.warn(
39-
`[DownloadModelJob] OpenWebUI service not ready yet for model ${modelName}. Will retry...`
35+
`[DownloadModelJob] Ollama service not ready yet for model ${modelName}. Will retry...`
4036
)
41-
throw new Error('OpenWebUI service not ready yet')
37+
throw new Error('Ollama service not ready yet')
4238
}
4339

4440
logger.info(
45-
`[DownloadModelJob] OpenWebUI service is ready. Initiating download for ${modelName}`
41+
`[DownloadModelJob] Ollama service is ready. Initiating download for ${modelName}`
4642
)
4743

4844
// Services are ready, initiate the download with progress tracking
49-
const result = await openWebUIService._downloadModel(modelName, (progress) => {
45+
const result = await ollamaService._downloadModel(modelName, (progress) => {
5046
// Update job progress in BullMQ
5147
const progressData = {
5248
status: progress.status,

admin/app/models/chat_message.ts

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
import { DateTime } from 'luxon'
2+
import { BaseModel, column, belongsTo, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'
3+
import type { BelongsTo } from '@adonisjs/lucid/types/relations'
4+
import ChatSession from './chat_session.js'
5+
6+
export default class ChatMessage extends BaseModel {
7+
static namingStrategy = new SnakeCaseNamingStrategy()
8+
9+
@column({ isPrimary: true })
10+
declare id: number
11+
12+
@column()
13+
declare session_id: number
14+
15+
@column()
16+
declare role: 'system' | 'user' | 'assistant'
17+
18+
@column()
19+
declare content: string
20+
21+
@belongsTo(() => ChatSession, { foreignKey: 'id', localKey: 'session_id' })
22+
declare session: BelongsTo<typeof ChatSession>
23+
24+
@column.dateTime({ autoCreate: true })
25+
declare created_at: DateTime
26+
27+
@column.dateTime({ autoCreate: true, autoUpdate: true })
28+
declare updated_at: DateTime
29+
}

0 commit comments

Comments
 (0)