Skip to content
Snippets Groups Projects
Select Git revision
  • 056ed04fb29fed5ea9be408eb0b5da360047a950
  • master default
  • v0.6.9
  • v0.6.8
  • v0.6.7
  • v0.6.6
  • v0.6.5
  • v0.6.4
  • v0.6.2
9 results

setup.py

Blame
  • Code owners
    Assign users and groups as approvers for specific file changes. Learn more.
    AI.js 5.33 KiB
    import { Ollama } from 'ollama';
    import Chat from "../models/Chat.js";
    import { aiDeleteModel, aiGetModels, aiGetModel, aiInstallModel, aiIsRunning, summarizeText } from "../utils/handleAI.js";
    import { mapStoredMessagesToChatMessages } from "@langchain/core/messages";
    import { createRecord, findOneRecord, findRecordByID, findRecords } from '../utils/handleDB.js';
    import { prefillDocumentObject } from '../utils/handleSchemes.js';
    import { performance } from "node:perf_hooks";
    
    
    
    
    // PROVIDE OLLAMA CONNECTION TO ALL ROUTES
    export const ollama = new Ollama({ host: process.env.AI_API_URL });
    
    /** *******************************************************
     * CHECK RUNNING
     */
    export const getStatus = async (req, res, next) => {
      // check if ollama is reachable
      const running = await aiIsRunning();
      // NOT reachable
      if (!running) return res.status(404).json({ running });
      // reachable
      return res.json({ running });
    };
    
    
    /** *******************************************************
     * GET MODELS
     */
    export const getModels = async (req, res, next) => {
      try {
        const foundModels = await aiFilterModelsByName(req.body.filter);
        return res.json(foundModels);
      } catch (error) {
        next(error);
      }
    };
    
    
    /** *******************************************************
     * GET MODEL
     */
    export const getModel = async (req, res, next) => {
      try {
        const model = await aiGetModel(req.body.model);
        return res.json(model);
      } catch (error) {
        next(error);
      }
    };
    
    
    /** *******************************************************
     * INSTALL SINGLE
     */
    export const installModel = async (req, res, next) => {
      try {
        const response = await aiInstallModel(req.body.model);
        const model = await aiFilterModelsByName(req.body.model);
    
        return res.json({ message: response.message, model });
      } catch (error) {
        next(error);
      }
    };
    
    
    /** *******************************************************
     * DELETE SINGLE
     */
    export const deleteModel = async (req, res, next) => {
      try {
        const response = await aiDeleteModel(req.body.model);
        return res.json(response);
      } catch (error) {
        next(error);
      }
    };
    
    
    
    /** *******************************************************
     * FETCH CHAT
     */
    export const getChat = async (req, res, next) => {
      // IF NO CHATID GIVEN
      if (!req.body.chatId) {
        try {
          // create chat and remember ID
          req.body.chatId = await createChat(req.body.model, req.body.input);
    
          // return
          return next();
        } catch (error) {
          return next(error);
        }
      }
    
      // IF CHATID GIVEN
      try {
        // fetch chat record
        const record = await findRecordByID(Chat, req.body.chatId);
        if (!record) {
          return res.status(404).json({ message: `No chat history with ID ${req.body.chatId} found.` });
        }
        // remember chat history
        // cite: https://js.langchain.com/v0.1/docs/modules/memory/chat_messages/custom/
        performance.mark('mapStoredMessagesToChatMessages:start');
        req.body.chatHistory = mapStoredMessagesToChatMessages(record.chatHistory);
        performance.mark('mapStoredMessagesToChatMessages:end');
        // go on
        next();
      } catch (error) {
        next(error);
      }
    };
    
    /** *******************************************************
     * GET CHATS
     */
    export const getChats = async (req, res, next) => {
      try {
        // TODO sort chats by createdAt
        const chats = await findRecords(Chat, { createdBy: global.currentUserId });
        // const chats = await Chat.find({ createdBy: global.currentUserId }).sort({ 'createdAt': 1 });
        return res.json({ chats });
      } catch (error) {
        next(error);
      }
    };
    
    /** *******************************************************
     ####################### FUNCTIONS #######################
     ******************************************************* */
    
    /**
    * CREATE CHAT
    * return id
    */
    export const createChat = async (model, input) => {
      try {
        performance.mark('create_chat:start');
        // create chat title
        const title = await summarizeText(model, input);
        // create record
        const record = await createRecord(Chat, prefillDocumentObject(Chat, { title }));
        // return record id
        performance.mark('create_chat:end');
        return record.id;
      } catch (error) {
        throw error;
      }
    };
    
    
    /** *******************************************************
     * CHECK IF REQUESTED MODEL IS AVAILABLE
     */
    export const checkRequestedModel = async (req, res, next) => {
      performance.mark('checkRequestedModel:start');
      if (!req.body.model) {
        return res.status(500).json({ error: `No chat model requested.` });
      }
      const models = await aiFilterModelsByName(req.body.model);
      // return if missing
      if (!models.length) {
        return res.status(500).json({ error: `Chat model ${req.body.model} not found.` });
      }
      performance.mark('checkRequestedModel:end');
      next();
    };
    
    /** *******************************************************
     * FILTER AVAILABLE MODELS BY NAME
     */
    export const aiFilterModelsByName = async (strFilter = '') => {
      try {
        performance.mark('aiFilterModelsByName:start');
        // fetch all available models
        const avail = await aiGetModels();
    
        // return all if no regex query provided
        if (strFilter === '') return avail;
        // set regex query
        const regex = new RegExp(strFilter, 'i');
        // filter models by regex query
        performance.mark('aiFilterModelsByName:end');
        return avail.models.filter((model) => regex.test(model.name));
      } catch (error) {
        throw error;
      }
    };