mirror of
https://github.com/rzmk/learnhouse.git
synced 2025-12-19 04:19:25 +00:00
feat: org wide ai features check
This commit is contained in:
parent
de93d56945
commit
077c26ce15
24 changed files with 573 additions and 163 deletions
|
|
@ -7,13 +7,19 @@ RUN pip install poetry
|
|||
#
|
||||
WORKDIR /usr/learnhouse/apps/api
|
||||
|
||||
#
|
||||
COPY ./requirements.txt /usr/learnhouse/requirements.txt
|
||||
# Copy poetry.lock* in case it doesn't exist in the repo
|
||||
COPY ./poetry.lock* /usr/learnhouse/
|
||||
|
||||
#
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& pip install --upgrade pip \
|
||||
&& pip install -r /usr/learnhouse/requirements.txt
|
||||
# Copy project requirement files here to ensure they will be cached.
|
||||
COPY pyproject.toml /usr/learnhouse/
|
||||
|
||||
# Install poetry
|
||||
RUN pip install --upgrade pip \
|
||||
&& pip install poetry \
|
||||
&& poetry config virtualenvs.create false
|
||||
|
||||
# Install project dependencies.
|
||||
RUN poetry install --no-interaction --no-ansi
|
||||
|
||||
#
|
||||
COPY ./ /usr/learnhouse
|
||||
|
|
|
|||
51
apps/api/poetry.lock
generated
51
apps/api/poetry.lock
generated
|
|
@ -226,17 +226,17 @@ typecheck = ["mypy"]
|
|||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.34.17"
|
||||
version = "1.34.18"
|
||||
description = "The AWS SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">= 3.8"
|
||||
files = [
|
||||
{file = "boto3-1.34.17-py3-none-any.whl", hash = "sha256:1efc02be786884034d503d59c018cf7650d0cff9fcb37cd2eb49b802a6fe6111"},
|
||||
{file = "boto3-1.34.17.tar.gz", hash = "sha256:8ca248cc84e7e859e4e276eb9c4309fa01a3e58473bf48d6c33448be870c2bb8"},
|
||||
{file = "boto3-1.34.18-py3-none-any.whl", hash = "sha256:ae7cfdf45f4dfd33bd3e84e36afcfbf0517e64a32e647989a068f34d053572b8"},
|
||||
{file = "boto3-1.34.18.tar.gz", hash = "sha256:5e38ca63007e903a7efe0a1751a0374d287b50d7bc148b9d3d495cdf74a0b712"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.34.17,<1.35.0"
|
||||
botocore = ">=1.34.18,<1.35.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.10.0,<0.11.0"
|
||||
|
||||
|
|
@ -245,13 +245,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
|||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.34.17"
|
||||
version = "1.34.18"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
optional = false
|
||||
python-versions = ">= 3.8"
|
||||
files = [
|
||||
{file = "botocore-1.34.17-py3-none-any.whl", hash = "sha256:7272c39032c6f1d62781e4c8445d9a1d9140c2bf52ba7ee66bf6db559c4b2427"},
|
||||
{file = "botocore-1.34.17.tar.gz", hash = "sha256:e48a662f3a6919219276b55085e8f73c3347966675f55e9d448be30cf79678ee"},
|
||||
{file = "botocore-1.34.18-py3-none-any.whl", hash = "sha256:2067d8385c11b7cf2d336227d8fa5aea632fe61afbadb3168dc169dcc13d8c3e"},
|
||||
{file = "botocore-1.34.18.tar.gz", hash = "sha256:85a77e72560a45b0dfdad94f92f5e114c82be07a51bb2d19dd310dab8be158cf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1362,13 +1362,13 @@ extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.
|
|||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.1.9"
|
||||
version = "0.1.10"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
files = [
|
||||
{file = "langchain_core-0.1.9-py3-none-any.whl", hash = "sha256:1dd45aec185ce3afb1c19fb2e88cdbc19fafa7ae929d8107799a7c82ef69ea9f"},
|
||||
{file = "langchain_core-0.1.9.tar.gz", hash = "sha256:4b51fdbdbc06027c26ea89a6da809cae2e404c9daa95dc6c10e3eae383d8ea6a"},
|
||||
{file = "langchain_core-0.1.10-py3-none-any.whl", hash = "sha256:d89952f6d0766cfc88d9f1e25b84d56f8d7bd63a45ad8ec1a9a038c9b49df16d"},
|
||||
{file = "langchain_core-0.1.10.tar.gz", hash = "sha256:3c9e1383264c102fcc6f865700dbb9416c4931a25d0ac2195f6311c6b867aa17"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1384,15 +1384,32 @@ tenacity = ">=8.1.0,<9.0.0"
|
|||
[package.extras]
|
||||
extended-testing = ["jinja2 (>=3,<4)"]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.0.2.post1"
|
||||
description = "An integration package connecting OpenAI and LangChain"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
files = [
|
||||
{file = "langchain_openai-0.0.2.post1-py3-none-any.whl", hash = "sha256:ba468b94c23da9d8ccefe5d5a3c1c65b4b9702292523e53acc689a9110022e26"},
|
||||
{file = "langchain_openai-0.0.2.post1.tar.gz", hash = "sha256:f8e78db4a663feeac71d9f036b9422406c199ea3ef4c97d99ff392c93530e073"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = ">=0.1.7,<0.2"
|
||||
numpy = ">=1,<2"
|
||||
openai = ">=1.6.1,<2.0.0"
|
||||
tiktoken = ">=0.5.2,<0.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.0.79"
|
||||
version = "0.0.80"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
files = [
|
||||
{file = "langsmith-0.0.79-py3-none-any.whl", hash = "sha256:be0374e913c36d9f6a13dd6b6e20a506066d5a0f3abfd476f9cf9e0b086ed744"},
|
||||
{file = "langsmith-0.0.79.tar.gz", hash = "sha256:d32639ccd18a92533b302f6f482255619afc8eb007fff91e37ee699d947c5e29"},
|
||||
{file = "langsmith-0.0.80-py3-none-any.whl", hash = "sha256:dee1c6ef9e8241b82a8851926624269954d0ff8e22d82e32e73455f387f4e245"},
|
||||
{file = "langsmith-0.0.80.tar.gz", hash = "sha256:6d22ee07eb41c65b3f5166b20041a026714952497d9e80d5be6879d3a5c14d84"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -2013,13 +2030,13 @@ sympy = "*"
|
|||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.7.1"
|
||||
version = "1.7.2"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.7.1"
|
||||
files = [
|
||||
{file = "openai-1.7.1-py3-none-any.whl", hash = "sha256:e52ad7ea015331edc584e6e9c98741c819d7ffbbd2ecc50bf1f55c33f9cb3f77"},
|
||||
{file = "openai-1.7.1.tar.gz", hash = "sha256:7556e6aa30e20254b1ad68de49bb5ef4d8106bfac5e8a78abdc1daa911fbb1fb"},
|
||||
{file = "openai-1.7.2-py3-none-any.whl", hash = "sha256:8f41b90a762f5fd9d182b45851041386fed94c8ad240a70abefee61a68e0ef53"},
|
||||
{file = "openai-1.7.2.tar.gz", hash = "sha256:c73c78878258b07f1b468b0602c6591f25a1478f49ecb90b9bd44b7cc80bce73"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4424,4 +4441,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.11"
|
||||
content-hash = "05aa4db63c592f8c68e48c3f9aa71e7d376852faaaef9a95c0d0ae74d848bae0"
|
||||
content-hash = "76237f0e04218f9ca9a2593ccf952452bd6d45657066feec87373279fb7fe6a2"
|
||||
|
|
|
|||
|
|
@ -38,6 +38,8 @@ sentence-transformers = "^2.2.2"
|
|||
python-dotenv = "^1.0.0"
|
||||
redis = "^5.0.1"
|
||||
langchain-community = "^0.0.11"
|
||||
langchain-openai = "^0.0.2.post1"
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ sentry-sdk[fastapi]
|
|||
pydantic[email]>=1.8.0,<2.0.0
|
||||
langchain==0.1.0
|
||||
langchain-community
|
||||
langchain-openai
|
||||
tiktoken
|
||||
openai
|
||||
chromadb
|
||||
|
|
|
|||
|
|
@ -1,7 +1,4 @@
|
|||
from json import JSONEncoder
|
||||
import json
|
||||
from typing import Literal, Optional
|
||||
from click import Option
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import JSON, BigInteger, Column, ForeignKey
|
||||
from sqlmodel import Field, SQLModel
|
||||
|
|
@ -21,6 +18,7 @@ class AIEnabledFeatures(BaseModel):
|
|||
|
||||
|
||||
class AIConfig(BaseModel):
|
||||
enabled : bool = True
|
||||
limits: AILimitsSettings = AILimitsSettings()
|
||||
embeddings: Literal[
|
||||
"text-embedding-ada-002", "all-MiniLM-L6-v2"
|
||||
|
|
|
|||
|
|
@ -29,6 +29,6 @@ class OrganizationCreate(OrganizationBase):
|
|||
class OrganizationRead(OrganizationBase):
|
||||
id: int
|
||||
org_uuid: str
|
||||
config: OrganizationConfig | dict
|
||||
config: Optional[OrganizationConfig | dict]
|
||||
creation_date: str
|
||||
update_date: str
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from typing import List
|
||||
from fastapi import APIRouter, Depends, Request, UploadFile
|
||||
from sqlmodel import Session
|
||||
from src.db.organization_config import OrganizationConfigBase
|
||||
from src.db.users import PublicUser
|
||||
from src.db.organizations import (
|
||||
Organization,
|
||||
|
|
@ -12,6 +13,7 @@ from src.core.events.database import get_db_session
|
|||
from src.security.auth import get_current_user
|
||||
from src.services.orgs.orgs import (
|
||||
create_org,
|
||||
create_org_with_config,
|
||||
delete_org,
|
||||
get_organization,
|
||||
get_organization_by_slug,
|
||||
|
|
@ -37,6 +39,23 @@ async def api_create_org(
|
|||
return await create_org(request, org_object, current_user, db_session)
|
||||
|
||||
|
||||
# Temporary pre-alpha code
|
||||
@router.post("/withconfig/")
|
||||
async def api_create_org_withconfig(
|
||||
request: Request,
|
||||
org_object: OrganizationCreate,
|
||||
config_object: OrganizationConfigBase,
|
||||
current_user: PublicUser = Depends(get_current_user),
|
||||
db_session: Session = Depends(get_db_session),
|
||||
) -> OrganizationRead:
|
||||
"""
|
||||
Create new organization
|
||||
"""
|
||||
return await create_org_with_config(
|
||||
request, org_object, current_user, db_session, config_object
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{org_id}")
|
||||
async def api_get_org(
|
||||
request: Request,
|
||||
|
|
@ -110,7 +129,7 @@ async def api_update_org(
|
|||
"""
|
||||
Update Org by ID
|
||||
"""
|
||||
return await update_org(request, org_object,org_id, current_user, db_session)
|
||||
return await update_org(request, org_object, org_id, current_user, db_session)
|
||||
|
||||
|
||||
@router.delete("/{org_id}")
|
||||
|
|
|
|||
|
|
@ -2,6 +2,9 @@ from uuid import uuid4
|
|||
from fastapi import Depends, HTTPException, Request
|
||||
from requests import session
|
||||
from sqlmodel import Session, select
|
||||
from src.db.organization_config import OrganizationConfig
|
||||
from src.db.organizations import Organization
|
||||
from src.services.ai.utils import check_limits_and_config, count_ai_ask
|
||||
from src.db.courses import Course, CourseRead
|
||||
from src.core.events.database import get_db_session
|
||||
from src.db.users import PublicUser
|
||||
|
|
@ -29,6 +32,7 @@ def ai_start_activity_chat_session(
|
|||
"""
|
||||
Start a new AI Chat session with a Course Activity
|
||||
"""
|
||||
|
||||
# Get the Activity
|
||||
statement = select(Activity).where(
|
||||
Activity.activity_uuid == chat_session_object.activity_uuid
|
||||
|
|
@ -46,6 +50,14 @@ def ai_start_activity_chat_session(
|
|||
course = db_session.exec(statement).first()
|
||||
course = CourseRead.from_orm(course)
|
||||
|
||||
# Get the Organization
|
||||
statement = select(Organization).where(Organization.id == course.org_id)
|
||||
org = db_session.exec(statement).first()
|
||||
|
||||
# Check limits and usage
|
||||
check_limits_and_config(db_session, org) # type: ignore
|
||||
count_ai_ask(org, "increment") # type: ignore
|
||||
|
||||
if not activity:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
|
|
@ -61,28 +73,48 @@ def ai_start_activity_chat_session(
|
|||
structured, course, activity
|
||||
)
|
||||
|
||||
# Get Activity Organization
|
||||
statement = select(Organization).where(Organization.id == course.org_id)
|
||||
org = db_session.exec(statement).first()
|
||||
|
||||
# Get Organization Config
|
||||
statement = select(OrganizationConfig).where(
|
||||
OrganizationConfig.org_id == org.id # type: ignore
|
||||
)
|
||||
result = db_session.exec(statement)
|
||||
org_config = result.first()
|
||||
|
||||
org_config = OrganizationConfig.from_orm(org_config)
|
||||
embeddings = org_config.config["AIConfig"]["embeddings"]
|
||||
ai_model = org_config.config["AIConfig"]["ai_model"]
|
||||
|
||||
chat_session = get_chat_session_history()
|
||||
|
||||
message = "You are a helpful Education Assistant, and you are helping a student with the associated Course. "
|
||||
message += "Use the available tools to get context about this question even if the question is not specific enough."
|
||||
message += "For context, this is the Course name :"
|
||||
message += course.name
|
||||
message += " and this is the Lecture name :"
|
||||
message += activity.name
|
||||
message += "."
|
||||
message += "Use your knowledge to help the student if the context is not enough."
|
||||
|
||||
response = ask_ai(
|
||||
chat_session_object.message,
|
||||
chat_session['message_history'],
|
||||
chat_session["message_history"],
|
||||
ai_friendly_text,
|
||||
"You are a helpful Education Assistant, and you are helping a student with the associated Course. "
|
||||
"Use the available tools to get context about this question even if the question is not specific enough."
|
||||
"For context, this is the Course name :"
|
||||
+ course.name
|
||||
+ " and this is the Lecture name :"
|
||||
+ activity.name
|
||||
+ "."
|
||||
"Use your knowledge to help the student.",
|
||||
message,
|
||||
embeddings,
|
||||
ai_model,
|
||||
)
|
||||
|
||||
return ActivityAIChatSessionResponse(
|
||||
aichat_uuid=chat_session['aichat_uuid'],
|
||||
aichat_uuid=chat_session["aichat_uuid"],
|
||||
activity_uuid=activity.activity_uuid,
|
||||
message=response["output"],
|
||||
)
|
||||
|
||||
|
||||
def ai_send_activity_chat_message(
|
||||
request: Request,
|
||||
chat_session_object: SendActivityAIChatMessage,
|
||||
|
|
@ -109,6 +141,14 @@ def ai_send_activity_chat_message(
|
|||
course = db_session.exec(statement).first()
|
||||
course = CourseRead.from_orm(course)
|
||||
|
||||
# Get the Organization
|
||||
statement = select(Organization).where(Organization.id == course.org_id)
|
||||
org = db_session.exec(statement).first()
|
||||
|
||||
# Check limits and usage
|
||||
check_limits_and_config(db_session, org) # type: ignore
|
||||
count_ai_ask(org, "increment") # type: ignore
|
||||
|
||||
if not activity:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
|
|
@ -116,7 +156,7 @@ def ai_send_activity_chat_message(
|
|||
)
|
||||
|
||||
# Get Activity Content Blocks
|
||||
content = activity.content
|
||||
content = activity.content
|
||||
|
||||
# Serialize Activity Content Blocks to a text comprehensible by the AI
|
||||
structured = structure_activity_content_by_type(content)
|
||||
|
|
@ -124,24 +164,43 @@ def ai_send_activity_chat_message(
|
|||
structured, course, activity
|
||||
)
|
||||
|
||||
# Get Activity Organization
|
||||
statement = select(Organization).where(Organization.id == course.org_id)
|
||||
org = db_session.exec(statement).first()
|
||||
|
||||
# Get Organization Config
|
||||
statement = select(OrganizationConfig).where(
|
||||
OrganizationConfig.org_id == org.id # type: ignore
|
||||
)
|
||||
result = db_session.exec(statement)
|
||||
org_config = result.first()
|
||||
|
||||
org_config = OrganizationConfig.from_orm(org_config)
|
||||
embeddings = org_config.config["AIConfig"]["embeddings"]
|
||||
ai_model = org_config.config["AIConfig"]["ai_model"]
|
||||
|
||||
chat_session = get_chat_session_history(chat_session_object.aichat_uuid)
|
||||
|
||||
message = "You are a helpful Education Assistant, and you are helping a student with the associated Course. "
|
||||
message += "Use the available tools to get context about this question even if the question is not specific enough."
|
||||
message += "For context, this is the Course name :"
|
||||
message += course.name
|
||||
message += " and this is the Lecture name :"
|
||||
message += activity.name
|
||||
message += "."
|
||||
message += "Use your knowledge to help the student if the context is not enough."
|
||||
|
||||
response = ask_ai(
|
||||
chat_session_object.message,
|
||||
chat_session['message_history'],
|
||||
chat_session["message_history"],
|
||||
ai_friendly_text,
|
||||
"You are a helpful Education Assistant, and you are helping a student with the associated Course. "
|
||||
"Use the available tools to get context about this question even if the question is not specific enough."
|
||||
"For context, this is the Course name :"
|
||||
+ course.name
|
||||
+ " and this is the Lecture name :"
|
||||
+ activity.name
|
||||
+ "."
|
||||
"Use your knowledge to help the student if the context is not enough.",
|
||||
message,
|
||||
embeddings,
|
||||
ai_model,
|
||||
)
|
||||
|
||||
return ActivityAIChatSessionResponse(
|
||||
aichat_uuid=chat_session['aichat_uuid'],
|
||||
aichat_uuid=chat_session["aichat_uuid"],
|
||||
activity_uuid=activity.activity_uuid,
|
||||
message=response["output"],
|
||||
)
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from langchain_core.messages import SystemMessage
|
|||
from langchain.agents.openai_functions_agent.agent_token_buffer_memory import (
|
||||
AgentTokenBufferMemory,
|
||||
)
|
||||
from langchain_openai import OpenAIEmbeddings
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
from langchain.agents.agent_toolkits import (
|
||||
create_retriever_tool,
|
||||
|
|
@ -31,6 +32,8 @@ def ask_ai(
|
|||
message_history,
|
||||
text_reference: str,
|
||||
message_for_the_prompt: str,
|
||||
embedding_model_name: str,
|
||||
openai_model_name: str,
|
||||
):
|
||||
# Get API Keys
|
||||
LH_CONFIG = get_learnhouse_config()
|
||||
|
|
@ -41,8 +44,20 @@ def ask_ai(
|
|||
documents = text_splitter.create_documents([text_reference])
|
||||
texts = text_splitter.split_documents(documents)
|
||||
|
||||
# create the open-source embedding function
|
||||
embedding_function = SentenceTransformerEmbeddings(model_name="all-MiniLM-L6-v2")
|
||||
embedding_models = {
|
||||
"all-MiniLM-L6-v2": SentenceTransformerEmbeddings,
|
||||
"text-embedding-ada-002": OpenAIEmbeddings,
|
||||
}
|
||||
|
||||
embedding_function = None
|
||||
|
||||
if embedding_model_name in embedding_models:
|
||||
if embedding_model_name == "text-embedding-ada-002":
|
||||
embedding_function = embedding_models[embedding_model_name](model=embedding_model_name, api_key=openai_api_key)
|
||||
if embedding_model_name == "all-MiniLM-L6-v2":
|
||||
embedding_function = embedding_models[embedding_model_name](model_name=embedding_model_name)
|
||||
else:
|
||||
embedding_function = embedding_models[embedding_model_name](model_name=embedding_model_name)
|
||||
|
||||
# load it into Chroma and use it as a retriever
|
||||
db = Chroma.from_documents(texts, embedding_function)
|
||||
|
|
@ -53,12 +68,14 @@ def ask_ai(
|
|||
)
|
||||
tools = [tool]
|
||||
|
||||
llm = ChatOpenAI(temperature=0, api_key=openai_api_key, model_name="gpt-3.5-turbo")
|
||||
llm = ChatOpenAI(
|
||||
temperature=0, api_key=openai_api_key, model_name=openai_model_name
|
||||
)
|
||||
|
||||
memory_key = "history"
|
||||
|
||||
memory = AgentTokenBufferMemory(
|
||||
memory_key=memory_key, llm=llm, chat_memory=message_history, max_tokens=1000
|
||||
memory_key=memory_key, llm=llm, chat_memory=message_history, max_token_limit=1000
|
||||
)
|
||||
|
||||
system_message = SystemMessage(content=(message_for_the_prompt))
|
||||
|
|
|
|||
114
apps/api/src/services/ai/utils.py
Normal file
114
apps/api/src/services/ai/utils.py
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
from typing import Literal
|
||||
import redis
|
||||
from fastapi import HTTPException
|
||||
from sqlmodel import Session, select
|
||||
from config.config import get_learnhouse_config
|
||||
from src.db.organization_config import OrganizationConfig
|
||||
from src.db.organizations import Organization
|
||||
|
||||
|
||||
def count_ai_ask(
|
||||
organization: Organization,
|
||||
operation: Literal["increment", "decrement"],
|
||||
):
|
||||
"""
|
||||
Count the number of AI asks
|
||||
"""
|
||||
|
||||
LH_CONFIG = get_learnhouse_config()
|
||||
redis_conn_string = LH_CONFIG.redis_config.redis_connection_string
|
||||
|
||||
if not redis_conn_string:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Redis connection string not found",
|
||||
)
|
||||
|
||||
# Connect to Redis
|
||||
r = redis.Redis.from_url(redis_conn_string)
|
||||
|
||||
if not r:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Could not connect to Redis",
|
||||
)
|
||||
|
||||
# Get the number of AI asks
|
||||
ai_asks = r.get(f"ai_asks:{organization.org_uuid}")
|
||||
|
||||
if ai_asks is None:
|
||||
ai_asks = 0
|
||||
|
||||
# Increment or decrement the number of AI asks
|
||||
if operation == "increment":
|
||||
ai_asks = int(ai_asks) + 1
|
||||
elif operation == "decrement":
|
||||
ai_asks = int(ai_asks) - 1
|
||||
|
||||
# Update the number of AI asks
|
||||
r.set(f"ai_asks:{organization.org_uuid}", ai_asks)
|
||||
|
||||
# Set the expiration time to 30 days
|
||||
r.expire(f"ai_asks:{organization.org_uuid}", 2592000)
|
||||
|
||||
|
||||
def check_limits_and_config(db_session: Session, organization: Organization):
|
||||
"""
|
||||
Check the limits and config of an Organization
|
||||
"""
|
||||
|
||||
# Get the Organization Config
|
||||
statement = select(OrganizationConfig).where(
|
||||
OrganizationConfig.org_id == organization.id
|
||||
)
|
||||
result = db_session.exec(statement)
|
||||
org_config = result.first()
|
||||
|
||||
if org_config is None:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="Organization has no config",
|
||||
)
|
||||
|
||||
# Check if the Organizations has AI enabled
|
||||
if org_config.config["AIConfig"]["enabled"] == False:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Organization has AI disabled",
|
||||
)
|
||||
|
||||
# Check if the Organization has Limits enabled and if the max_asks limit has been reached
|
||||
if org_config.config["AIConfig"]["limits"]["limits_enabled"] == True:
|
||||
LH_CONFIG = get_learnhouse_config()
|
||||
redis_conn_string = LH_CONFIG.redis_config.redis_connection_string
|
||||
|
||||
if not redis_conn_string:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Redis connection string not found",
|
||||
)
|
||||
|
||||
# Connect to Redis
|
||||
r = redis.Redis.from_url(redis_conn_string)
|
||||
|
||||
if not r:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Could not connect to Redis",
|
||||
)
|
||||
|
||||
# Get the number of AI asks
|
||||
ai_asks = r.get(f"ai_asks:{organization.org_uuid}")
|
||||
|
||||
# Get a number of AI asks
|
||||
if ai_asks is None:
|
||||
ai_asks = 0
|
||||
else:
|
||||
ai_asks = int(ai_asks)
|
||||
|
||||
# Check if the Number of asks is less than the max_asks limit
|
||||
if org_config.config["AIConfig"]["limits"]["max_asks"] <= ai_asks:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Organization has reached the max number of AI asks",
|
||||
)
|
||||
|
|
@ -44,12 +44,12 @@ async def create_image_block(
|
|||
image_file,
|
||||
activity_uuid,
|
||||
block_uuid,
|
||||
["jpg", "jpeg", "png", "gif"],
|
||||
["jpg", "jpeg", "png", "gif", "webp"],
|
||||
block_type,
|
||||
org.org_uuid,
|
||||
str(course.course_uuid),
|
||||
)
|
||||
|
||||
|
||||
# create block
|
||||
block = Block(
|
||||
activity_id=activity.id if activity.id else 0,
|
||||
|
|
|
|||
|
|
@ -1,30 +1,25 @@
|
|||
from src.db.activities import ActivityRead
|
||||
from src.db.courses import CourseRead
|
||||
|
||||
|
||||
def structure_activity_content_by_type(activity):
|
||||
### Get Headings, Texts, Callouts, Answers and Paragraphs from the activity as a big list of strings (text only) and return it
|
||||
content = activity["content"]
|
||||
|
||||
# Get Headings
|
||||
headings = []
|
||||
for item in activity["content"]:
|
||||
if item["type"] == "heading":
|
||||
headings.append(item["content"][0]["text"])
|
||||
|
||||
# Get Callouts
|
||||
callouts = []
|
||||
for item in activity["content"]:
|
||||
if item["type"] == "calloutInfo":
|
||||
# Get every type of text in the callout
|
||||
text = ""
|
||||
for text_item in item["content"]:
|
||||
text += text_item["text"]
|
||||
callouts.append(text)
|
||||
|
||||
# Get Paragraphs
|
||||
paragraphs = []
|
||||
for item in activity["content"]:
|
||||
if item["type"] == "paragraph":
|
||||
paragraphs.append(item["content"][0]["text"])
|
||||
|
||||
for item in content:
|
||||
if 'content' in item:
|
||||
if item["type"] == "heading" and "text" in item["content"][0]:
|
||||
headings.append(item["content"][0]["text"])
|
||||
elif item["type"] in ["calloutInfo", "calloutWarning"] and all("text" in text_item for text_item in item["content"]):
|
||||
callouts.append(
|
||||
"".join([text_item["text"] for text_item in item["content"]])
|
||||
)
|
||||
elif item["type"] == "paragraph" and "text" in item["content"][0]:
|
||||
paragraphs.append(item["content"][0]["text"])
|
||||
|
||||
# TODO: Get Questions and Answers (if any)
|
||||
|
||||
|
|
@ -39,10 +34,14 @@ def structure_activity_content_by_type(activity):
|
|||
# Add Paragraphs
|
||||
data_array.append({"Paragraphs": paragraphs})
|
||||
|
||||
print(data_array)
|
||||
|
||||
return data_array
|
||||
|
||||
|
||||
def serialize_activity_text_to_ai_comprehensible_text(data_array, course: CourseRead, activity: ActivityRead):
|
||||
def serialize_activity_text_to_ai_comprehensible_text(
|
||||
data_array, course: CourseRead, activity: ActivityRead
|
||||
):
|
||||
### Serialize the text to a format that is comprehensible by the AI
|
||||
|
||||
# Serialize Headings
|
||||
|
|
@ -63,9 +62,13 @@ def serialize_activity_text_to_ai_comprehensible_text(data_array, course: Course
|
|||
|
||||
# Get a text that is comprehensible by the AI
|
||||
text = (
|
||||
'Use this as a context ' +
|
||||
'This is a course about "' + course.name + '". '
|
||||
+ 'This is a lecture about "' + activity.name + '". '
|
||||
"Use this as a context "
|
||||
+ 'This is a course about "'
|
||||
+ course.name
|
||||
+ '". '
|
||||
+ 'This is a lecture about "'
|
||||
+ activity.name
|
||||
+ '". '
|
||||
'These are the headings: "'
|
||||
+ serialized_headings
|
||||
+ '" These are the callouts: "'
|
||||
|
|
|
|||
|
|
@ -165,6 +165,7 @@ async def create_org(
|
|||
active=True,
|
||||
),
|
||||
AIConfig=AIConfig(
|
||||
enabled=False,
|
||||
limits=AILimitsSettings(
|
||||
limits_enabled=False,
|
||||
max_asks=0,
|
||||
|
|
@ -210,6 +211,87 @@ async def create_org(
|
|||
return org
|
||||
|
||||
|
||||
# Temporary pre-alpha code
|
||||
async def create_org_with_config(
|
||||
request: Request,
|
||||
org_object: OrganizationCreate,
|
||||
current_user: PublicUser | AnonymousUser,
|
||||
db_session: Session,
|
||||
submitted_config: OrganizationConfigBase,
|
||||
):
|
||||
statement = select(Organization).where(Organization.slug == org_object.slug)
|
||||
result = db_session.exec(statement)
|
||||
|
||||
org = result.first()
|
||||
|
||||
if org:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="Organization already exists",
|
||||
)
|
||||
|
||||
org = Organization.from_orm(org_object)
|
||||
|
||||
if isinstance(current_user, AnonymousUser):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="You should be logged in to be able to achieve this action",
|
||||
)
|
||||
|
||||
# Complete the org object
|
||||
org.org_uuid = f"org_{uuid4()}"
|
||||
org.creation_date = str(datetime.now())
|
||||
org.update_date = str(datetime.now())
|
||||
|
||||
db_session.add(org)
|
||||
db_session.commit()
|
||||
db_session.refresh(org)
|
||||
|
||||
# Link user to org
|
||||
user_org = UserOrganization(
|
||||
user_id=int(current_user.id),
|
||||
org_id=int(org.id if org.id else 0),
|
||||
role_id=1,
|
||||
creation_date=str(datetime.now()),
|
||||
update_date=str(datetime.now()),
|
||||
)
|
||||
|
||||
db_session.add(user_org)
|
||||
db_session.commit()
|
||||
db_session.refresh(user_org)
|
||||
|
||||
org_config = submitted_config
|
||||
|
||||
org_config = json.loads(org_config.json())
|
||||
|
||||
# OrgSettings
|
||||
org_settings = OrganizationConfig(
|
||||
org_id=int(org.id if org.id else 0),
|
||||
config=org_config,
|
||||
creation_date=str(datetime.now()),
|
||||
update_date=str(datetime.now()),
|
||||
)
|
||||
|
||||
db_session.add(org_settings)
|
||||
db_session.commit()
|
||||
db_session.refresh(org_settings)
|
||||
|
||||
# Get org config
|
||||
statement = select(OrganizationConfig).where(OrganizationConfig.org_id == org.id)
|
||||
result = db_session.exec(statement)
|
||||
|
||||
org_config = result.first()
|
||||
|
||||
if org_config is None:
|
||||
logging.error(f"Organization {org.id} has no config")
|
||||
|
||||
config = OrganizationConfig.from_orm(org_config)
|
||||
|
||||
org = OrganizationRead(**org.dict(), config=config)
|
||||
|
||||
return org
|
||||
|
||||
|
||||
async def update_org(
|
||||
request: Request,
|
||||
org_object: OrganizationUpdate,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue