mirror of
https://github.com/rzmk/learnhouse.git
synced 2025-12-19 04:19:25 +00:00
Merge pull request #150 from learnhouse/fix/upgrade-packages
Update Python & Dependencies + Retire MongoDB
This commit is contained in:
commit
d70071c0cd
11 changed files with 550 additions and 985 deletions
|
|
@ -27,7 +27,7 @@ Initiate a dev environment, please check the official guide [here](https://docs.
|
|||
- **Radix UI** - Accessible UI Components
|
||||
- **Tiptap** - An editor framework and headless wrapper around ProseMirror
|
||||
- **YJS** - Shared data types for building collaborative software
|
||||
- **MongoDB** - NoSQL Database
|
||||
- **PostgreSQL** - SQL Database
|
||||
- **React** - duh
|
||||
|
||||
### Get started
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
#
|
||||
FROM python:3.11
|
||||
FROM python:3.12
|
||||
|
||||
# poetry
|
||||
RUN pip install poetry
|
||||
|
|
|
|||
|
|
@ -53,7 +53,6 @@ class HostingConfig(BaseModel):
|
|||
|
||||
class DatabaseConfig(BaseModel):
|
||||
sql_connection_string: Optional[str]
|
||||
mongo_connection_string: Optional[str]
|
||||
|
||||
class RedisConfig(BaseModel):
|
||||
redis_connection_string: Optional[str]
|
||||
|
|
@ -182,9 +181,7 @@ def get_learnhouse_config() -> LearnHouseConfig:
|
|||
"database_config", {}
|
||||
).get("sql_connection_string")
|
||||
|
||||
mongo_connection_string = yaml_config.get("database_config", {}).get(
|
||||
"mongo_connection_string"
|
||||
)
|
||||
|
||||
|
||||
# Redis config
|
||||
env_redis_connection_string = os.environ.get("LEARNHOUSE_REDIS_CONNECTION_STRING")
|
||||
|
|
@ -244,7 +241,6 @@ def get_learnhouse_config() -> LearnHouseConfig:
|
|||
)
|
||||
database_config = DatabaseConfig(
|
||||
sql_connection_string=sql_connection_string,
|
||||
mongo_connection_string=mongo_connection_string,
|
||||
)
|
||||
|
||||
# AI Config
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ hosting_config:
|
|||
|
||||
database_config:
|
||||
sql_connection_string: postgresql://learnhouse:learnhouse@db:5432/learnhouse
|
||||
mongo_connection_string: mongodb://learnhouse:learnhouse@mongo:27017/
|
||||
|
||||
redis_config:
|
||||
redis_connection_string: redis://redis:6379/learnhouse
|
||||
|
|
|
|||
1183
apps/api/poetry.lock
generated
1183
apps/api/poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -1,5 +1,4 @@
|
|||
[tool.ruff]
|
||||
# E501 line too long (82 > 79 characters)
|
||||
ignore = ["E501", "E712"]
|
||||
|
||||
[tool.poetry]
|
||||
|
|
@ -10,13 +9,11 @@ authors = ["Badr B. (swve)"]
|
|||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
fastapi = "0.109.1"
|
||||
python = "^3.12"
|
||||
fastapi = "0.109.2"
|
||||
pydantic = {version = ">=1.8.0,<2.0.0", extras = ["email"]}
|
||||
sqlmodel = "0.0.10"
|
||||
uvicorn = "0.23.2"
|
||||
pymongo = "4.3.3"
|
||||
motor = "3.1.1"
|
||||
uvicorn = "0.27.1"
|
||||
psycopg2 = "^2.9.9"
|
||||
python-multipart = "^0.0.7"
|
||||
boto3 = "^1.34.17"
|
||||
|
|
|
|||
|
|
@ -1,28 +0,0 @@
|
|||
fastapi==0.109.1
|
||||
pydantic>=1.8.0,<2.0.0
|
||||
sqlmodel==0.0.10
|
||||
uvicorn==0.23.2
|
||||
pymongo==4.3.3
|
||||
motor==3.1.1
|
||||
psycopg2
|
||||
python-multipart
|
||||
boto3
|
||||
botocore
|
||||
python-jose
|
||||
passlib
|
||||
fastapi-jwt-auth
|
||||
pytest
|
||||
httpx
|
||||
faker
|
||||
requests
|
||||
pyyaml
|
||||
sentry-sdk[fastapi]
|
||||
pydantic[email]>=1.8.0,<2.0.0
|
||||
langchain==0.1.0
|
||||
langchain-community
|
||||
langchain-openai
|
||||
tiktoken
|
||||
openai
|
||||
chromadb
|
||||
python-dotenv
|
||||
redis
|
||||
|
|
@ -2,7 +2,6 @@ import logging
|
|||
from config.config import get_learnhouse_config
|
||||
from fastapi import FastAPI
|
||||
from sqlmodel import SQLModel, Session, create_engine
|
||||
import motor.motor_asyncio
|
||||
|
||||
learnhouse_config = get_learnhouse_config()
|
||||
engine = create_engine(
|
||||
|
|
@ -16,12 +15,7 @@ async def connect_to_db(app: FastAPI):
|
|||
logging.info("LearnHouse database has been started.")
|
||||
SQLModel.metadata.create_all(engine)
|
||||
|
||||
# MongoDB for migration purposes
|
||||
# mongodb
|
||||
app.mongodb_client = motor.motor_asyncio.AsyncIOMotorClient( # type: ignore
|
||||
app.learnhouse_config.database_config.mongo_connection_string # type: ignore
|
||||
) # type: ignore
|
||||
app.db = app.mongodb_client["learnhouse"] # type: ignore
|
||||
|
||||
|
||||
|
||||
def get_db_session():
|
||||
|
|
|
|||
|
|
@ -1,7 +1,4 @@
|
|||
from fastapi import APIRouter, Depends, Request
|
||||
from sqlmodel import Session
|
||||
from src.core.events.database import get_db_session
|
||||
from src.services.dev.migration_from_mongo import start_migrate_from_mongo
|
||||
from fastapi import APIRouter
|
||||
from config.config import get_learnhouse_config
|
||||
|
||||
|
||||
|
|
@ -14,9 +11,4 @@ async def config():
|
|||
return config.dict()
|
||||
|
||||
|
||||
@router.get("/migrate_from_mongo")
|
||||
async def migrate_from_mongo(
|
||||
request: Request,
|
||||
db_session: Session = Depends(get_db_session),
|
||||
):
|
||||
return await start_migrate_from_mongo(request, db_session)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,275 +0,0 @@
|
|||
import datetime
|
||||
from fastapi import Request
|
||||
from sqlmodel import Session, select
|
||||
from src.db.blocks import Block, BlockTypeEnum
|
||||
from src.db.chapter_activities import ChapterActivity
|
||||
from src.db.activities import Activity, ActivitySubTypeEnum, ActivityTypeEnum
|
||||
from src.db.course_chapters import CourseChapter
|
||||
from src.db.resource_authors import ResourceAuthor, ResourceAuthorshipEnum
|
||||
from src.db.user_organizations import UserOrganization
|
||||
from src.db.chapters import Chapter
|
||||
from src.db.courses import Course
|
||||
from src.db.users import User
|
||||
|
||||
from src.db.organizations import Organization
|
||||
|
||||
|
||||
async def start_migrate_from_mongo(request: Request, db_session: Session):
|
||||
orgs = request.app.db["organizations"]
|
||||
|
||||
## ----> Organizations migration
|
||||
org_db_list = await orgs.find().to_list(length=100)
|
||||
|
||||
for org in org_db_list:
|
||||
org_to_add = Organization(
|
||||
name=org["name"],
|
||||
description=org["description"],
|
||||
slug=org["slug"],
|
||||
logo_image=org["logo"],
|
||||
email=org["email"],
|
||||
org_uuid=org["org_id"],
|
||||
creation_date=str(datetime.datetime.now()),
|
||||
update_date=str(datetime.datetime.now()),
|
||||
)
|
||||
db_session.add(org_to_add)
|
||||
db_session.commit()
|
||||
|
||||
print("Migrated organizations.")
|
||||
|
||||
## ----> Users migration
|
||||
users = request.app.db["users"]
|
||||
|
||||
users_db_list = await users.find().to_list(length=100)
|
||||
|
||||
for user in users_db_list:
|
||||
user_to_add = User(
|
||||
email=user["email"],
|
||||
username=user["username"],
|
||||
first_name="",
|
||||
last_name="",
|
||||
user_uuid=user["user_id"],
|
||||
password=user["password"],
|
||||
creation_date=user["creation_date"],
|
||||
update_date=user["update_date"],
|
||||
)
|
||||
db_session.add(user_to_add)
|
||||
db_session.commit()
|
||||
|
||||
# Link Orgs to users and make them owners
|
||||
for org in user["orgs"]:
|
||||
statement = select(Organization).where(
|
||||
Organization.org_uuid == org["org_id"]
|
||||
)
|
||||
org_from_db = db_session.exec(statement).first()
|
||||
|
||||
statement = select(User).where(User.user_uuid == user["user_id"])
|
||||
user_from_db = db_session.exec(statement).first()
|
||||
|
||||
user_org_object = UserOrganization(
|
||||
user_id=user_from_db.id, # type: ignore
|
||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
||||
role_id=1,
|
||||
creation_date=str(datetime.datetime.now()),
|
||||
update_date=str(datetime.datetime.now()),
|
||||
)
|
||||
db_session.add(user_org_object)
|
||||
db_session.commit()
|
||||
|
||||
print("Migrated users and linked them to orgs.")
|
||||
|
||||
## ----> Courses migration
|
||||
courses = request.app.db["courses"]
|
||||
|
||||
courses_db_list = await courses.find().to_list(length=300)
|
||||
|
||||
for course in courses_db_list:
|
||||
# Get the organization id
|
||||
statement = select(Organization).where(
|
||||
Organization.org_uuid == course["org_id"]
|
||||
)
|
||||
org_from_db = db_session.exec(statement).first()
|
||||
|
||||
course_to_add = Course(
|
||||
name=course["name"],
|
||||
description=course["description"],
|
||||
about=course["description"],
|
||||
learnings="",
|
||||
course_uuid=course["course_id"],
|
||||
thumbnail_image=course["thumbnail"],
|
||||
tags="",
|
||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
||||
public=course["public"],
|
||||
creation_date=str(course["creationDate"]),
|
||||
update_date=str(course["updateDate"]),
|
||||
)
|
||||
db_session.add(course_to_add)
|
||||
db_session.commit()
|
||||
|
||||
# Get this course
|
||||
statement = select(Course).where(Course.course_uuid == course["course_id"])
|
||||
course_from_db = db_session.exec(statement).first()
|
||||
|
||||
# Add Authorship
|
||||
authors = course["authors"]
|
||||
|
||||
for author in authors:
|
||||
# Get the user id
|
||||
statement = select(User).where(User.user_uuid == author)
|
||||
user_from_db = db_session.exec(statement).first()
|
||||
|
||||
authorship = ResourceAuthor(
|
||||
resource_uuid=course_from_db.course_uuid, # type: ignore
|
||||
user_id=user_from_db.id if user_from_db is not None else None, # type: ignore
|
||||
authorship=ResourceAuthorshipEnum.CREATOR,
|
||||
creation_date=str(datetime.datetime.now()),
|
||||
update_date=str(datetime.datetime.now()),
|
||||
)
|
||||
db_session.add(authorship)
|
||||
db_session.commit()
|
||||
|
||||
print("Added authorship.")
|
||||
|
||||
## ----> Chapters migration & Link
|
||||
|
||||
chapter_object = course["chapters_content"]
|
||||
order = 0
|
||||
for chapter in chapter_object:
|
||||
chapter_to_add = Chapter(
|
||||
name=chapter["name"],
|
||||
description=chapter["description"],
|
||||
chapter_uuid=chapter["coursechapter_id"].replace(
|
||||
"coursechapter", "chapter"
|
||||
),
|
||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
||||
course_id=course_from_db.id, # type: ignore
|
||||
creation_date=str(datetime.datetime.now()),
|
||||
update_date=str(datetime.datetime.now()),
|
||||
)
|
||||
db_session.add(chapter_to_add)
|
||||
db_session.commit()
|
||||
|
||||
# Get this chapter
|
||||
statement = select(Chapter).where(
|
||||
Chapter.chapter_uuid
|
||||
== chapter["coursechapter_id"].replace("coursechapter", "chapter")
|
||||
)
|
||||
chapter_from_db = db_session.exec(statement).first()
|
||||
|
||||
# Link chapter to course
|
||||
coursechapter_to_add = CourseChapter(
|
||||
chapter_id=chapter_from_db.id, # type: ignore
|
||||
course_id=course_from_db.id, # type: ignore
|
||||
order=order,
|
||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
||||
creation_date=str(datetime.datetime.now()),
|
||||
update_date=str(datetime.datetime.now()),
|
||||
)
|
||||
|
||||
db_session.add(coursechapter_to_add)
|
||||
db_session.commit()
|
||||
|
||||
order += 1
|
||||
|
||||
## ----> Activities migration
|
||||
activities = request.app.db["activities"]
|
||||
activities_db_list = await activities.find(
|
||||
{"coursechapter_id": chapter["coursechapter_id"]}
|
||||
).to_list(length=100)
|
||||
|
||||
activity_order = 0
|
||||
|
||||
for activity in activities_db_list:
|
||||
type_to_use = ActivityTypeEnum.TYPE_CUSTOM
|
||||
sub_type_to_use = ActivityTypeEnum.TYPE_CUSTOM
|
||||
|
||||
if activity["type"] == "video":
|
||||
type_to_use = ActivityTypeEnum.TYPE_VIDEO
|
||||
sub_type_to_use = ActivitySubTypeEnum.SUBTYPE_VIDEO_HOSTED
|
||||
|
||||
if "external_video" in activity["content"]:
|
||||
type_to_use = ActivityTypeEnum.TYPE_VIDEO
|
||||
sub_type_to_use = ActivitySubTypeEnum.SUBTYPE_VIDEO_YOUTUBE
|
||||
|
||||
if activity["type"] == "documentpdf":
|
||||
type_to_use = ActivityTypeEnum.TYPE_DOCUMENT
|
||||
sub_type_to_use = ActivitySubTypeEnum.SUBTYPE_DOCUMENT_PDF
|
||||
|
||||
if activity["type"] == "dynamic":
|
||||
type_to_use = ActivityTypeEnum.TYPE_DYNAMIC
|
||||
sub_type_to_use = ActivitySubTypeEnum.SUBTYPE_DYNAMIC_PAGE
|
||||
|
||||
activity_to_add = Activity(
|
||||
name=activity["name"],
|
||||
activity_uuid=activity["activity_id"],
|
||||
version=1,
|
||||
published_version=1,
|
||||
activity_type=type_to_use,
|
||||
content=activity["content"],
|
||||
activity_sub_type=sub_type_to_use,
|
||||
chapter_id=chapter_from_db.id, # type: ignore
|
||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
||||
course_id=course_from_db.id, # type: ignore
|
||||
creation_date=str(activity["creationDate"]),
|
||||
update_date=str(activity["updateDate"]),
|
||||
)
|
||||
db_session.add(activity_to_add)
|
||||
db_session.commit()
|
||||
|
||||
# Link activity to chapter
|
||||
statement = select(Activity).where(
|
||||
Activity.activity_uuid == activity["activity_id"]
|
||||
)
|
||||
|
||||
activity_from_db = db_session.exec(statement).first()
|
||||
|
||||
activitychapter_to_add = ChapterActivity(
|
||||
chapter_id=chapter_from_db.id, # type: ignore
|
||||
activity_id=activity_from_db.id, # type: ignore
|
||||
order=activity_order,
|
||||
course_id=course_from_db.id, # type: ignore
|
||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
||||
creation_date=str(datetime.datetime.now()),
|
||||
update_date=str(datetime.datetime.now()),
|
||||
)
|
||||
|
||||
db_session.add(activitychapter_to_add)
|
||||
db_session.commit()
|
||||
|
||||
activity_order += 1
|
||||
|
||||
## ----> Blocks migration
|
||||
blocks = request.app.db["blocks"]
|
||||
|
||||
blocks_db_list = await blocks.find(
|
||||
{"activity_id": activity["activity_id"]}
|
||||
).to_list(length=200)
|
||||
|
||||
for block in blocks_db_list:
|
||||
type_to_use = BlockTypeEnum.BLOCK_CUSTOM
|
||||
|
||||
if block["block_type"] == "imageBlock":
|
||||
type_to_use = BlockTypeEnum.BLOCK_IMAGE
|
||||
|
||||
if block["block_type"] == "videoBlock":
|
||||
type_to_use = BlockTypeEnum.BLOCK_VIDEO
|
||||
|
||||
if block["block_type"] == "pdfBlock":
|
||||
type_to_use = BlockTypeEnum.BLOCK_DOCUMENT_PDF
|
||||
|
||||
print('block', block)
|
||||
|
||||
block_to_add = Block(
|
||||
block_uuid=block["block_id"],
|
||||
content=block["block_data"],
|
||||
block_type=type_to_use,
|
||||
activity_id=activity_from_db.id, # type: ignore
|
||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
||||
course_id=course_from_db.id, # type: ignore
|
||||
chapter_id=chapter_from_db.id, # type: ignore
|
||||
creation_date=str(datetime.datetime.now()),
|
||||
update_date=str(datetime.datetime.now()),
|
||||
)
|
||||
db_session.add(block_to_add)
|
||||
db_session.commit()
|
||||
|
||||
return "Migration successfull."
|
||||
|
|
@ -13,7 +13,6 @@ services:
|
|||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
|
||||
db:
|
||||
image: postgres:16-alpine
|
||||
restart: always
|
||||
|
|
@ -38,11 +37,3 @@ services:
|
|||
interval: 5s
|
||||
timeout: 4s
|
||||
retries: 5
|
||||
mongo:
|
||||
image: mongo:5.0
|
||||
restart: always
|
||||
ports:
|
||||
- "27017:27017"
|
||||
environment:
|
||||
- MONGO_INITDB_ROOT_USERNAME=learnhouse
|
||||
- MONGO_INITDB_ROOT_PASSWORD=learnhous
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue