mirror of
https://github.com/rzmk/learnhouse.git
synced 2025-12-19 04:19:25 +00:00
fix: retire mongodb
This commit is contained in:
parent
d8da7f6fd8
commit
9f7da491d8
9 changed files with 7 additions and 443 deletions
|
|
@ -27,7 +27,7 @@ Initiate a dev environment, please check the official guide [here](https://docs.
|
||||||
- **Radix UI** - Accessible UI Components
|
- **Radix UI** - Accessible UI Components
|
||||||
- **Tiptap** - An editor framework and headless wrapper around ProseMirror
|
- **Tiptap** - An editor framework and headless wrapper around ProseMirror
|
||||||
- **YJS** - Shared data types for building collaborative software
|
- **YJS** - Shared data types for building collaborative software
|
||||||
- **MongoDB** - NoSQL Database
|
- **PostgreSQL** - SQL Database
|
||||||
- **React** - duh
|
- **React** - duh
|
||||||
|
|
||||||
### Get started
|
### Get started
|
||||||
|
|
|
||||||
|
|
@ -53,7 +53,6 @@ class HostingConfig(BaseModel):
|
||||||
|
|
||||||
class DatabaseConfig(BaseModel):
|
class DatabaseConfig(BaseModel):
|
||||||
sql_connection_string: Optional[str]
|
sql_connection_string: Optional[str]
|
||||||
mongo_connection_string: Optional[str]
|
|
||||||
|
|
||||||
class RedisConfig(BaseModel):
|
class RedisConfig(BaseModel):
|
||||||
redis_connection_string: Optional[str]
|
redis_connection_string: Optional[str]
|
||||||
|
|
@ -182,9 +181,7 @@ def get_learnhouse_config() -> LearnHouseConfig:
|
||||||
"database_config", {}
|
"database_config", {}
|
||||||
).get("sql_connection_string")
|
).get("sql_connection_string")
|
||||||
|
|
||||||
mongo_connection_string = yaml_config.get("database_config", {}).get(
|
|
||||||
"mongo_connection_string"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Redis config
|
# Redis config
|
||||||
env_redis_connection_string = os.environ.get("LEARNHOUSE_REDIS_CONNECTION_STRING")
|
env_redis_connection_string = os.environ.get("LEARNHOUSE_REDIS_CONNECTION_STRING")
|
||||||
|
|
@ -244,7 +241,6 @@ def get_learnhouse_config() -> LearnHouseConfig:
|
||||||
)
|
)
|
||||||
database_config = DatabaseConfig(
|
database_config = DatabaseConfig(
|
||||||
sql_connection_string=sql_connection_string,
|
sql_connection_string=sql_connection_string,
|
||||||
mongo_connection_string=mongo_connection_string,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# AI Config
|
# AI Config
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,6 @@ hosting_config:
|
||||||
|
|
||||||
database_config:
|
database_config:
|
||||||
sql_connection_string: postgresql://learnhouse:learnhouse@db:5432/learnhouse
|
sql_connection_string: postgresql://learnhouse:learnhouse@db:5432/learnhouse
|
||||||
mongo_connection_string: mongodb://learnhouse:learnhouse@mongo:27017/
|
|
||||||
|
|
||||||
redis_config:
|
redis_config:
|
||||||
redis_connection_string: redis://redis:6379/learnhouse
|
redis_connection_string: redis://redis:6379/learnhouse
|
||||||
|
|
|
||||||
135
apps/api/poetry.lock
generated
135
apps/api/poetry.lock
generated
|
|
@ -143,17 +143,6 @@ files = [
|
||||||
[package.extras]
|
[package.extras]
|
||||||
tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
|
tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "async-timeout"
|
|
||||||
version = "4.0.3"
|
|
||||||
description = "Timeout context manager for asyncio programs"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
|
|
||||||
{file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "attrs"
|
name = "attrs"
|
||||||
version = "23.2.0"
|
version = "23.2.0"
|
||||||
|
|
@ -1511,29 +1500,6 @@ files = [
|
||||||
{file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"},
|
{file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "motor"
|
|
||||||
version = "3.1.1"
|
|
||||||
description = "Non-blocking MongoDB driver for Tornado or asyncio"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "motor-3.1.1-py3-none-any.whl", hash = "sha256:01d93d7c512810dcd85f4d634a7244ba42ff6be7340c869791fe793561e734da"},
|
|
||||||
{file = "motor-3.1.1.tar.gz", hash = "sha256:a4bdadf8a08ebb186ba16e557ba432aa867f689a42b80f2e9f8b24bbb1604742"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
pymongo = ">=4.1,<5"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
aws = ["pymongo[aws] (>=4.1,<5)"]
|
|
||||||
encryption = ["pymongo[encryption] (>=4.1,<5)"]
|
|
||||||
gssapi = ["pymongo[gssapi] (>=4.1,<5)"]
|
|
||||||
ocsp = ["pymongo[ocsp] (>=4.1,<5)"]
|
|
||||||
snappy = ["pymongo[snappy] (>=4.1,<5)"]
|
|
||||||
srv = ["pymongo[srv] (>=4.1,<5)"]
|
|
||||||
zstd = ["pymongo[zstd] (>=4.1,<5)"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mpmath"
|
name = "mpmath"
|
||||||
version = "1.3.0"
|
version = "1.3.0"
|
||||||
|
|
@ -2213,100 +2179,6 @@ crypto = ["cryptography (>=1.4)"]
|
||||||
flake8 = ["flake8", "flake8-import-order", "pep8-naming"]
|
flake8 = ["flake8", "flake8-import-order", "pep8-naming"]
|
||||||
test = ["pytest (>=4.0.1,<5.0.0)", "pytest-cov (>=2.6.0,<3.0.0)", "pytest-runner (>=4.2,<5.0.0)"]
|
test = ["pytest (>=4.0.1,<5.0.0)", "pytest-cov (>=2.6.0,<3.0.0)", "pytest-runner (>=4.2,<5.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pymongo"
|
|
||||||
version = "4.3.3"
|
|
||||||
description = "Python driver for MongoDB <http://www.mongodb.org>"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:66413c50d510e5bcb0afc79880d1693a2185bcea003600ed898ada31338c004e"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9b87b23570565a6ddaa9244d87811c2ee9cffb02a753c8a2da9c077283d85845"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:695939036a320f4329ccf1627edefbbb67cc7892b8222d297b0dd2313742bfee"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:ffcc8394123ea8d43fff8e5d000095fe7741ce3f8988366c5c919c4f5eb179d3"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:943f208840777f34312c103a2d1caab02d780c4e9be26b3714acf6c4715ba7e1"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:01f7cbe88d22440b6594c955e37312d932fd632ffed1a86d0c361503ca82cc9d"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdb87309de97c63cb9a69132e1cb16be470e58cffdfbad68fdd1dc292b22a840"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d86c35d94b5499689354ccbc48438a79f449481ee6300f3e905748edceed78e7"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a966d5304b7d90c45c404914e06bbf02c5bf7e99685c6c12f0047ef2aa837142"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be1d2ce7e269215c3ee9a215e296b7a744aff4f39233486d2c4d77f5f0c561a6"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b6163dac53ef1e5d834297810c178050bd0548a4136cd4e0f56402185916ca"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-win32.whl", hash = "sha256:dc0cff74cd36d7e1edba91baa09622c35a8a57025f2f2b7a41e3f83b1db73186"},
|
|
||||||
{file = "pymongo-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:cafa52873ae12baa512a8721afc20de67a36886baae6a5f394ddef0ce9391f91"},
|
|
||||||
{file = "pymongo-4.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:599d3f6fbef31933b96e2d906b0f169b3371ff79ea6aaf6ecd76c947a3508a3d"},
|
|
||||||
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0640b4e9d008e13956b004d1971a23377b3d45491f87082161c92efb1e6c0d6"},
|
|
||||||
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341221e2f2866a5960e6f8610f4cbac0bb13097f3b1a289aa55aba984fc0d969"},
|
|
||||||
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7fac06a539daef4fcf5d8288d0d21b412f9b750454cd5a3cf90484665db442a"},
|
|
||||||
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a51901066696c4af38c6c63a1f0aeffd5e282367ff475de8c191ec9609b56d"},
|
|
||||||
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3055510fdfdb1775bc8baa359783022f70bb553f2d46e153c094dfcb08578ff"},
|
|
||||||
{file = "pymongo-4.3.3-cp311-cp311-win32.whl", hash = "sha256:524d78673518dcd352a91541ecd2839c65af92dc883321c2109ef6e5cd22ef23"},
|
|
||||||
{file = "pymongo-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b8a03af1ce79b902a43f5f694c4ca8d92c2a4195db0966f08f266549e2fc49bc"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:39b03045c71f761aee96a12ebfbc2f4be89e724ff6f5e31c2574c1a0e2add8bd"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6fcfbf435eebf8a1765c6d1f46821740ebe9f54f815a05c8fc30d789ef43cb12"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7d43ac9c7eeda5100fb0a7152fab7099c9cf9e5abd3bb36928eb98c7d7a339c6"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3b93043b14ba7eb08c57afca19751658ece1cfa2f0b7b1fb5c7a41452fbb8482"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c09956606c08c4a7c6178a04ba2dd9388fcc5db32002ade9c9bc865ab156ab6d"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:b0cfe925610f2fd59555bb7fc37bd739e4b197d33f2a8b2fae7b9c0c6640318c"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:4d00b91c77ceb064c9b0459f0d6ea5bfdbc53ea9e17cf75731e151ef25a830c7"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:c6258a3663780ae47ba73d43eb63c79c40ffddfb764e09b56df33be2f9479837"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e758f0e734e1e90357ae01ec9c6daf19ff60a051192fe110d8fb25c62600e"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f3621a46cdc7a9ba8080422262398a91762a581d27e0647746588d3f995c88"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47f7aa217b25833cd6f0e72b0d224be55393c2692b4f5e0561cb3beeb10296e9"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2fdc855149efe7cdcc2a01ca02bfa24761c640203ea94df467f3baf19078be"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5effd87c7d363890259eac16c56a4e8da307286012c076223997f8cc4a8c435b"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dd1cf2995fdbd64fc0802313e8323f5fa18994d51af059b5b8862b73b5e53f0"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bb869707d8e30645ed6766e44098600ca6cdf7989c22a3ea2b7966bb1d98d4b2"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-win32.whl", hash = "sha256:49210feb0be8051a64d71691f0acbfbedc33e149f0a5d6e271fddf6a12493fed"},
|
|
||||||
{file = "pymongo-4.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:54c377893f2cbbffe39abcff5ff2e917b082c364521fa079305f6f064e1a24a9"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c184ec5be465c0319440734491e1aa4709b5f3ba75fdfc9dbbc2ae715a7f6829"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:dca34367a4e77fcab0693e603a959878eaf2351585e7d752cac544bc6b2dee46"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd6a4afb20fb3c26a7bfd4611a0bbb24d93cbd746f5eb881f114b5e38fd55501"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0c466710871d0026c190fc4141e810cf9d9affbf4935e1d273fbdc7d7cda6143"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:d07d06dba5b5f7d80f9cc45501456e440f759fe79f9895922ed486237ac378a8"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:711bc52cb98e7892c03e9b669bebd89c0a890a90dbc6d5bb2c47f30239bac6e9"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:34b040e095e1671df0c095ec0b04fc4ebb19c4c160f87c2b55c079b16b1a6b00"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4ed00f96e147f40b565fe7530d1da0b0f3ab803d5dd5b683834500fa5d195ec4"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef888f48eb9203ee1e04b9fb27429017b290fb916f1e7826c2f7808c88798394"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:316498b642c00401370b2156b5233b256f9b33799e0a8d9d0b8a7da217a20fca"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7e202feb683dad74f00dea066690448d0cfa310f8a277db06ec8eb466601b5"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52896e22115c97f1c829db32aa2760b0d61839cfe08b168c2b1d82f31dbc5f55"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c051fe37c96b9878f37fa58906cb53ecd13dcb7341d3a85f1e2e2f6b10782d9"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5134d33286c045393c7beb51be29754647cec5ebc051cf82799c5ce9820a2ca2"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a9c2885b4a8e6e39db5662d8b02ca6dcec796a45e48c2de12552841f061692ba"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-win32.whl", hash = "sha256:a6cd6f1db75eb07332bd3710f58f5fce4967eadbf751bad653842750a61bda62"},
|
|
||||||
{file = "pymongo-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:d5571b6978750601f783cea07fb6b666837010ca57e5cefa389c1d456f6222e2"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:81d1a7303bd02ca1c5be4aacd4db73593f573ba8e0c543c04c6da6275fd7a47e"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:016c412118e1c23fef3a1eada4f83ae6e8844fd91986b2e066fc1b0013cdd9ae"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8fd6e191b92a10310f5a6cfe10d6f839d79d192fb02480bda325286bd1c7b385"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2961b05f9c04a53da8bfc72f1910b6aec7205fcf3ac9c036d24619979bbee4b"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b38a96b3eed8edc515b38257f03216f382c4389d022a8834667e2bc63c0c0c31"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:c1a70c51da9fa95bd75c167edb2eb3f3c4d27bc4ddd29e588f21649d014ec0b7"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8a06a0c02f5606330e8f2e2f3b7949877ca7e4024fa2bff5a4506bec66c49ec7"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6c2216d8b6a6d019c6f4b1ad55f890e5e77eb089309ffc05b6911c09349e7474"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac0a143ef4f28f49670bf89cb15847eb80b375d55eba401ca2f777cd425f338"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08fc250b5552ee97ceeae0f52d8b04f360291285fc7437f13daa516ce38fdbc6"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704d939656e21b073bfcddd7228b29e0e8a93dd27b54240eaafc0b9a631629a6"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074f1a6f23e28b983c96142f2d45be03ec55d93035b471c26889a7ad2365db3"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b16250238de8dafca225647608dddc7bbb5dce3dd53b4d8e63c1cc287394c2f"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7761cacb8745093062695b11574effea69db636c2fd0a9269a1f0183712927b4"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd7bb378d82b88387dc10227cfd964f6273eb083e05299e9b97cbe075da12d11"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-win32.whl", hash = "sha256:dc24d245026a72d9b4953729d31813edd4bd4e5c13622d96e27c284942d33f24"},
|
|
||||||
{file = "pymongo-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:fc28e8d85d392a06434e9a934908d97e2cf453d69488d2bcd0bfb881497fd975"},
|
|
||||||
{file = "pymongo-4.3.3.tar.gz", hash = "sha256:34e95ffb0a68bffbc3b437f2d1f25fc916fef3df5cdeed0992da5f42fae9b807"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
dnspython = ">=1.16.0,<3.0.0"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
aws = ["pymongo-auth-aws (<2.0.0)"]
|
|
||||||
encryption = ["pymongo-auth-aws (<2.0.0)", "pymongocrypt (>=1.3.0,<2.0.0)"]
|
|
||||||
gssapi = ["pykerberos"]
|
|
||||||
ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"]
|
|
||||||
snappy = ["python-snappy"]
|
|
||||||
zstd = ["zstandard"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pypika"
|
name = "pypika"
|
||||||
version = "0.48.9"
|
version = "0.48.9"
|
||||||
|
|
@ -2493,9 +2365,6 @@ files = [
|
||||||
{file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"},
|
{file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""}
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
hiredis = ["hiredis (>=1.0.0)"]
|
hiredis = ["hiredis (>=1.0.0)"]
|
||||||
ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"]
|
ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"]
|
||||||
|
|
@ -3625,5 +3494,5 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.11"
|
python-versions = "^3.12"
|
||||||
content-hash = "03cb227f0f2c84394f504d1fd7dea7c524759b4f3248397f06f67712f7a5f966"
|
content-hash = "909bc8706c915f93e93d0d27bcc69db648213249ecd53e293283a1a8369e9692"
|
||||||
|
|
|
||||||
|
|
@ -14,8 +14,6 @@ fastapi = "0.109.2"
|
||||||
pydantic = {version = ">=1.8.0,<2.0.0", extras = ["email"]}
|
pydantic = {version = ">=1.8.0,<2.0.0", extras = ["email"]}
|
||||||
sqlmodel = "0.0.10"
|
sqlmodel = "0.0.10"
|
||||||
uvicorn = "0.27.1"
|
uvicorn = "0.27.1"
|
||||||
pymongo = "4.3.3"
|
|
||||||
motor = "3.1.1"
|
|
||||||
psycopg2 = "^2.9.9"
|
psycopg2 = "^2.9.9"
|
||||||
python-multipart = "^0.0.7"
|
python-multipart = "^0.0.7"
|
||||||
boto3 = "^1.34.17"
|
boto3 = "^1.34.17"
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,6 @@ import logging
|
||||||
from config.config import get_learnhouse_config
|
from config.config import get_learnhouse_config
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from sqlmodel import SQLModel, Session, create_engine
|
from sqlmodel import SQLModel, Session, create_engine
|
||||||
import motor.motor_asyncio
|
|
||||||
|
|
||||||
learnhouse_config = get_learnhouse_config()
|
learnhouse_config = get_learnhouse_config()
|
||||||
engine = create_engine(
|
engine = create_engine(
|
||||||
|
|
@ -16,12 +15,7 @@ async def connect_to_db(app: FastAPI):
|
||||||
logging.info("LearnHouse database has been started.")
|
logging.info("LearnHouse database has been started.")
|
||||||
SQLModel.metadata.create_all(engine)
|
SQLModel.metadata.create_all(engine)
|
||||||
|
|
||||||
# MongoDB for migration purposes
|
|
||||||
# mongodb
|
|
||||||
app.mongodb_client = motor.motor_asyncio.AsyncIOMotorClient( # type: ignore
|
|
||||||
app.learnhouse_config.database_config.mongo_connection_string # type: ignore
|
|
||||||
) # type: ignore
|
|
||||||
app.db = app.mongodb_client["learnhouse"] # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
def get_db_session():
|
def get_db_session():
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,4 @@
|
||||||
from fastapi import APIRouter, Depends, Request
|
from fastapi import APIRouter
|
||||||
from sqlmodel import Session
|
|
||||||
from src.core.events.database import get_db_session
|
|
||||||
from src.services.dev.migration_from_mongo import start_migrate_from_mongo
|
|
||||||
from config.config import get_learnhouse_config
|
from config.config import get_learnhouse_config
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -14,9 +11,4 @@ async def config():
|
||||||
return config.dict()
|
return config.dict()
|
||||||
|
|
||||||
|
|
||||||
@router.get("/migrate_from_mongo")
|
|
||||||
async def migrate_from_mongo(
|
|
||||||
request: Request,
|
|
||||||
db_session: Session = Depends(get_db_session),
|
|
||||||
):
|
|
||||||
return await start_migrate_from_mongo(request, db_session)
|
|
||||||
|
|
|
||||||
|
|
@ -1,275 +0,0 @@
|
||||||
import datetime
|
|
||||||
from fastapi import Request
|
|
||||||
from sqlmodel import Session, select
|
|
||||||
from src.db.blocks import Block, BlockTypeEnum
|
|
||||||
from src.db.chapter_activities import ChapterActivity
|
|
||||||
from src.db.activities import Activity, ActivitySubTypeEnum, ActivityTypeEnum
|
|
||||||
from src.db.course_chapters import CourseChapter
|
|
||||||
from src.db.resource_authors import ResourceAuthor, ResourceAuthorshipEnum
|
|
||||||
from src.db.user_organizations import UserOrganization
|
|
||||||
from src.db.chapters import Chapter
|
|
||||||
from src.db.courses import Course
|
|
||||||
from src.db.users import User
|
|
||||||
|
|
||||||
from src.db.organizations import Organization
|
|
||||||
|
|
||||||
|
|
||||||
async def start_migrate_from_mongo(request: Request, db_session: Session):
|
|
||||||
orgs = request.app.db["organizations"]
|
|
||||||
|
|
||||||
## ----> Organizations migration
|
|
||||||
org_db_list = await orgs.find().to_list(length=100)
|
|
||||||
|
|
||||||
for org in org_db_list:
|
|
||||||
org_to_add = Organization(
|
|
||||||
name=org["name"],
|
|
||||||
description=org["description"],
|
|
||||||
slug=org["slug"],
|
|
||||||
logo_image=org["logo"],
|
|
||||||
email=org["email"],
|
|
||||||
org_uuid=org["org_id"],
|
|
||||||
creation_date=str(datetime.datetime.now()),
|
|
||||||
update_date=str(datetime.datetime.now()),
|
|
||||||
)
|
|
||||||
db_session.add(org_to_add)
|
|
||||||
db_session.commit()
|
|
||||||
|
|
||||||
print("Migrated organizations.")
|
|
||||||
|
|
||||||
## ----> Users migration
|
|
||||||
users = request.app.db["users"]
|
|
||||||
|
|
||||||
users_db_list = await users.find().to_list(length=100)
|
|
||||||
|
|
||||||
for user in users_db_list:
|
|
||||||
user_to_add = User(
|
|
||||||
email=user["email"],
|
|
||||||
username=user["username"],
|
|
||||||
first_name="",
|
|
||||||
last_name="",
|
|
||||||
user_uuid=user["user_id"],
|
|
||||||
password=user["password"],
|
|
||||||
creation_date=user["creation_date"],
|
|
||||||
update_date=user["update_date"],
|
|
||||||
)
|
|
||||||
db_session.add(user_to_add)
|
|
||||||
db_session.commit()
|
|
||||||
|
|
||||||
# Link Orgs to users and make them owners
|
|
||||||
for org in user["orgs"]:
|
|
||||||
statement = select(Organization).where(
|
|
||||||
Organization.org_uuid == org["org_id"]
|
|
||||||
)
|
|
||||||
org_from_db = db_session.exec(statement).first()
|
|
||||||
|
|
||||||
statement = select(User).where(User.user_uuid == user["user_id"])
|
|
||||||
user_from_db = db_session.exec(statement).first()
|
|
||||||
|
|
||||||
user_org_object = UserOrganization(
|
|
||||||
user_id=user_from_db.id, # type: ignore
|
|
||||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
|
||||||
role_id=1,
|
|
||||||
creation_date=str(datetime.datetime.now()),
|
|
||||||
update_date=str(datetime.datetime.now()),
|
|
||||||
)
|
|
||||||
db_session.add(user_org_object)
|
|
||||||
db_session.commit()
|
|
||||||
|
|
||||||
print("Migrated users and linked them to orgs.")
|
|
||||||
|
|
||||||
## ----> Courses migration
|
|
||||||
courses = request.app.db["courses"]
|
|
||||||
|
|
||||||
courses_db_list = await courses.find().to_list(length=300)
|
|
||||||
|
|
||||||
for course in courses_db_list:
|
|
||||||
# Get the organization id
|
|
||||||
statement = select(Organization).where(
|
|
||||||
Organization.org_uuid == course["org_id"]
|
|
||||||
)
|
|
||||||
org_from_db = db_session.exec(statement).first()
|
|
||||||
|
|
||||||
course_to_add = Course(
|
|
||||||
name=course["name"],
|
|
||||||
description=course["description"],
|
|
||||||
about=course["description"],
|
|
||||||
learnings="",
|
|
||||||
course_uuid=course["course_id"],
|
|
||||||
thumbnail_image=course["thumbnail"],
|
|
||||||
tags="",
|
|
||||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
|
||||||
public=course["public"],
|
|
||||||
creation_date=str(course["creationDate"]),
|
|
||||||
update_date=str(course["updateDate"]),
|
|
||||||
)
|
|
||||||
db_session.add(course_to_add)
|
|
||||||
db_session.commit()
|
|
||||||
|
|
||||||
# Get this course
|
|
||||||
statement = select(Course).where(Course.course_uuid == course["course_id"])
|
|
||||||
course_from_db = db_session.exec(statement).first()
|
|
||||||
|
|
||||||
# Add Authorship
|
|
||||||
authors = course["authors"]
|
|
||||||
|
|
||||||
for author in authors:
|
|
||||||
# Get the user id
|
|
||||||
statement = select(User).where(User.user_uuid == author)
|
|
||||||
user_from_db = db_session.exec(statement).first()
|
|
||||||
|
|
||||||
authorship = ResourceAuthor(
|
|
||||||
resource_uuid=course_from_db.course_uuid, # type: ignore
|
|
||||||
user_id=user_from_db.id if user_from_db is not None else None, # type: ignore
|
|
||||||
authorship=ResourceAuthorshipEnum.CREATOR,
|
|
||||||
creation_date=str(datetime.datetime.now()),
|
|
||||||
update_date=str(datetime.datetime.now()),
|
|
||||||
)
|
|
||||||
db_session.add(authorship)
|
|
||||||
db_session.commit()
|
|
||||||
|
|
||||||
print("Added authorship.")
|
|
||||||
|
|
||||||
## ----> Chapters migration & Link
|
|
||||||
|
|
||||||
chapter_object = course["chapters_content"]
|
|
||||||
order = 0
|
|
||||||
for chapter in chapter_object:
|
|
||||||
chapter_to_add = Chapter(
|
|
||||||
name=chapter["name"],
|
|
||||||
description=chapter["description"],
|
|
||||||
chapter_uuid=chapter["coursechapter_id"].replace(
|
|
||||||
"coursechapter", "chapter"
|
|
||||||
),
|
|
||||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
|
||||||
course_id=course_from_db.id, # type: ignore
|
|
||||||
creation_date=str(datetime.datetime.now()),
|
|
||||||
update_date=str(datetime.datetime.now()),
|
|
||||||
)
|
|
||||||
db_session.add(chapter_to_add)
|
|
||||||
db_session.commit()
|
|
||||||
|
|
||||||
# Get this chapter
|
|
||||||
statement = select(Chapter).where(
|
|
||||||
Chapter.chapter_uuid
|
|
||||||
== chapter["coursechapter_id"].replace("coursechapter", "chapter")
|
|
||||||
)
|
|
||||||
chapter_from_db = db_session.exec(statement).first()
|
|
||||||
|
|
||||||
# Link chapter to course
|
|
||||||
coursechapter_to_add = CourseChapter(
|
|
||||||
chapter_id=chapter_from_db.id, # type: ignore
|
|
||||||
course_id=course_from_db.id, # type: ignore
|
|
||||||
order=order,
|
|
||||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
|
||||||
creation_date=str(datetime.datetime.now()),
|
|
||||||
update_date=str(datetime.datetime.now()),
|
|
||||||
)
|
|
||||||
|
|
||||||
db_session.add(coursechapter_to_add)
|
|
||||||
db_session.commit()
|
|
||||||
|
|
||||||
order += 1
|
|
||||||
|
|
||||||
## ----> Activities migration
|
|
||||||
activities = request.app.db["activities"]
|
|
||||||
activities_db_list = await activities.find(
|
|
||||||
{"coursechapter_id": chapter["coursechapter_id"]}
|
|
||||||
).to_list(length=100)
|
|
||||||
|
|
||||||
activity_order = 0
|
|
||||||
|
|
||||||
for activity in activities_db_list:
|
|
||||||
type_to_use = ActivityTypeEnum.TYPE_CUSTOM
|
|
||||||
sub_type_to_use = ActivityTypeEnum.TYPE_CUSTOM
|
|
||||||
|
|
||||||
if activity["type"] == "video":
|
|
||||||
type_to_use = ActivityTypeEnum.TYPE_VIDEO
|
|
||||||
sub_type_to_use = ActivitySubTypeEnum.SUBTYPE_VIDEO_HOSTED
|
|
||||||
|
|
||||||
if "external_video" in activity["content"]:
|
|
||||||
type_to_use = ActivityTypeEnum.TYPE_VIDEO
|
|
||||||
sub_type_to_use = ActivitySubTypeEnum.SUBTYPE_VIDEO_YOUTUBE
|
|
||||||
|
|
||||||
if activity["type"] == "documentpdf":
|
|
||||||
type_to_use = ActivityTypeEnum.TYPE_DOCUMENT
|
|
||||||
sub_type_to_use = ActivitySubTypeEnum.SUBTYPE_DOCUMENT_PDF
|
|
||||||
|
|
||||||
if activity["type"] == "dynamic":
|
|
||||||
type_to_use = ActivityTypeEnum.TYPE_DYNAMIC
|
|
||||||
sub_type_to_use = ActivitySubTypeEnum.SUBTYPE_DYNAMIC_PAGE
|
|
||||||
|
|
||||||
activity_to_add = Activity(
|
|
||||||
name=activity["name"],
|
|
||||||
activity_uuid=activity["activity_id"],
|
|
||||||
version=1,
|
|
||||||
published_version=1,
|
|
||||||
activity_type=type_to_use,
|
|
||||||
content=activity["content"],
|
|
||||||
activity_sub_type=sub_type_to_use,
|
|
||||||
chapter_id=chapter_from_db.id, # type: ignore
|
|
||||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
|
||||||
course_id=course_from_db.id, # type: ignore
|
|
||||||
creation_date=str(activity["creationDate"]),
|
|
||||||
update_date=str(activity["updateDate"]),
|
|
||||||
)
|
|
||||||
db_session.add(activity_to_add)
|
|
||||||
db_session.commit()
|
|
||||||
|
|
||||||
# Link activity to chapter
|
|
||||||
statement = select(Activity).where(
|
|
||||||
Activity.activity_uuid == activity["activity_id"]
|
|
||||||
)
|
|
||||||
|
|
||||||
activity_from_db = db_session.exec(statement).first()
|
|
||||||
|
|
||||||
activitychapter_to_add = ChapterActivity(
|
|
||||||
chapter_id=chapter_from_db.id, # type: ignore
|
|
||||||
activity_id=activity_from_db.id, # type: ignore
|
|
||||||
order=activity_order,
|
|
||||||
course_id=course_from_db.id, # type: ignore
|
|
||||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
|
||||||
creation_date=str(datetime.datetime.now()),
|
|
||||||
update_date=str(datetime.datetime.now()),
|
|
||||||
)
|
|
||||||
|
|
||||||
db_session.add(activitychapter_to_add)
|
|
||||||
db_session.commit()
|
|
||||||
|
|
||||||
activity_order += 1
|
|
||||||
|
|
||||||
## ----> Blocks migration
|
|
||||||
blocks = request.app.db["blocks"]
|
|
||||||
|
|
||||||
blocks_db_list = await blocks.find(
|
|
||||||
{"activity_id": activity["activity_id"]}
|
|
||||||
).to_list(length=200)
|
|
||||||
|
|
||||||
for block in blocks_db_list:
|
|
||||||
type_to_use = BlockTypeEnum.BLOCK_CUSTOM
|
|
||||||
|
|
||||||
if block["block_type"] == "imageBlock":
|
|
||||||
type_to_use = BlockTypeEnum.BLOCK_IMAGE
|
|
||||||
|
|
||||||
if block["block_type"] == "videoBlock":
|
|
||||||
type_to_use = BlockTypeEnum.BLOCK_VIDEO
|
|
||||||
|
|
||||||
if block["block_type"] == "pdfBlock":
|
|
||||||
type_to_use = BlockTypeEnum.BLOCK_DOCUMENT_PDF
|
|
||||||
|
|
||||||
print('block', block)
|
|
||||||
|
|
||||||
block_to_add = Block(
|
|
||||||
block_uuid=block["block_id"],
|
|
||||||
content=block["block_data"],
|
|
||||||
block_type=type_to_use,
|
|
||||||
activity_id=activity_from_db.id, # type: ignore
|
|
||||||
org_id=org_from_db.id if org_from_db is not None else None, # type: ignore
|
|
||||||
course_id=course_from_db.id, # type: ignore
|
|
||||||
chapter_id=chapter_from_db.id, # type: ignore
|
|
||||||
creation_date=str(datetime.datetime.now()),
|
|
||||||
update_date=str(datetime.datetime.now()),
|
|
||||||
)
|
|
||||||
db_session.add(block_to_add)
|
|
||||||
db_session.commit()
|
|
||||||
|
|
||||||
return "Migration successfull."
|
|
||||||
|
|
@ -13,7 +13,6 @@ services:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:16-alpine
|
image: postgres:16-alpine
|
||||||
restart: always
|
restart: always
|
||||||
|
|
@ -38,11 +37,3 @@ services:
|
||||||
interval: 5s
|
interval: 5s
|
||||||
timeout: 4s
|
timeout: 4s
|
||||||
retries: 5
|
retries: 5
|
||||||
mongo:
|
|
||||||
image: mongo:5.0
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- "27017:27017"
|
|
||||||
environment:
|
|
||||||
- MONGO_INITDB_ROOT_USERNAME=learnhouse
|
|
||||||
- MONGO_INITDB_ROOT_PASSWORD=learnhous
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue