mirror of
https://github.com/beak-insights/felicity-lims.git
synced 2025-02-21 07:22:53 +08:00
fixed audit to mongo, updated event system
This commit is contained in:
parent
561be0c7a5
commit
be25d8a746
42 changed files with 1029 additions and 2221 deletions
|
@ -6,9 +6,47 @@
|
|||
<component name="ChangeListManager">
|
||||
<list default="true" id="9f590b69-f929-45a1-8512-12ed6efbf028" name="Changes" comment="Added Invetory starter">
|
||||
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/.vscode/PythonImportHelper-v2-Completion.json" beforeDir="false" afterPath="$PROJECT_DIR$/.vscode/PythonImportHelper-v2-Completion.json" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/docker-compose.dev.yml" beforeDir="false" afterPath="$PROJECT_DIR$/docker-compose.dev.yml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/core/config.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/core/config.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/api/gql/analysis/mutations/analysis_request.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/api/gql/analysis/mutations/analysis_request.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/api/gql/audit/query.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/api/gql/audit/query.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/api/gql/audit/types.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/api/gql/audit/types.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/api/gql/notification/subscription.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/api/gql/notification/subscription.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/api/gql/notification/types.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/api/gql/notification/types.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/api/gql/types/generic.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/api/gql/types/generic.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/abstract/events.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/apps/abstract/events.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/abstract/trackable.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/apps/abstract/trackable.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/analysis/services/analysis.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/apps/analysis/services/analysis.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/analysis/utils.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/apps/analysis/utils.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/auditlog/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/auditlog/entities.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/auditlog/mixin.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/auditlog/repositories.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/auditlog/services.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/common/utils/serializer.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/apps/common/utils/serializer.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/errlog/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/errlog/entities.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/errlog/repositories.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/errlog/schemas.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/errlog/services.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/iol/meilisearch/client.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/apps/iol/meilisearch/client.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/iol/meilisearch/handler.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/apps/iol/meilisearch/handler.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/iol/minio/client.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/apps/iol/minio/client.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/apps/notification/services.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/apps/notification/services.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/core/events.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/core/events.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/database/base.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/database/base.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/database/mongo.py" beforeDir="false" afterPath="$PROJECT_DIR$/felicity/database/mongo.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/database/queryset/__init__.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/felicity/database/queryset/builder.py" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/webapp/components/audit/FelAuditLog.vue" beforeDir="false" afterPath="$PROJECT_DIR$/webapp/components/audit/FelAuditLog.vue" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/webapp/components/result/ResultDetail.vue" beforeDir="false" afterPath="$PROJECT_DIR$/webapp/components/result/ResultDetail.vue" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/webapp/graphql/operations/_queries.ts" beforeDir="false" afterPath="$PROJECT_DIR$/webapp/graphql/operations/_queries.ts" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/webapp/views/client/_id/ClientDetail.vue" beforeDir="false" afterPath="$PROJECT_DIR$/webapp/views/client/_id/ClientDetail.vue" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/webapp/views/patient/PatientsCompact.vue" beforeDir="false" afterPath="$PROJECT_DIR$/webapp/views/patient/PatientsCompact.vue" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/webapp/views/patient/_id/PatientDetail.vue" beforeDir="false" afterPath="$PROJECT_DIR$/webapp/views/patient/_id/PatientDetail.vue" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/webapp/views/sample/_id/SampleDetail.vue" beforeDir="false" afterPath="$PROJECT_DIR$/webapp/views/sample/_id/SampleDetail.vue" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/webapp/views/shipment/_id/ShipmentDetail.vue" beforeDir="false" afterPath="$PROJECT_DIR$/webapp/views/shipment/_id/ShipmentDetail.vue" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/webapp/views/worksheet/_id/WorkSheetDetail.vue" beforeDir="false" afterPath="$PROJECT_DIR$/webapp/views/worksheet/_id/WorkSheetDetail.vue" afterDir="false" />
|
||||
</list>
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
|
|
1647
.vscode/PythonImportHelper-v2-Completion.json
vendored
1647
.vscode/PythonImportHelper-v2-Completion.json
vendored
File diff suppressed because it is too large
Load diff
|
@ -2,15 +2,15 @@ version: '3.5'
|
|||
|
||||
services:
|
||||
|
||||
felicity_fe:
|
||||
container_name: felicity_fe
|
||||
felicity-webapp:
|
||||
container_name: felicity-webapp
|
||||
restart: unless-stopped
|
||||
build:
|
||||
context: ./
|
||||
target: webapp
|
||||
dockerfile: Dockerfile.dev
|
||||
volumes:
|
||||
- ./webapp:/app/webapp
|
||||
- ./webapp/:/app/webapp
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- VITE_NODE_ENV=development
|
||||
|
@ -19,49 +19,49 @@ services:
|
|||
ports:
|
||||
- "3000:3000"
|
||||
depends_on:
|
||||
- felicity_pg
|
||||
- felicity-postgres
|
||||
networks:
|
||||
- felicitynet
|
||||
command: bash -c "pnpm webapp:dev --host --port 3000"
|
||||
|
||||
felicity_be:
|
||||
container_name: felicity_be
|
||||
felicity-api:
|
||||
container_name: felicity-api
|
||||
restart: unless-stopped
|
||||
build:
|
||||
context: ./
|
||||
target: webapi
|
||||
dockerfile: Dockerfile.dev
|
||||
environment:
|
||||
- POSTGRES_SERVER=felicity_pg
|
||||
- POSTGRES_SERVER=felicity-postgres
|
||||
- POSTGRES_DB=felicity_lims
|
||||
- POSTGRES_USER=felicity
|
||||
- POSTGRES_PASSWORD=felicity
|
||||
- POSTGRES_HOST_AUTH_METHOD=trust
|
||||
- LOAD_SETUP_DATA=${LOAD_SETUP_DATA:-False}
|
||||
- SERVE_WEBAPP=${SERVE_WEBAPP:-False}
|
||||
- REDIS_SERVER=felicity_df:6379
|
||||
- MONGODB_SERVER=felicity_mg:27027
|
||||
- REDIS_SERVER=felicity-dragonfly:6379
|
||||
- MONGODB_SERVER=felicity-mongo:27017
|
||||
- MONGODB_USER=felicity
|
||||
- MONGODB_PASS=felicity
|
||||
- MINIO_SERVER=felicity_min:9000
|
||||
- MINIO_SERVER=felicity-minio:9000
|
||||
- MINIO_ACCESS=felicity
|
||||
- MINIO_SECRET=felicity
|
||||
- MEILISEARCH_SERVER=http://felicity_ms:7700
|
||||
- MEILISEARCH_API_KEY=http:api_key
|
||||
- MEILISEARCH_SERVER=http://felicity-meilisearch:7700
|
||||
- MEILISEARCH_API_KEY=masterKey
|
||||
volumes:
|
||||
- ./felicity/:/app/felicity
|
||||
ports:
|
||||
- 8000:8000
|
||||
depends_on:
|
||||
- felicity_pg
|
||||
- felicity-postgres
|
||||
networks:
|
||||
- felicitynet
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
command: bash -c "cd felicity && alembic upgrade head && uvicorn main:felicity --host=0.0.0.0 --port=8000"
|
||||
command: bash -c "cd felicity && alembic upgrade head && uvicorn main:felicity --reload --host=0.0.0.0 --port=8000"
|
||||
|
||||
felicity_min:
|
||||
container_name: felicity_min
|
||||
felicity-minio:
|
||||
container_name: felicity-minio
|
||||
image: bitnami/minio:2024.7.31
|
||||
ports:
|
||||
- '9000:9000'
|
||||
|
@ -75,8 +75,8 @@ services:
|
|||
- MINIO_ROOT_PASSWORD=felicity
|
||||
- MINIO_DEFAULT_BUCKET=felicity
|
||||
|
||||
felicity_ms:
|
||||
container_name: felicity_ms
|
||||
felicity-meilisearch:
|
||||
container_name: felicity-meilisearch
|
||||
image: getmeili/meilisearch:v1.8.4
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
|
@ -94,8 +94,17 @@ services:
|
|||
networks:
|
||||
- felicitynet
|
||||
|
||||
felicity_df:
|
||||
container_name: felicity_df
|
||||
felicity-meiliui:
|
||||
container_name: felicity-meiliui
|
||||
image: riccoxie/meilisearch-ui:latest
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- ${MEILI_UI_PORT:-24900}:24900
|
||||
networks:
|
||||
- felicitynet
|
||||
|
||||
felicity-dragonfly:
|
||||
container_name: felicity-dragonfly
|
||||
image: 'docker.dragonflydb.io/dragonflydb/dragonfly'
|
||||
ulimits:
|
||||
memlock: -1
|
||||
|
@ -110,8 +119,8 @@ services:
|
|||
networks:
|
||||
- felicitynet
|
||||
|
||||
felicity_pg:
|
||||
container_name: felicity_pg
|
||||
felicity-postgres:
|
||||
container_name: felicity-postgres
|
||||
image: postgres:12
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
|
@ -129,8 +138,8 @@ services:
|
|||
networks:
|
||||
- felicitynet
|
||||
|
||||
felicity_mg:
|
||||
container_name: felicity_mg
|
||||
felicity-mongo:
|
||||
container_name: felicity-mongo
|
||||
image: mongo
|
||||
restart: always
|
||||
environment:
|
||||
|
@ -141,8 +150,8 @@ services:
|
|||
networks:
|
||||
- felicitynet
|
||||
|
||||
felicity_dg:
|
||||
container_name: felicity_dg
|
||||
felicity-dbgate:
|
||||
container_name: felicity-dbgate
|
||||
image: dbgate/dbgate
|
||||
restart: always
|
||||
ports:
|
||||
|
@ -154,7 +163,7 @@ services:
|
|||
PASSWORD: felicity
|
||||
CONNECTIONS: pg,mg
|
||||
LABEL_pg: Felicity Postgres
|
||||
SERVER_pg: felicity_pg
|
||||
SERVER_pg: felicity-postgres
|
||||
USER_pg: felicity
|
||||
PASSWORD_pg: felicity
|
||||
DATABASE_pg: felicity_lims
|
||||
|
@ -162,7 +171,7 @@ services:
|
|||
ENGINE_pg: postgres@dbgate-plugin-postgres
|
||||
READONLY_pg: 1
|
||||
LABEL_mg: Felicity Mongo
|
||||
URL_mg: mongodb://felicity:felicity@felicity_mg:27017/
|
||||
URL_mg: mongodb://felicity:felicity@felicity-mongo:27017/
|
||||
DATABASE_mg: felicity
|
||||
ENGINE_mg: mongo@dbgate-plugin-mongo
|
||||
READONLY_mg: 1
|
||||
|
|
|
@ -256,7 +256,7 @@ async def create_analysis_request(
|
|||
|
||||
# initialise reflex action if exist
|
||||
logger.debug(f"ReflexUtil .... set_reflex_actions ...")
|
||||
ReflexEngineService(created[0], felicity_user).set_reflex_actions(created)
|
||||
await ReflexEngineService(created[0], felicity_user).set_reflex_actions(created)
|
||||
|
||||
# ! paramount !
|
||||
await asyncio.sleep(1)
|
||||
|
@ -305,11 +305,13 @@ async def clone_samples(info, samples: List[str]) -> SampleActionResponse:
|
|||
"status": ResultState.PENDING,
|
||||
}
|
||||
a_result_schema = schemas.AnalysisResultCreate(**a_result_in)
|
||||
created = await AnalysisResultService().create(a_result_schema)
|
||||
created = await AnalysisResultService().create(a_result_schema, related=["sample", "analysis"])
|
||||
await ReflexEngineService(created, felicity_user).set_reflex_actions(
|
||||
[created]
|
||||
)
|
||||
|
||||
clones = [
|
||||
(await SampleService().get_related(related=["sample_type"], uid=clone.uid)) for clone in clones
|
||||
]
|
||||
return SampleListingType(samples=clones)
|
||||
|
||||
|
||||
|
@ -482,11 +484,12 @@ async def publish_samples(
|
|||
|
||||
# !important for frontend
|
||||
# unfreeze frontend and return sample to original state since it is a non final publish
|
||||
ns_samples = await SampleService().get_by_uids([nf.uid for nf in not_final])
|
||||
for sample in ns_samples:
|
||||
await ActivityStreamService().stream(
|
||||
sample, felicity_user, sample.status, "sample"
|
||||
)
|
||||
if not_final:
|
||||
ns_samples = await SampleService().get_by_uids([nf.uid for nf in not_final])
|
||||
for sample in ns_samples:
|
||||
await ActivityStreamService().stream(
|
||||
sample, felicity_user, sample.status, "sample"
|
||||
)
|
||||
|
||||
return OperationSuccess(
|
||||
message="Your results are being published in the background."
|
||||
|
|
|
@ -2,17 +2,29 @@ from typing import List
|
|||
|
||||
import strawberry # noqa
|
||||
|
||||
from database.mongo import MongoService, MongoCollection
|
||||
from felicity.api.gql.audit.types import AuditLogType
|
||||
from felicity.api.gql.permissions import IsAuthenticated
|
||||
from felicity.apps.auditlog.services import AuditLogService
|
||||
|
||||
|
||||
@strawberry.type
|
||||
class AuditLogQuery:
|
||||
@strawberry.field(permission_classes=[IsAuthenticated])
|
||||
async def audit_logs_filter(
|
||||
self, info, target_type: str, target_id: str
|
||||
) -> List[AuditLogType]:
|
||||
return await AuditLogService().get_all(
|
||||
target_type=target_type, target_id=target_id
|
||||
self, info, target_type: str, target_uid: str
|
||||
) -> List[AuditLogType] | None:
|
||||
documents = await MongoService().search(
|
||||
MongoCollection.AUDIT_LOG,
|
||||
filters={
|
||||
"target_type": target_type,
|
||||
"target_uid": target_uid
|
||||
}
|
||||
)
|
||||
|
||||
def _log(doc: dict) -> AuditLogType:
|
||||
print(type(doc))
|
||||
doc["uid"] = doc["_id"]
|
||||
del doc["_id"]
|
||||
return AuditLogType(**doc)
|
||||
|
||||
return [_log(doc) for doc in documents] if documents else None
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
from typing import Optional, Text
|
||||
from typing import Optional
|
||||
|
||||
import strawberry # noqa
|
||||
|
||||
from felicity.api.gql.types import JSONScalar
|
||||
|
||||
|
||||
@strawberry.type
|
||||
class AuditLogType:
|
||||
uid: str
|
||||
user_id: str | None = None
|
||||
user_uid: str | None = None
|
||||
target_type: str | None = None
|
||||
target_id: str | None = None
|
||||
target_uid: str | None = None
|
||||
action: int | None = None
|
||||
state_before: Optional[Text] = None
|
||||
state_after: Optional[Text] = None
|
||||
state_before: Optional[JSONScalar] = None
|
||||
state_after: Optional[JSONScalar] = None
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from typing import AsyncGenerator
|
||||
|
||||
import strawberry # noqa
|
||||
|
||||
from felicity.api.gql.notification.types import ActivityStreamType
|
||||
# from felicity.api.gql.permissions import IsAuthenticated
|
||||
from felicity.apps.common.channel import broadcast
|
||||
from felicity.apps.notification.services import ActivityStreamService
|
||||
|
||||
|
@ -22,7 +22,7 @@ class StreamSubscription:
|
|||
try:
|
||||
async for event in subscriber:
|
||||
logger.info(event)
|
||||
yield event.message
|
||||
yield ActivityStreamType(**json.loads(event.message))
|
||||
finally:
|
||||
logger.info("Unsubscribed")
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import List, Optional, Union
|
||||
from typing import Optional, Union
|
||||
|
||||
import strawberry # noqa
|
||||
|
||||
|
@ -37,7 +37,7 @@ class ActivityFeedType:
|
|||
@strawberry.type
|
||||
class ActivityStreamType:
|
||||
uid: str
|
||||
feeds: Optional[List[ActivityFeedType]] = None
|
||||
feeds: Optional[list[ActivityFeedType]] = None
|
||||
actor_uid: str | None = None
|
||||
verb: str | None = None
|
||||
action_object_type: str | None = None
|
||||
|
@ -48,6 +48,9 @@ class ActivityStreamType:
|
|||
created_at: str | None = None
|
||||
created_by_uid: str | None = None
|
||||
created_by: UserType | None = None
|
||||
updated_at: str | None = None
|
||||
updated_by_uid: str | None = None
|
||||
updated_by: UserType | None = None
|
||||
|
||||
@strawberry.field
|
||||
async def actor(self) -> UserType:
|
||||
|
|
|
@ -4,11 +4,13 @@ from typing import Any, Generic, NewType, TypeVar
|
|||
|
||||
import strawberry
|
||||
|
||||
from felicity.apps.common.utils.serializer import marshaller
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
JSONScalar = strawberry.scalar(
|
||||
NewType("JSONScalar", Any),
|
||||
serialize=lambda v: v,
|
||||
serialize=lambda v: marshaller(v),
|
||||
parse_value=lambda v: json.loads(v),
|
||||
description="json field",
|
||||
)
|
||||
|
|
|
@ -1,11 +1,5 @@
|
|||
import datetime
|
||||
import json
|
||||
import logging
|
||||
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy.orm import class_mapper
|
||||
from sqlalchemy.orm.attributes import get_history
|
||||
|
||||
from apps.iol.meilisearch.handler import MeilisearchSyncHandler
|
||||
from database.mongo import MongoService, MongoCollection
|
||||
from felicity.core.events import subscribe
|
||||
|
@ -14,75 +8,32 @@ logging.basicConfig(level=logging.INFO)
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def custom_serial(o):
|
||||
if isinstance(o, (datetime.date, datetime.datetime)):
|
||||
return o.isoformat()
|
||||
|
||||
raise TypeError("Type %s not serializable" % type(o))
|
||||
|
||||
|
||||
def get_changes(target) -> dict:
|
||||
state_before = {}
|
||||
state_after = {}
|
||||
inspector = inspect(target)
|
||||
attrs = class_mapper(target.__class__).column_attrs
|
||||
|
||||
for attr in attrs: # noqa
|
||||
hist = getattr(inspector.attrs, attr.key).history
|
||||
if hist.has_changes():
|
||||
if isinstance(get_history(target, attr.key)[2], tuple):
|
||||
continue
|
||||
state_before[attr.key] = get_history(target, attr.key)[2].pop()
|
||||
state_after[attr.key] = getattr(target, attr.key)
|
||||
else:
|
||||
if attr.key in ["updated_by_uid", "created_by_uid"]:
|
||||
state_after[attr.key] = getattr(target, attr.key)
|
||||
try:
|
||||
state_before[attr.key] = get_history(target, attr.key)[2].pop()
|
||||
except Exception: # noqa
|
||||
state_before[attr.key] = getattr(target, attr.key)
|
||||
|
||||
if len(state_after.keys()) == 1:
|
||||
if "updated_at" in list(state_after.keys()):
|
||||
return {}
|
||||
|
||||
return {
|
||||
"uid": target.uid,
|
||||
"state_before": json.dumps(state_before, default=custom_serial),
|
||||
"state_after": json.dumps(state_after, default=custom_serial),
|
||||
}
|
||||
|
||||
|
||||
def meilisearch_tracker(action: str, table_name: str, instance):
|
||||
def meilisearch_tracker(action: str, table_name: str, metadata):
|
||||
logger.info(f"Event fired: {action}:{table_name}")
|
||||
data = instance
|
||||
if action == "after-update":
|
||||
changes = get_changes(instance)
|
||||
data = {
|
||||
"uid": changes["uid"],
|
||||
**changes["state_after"]
|
||||
}
|
||||
|
||||
if data:
|
||||
MeilisearchSyncHandler().on_event(action, table_name, data)
|
||||
|
||||
|
||||
def auditlog_tracker(action: str, table_name: str, instance):
|
||||
logger.info(f"Event fired: {action}:{table_name}")
|
||||
if action != "after-update":
|
||||
if not metadata:
|
||||
return
|
||||
|
||||
data = get_changes(instance)
|
||||
if not data:
|
||||
if action == "after-update":
|
||||
metadata = {
|
||||
"uid": metadata["uid"],
|
||||
**metadata.get("state_after")
|
||||
}
|
||||
|
||||
MeilisearchSyncHandler().on_event(action, table_name, metadata)
|
||||
|
||||
|
||||
async def auditlog_tracker(action: str, table_name: str, metadata):
|
||||
logger.info(f"Event fired: {action}:{table_name}")
|
||||
if action != "after-update" and not metadata:
|
||||
return
|
||||
|
||||
update = {
|
||||
**data,
|
||||
"user_uid": data["state_after"].get("updated_by_uid", None),
|
||||
**metadata,
|
||||
"user_uid": metadata["state_after"].get("updated_by_uid", None),
|
||||
"target_type": table_name,
|
||||
"target_id": instance.uid,
|
||||
"target_uid": metadata.get("uid"),
|
||||
}
|
||||
MongoService().create(MongoCollection.AUDIT_LOG, update)
|
||||
await MongoService().create(MongoCollection.AUDIT_LOG, update)
|
||||
|
||||
|
||||
def init_entity_tracker_events():
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
import logging
|
||||
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy import event, inspect
|
||||
from sqlalchemy.orm import class_mapper
|
||||
from sqlalchemy.orm.attributes import get_history
|
||||
|
||||
from felicity.apps.common.utils.serializer import marshaller
|
||||
from felicity.core.events import post_event
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
@ -12,13 +15,13 @@ class TrackableEntity:
|
|||
"""Allow a model to be automatically tracked"""
|
||||
|
||||
@staticmethod
|
||||
def put_out(action, table_name, target):
|
||||
def put_out(action, table_name, metadata):
|
||||
""""handle an event that something has happened"""
|
||||
post_event(
|
||||
event_type="entity-tracker",
|
||||
action=action,
|
||||
table_name=table_name,
|
||||
instance=target
|
||||
metadata=metadata
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -30,12 +33,44 @@ class TrackableEntity:
|
|||
|
||||
@staticmethod
|
||||
def handle_insert(mapper, connection, target):
|
||||
target.put_out("after-insert", target.__tablename__, target)
|
||||
target.put_out("after-insert", target.__tablename__, marshaller(target))
|
||||
|
||||
@staticmethod
|
||||
def handle_delete(mapper, connection, target):
|
||||
target.put_out("after-delete", target.__tablename__, target)
|
||||
target.put_out("after-delete", target.__tablename__, marshaller(target))
|
||||
|
||||
@staticmethod
|
||||
def handle_update(mapper, connection, target):
|
||||
target.put_out("after-update", target.__tablename__, target)
|
||||
target.put_out("after-update", target.__tablename__, target.get_changes(target))
|
||||
|
||||
@staticmethod
|
||||
def get_changes(target) -> dict:
|
||||
state_before = {}
|
||||
state_after = {}
|
||||
inspector = inspect(target)
|
||||
attrs = class_mapper(target.__class__).column_attrs
|
||||
|
||||
for attr in attrs: # noqa
|
||||
hist = getattr(inspector.attrs, attr.key).history
|
||||
if hist.has_changes():
|
||||
if isinstance(get_history(target, attr.key)[2], tuple):
|
||||
continue
|
||||
state_before[attr.key] = get_history(target, attr.key)[2].pop()
|
||||
state_after[attr.key] = getattr(target, attr.key)
|
||||
else:
|
||||
if attr.key in ["updated_by_uid", "created_by_uid"]:
|
||||
state_after[attr.key] = getattr(target, attr.key)
|
||||
try:
|
||||
state_before[attr.key] = get_history(target, attr.key)[2].pop()
|
||||
except Exception: # noqa
|
||||
state_before[attr.key] = getattr(target, attr.key)
|
||||
|
||||
if len(state_after.keys()) == 1:
|
||||
if "updated_at" in list(state_after.keys()):
|
||||
return {}
|
||||
|
||||
return {
|
||||
"uid": target.uid,
|
||||
"state_before": state_before,
|
||||
"state_after": state_after
|
||||
}
|
||||
|
|
|
@ -482,28 +482,28 @@ class SampleService(BaseService[Sample, SampleCreate, SampleUpdate]):
|
|||
return await super().create(data, related)
|
||||
|
||||
async def duplicate_unique(self, uid: str, duplicator):
|
||||
sample = await self.get(uid=uid)
|
||||
sample = await self.get_related(related=["profiles", "analyses"], uid=uid)
|
||||
data = sample.to_dict(nested=False)
|
||||
data["sample_id"] = self.copy_sample_id_unique(sample)
|
||||
for key, _ in list(data.items()):
|
||||
if key not in self.copy_include_keys():
|
||||
del data[key]
|
||||
data["status"] = SampleState.RECEIVED
|
||||
data["profiles"] = self.profiles
|
||||
data["analyses"] = self.analyses
|
||||
data["parent_id"] = self.uid
|
||||
data["profiles"] = sample.profiles
|
||||
data["analyses"] = sample.analyses
|
||||
data["parent_id"] = sample.uid
|
||||
data["created_by_uid"] = duplicator.uid
|
||||
return await super().create(data)
|
||||
|
||||
async def clone_afresh(self, uid: str, cloner):
|
||||
sample = await self.get(uid=uid)
|
||||
sample = await self.get_related(related=["profiles", "analyses"], uid=uid)
|
||||
data = sample.to_dict(nested=False)
|
||||
for key, _ in list(data.items()):
|
||||
if key not in self.copy_include_keys():
|
||||
del data[key]
|
||||
data["status"] = SampleState.RECEIVED
|
||||
data["profiles"] = self.profiles
|
||||
data["analyses"] = self.analyses
|
||||
data["parent_id"] = self.uid
|
||||
data["profiles"] = sample.profiles
|
||||
data["analyses"] = sample.analyses
|
||||
data["parent_id"] = sample.uid
|
||||
data["created_by_uid"] = cloner.uid
|
||||
return await self.create(obj_in=data)
|
||||
|
|
|
@ -40,7 +40,6 @@ from felicity.utils import has_value_or_is_truthy
|
|||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
QC_SAMPLE = {"name": "QC Sample", "description": "QC Sample", "abbr": "QCS"}
|
||||
|
||||
|
||||
|
@ -67,7 +66,7 @@ async def get_last_verificator(result_uid: str) -> User | None:
|
|||
|
||||
|
||||
async def sample_search(
|
||||
model, status: str, text: str, client_uid: str
|
||||
model, status: str, text: str, client_uid: str
|
||||
) -> list[SampleType]:
|
||||
"""No pagination"""
|
||||
sample_service = SampleService()
|
||||
|
@ -100,7 +99,7 @@ async def sample_search(
|
|||
|
||||
|
||||
async def retest_from_result_uids(
|
||||
uids: list[str], user: User
|
||||
uids: list[str], user: User
|
||||
) -> tuple[list[AnalysisResult], list[AnalysisResult]]:
|
||||
analysis_result_service = AnalysisResultService()
|
||||
analysis_result_wf = AnalysisResultWorkFlow()
|
||||
|
@ -124,7 +123,7 @@ async def retest_from_result_uids(
|
|||
|
||||
|
||||
async def results_submitter(
|
||||
analysis_results: List[dict], submitter: User
|
||||
analysis_results: List[dict], submitter: User
|
||||
) -> list[AnalysisResult]:
|
||||
analysis_result_service = AnalysisResultService()
|
||||
sample_wf = SampleWorkFlow()
|
||||
|
@ -178,9 +177,9 @@ async def verify_from_result_uids(uids: list[str], user: User) -> list[AnalysisR
|
|||
logger.info(f"ReflexUtil .... done")
|
||||
|
||||
# try to verify associated sample
|
||||
sample_verified=False
|
||||
sample_verified = False
|
||||
try:
|
||||
sample_verified, _ = await sample_wf.approve([a_result.sample_uid], submitted_by=user)
|
||||
sample_verified, _ = await sample_wf.approve(a_result.sample_uid, approved_by=user)
|
||||
except Exception as e:
|
||||
await sample_wf.revert(a_result.sample_uid, by_uid=user.uid)
|
||||
logger.warning(e)
|
||||
|
@ -188,7 +187,7 @@ async def verify_from_result_uids(uids: list[str], user: User) -> list[AnalysisR
|
|||
# try to submit associated worksheet
|
||||
if a_result.worksheet_uid:
|
||||
try:
|
||||
await worksheet_wf.approve(a_result.worksheet_uid, verified_by=user)
|
||||
await worksheet_wf.approve(a_result.worksheet_uid, approved_by=user)
|
||||
except Exception as e:
|
||||
await worksheet_wf.revert(a_result.worksheet_uid, by_uid=user.uid)
|
||||
logger.warning(e)
|
||||
|
@ -248,11 +247,11 @@ async def result_mutator(result: AnalysisResult) -> None:
|
|||
# Correction factor
|
||||
for cf in correction_factors:
|
||||
if (
|
||||
cf.instrument_uid == result.laboratory_instrument_uid
|
||||
and cf.method_uid == result.method_uid
|
||||
cf.instrument_uid == result.laboratory_instrument_uid
|
||||
and cf.method_uid == result.method_uid
|
||||
):
|
||||
await result_mutation_service.create(
|
||||
obj_in={
|
||||
c={
|
||||
"result_uid": result.uid,
|
||||
"before": result.result,
|
||||
"after": result.result * cf.factor,
|
||||
|
@ -267,7 +266,7 @@ async def result_mutator(result: AnalysisResult) -> None:
|
|||
# Min
|
||||
if result.result < spec.min_warn:
|
||||
await result_mutation_service.create(
|
||||
obj_in={
|
||||
c={
|
||||
"result_uid": result.uid,
|
||||
"before": result.result,
|
||||
"after": spec.min_report,
|
||||
|
@ -283,7 +282,7 @@ async def result_mutator(result: AnalysisResult) -> None:
|
|||
# Max
|
||||
if result.result > spec.max_warn:
|
||||
await result_mutation_service.create(
|
||||
obj_in={
|
||||
c={
|
||||
"result_uid": result.uid,
|
||||
"before": result.result,
|
||||
"after": spec.max_report,
|
||||
|
@ -300,7 +299,7 @@ async def result_mutator(result: AnalysisResult) -> None:
|
|||
for dlim in detection_limits:
|
||||
if result.result < dlim.lower_limit:
|
||||
await result_mutation_service.create(
|
||||
obj_in={
|
||||
c={
|
||||
"result_uid": result.uid,
|
||||
"before": result.result,
|
||||
"after": f"< {dlim.lower_limit}",
|
||||
|
@ -312,7 +311,7 @@ async def result_mutator(result: AnalysisResult) -> None:
|
|||
|
||||
if result.result > dlim.upper_limit:
|
||||
await result_mutation_service.create(
|
||||
obj_in={
|
||||
c={
|
||||
"result_uid": result.uid,
|
||||
"before": result.result,
|
||||
"after": f"> {dlim.upper_limit}",
|
||||
|
@ -327,7 +326,7 @@ async def result_mutator(result: AnalysisResult) -> None:
|
|||
for uncert in uncertainties:
|
||||
if uncert.min <= result.result <= uncert.max:
|
||||
await result_mutation_service.create(
|
||||
obj_in={
|
||||
c={
|
||||
"result_uid": result.uid,
|
||||
"before": result.result,
|
||||
"after": f"{result.result} +/- {uncert.value}",
|
||||
|
@ -341,7 +340,7 @@ async def result_mutator(result: AnalysisResult) -> None:
|
|||
for spec in specifications:
|
||||
if result.result in spec.warn_values.split(","):
|
||||
await result_mutation_service.create(
|
||||
obj_in={
|
||||
c={
|
||||
"result_uid": result.uid,
|
||||
"before": result.result,
|
||||
"after": spec.warn_report,
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
# https://github.com/accent-starlette/starlette-audit/blob/master/starlette_audit/tables.py
|
|
@ -1,90 +0,0 @@
|
|||
import json
|
||||
|
||||
from sqlalchemy import Column, Integer, String, UnicodeText
|
||||
|
||||
from felicity.apps.abstract.entity import BaseEntity
|
||||
|
||||
|
||||
class AuditLog(BaseEntity):
|
||||
"""Model an audit log of user actions"""
|
||||
|
||||
__tablename__ = "audit_log"
|
||||
|
||||
user_id = Column(String, doc="The ID of the user who made the change")
|
||||
target_type = Column(
|
||||
String(100), nullable=False, doc="The table name of the altered object"
|
||||
)
|
||||
target_id = Column(String, doc="The ID of the altered object")
|
||||
action = Column(Integer, doc="Create (1), update (2), or delete (3)")
|
||||
state_before = Column(
|
||||
UnicodeText,
|
||||
doc="Stores a JSON string representation of a dict containing the altered "
|
||||
"column names and original values",
|
||||
)
|
||||
state_after = Column(
|
||||
UnicodeText,
|
||||
doc="Stores a JSON string representation of a dict containing the altered "
|
||||
"column names and new values",
|
||||
)
|
||||
|
||||
def __init__(self, target_type, target_id, action, state_before, state_after):
|
||||
|
||||
self.state_after = state_after
|
||||
self.target_type = target_type
|
||||
self.target_id = target_id
|
||||
self.action = action
|
||||
self.state_before = state_before
|
||||
|
||||
if isinstance(state_after, str):
|
||||
state_after = json.loads(state_after)
|
||||
|
||||
try:
|
||||
updated_by_uid = state_after["updated_by_uid"]
|
||||
except (KeyError, TypeError):
|
||||
updated_by_uid = None
|
||||
|
||||
self.user_id = updated_by_uid if updated_by_uid else None
|
||||
|
||||
def __repr__(self):
|
||||
return "<AuditLog %r: %r -> %r>" % (self.user_id, self.target_type, self.action)
|
||||
|
||||
def save(self, connection):
|
||||
|
||||
state_after = self.state_after
|
||||
if isinstance(state_after, str):
|
||||
state_after = json.loads(state_after)
|
||||
|
||||
state_before = self.state_before
|
||||
if isinstance(state_before, str):
|
||||
state_before = json.loads(state_before)
|
||||
|
||||
if state_after:
|
||||
to_delete = []
|
||||
for key in state_after.keys():
|
||||
if state_after[key] == state_before[key]:
|
||||
to_delete.append(key)
|
||||
|
||||
for _key in to_delete:
|
||||
del state_after[_key]
|
||||
del state_before[_key]
|
||||
|
||||
if len(state_after.keys()) == 1:
|
||||
if list(state_after.keys())[0] == "updated_at":
|
||||
return
|
||||
|
||||
state_after = json.dumps(state_after) if state_after else json.dumps({})
|
||||
state_before = json.dumps(state_before) if state_before else json.dumps({})
|
||||
|
||||
connection.execute(
|
||||
self.__table__.insert(),
|
||||
[
|
||||
{
|
||||
"user_id": self.user_id,
|
||||
"target_type": self.target_type,
|
||||
"target_id": self.target_id,
|
||||
"action": self.action,
|
||||
"state_before": state_before,
|
||||
"state_after": state_after,
|
||||
}
|
||||
],
|
||||
)
|
|
@ -1,111 +0,0 @@
|
|||
import datetime
|
||||
import json
|
||||
import logging
|
||||
|
||||
from sqlalchemy import event, inspect
|
||||
from sqlalchemy.orm import class_mapper
|
||||
from sqlalchemy.orm.attributes import get_history
|
||||
|
||||
from felicity.apps.auditlog.entities import AuditLog
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ACTION_CREATE = 1
|
||||
ACTION_UPDATE = 2
|
||||
ACTION_DELETE = 3
|
||||
|
||||
# Only audit the events in this list
|
||||
PLEASE_AUDIT = [ACTION_UPDATE]
|
||||
|
||||
|
||||
def custom_serial(o):
|
||||
if isinstance(o, (datetime.date, datetime.datetime)):
|
||||
return o.isoformat()
|
||||
|
||||
raise TypeError("Type %s not serializable" % type(o))
|
||||
|
||||
|
||||
class AuditHistoryMixin:
|
||||
"""Allow a model to be automatically audited"""
|
||||
|
||||
@staticmethod
|
||||
def create_audit(connection, object_type, object_id, action, **kwargs):
|
||||
audit = AuditLog(
|
||||
object_type,
|
||||
object_id,
|
||||
action,
|
||||
kwargs.get("state_before"),
|
||||
kwargs.get("state_after"),
|
||||
)
|
||||
|
||||
audit.save(connection)
|
||||
|
||||
@classmethod
|
||||
def __declare_last__(cls):
|
||||
logger.debug("trigger")
|
||||
if ACTION_CREATE in PLEASE_AUDIT:
|
||||
event.listens_for(cls, "after_insert", cls.audit_insert)
|
||||
|
||||
if ACTION_DELETE in PLEASE_AUDIT:
|
||||
event.listen(cls, "after_delete", cls.audit_delete)
|
||||
|
||||
if ACTION_UPDATE in PLEASE_AUDIT:
|
||||
event.listen(cls, "after_update", cls.audit_update)
|
||||
|
||||
@staticmethod
|
||||
def audit_insert(mapper, connection, target):
|
||||
"""Listen for the `after_insert` event and create an AuditLog entry"""
|
||||
target.create_audit(connection, target.__tablename__, target.uid, ACTION_CREATE)
|
||||
|
||||
@staticmethod
|
||||
def audit_delete(mapper, connection, target):
|
||||
"""Listen for the `after_delete` event and create an AuditLog entry"""
|
||||
target.create_audit(connection, target.__tablename__, target.uid, ACTION_DELETE)
|
||||
|
||||
@staticmethod
|
||||
def audit_update(mapper, connection, target):
|
||||
"""Listen for the `after_update` event and create an AuditLog entry with before and after state changes"""
|
||||
state_before = {}
|
||||
state_after = {}
|
||||
inspector = inspect(target)
|
||||
attrs = class_mapper(target.__class__).column_attrs
|
||||
|
||||
for attr in attrs:
|
||||
hist = getattr(inspector.attrs, attr.key).history
|
||||
if hist.has_changes():
|
||||
if isinstance(get_history(target, attr.key)[2], tuple):
|
||||
continue
|
||||
state_before[attr.key] = get_history(target, attr.key)[2].pop()
|
||||
state_after[attr.key] = getattr(target, attr.key)
|
||||
else:
|
||||
if attr.key in ["updated_by_uid", "created_by_uid"]:
|
||||
state_after[attr.key] = getattr(target, attr.key)
|
||||
try:
|
||||
state_before[attr.key] = get_history(target, attr.key)[2].pop()
|
||||
except Exception:
|
||||
state_before[attr.key] = getattr(target, attr.key)
|
||||
|
||||
if len(state_after.keys()) == 1:
|
||||
if "updated_at" in list(state_after.keys()):
|
||||
return
|
||||
|
||||
# clean up
|
||||
if "json_content" in state_before:
|
||||
del state_before["json_content"]
|
||||
if "json_content" in state_after:
|
||||
del state_after["json_content"]
|
||||
|
||||
if "pdf_content" in state_after:
|
||||
del state_after["pdf_content"]
|
||||
if "pdf_content" in state_before:
|
||||
del state_before["pdf_content"]
|
||||
|
||||
target.create_audit(
|
||||
connection,
|
||||
target.__tablename__,
|
||||
target.uid,
|
||||
ACTION_UPDATE,
|
||||
state_before=json.dumps(state_before, default=custom_serial),
|
||||
state_after=json.dumps(state_after, default=custom_serial),
|
||||
)
|
|
@ -1,7 +0,0 @@
|
|||
from felicity.apps.abstract import BaseRepository
|
||||
from felicity.apps.auditlog.entities import AuditLog
|
||||
|
||||
|
||||
class AuditLogRepository(BaseRepository[AuditLog]):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(AuditLog)
|
|
@ -1,9 +0,0 @@
|
|||
from felicity.apps.abstract import BaseService
|
||||
from felicity.apps.auditlog.entities import AuditLog
|
||||
from felicity.apps.auditlog.repositories import AuditLogRepository
|
||||
from felicity.apps.common.schemas.dummy import Dummy
|
||||
|
||||
|
||||
class AuditLogService(BaseService[AuditLog, Dummy, Dummy]):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(AuditLogRepository)
|
|
@ -1,14 +1,16 @@
|
|||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
from felicity.core.dtz import format_datetime
|
||||
|
||||
# from felicity.core.dtz import format_datetime
|
||||
|
||||
|
||||
def marshaller(
|
||||
obj, path=None, memoize=None, exclude: list[str] = None, depth=2
|
||||
obj, path=None, memoize=None, exclude: list[str] = None, depth=2
|
||||
) -> dict | str:
|
||||
"""Notes:
|
||||
1. We use memoization To prevent marshalling the same object again hence speed things up
|
||||
2. We use path tracking To stop marshalling when a path starts to repeat itself or meets a certain path restriction
|
||||
"""
|
||||
Custom marshaller function to convert objects to dictionaries or strings with proper handling for
|
||||
StrEnum, datetime, and other custom objects.
|
||||
"""
|
||||
if memoize is None:
|
||||
memoize = {}
|
||||
|
@ -22,34 +24,38 @@ def marshaller(
|
|||
if id(obj) in memoize:
|
||||
return memoize[id(obj)]
|
||||
|
||||
if not hasattr(obj, "__dict__"):
|
||||
# Handling non-dict objects a.k.a values
|
||||
if not (hasattr(obj, "__dict__") or isinstance(obj, dict)):
|
||||
if obj is None:
|
||||
return ""
|
||||
if isinstance(obj, datetime):
|
||||
return format_datetime(obj, human_format=False, with_time=True)
|
||||
if hasattr(obj, "__str__"):
|
||||
return obj.__str__()
|
||||
return obj
|
||||
elif isinstance(obj, datetime):
|
||||
return obj.isoformat() # format_datetime(obj, human_format=False, with_time=True)
|
||||
elif hasattr(obj, "__str__"):
|
||||
return obj.__str__() # Convert other objects to their string representation
|
||||
else:
|
||||
return obj
|
||||
|
||||
# Handling dict and objects with __dict__
|
||||
if isinstance(obj, Enum): # enums are dicts
|
||||
return obj.value # Convert StrEnum to its value
|
||||
|
||||
if depth <= 0:
|
||||
return {}
|
||||
|
||||
items = obj.__dict__.items() if hasattr(obj, "__dict__") else obj.items()
|
||||
result = {}
|
||||
for key, val in obj.__dict__.items():
|
||||
|
||||
for key, val in items:
|
||||
if (key.startswith("_") or key in exclude) or (path and path[-1] == key):
|
||||
continue
|
||||
|
||||
element = []
|
||||
# if isinstance(val, bool):
|
||||
# element = val
|
||||
# Process lists and other values
|
||||
if isinstance(val, list):
|
||||
for item in val:
|
||||
element.append(
|
||||
marshaller(item, path + [key], memoize, exclude, depth - 1)
|
||||
)
|
||||
result[key] = [
|
||||
marshaller(item, path + [key], memoize, exclude, depth - 1) for item in val
|
||||
]
|
||||
else:
|
||||
element = marshaller(val, path + [key], memoize, exclude, depth - 1)
|
||||
result[key] = element
|
||||
result[key] = marshaller(val, path + [key], memoize, exclude, depth - 1)
|
||||
|
||||
memoize[id(obj)] = result
|
||||
memoize[id(obj)] = result
|
||||
return result
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
from sqlalchemy import Column
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
|
||||
from felicity.apps.abstract import AuditUser
|
||||
|
||||
|
||||
class ErrorLog(AuditUser):
|
||||
__tablename__ = "error_log"
|
||||
|
||||
content = Column(JSONB)
|
|
@ -1,7 +0,0 @@
|
|||
from felicity.apps.abstract.repository import BaseRepository
|
||||
from felicity.apps.errlog.entities import ErrorLog
|
||||
|
||||
|
||||
class ErrorLogRepository(BaseRepository[ErrorLog]):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(ErrorLog)
|
|
@ -1,24 +0,0 @@
|
|||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
#
|
||||
# Job Schemas
|
||||
#
|
||||
class ErrorLogBase(BaseModel):
|
||||
content: Optional[Any] = None
|
||||
|
||||
|
||||
class ErrorLog(ErrorLogBase):
|
||||
uid: str | None = None
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class ErrorLogCreate(ErrorLogBase):
|
||||
pass
|
||||
|
||||
|
||||
class ErrorLogUpdate(ErrorLogBase):
|
||||
pass
|
|
@ -1,9 +0,0 @@
|
|||
from felicity.apps.abstract.service import BaseService
|
||||
from felicity.apps.errlog.entities import ErrorLog
|
||||
from felicity.apps.errlog.repositories import ErrorLogRepository
|
||||
from felicity.apps.errlog.schemas import ErrorLogCreate, ErrorLogUpdate
|
||||
|
||||
|
||||
class ErrorLogService(BaseService[ErrorLog, ErrorLogCreate, ErrorLogUpdate]):
|
||||
def __int__(self):
|
||||
super().__init__(ErrorLogRepository)
|
|
@ -1,26 +1,34 @@
|
|||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
import meilisearch
|
||||
|
||||
from felicity.core.config import settings
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MeiliSearchClient:
|
||||
def __init__(self):
|
||||
self.client = meilisearch.Client(settings.MEILISEARCH_SERVER, settings.MEILISEARCH_API_KEY)
|
||||
|
||||
def create_index(self, index_name: str):
|
||||
return self.client.create_index(uid=index_name, options={"primary_key": "uid"})
|
||||
logger.info("meilisearch -- create index --")
|
||||
return self.client.create_index(uid=index_name, options={"primaryKey": "uid"})
|
||||
|
||||
def add_documents(self, index_name: str, docs: list):
|
||||
logger.info("meilisearch -- add documents --")
|
||||
index = self.client.index(index_name)
|
||||
return index.add_documents(docs)
|
||||
|
||||
def update_documents(self, index_name: str, docs: list):
|
||||
logger.info("meilisearch -- update documents --")
|
||||
index = self.client.index(index_name)
|
||||
return index.update_documents(docs)
|
||||
|
||||
def delete_document(self, index_name: str, doc_id: str):
|
||||
logger.info("meilisearch -- delete document --")
|
||||
index = self.client.index(index_name)
|
||||
return index.delete_document(doc_id)
|
||||
|
||||
|
@ -33,6 +41,7 @@ class MeiliSearchClient:
|
|||
date_gt: datetime = None,
|
||||
date_lt: datetime = None
|
||||
):
|
||||
logger.info("meilisearch -- search --")
|
||||
index = self.client.index(index_name)
|
||||
|
||||
# Build the filter string
|
||||
|
@ -55,7 +64,9 @@ class MeiliSearchClient:
|
|||
return index.search(query, options)
|
||||
|
||||
def delete_index(self, index_name: str):
|
||||
logger.info("meilisearch -- delete index --")
|
||||
return self.client.delete_index(index_name)
|
||||
|
||||
def list_indexes(self):
|
||||
logger.info("meilisearch -- list indexes --")
|
||||
return self.client.get_indexes()
|
||||
|
|
|
@ -1,7 +1,20 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
from enum import StrEnum
|
||||
|
||||
from apps.common.utils.serializer import marshaller
|
||||
from apps.iol.meilisearch.client import MeiliSearchClient
|
||||
|
||||
|
||||
class CustomJSONEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, StrEnum):
|
||||
return obj.value # Serialize StrEnum to its value
|
||||
if isinstance(obj, datetime):
|
||||
return obj.isoformat() # Serialize datetime to ISO format string
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
class MeilisearchSyncHandler:
|
||||
def __init__(self):
|
||||
self.client = MeiliSearchClient()
|
||||
|
@ -11,19 +24,19 @@ class MeilisearchSyncHandler:
|
|||
return table_name in self.tables_of_interest
|
||||
|
||||
@staticmethod
|
||||
def get_document(instance):
|
||||
return marshaller(instance)
|
||||
def get_document(metadata):
|
||||
return marshaller(metadata)
|
||||
|
||||
def on_event(self, event_type, table_name, tracked):
|
||||
if not self.should_sync(table_name):
|
||||
return
|
||||
|
||||
document = self.get_document(tracked)
|
||||
doc = self.get_document(tracked)
|
||||
|
||||
self.client.create_index(table_name)
|
||||
if event_type == 'after-insert':
|
||||
self.client.add_documents(table_name, [document])
|
||||
self.client.add_documents(table_name, [doc])
|
||||
elif event_type == 'after-update':
|
||||
self.client.update_documents(table_name, [document])
|
||||
self.client.update_documents(table_name, [doc])
|
||||
elif event_type == 'after-delete':
|
||||
self.client.delete_document(table_name, document["uid"])
|
||||
self.client.delete_document(table_name, doc["uid"])
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import io
|
||||
import logging
|
||||
from typing import BinaryIO
|
||||
|
||||
from minio import Minio
|
||||
|
@ -7,11 +8,14 @@ from minio.error import S3Error
|
|||
from felicity.core.config import settings
|
||||
from .enum import MinioBucket
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MinioClient:
|
||||
def __init__(self):
|
||||
self.client = Minio(
|
||||
settings.MINIO_SERVER,
|
||||
endpoint=settings.MINIO_SERVER,
|
||||
access_key=settings.MINIO_ACCESS,
|
||||
secret_key=settings.MINIO_SECRET,
|
||||
secure=False
|
||||
|
@ -21,10 +25,12 @@ class MinioClient:
|
|||
return self.client.bucket_exists(bucket)
|
||||
|
||||
def make_bucket(self, bucket: MinioBucket):
|
||||
logger.info(f"minio -- add {bucket} bucket --")
|
||||
if not self.bucket_exists(bucket):
|
||||
self.client.make_bucket(bucket)
|
||||
|
||||
def put_object(self, bucket: MinioBucket, object_name: str, data, metadata, content_type="application/pdf"):
|
||||
logger.info(f"minio -- put {bucket} object --")
|
||||
self.make_bucket(bucket)
|
||||
|
||||
if not isinstance(data, BinaryIO):
|
||||
|
@ -43,6 +49,7 @@ class MinioClient:
|
|||
raise Exception(f"Failed to upload file: {str(e)}")
|
||||
|
||||
def get_object(self, bucket: MinioBucket, object_names: list[str]):
|
||||
logger.info(f"minio -- get {bucket} object --")
|
||||
objects = []
|
||||
try:
|
||||
for obj_name in object_names:
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
import json
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from apps.common.utils.serializer import marshaller
|
||||
from felicity.apps.abstract.service import BaseService
|
||||
from felicity.apps.common.channel import broadcast
|
||||
from felicity.apps.notification.entities import (ActivityFeed, ActivityStream,
|
||||
|
@ -29,13 +31,13 @@ class ActivityFeedService(
|
|||
return await super().save(activity_feed)
|
||||
|
||||
async def remove_subscriber(
|
||||
self, activity_feed: ActivityFeed, user: User
|
||||
self, activity_feed: ActivityFeed, user: User
|
||||
) -> ActivityFeed:
|
||||
activity_feed.subscribers.remove(user)
|
||||
return await super().save(activity_feed)
|
||||
|
||||
async def add_subscriber(
|
||||
self, activity_feed: ActivityFeed, user: User
|
||||
self, activity_feed: ActivityFeed, user: User
|
||||
) -> ActivityFeed:
|
||||
if user not in activity_feed.viewers:
|
||||
activity_feed.subscribers.append(user)
|
||||
|
@ -50,12 +52,12 @@ class ActivityStreamService(
|
|||
super().__init__(ActivityStreamRepository)
|
||||
|
||||
async def stream(
|
||||
self,
|
||||
obj: Any,
|
||||
actor: User,
|
||||
verb: str,
|
||||
object_type: str,
|
||||
feeds: List[ActivityFeed] = None,
|
||||
self,
|
||||
obj: Any,
|
||||
actor: User,
|
||||
verb: str,
|
||||
object_type: str,
|
||||
feeds: List[ActivityFeed] = None,
|
||||
):
|
||||
if feeds is None:
|
||||
feeds = []
|
||||
|
@ -68,20 +70,20 @@ class ActivityStreamService(
|
|||
target_uid=None,
|
||||
)
|
||||
stream = await self.create(s_in)
|
||||
await broadcast.publish(NotificationChannel.ACTIVITIES, stream)
|
||||
await broadcast.publish(NotificationChannel.ACTIVITIES, json.dumps(marshaller(stream)))
|
||||
|
||||
async def reset_feeds(self, activity_stream: ActivityStream) -> ActivityStream:
|
||||
activity_stream.feeds.clear()
|
||||
return await super().save(activity_stream)
|
||||
|
||||
async def remove_feed(
|
||||
self, activity_stream: ActivityStream, feed: ActivityFeed
|
||||
self, activity_stream: ActivityStream, feed: ActivityFeed
|
||||
) -> ActivityStream:
|
||||
activity_stream.feeds.remove(feed)
|
||||
return await super().save(activity_stream)
|
||||
|
||||
async def add_feed(
|
||||
self, activity_stream: ActivityStream, feed: ActivityFeed
|
||||
self, activity_stream: ActivityStream, feed: ActivityFeed
|
||||
) -> ActivityStream:
|
||||
if feed not in activity_stream.feeds:
|
||||
activity_stream.feeds.append(feed)
|
||||
|
@ -93,13 +95,13 @@ class ActivityStreamService(
|
|||
return await super().save(activity_stream)
|
||||
|
||||
async def remove_viewer(
|
||||
self, activity_stream: ActivityStream, viewer: User
|
||||
self, activity_stream: ActivityStream, viewer: User
|
||||
) -> ActivityStream:
|
||||
activity_stream.viewers.remove(viewer)
|
||||
return await super().save(activity_stream)
|
||||
|
||||
async def add_viewer(
|
||||
self, activity_stream: ActivityStream, viewer: User
|
||||
self, activity_stream: ActivityStream, viewer: User
|
||||
) -> ActivityStream:
|
||||
if viewer not in activity_stream.viewers:
|
||||
activity_stream.viewers.append(viewer)
|
||||
|
@ -111,7 +113,7 @@ class ActivityStreamService(
|
|||
|
||||
@classmethod
|
||||
async def for_viewer(
|
||||
self, activity_stream: ActivityStream, viewer: User, seen=False
|
||||
self, activity_stream: ActivityStream, viewer: User, seen=False
|
||||
) -> Optional[List[ActivityStream]]:
|
||||
"""Streams for user: seen or unseen"""
|
||||
|
||||
|
@ -121,8 +123,8 @@ class NotificationService(
|
|||
):
|
||||
def __init__(self):
|
||||
super().__init__(NotificationRepository)
|
||||
|
||||
async def notify(self,message: str, users, departments=None, groups=None) -> None:
|
||||
|
||||
async def notify(self, message: str, users, departments=None, groups=None) -> None:
|
||||
n_in = NotificationCreate(message=message)
|
||||
notification = await super().create(n_in, related=["users", "departments", "groups"])
|
||||
if not isinstance(users, list):
|
||||
|
@ -131,13 +133,13 @@ class NotificationService(
|
|||
notification.departments = departments if departments else []
|
||||
notification.groups = groups if groups else []
|
||||
notification = await self.save(notification)
|
||||
await broadcast.publish(NotificationChannel.NOTIFICATIONS, notification)
|
||||
await broadcast.publish(NotificationChannel.NOTIFICATIONS, json.dumps(marshaller(notification)))
|
||||
|
||||
async def filter(
|
||||
self,
|
||||
group_uid: str | None,
|
||||
department_uid: str | None,
|
||||
user_uid: str | None,
|
||||
self,
|
||||
group_uid: str | None,
|
||||
department_uid: str | None,
|
||||
user_uid: str | None,
|
||||
) -> List[Notification]:
|
||||
filters = {}
|
||||
|
||||
|
@ -157,7 +159,7 @@ class NotificationService(
|
|||
return await super().save(notification)
|
||||
|
||||
async def remove_viewer(
|
||||
self, notification: Notification, user: User
|
||||
self, notification: Notification, user: User
|
||||
) -> Notification:
|
||||
notification.viewers.remove(user)
|
||||
return await super().save(notification)
|
||||
|
@ -173,13 +175,13 @@ class NotificationService(
|
|||
return await super().save(notification)
|
||||
|
||||
async def remove_department(
|
||||
self, notification: Notification, department: Department
|
||||
self, notification: Notification, department: Department
|
||||
) -> Notification:
|
||||
notification.departments.remove(department)
|
||||
return await super().save(notification)
|
||||
|
||||
async def add_department(
|
||||
self, notification: Notification, department: Department
|
||||
self, notification: Notification, department: Department
|
||||
) -> Notification:
|
||||
if department not in self.departments:
|
||||
notification.departments.append(department)
|
||||
|
@ -191,7 +193,7 @@ class NotificationService(
|
|||
return await super().save(notification)
|
||||
|
||||
async def remove_group(
|
||||
self, notification: Notification, group: Group
|
||||
self, notification: Notification, group: Group
|
||||
) -> Notification:
|
||||
notification.groups.remove(group)
|
||||
return await super().save(notification)
|
||||
|
@ -207,7 +209,7 @@ class NotificationService(
|
|||
return await super().save(notification)
|
||||
|
||||
async def remove_users(
|
||||
self, notification: Notification, user: User
|
||||
self, notification: Notification, user: User
|
||||
) -> Notification:
|
||||
notification.users.remove(user)
|
||||
return await super().save(notification)
|
||||
|
|
|
@ -1,91 +1,114 @@
|
|||
import asyncio
|
||||
import inspect
|
||||
import logging
|
||||
import traceback
|
||||
from asyncio import create_task, gather, Lock as ALock
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from threading import Lock
|
||||
from typing import Callable, List, Dict
|
||||
from typing import Callable, List, Dict, Any
|
||||
|
||||
# Initialize logger
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
subscribers: Dict[str, List[Callable]] = {}
|
||||
lock = Lock() # Synchronous lock
|
||||
alock = ALock() # Asynchronous lock
|
||||
subscribers: Dict[str, List[Callable[..., Any]]] = {}
|
||||
sync_lock = Lock() # Synchronous lock
|
||||
async_lock = ALock() # Asynchronous lock
|
||||
|
||||
|
||||
def partition_functions(functions: List[Callable[..., Any]]) -> (List[Callable[..., Any]], List[Callable[..., Any]]):
|
||||
async_funcs = [fn for fn in functions if inspect.iscoroutinefunction(fn)]
|
||||
sync_funcs = [fn for fn in functions if not inspect.iscoroutinefunction(fn)]
|
||||
return async_funcs, sync_funcs
|
||||
|
||||
|
||||
def get_or_create_event_loop() -> asyncio.AbstractEventLoop:
|
||||
try:
|
||||
return asyncio.get_event_loop()
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
return loop
|
||||
|
||||
|
||||
# Synchronous Event System
|
||||
def subscribe(event_type: str, fn: Callable):
|
||||
with lock:
|
||||
if event_type not in subscribers:
|
||||
subscribers[event_type] = []
|
||||
subscribers[event_type].append(fn)
|
||||
def subscribe(event_type: str, fn: Callable[..., Any]) -> None:
|
||||
with sync_lock:
|
||||
subscribers.setdefault(event_type, []).append(fn)
|
||||
|
||||
|
||||
def unsubscribe(event_type: str, fn: Callable):
|
||||
with lock:
|
||||
def unsubscribe(event_type: str, fn: Callable[..., Any]) -> None:
|
||||
with sync_lock:
|
||||
if event_type in subscribers:
|
||||
try:
|
||||
subscribers[event_type].remove(fn)
|
||||
except ValueError:
|
||||
logger.warning(f"Function not found in subscribers for event type: {event_type}")
|
||||
logger.error(
|
||||
f"Function not found in subscribers for event type: {event_type}\n{traceback.format_exc()}")
|
||||
|
||||
|
||||
def post_event(event_type: str, **kwargs):
|
||||
with lock:
|
||||
if event_type not in subscribers:
|
||||
return
|
||||
current_subscribers: List[Callable] = subscribers[event_type][:]
|
||||
def post_event(event_type: str, **kwargs: Any) -> None:
|
||||
with sync_lock:
|
||||
current_subscribers = subscribers.get(event_type, [])
|
||||
|
||||
# Separate sync from async functions
|
||||
async_funcs, sync_funcs = partition_functions(current_subscribers)
|
||||
|
||||
# Handle async functions
|
||||
loop = get_or_create_event_loop()
|
||||
for fn in async_funcs:
|
||||
if loop.is_running():
|
||||
loop.create_task(fn(**kwargs))
|
||||
else:
|
||||
asyncio.run(fn(**kwargs))
|
||||
|
||||
# Handle sync functions
|
||||
with ThreadPoolExecutor() as executor:
|
||||
futures = []
|
||||
for fn in current_subscribers:
|
||||
futures.append(executor.submit(safe_execute, fn, **kwargs))
|
||||
futures = [executor.submit(safe_execute, fn, **kwargs) for fn in sync_funcs]
|
||||
|
||||
for future in as_completed(futures):
|
||||
if future.exception() is not None:
|
||||
logger.error(f"Error executing event handler: {future.exception()}")
|
||||
try:
|
||||
future.result() # Re-raise any exception from the executed function
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing event handler: {e}\n{traceback.format_exc()}")
|
||||
|
||||
|
||||
def safe_execute(fn: Callable, **kwargs):
|
||||
def safe_execute(fn: Callable[..., Any], **kwargs: Any) -> None:
|
||||
try:
|
||||
fn(**kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in event subscriber {fn.__name__}: {e}")
|
||||
logger.error(f"Error in event subscriber {fn.__name__}: {e}\n{traceback.format_exc()}")
|
||||
|
||||
|
||||
# Asynchronous Event System
|
||||
async def asubscribe(event_type: str, fn: Callable):
|
||||
async with alock:
|
||||
if event_type not in subscribers:
|
||||
subscribers[event_type] = []
|
||||
subscribers[event_type].append(fn)
|
||||
async def asubscribe(event_type: str, fn: Callable[..., Any]) -> None:
|
||||
async with async_lock:
|
||||
subscribers.setdefault(event_type, []).append(fn)
|
||||
|
||||
|
||||
async def aunsubscribe(event_type: str, fn: Callable):
|
||||
async with alock:
|
||||
async def aunsubscribe(event_type: str, fn: Callable[..., Any]) -> None:
|
||||
async with async_lock:
|
||||
if event_type in subscribers:
|
||||
try:
|
||||
subscribers[event_type].remove(fn)
|
||||
except ValueError:
|
||||
logger.warning(f"Function not found in subscribers for event type: {event_type}")
|
||||
logger.error(
|
||||
f"Function not found in subscribers for event type: {event_type}\n{traceback.format_exc()}")
|
||||
|
||||
|
||||
async def apost_event(event_type: str, **kwargs):
|
||||
async with alock:
|
||||
if event_type not in subscribers:
|
||||
return
|
||||
current_subscribers: List[Callable] = subscribers[event_type][:]
|
||||
async def apost_event(event_type: str, **kwargs: Any) -> None:
|
||||
async with async_lock:
|
||||
current_subscribers = subscribers.get(event_type, [])
|
||||
|
||||
tasks = [create_task(asafe_execute(fn, **kwargs)) for fn in current_subscribers]
|
||||
await gather(*tasks)
|
||||
|
||||
|
||||
async def asafe_execute(fn: Callable, **kwargs):
|
||||
async def asafe_execute(fn: Callable[..., Any], **kwargs: Any) -> None:
|
||||
try:
|
||||
if callable(fn):
|
||||
if hasattr(fn, "__await__"): # Check if it's an async function
|
||||
await fn(**kwargs)
|
||||
else:
|
||||
fn(**kwargs)
|
||||
if inspect.iscoroutinefunction(fn):
|
||||
await fn(**kwargs)
|
||||
else:
|
||||
fn(**kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in async event subscriber {fn.__name__}: {e}")
|
||||
logger.error(f"Error in event subscriber {fn.__name__}: {e}\n{traceback.format_exc()}")
|
||||
|
|
|
@ -4,8 +4,6 @@ from felicity.apps.analysis.entities.analysis import AnalysisCategory # noqa
|
|||
from felicity.apps.analysis.entities.analysis import AnalysisCoding # noqa
|
||||
from felicity.apps.analysis.entities.analysis import AnalysisInterim # noqa
|
||||
from felicity.apps.analysis.entities.analysis import AnalysisRequest # noqa
|
||||
from felicity.apps.analysis.entities.analysis import \
|
||||
AnalysisUncertainty # noqa
|
||||
from felicity.apps.analysis.entities.analysis import CodingStandard # noqa
|
||||
from felicity.apps.analysis.entities.analysis import Profile # noqa
|
||||
from felicity.apps.analysis.entities.analysis import ProfileCoding # noqa
|
||||
|
@ -20,7 +18,6 @@ from felicity.apps.analysis.entities.qc import QCTemplate # noqa
|
|||
from felicity.apps.analysis.entities.results import AnalysisResult # noqa
|
||||
from felicity.apps.analysis.entities.results import ResultMutation # noqa
|
||||
from felicity.apps.analytics.entities import ReportMeta # noqa
|
||||
from felicity.apps.auditlog.entities import AuditLog # noqa
|
||||
from felicity.apps.billing.entities import AnalysisDiscount # noqa
|
||||
from felicity.apps.billing.entities import AnalysisPrice # noqa
|
||||
from felicity.apps.billing.entities import ProfileDiscount # noqa
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
import logging
|
||||
from enum import StrEnum
|
||||
from typing import Optional
|
||||
from typing import Optional, Any
|
||||
|
||||
from bson import ObjectId
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
|
||||
from felicity.core.config import settings
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
client = AsyncIOMotorClient(
|
||||
f"mongodb://{settings.MONGODB_USER}:{settings.MONGODB_PASS}@{settings.MONGODB_SERVER}"
|
||||
)
|
||||
|
@ -22,22 +26,47 @@ class MongoService:
|
|||
self.db = client.felicity
|
||||
|
||||
async def create(self, collection: MongoCollection, data: dict) -> Optional[dict]:
|
||||
logger.info(f"mongodb -- create:{collection} --")
|
||||
collection = self.db.get_collection(collection)
|
||||
created = await collection.insert_one(data)
|
||||
return await collection.find_one({"_id": created.inserted_id})
|
||||
|
||||
async def upsert(self, collection: MongoCollection, uid: str, data: dict) -> Optional[dict]:
|
||||
logger.info(f"mongodb -- upsert:{collection} --")
|
||||
collection = self.db.get_collection(collection)
|
||||
result = await collection.update_one({'_id': self.oid(uid)}, {'$set': data}, upsert=True)
|
||||
return await collection.find_one({"_id": result.upserted_id})
|
||||
|
||||
async def retrieve(self, collection: MongoCollection, uid: str):
|
||||
logger.info(f"mongodb -- retrieve:{collection} --")
|
||||
collection = self.db.get_collection(collection)
|
||||
item = await collection.find_one({"_id": self.oid(uid)})
|
||||
item['_id'] = self.flake_id_from_hex(str(item['_id']))
|
||||
if item:
|
||||
item['_id'] = self.flake_id_from_hex(str(item['_id']))
|
||||
return item
|
||||
|
||||
async def search(
|
||||
self, collection: MongoCollection, filters: dict[str, Any], projection: dict[str, int] = None,
|
||||
limit: int = 100
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Search documents in MongoDB based on user-defined filters.
|
||||
|
||||
:param filters: A dictionary of filters to apply.
|
||||
:param projection: A dictionary specifying fields to include or exclude (1 to include, 0 to exclude).
|
||||
:param limit: Maximum number of documents to return.
|
||||
:return: A list of documents matching the filters.
|
||||
"""
|
||||
logger.info(f"mongodb -- search:{collection} --")
|
||||
collection = self.db.get_collection(collection)
|
||||
cursor = collection.find(filters, projection).limit(limit)
|
||||
results = []
|
||||
async for document in cursor:
|
||||
results.append(document)
|
||||
return results
|
||||
|
||||
async def update(self, collection: MongoCollection, uid: str, data: dict) -> Optional[bool]:
|
||||
logger.info(f"mongodb -- update:{collection} --")
|
||||
collection = self.db.get_collection(collection)
|
||||
if len(data) < 1:
|
||||
return None
|
||||
|
@ -50,6 +79,7 @@ class MongoService:
|
|||
return False
|
||||
|
||||
async def delete(self, collection: MongoCollection, uid: str) -> bool:
|
||||
logger.info(f"mongodb -- delete:{collection} --")
|
||||
collection = self.db.get_collection(collection)
|
||||
item = await collection.find_one({"_id": self.oid(uid)})
|
||||
if item:
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
from .builder import QueryBuilder, settable_attributes, smart_query
|
||||
from .builderx import SmartQueryMixin, EagerLoadMixin
|
||||
|
||||
__all__ = ["QueryBuilder", "settable_attributes", "smart_query", "SmartQueryMixin"]
|
|
@ -1,646 +0,0 @@
|
|||
from collections import OrderedDict, abc
|
||||
from typing import TypeVar
|
||||
|
||||
from sqlalchemy import asc, desc, inspect, select
|
||||
from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property
|
||||
from sqlalchemy.orm import (RelationshipProperty, aliased, joinedload,
|
||||
subqueryload)
|
||||
from sqlalchemy.orm.state import InstanceState
|
||||
from sqlalchemy.orm.util import AliasedClass
|
||||
from sqlalchemy.sql import extract, operators
|
||||
|
||||
M = TypeVar("M")
|
||||
|
||||
JOINED = "joined"
|
||||
SUBQUERY = "subquery"
|
||||
|
||||
RELATION_SPLITTER = "___"
|
||||
OPERATOR_SPLITTER = "__"
|
||||
|
||||
DESC_PREFIX = "-"
|
||||
|
||||
|
||||
class ModelNotFoundError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
def _flatten_filter_keys(filters):
|
||||
"""
|
||||
:type filters: dict|list
|
||||
Flatten the nested filters, extracting keys where they correspond
|
||||
to smart_query paths, e.g.
|
||||
{or_: {'id__gt': 1000, and_ : {
|
||||
'id__lt': 500,
|
||||
'related___property__in': (1,2,3)
|
||||
}}}
|
||||
|
||||
Yields:
|
||||
|
||||
'id__gt', 'id__lt', 'related___property__in'
|
||||
|
||||
Also allow lists (any abc.Sequence subclass) to enable support
|
||||
of expressions like.
|
||||
|
||||
(X OR Y) AND (W OR Z)
|
||||
|
||||
{ and_: [
|
||||
{or_: {'id__gt': 5, 'related_id__lt': 10}},
|
||||
{or_: {'related_id2__gt': 1, 'name__like': 'Bob' }}
|
||||
]}
|
||||
"""
|
||||
|
||||
if isinstance(filters, abc.Mapping):
|
||||
for key, value in filters.items():
|
||||
if callable(key):
|
||||
yield from _flatten_filter_keys(value)
|
||||
else:
|
||||
yield key
|
||||
|
||||
elif isinstance(filters, abc.Sequence):
|
||||
for f in filters:
|
||||
yield from _flatten_filter_keys(f)
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
"Unsupported type (%s) in filters: %r", (type(filters), filters)
|
||||
)
|
||||
|
||||
|
||||
def _parse_path_and_make_aliases(entity, entity_path, attrs, aliases):
|
||||
"""
|
||||
:type entity: InspectionMixin
|
||||
:type entity_path: str
|
||||
:type attrs: list
|
||||
:type aliases: OrderedDict
|
||||
|
||||
Sample values:
|
||||
|
||||
attrs: ['product__subject_ids', 'user_id', '-group_id',
|
||||
'user__name', 'product__name', 'product__grade_from__order']
|
||||
relations: {'product': ['subject_ids', 'name'], 'user': ['name']}
|
||||
|
||||
"""
|
||||
relations = {}
|
||||
# take only attributes that have magic RELATION_SPLITTER
|
||||
for attr in attrs:
|
||||
# from attr (say, 'product__grade__order') take
|
||||
# relationship name ('product') and nested attribute ('grade__order')
|
||||
if RELATION_SPLITTER in attr:
|
||||
relation_name, nested_attr = attr.split(RELATION_SPLITTER, 1)
|
||||
if relation_name in relations:
|
||||
relations[relation_name].append(nested_attr)
|
||||
else:
|
||||
relations[relation_name] = [nested_attr]
|
||||
|
||||
for relation_name, nested_attrs in relations.items():
|
||||
path = (
|
||||
entity_path + RELATION_SPLITTER + relation_name
|
||||
if entity_path
|
||||
else relation_name
|
||||
)
|
||||
if relation_name not in entity.relations:
|
||||
raise KeyError(
|
||||
"Incorrect path `{}`: "
|
||||
"{} doesnt have `{}` relationship ".format(path, entity, relation_name)
|
||||
)
|
||||
relationship = getattr(entity, relation_name)
|
||||
alias = aliased(relationship.property.mapper.class_)
|
||||
aliases[path] = alias, relationship
|
||||
_parse_path_and_make_aliases(alias, path, nested_attrs, aliases)
|
||||
|
||||
|
||||
def _get_root_cls(query):
|
||||
# sqlalchemy < 1.4.0
|
||||
if hasattr(query, "_entity_zero"):
|
||||
return query._entity_zero().class_
|
||||
|
||||
# sqlalchemy >= 1.4.0
|
||||
else:
|
||||
if hasattr(query, "_entity_from_pre_ent_zero"):
|
||||
return query._entity_from_pre_ent_zero().class_
|
||||
|
||||
# sqlalchemy 2.x
|
||||
else:
|
||||
if query.__dict__["_propagate_attrs"]["plugin_subject"].class_:
|
||||
return query.__dict__["_propagate_attrs"]["plugin_subject"].class_
|
||||
|
||||
raise ValueError("Cannot get a root class from`{}`".format(query))
|
||||
|
||||
|
||||
def filter_expr(model, **filters):
|
||||
"""
|
||||
forms expressions like [Product.age_from = 5,
|
||||
Product.subject_ids.in_([1,2])]
|
||||
from filters like {'age_from': 5, 'subject_ids__in': [1,2]}
|
||||
|
||||
Example 1:
|
||||
db.query(Product).filter(
|
||||
*Product.filter_expr(age_from = 5, subject_ids__in=[1, 2]))
|
||||
|
||||
Example 2:
|
||||
filters = {'age_from': 5, 'subject_ids__in': [1,2]}
|
||||
db.query(Product).filter(*Product.filter_expr(**filters))
|
||||
|
||||
|
||||
### About alias ###:
|
||||
If we will use alias:
|
||||
alias = aliased(Product) # table name will be product_1
|
||||
we can't just write query like
|
||||
db.query(alias).filter(*Product.filter_expr(age_from=5))
|
||||
because it will be compiled to
|
||||
SELECT * FROM product_1 WHERE product.age_from=5
|
||||
which is wrong: we select from 'product_1' but filter on 'product'
|
||||
such filter will not work
|
||||
|
||||
We need to obtain
|
||||
SELECT * FROM product_1 WHERE product_1.age_from=5
|
||||
For such case, we can call filter_expr ON ALIAS:
|
||||
alias = aliased(Product)
|
||||
db.query(alias).filter(*alias.filter_expr(age_from=5))
|
||||
|
||||
Alias realization details:
|
||||
* we allow to call this method
|
||||
either ON ALIAS (say, alias.filter_expr())
|
||||
or on class (Product.filter_expr())
|
||||
* when method is called on alias, we need to generate SQL using
|
||||
aliased table (say, product_1), but we also need to have a real
|
||||
class to call methods on (say, Product.relations)
|
||||
* so, we have 'mapper' that holds table name
|
||||
and 'cls' that holds real class
|
||||
|
||||
when we call this method ON ALIAS, we will have:
|
||||
mapper = <product_1 table>
|
||||
cls = <Product>
|
||||
when we call this method ON CLASS, we will simply have:
|
||||
mapper = <Product> (or we could write <Product>.__mapper__.
|
||||
It doesn't matter because when we call
|
||||
<Product>.getattr, SA will magically
|
||||
call <Product>.__mapper__.getattr())
|
||||
cls = <Product>
|
||||
"""
|
||||
if isinstance(model, AliasedClass):
|
||||
mapper, cls = model, inspect(model).mapper.class_
|
||||
else:
|
||||
mapper = cls = model
|
||||
|
||||
expressions = []
|
||||
valid_attributes = filterable_attributes(model)
|
||||
for attr, value in filters.items():
|
||||
# if attribute is filtered by method, call this method
|
||||
if attr in hybrid_methods(model):
|
||||
method = getattr(cls, attr)
|
||||
expressions.append(method(value, mapper=mapper))
|
||||
# else just add simple condition (== for scalars or IN for lists)
|
||||
else:
|
||||
# determine attrbitute name and operator
|
||||
# if they are explicitly set (say, id___between), take them
|
||||
if OPERATOR_SPLITTER in attr:
|
||||
attr_name, op_name = attr.rsplit(OPERATOR_SPLITTER, 1)
|
||||
if op_name not in _operators:
|
||||
raise KeyError(
|
||||
"Expression `{}` has incorrect "
|
||||
"operator `{}`".format(attr, op_name)
|
||||
)
|
||||
op = _operators[op_name]
|
||||
# assume equality operator for other cases (say, id=1)
|
||||
else:
|
||||
attr_name, op = attr, operators.eq
|
||||
|
||||
if attr_name not in valid_attributes:
|
||||
raise KeyError(
|
||||
"Expression `{}` "
|
||||
"has incorrect attribute `{}`".format(attr, attr_name)
|
||||
)
|
||||
|
||||
column = getattr(mapper, attr_name)
|
||||
expressions.append(op(column, value))
|
||||
|
||||
return expressions
|
||||
|
||||
|
||||
def smart_query(query, filters=None, sort_attrs=None, schema=None):
|
||||
"""
|
||||
Does magic Django-ish joins like post___user___name__startswith='Bob'
|
||||
(see https://goo.gl/jAgCyM)
|
||||
Does filtering, sorting and eager loading at the same time.
|
||||
And if, say, filters and sorting need the same joinm it will be done
|
||||
only one. That's why all stuff is combined in single method
|
||||
|
||||
:param query: sqlalchemy.orm.query.Query
|
||||
:param filters: dict
|
||||
:param sort_attrs: List[basestring]
|
||||
:param schema: dict
|
||||
"""
|
||||
if not filters:
|
||||
filters = {}
|
||||
if not sort_attrs:
|
||||
sort_attrs = []
|
||||
|
||||
# sqlalchemy >= 1.4.0, should probably a. check something else to determine if we need to convert
|
||||
# AppenderQuery to a query, b. probably not hack it like this
|
||||
# noinspection PyProtectedMember
|
||||
if type(query).__name__ == "AppenderQuery" and query._statement:
|
||||
sess = query.session
|
||||
# noinspection PyProtectedMember
|
||||
query = query._statement
|
||||
query.session = sess
|
||||
|
||||
root_cls = _get_root_cls(query) # for example, User or Post
|
||||
attrs = list(_flatten_filter_keys(filters)) + list(
|
||||
map(lambda s: s.lstrip(DESC_PREFIX), sort_attrs)
|
||||
)
|
||||
aliases = OrderedDict({})
|
||||
_parse_path_and_make_aliases(root_cls, "", attrs, aliases)
|
||||
|
||||
loaded_paths = []
|
||||
for path, al in aliases.items():
|
||||
relationship_path = path.replace(RELATION_SPLITTER, ".")
|
||||
query = query.outerjoin(al[0], al[1])
|
||||
loaded_paths.append(relationship_path)
|
||||
|
||||
def recurse_filters(_filters):
|
||||
if isinstance(_filters, abc.Mapping):
|
||||
for attr, value in _filters.items():
|
||||
if callable(attr):
|
||||
# E.g. or_, and_, or other sqlalchemy expression
|
||||
yield attr(*recurse_filters(value))
|
||||
continue
|
||||
if RELATION_SPLITTER in attr:
|
||||
parts = attr.rsplit(RELATION_SPLITTER, 1)
|
||||
entity, attr_name = aliases[parts[0]][0], parts[1]
|
||||
else:
|
||||
entity, attr_name = root_cls, attr
|
||||
try:
|
||||
yield from filter_expr(entity, **{attr_name: value})
|
||||
except KeyError as e:
|
||||
raise KeyError("Incorrect filter path `{}`: {}".format(attr, e))
|
||||
|
||||
elif isinstance(_filters, abc.Sequence):
|
||||
for f in _filters:
|
||||
yield from recurse_filters(f)
|
||||
|
||||
query = query.filter(*recurse_filters(filters))
|
||||
|
||||
for attr in sort_attrs:
|
||||
if RELATION_SPLITTER in attr:
|
||||
prefix = ""
|
||||
if attr.startswith(DESC_PREFIX):
|
||||
prefix = DESC_PREFIX
|
||||
attr = attr.lstrip(DESC_PREFIX)
|
||||
parts = attr.rsplit(RELATION_SPLITTER, 1)
|
||||
entity, attr_name = aliases[parts[0]][0], prefix + parts[1]
|
||||
else:
|
||||
entity, attr_name = root_cls, attr
|
||||
try:
|
||||
query = query.order_by(*order_expr(entity, attr_name))
|
||||
except KeyError as e:
|
||||
raise KeyError("Incorrect order path `{}`: {}".format(attr, e))
|
||||
|
||||
if schema:
|
||||
query = query.options(*_eager_expr_from_schema(schema))
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def eager_expr(schema):
|
||||
"""
|
||||
:type schema: dict
|
||||
"""
|
||||
return _eager_expr_from_schema(schema)
|
||||
|
||||
|
||||
def _flatten_schema(schema):
|
||||
"""
|
||||
:type schema: dict
|
||||
"""
|
||||
|
||||
def _flatten(schema, parent_path, result):
|
||||
"""
|
||||
:type schema: dict
|
||||
"""
|
||||
for path, value in schema.items():
|
||||
# for supporting schemas like Product.user: {...},
|
||||
# we transform, say, Product.user to 'user' string
|
||||
attr = path
|
||||
path = path.key
|
||||
|
||||
if isinstance(value, tuple):
|
||||
join_method, inner_schema = value[0], value[1]
|
||||
elif isinstance(value, dict):
|
||||
join_method, inner_schema = JOINED, value
|
||||
else:
|
||||
join_method, inner_schema = value, None
|
||||
|
||||
full_path = parent_path + "." + path if parent_path else path
|
||||
result[attr] = join_method
|
||||
|
||||
if inner_schema:
|
||||
_flatten(inner_schema, full_path, result)
|
||||
|
||||
result = {}
|
||||
_flatten(schema, "", result)
|
||||
return result
|
||||
|
||||
|
||||
def _eager_expr_from_flat_schema(flat_schema):
|
||||
"""
|
||||
:type flat_schema: dict
|
||||
"""
|
||||
result = []
|
||||
for path, join_method in flat_schema.items():
|
||||
if join_method == JOINED:
|
||||
result.append(joinedload(path))
|
||||
elif join_method == SUBQUERY:
|
||||
result.append(subqueryload(path))
|
||||
else:
|
||||
raise ValueError("Bad join method `{}` in `{}`".format(join_method, path))
|
||||
return result
|
||||
|
||||
|
||||
def _eager_expr_from_schema(schema):
|
||||
def _get_expr(schema, result):
|
||||
for path, value in schema.items():
|
||||
if isinstance(value, tuple):
|
||||
join_method, inner_schema = value[0], value[1]
|
||||
load_option = _create_eager_load_option(path, join_method)
|
||||
result.append(
|
||||
load_option.options(*_eager_expr_from_schema(inner_schema))
|
||||
)
|
||||
elif isinstance(value, dict):
|
||||
join_method, inner_schema = JOINED, value
|
||||
load_option = _create_eager_load_option(path, join_method)
|
||||
result.append(
|
||||
load_option.options(*_eager_expr_from_schema(inner_schema))
|
||||
)
|
||||
# load_option = _create_eager_load_option(path, value)
|
||||
else:
|
||||
result.append(_create_eager_load_option(path, value))
|
||||
|
||||
result = []
|
||||
_get_expr(schema, result)
|
||||
return result
|
||||
|
||||
|
||||
def _create_eager_load_option(path, join_method):
|
||||
if join_method == JOINED:
|
||||
return joinedload(path)
|
||||
elif join_method == SUBQUERY:
|
||||
return subqueryload(path)
|
||||
else:
|
||||
raise ValueError("Bad join method `{}` in `{}`".format(join_method, path))
|
||||
|
||||
|
||||
def order_expr(model, *columns):
|
||||
"""
|
||||
Forms expressions like [desc(User.first_name), asc(User.phone)]
|
||||
from list like ['-first_name', 'phone']
|
||||
|
||||
Example for 1 column:
|
||||
db.query(User).order_by(*User.order_expr('-first_name'))
|
||||
# will compile to ORDER BY user.first_name DESC
|
||||
|
||||
Example for multiple columns:
|
||||
columns = ['-first_name', 'phone']
|
||||
db.query(User).order_by(*User.order_expr(*columns))
|
||||
# will compile to ORDER BY user.first_name DESC, user.phone ASC
|
||||
|
||||
About cls_or_alias, mapper, cls: read in filter_expr method description
|
||||
"""
|
||||
|
||||
expressions = []
|
||||
for attr in columns:
|
||||
fn, attr = (desc, attr[1:]) if attr.startswith(DESC_PREFIX) else (asc, attr)
|
||||
if attr not in sortable_attributes(model):
|
||||
raise KeyError("Cant order {} by {}".format(model, attr))
|
||||
|
||||
expr = fn(getattr(model, attr))
|
||||
expressions.append(expr)
|
||||
return expressions
|
||||
|
||||
|
||||
_operators = {
|
||||
"isnull": lambda c, v: (c == None) if v else (c != None),
|
||||
"exact": operators.eq,
|
||||
"ne": operators.ne, # not equal or is not (for None)
|
||||
"gt": operators.gt, # greater than , >
|
||||
"ge": operators.ge, # greater than or equal, >=
|
||||
"lt": operators.lt, # lower than, <
|
||||
"le": operators.le, # lower than or equal, <=
|
||||
"in": operators.in_op,
|
||||
"notin": operators.notin_op,
|
||||
"between": lambda c, v: c.between(v[0], v[1]),
|
||||
"like": operators.like_op,
|
||||
"ilike": operators.ilike_op,
|
||||
"startswith": operators.startswith_op,
|
||||
"istartswith": lambda c, v: c.ilike(v + "%"),
|
||||
"endswith": operators.endswith_op,
|
||||
"iendswith": lambda c, v: c.ilike("%" + v),
|
||||
"contains": lambda c, v: c.ilike("%{v}%".format(v=v)),
|
||||
"year": lambda c, v: extract("year", c) == v,
|
||||
"year_ne": lambda c, v: extract("year", c) != v,
|
||||
"year_gt": lambda c, v: extract("year", c) > v,
|
||||
"year_ge": lambda c, v: extract("year", c) >= v,
|
||||
"year_lt": lambda c, v: extract("year", c) < v,
|
||||
"year_le": lambda c, v: extract("year", c) <= v,
|
||||
"month": lambda c, v: extract("month", c) == v,
|
||||
"month_ne": lambda c, v: extract("month", c) != v,
|
||||
"month_gt": lambda c, v: extract("month", c) > v,
|
||||
"month_ge": lambda c, v: extract("month", c) >= v,
|
||||
"month_lt": lambda c, v: extract("month", c) < v,
|
||||
"month_le": lambda c, v: extract("month", c) <= v,
|
||||
"day": lambda c, v: extract("day", c) == v,
|
||||
"day_ne": lambda c, v: extract("day", c) != v,
|
||||
"day_gt": lambda c, v: extract("day", c) > v,
|
||||
"day_ge": lambda c, v: extract("day", c) >= v,
|
||||
"day_lt": lambda c, v: extract("day", c) < v,
|
||||
"day_le": lambda c, v: extract("day", c) <= v,
|
||||
}
|
||||
|
||||
|
||||
def _klass(obj):
|
||||
if isinstance(inspect(obj), InstanceState): # type(inspect(obj)) ==InstanceState
|
||||
return obj._sa_instance_state.mapper.class_
|
||||
return obj
|
||||
|
||||
|
||||
def columns(model):
|
||||
"""classproperty"""
|
||||
cls = _klass(model)
|
||||
return inspect(cls).columns.keys()
|
||||
|
||||
|
||||
def primary_keys_full(model):
|
||||
"""classproperty
|
||||
Get primary key properties for a SQLAlchemy cls.
|
||||
Taken from marshmallow_sqlalchemy
|
||||
"""
|
||||
cls = _klass(model)
|
||||
mapper = cls.__mapper__
|
||||
return [mapper.get_property_by_column(column) for column in mapper.primary_key]
|
||||
|
||||
|
||||
def primary_keys(model):
|
||||
"""classproperty"""
|
||||
cls = _klass(model)
|
||||
return [pk.key for pk in primary_keys_full(cls)]
|
||||
|
||||
|
||||
def relations(model):
|
||||
"""classproperty
|
||||
Return a `list` of relationship names or the given model"""
|
||||
cls = _klass(model)
|
||||
return [c.key for c in cls.__mapper__.attrs if isinstance(c, RelationshipProperty)]
|
||||
|
||||
|
||||
def settable_relations(model):
|
||||
"""classproperty
|
||||
Return a `list` of relationship names or the given model"""
|
||||
cls = _klass(model)
|
||||
return [r for r in relations(cls) if getattr(model, r).property.viewonly is False]
|
||||
|
||||
|
||||
def hybrid_properties(model):
|
||||
"""classproperty"""
|
||||
cls = _klass(model)
|
||||
items = inspect(cls).all_orm_descriptors
|
||||
return [item.__name__ for item in items if isinstance(item, hybrid_property)]
|
||||
|
||||
|
||||
def hybrid_methods_full(model):
|
||||
"""classproperty"""
|
||||
cls = _klass(model)
|
||||
items = inspect(cls).all_orm_descriptors
|
||||
return {item.func.__name__: item for item in items if type(item) == hybrid_method}
|
||||
|
||||
|
||||
def hybrid_methods(model):
|
||||
"""classproperty"""
|
||||
cls = _klass(model)
|
||||
return list(hybrid_methods_full(cls).keys())
|
||||
|
||||
|
||||
def filterable_attributes(model):
|
||||
cls = _klass(model)
|
||||
return relations(cls) + columns(cls) + hybrid_properties(cls) + hybrid_methods(cls)
|
||||
|
||||
|
||||
def sortable_attributes(model):
|
||||
cls = _klass(model)
|
||||
return columns(cls) + hybrid_properties(cls)
|
||||
|
||||
|
||||
def settable_attributes(model):
|
||||
cls = _klass(model)
|
||||
return sortable_attributes(cls) + settable_relations(cls)
|
||||
|
||||
|
||||
class QueryBuilder:
|
||||
def __init__(self, model: M) -> None:
|
||||
self.model = model
|
||||
|
||||
@property
|
||||
def query(self):
|
||||
return select(self.model)
|
||||
|
||||
def with_(self, schema):
|
||||
"""
|
||||
Query class and eager load schema at once.
|
||||
:type schema: dict
|
||||
|
||||
Example:
|
||||
schema = {
|
||||
Post.user: JOINED, # joinedload user
|
||||
Post.comments: (SUBQUERY, { # load comments in separate query
|
||||
Comment.user: JOINED # but, in this separate query, join user
|
||||
})
|
||||
}
|
||||
User.with_(schema).first()
|
||||
"""
|
||||
return self.query.options(*eager_expr(schema or {}))
|
||||
|
||||
def with_joined(self, *paths):
|
||||
"""
|
||||
Eagerload for simple cases where we need to just
|
||||
joined load some relations
|
||||
You can only load direct relationships.
|
||||
|
||||
:type paths: *List[QueryableAttribute]
|
||||
|
||||
Example 1:
|
||||
Comment.with_joined(Comment.user, Comment.post).first()
|
||||
"""
|
||||
options = [joinedload(path) for path in paths]
|
||||
return self.query.options(*options)
|
||||
|
||||
def with_subquery(self, *paths):
|
||||
"""
|
||||
Eagerload for simple cases where we need to just
|
||||
joined load some relations
|
||||
You can only load direct relationships.
|
||||
|
||||
:type paths: *List[QueryableAttribute]
|
||||
|
||||
Example 1:
|
||||
User.with_subquery(User.posts, User.comments).all()
|
||||
"""
|
||||
options = [subqueryload(path) for path in paths]
|
||||
return self.query.options(*options)
|
||||
|
||||
def smart_query(self, filters=None, sort_attrs=None, schema=None):
|
||||
"""
|
||||
Does magic Django-ish joins like post___user___name__startswith='Bob'
|
||||
(see https://goo.gl/jAgCyM)
|
||||
Does filtering, sorting and eager loading at the same time.
|
||||
And if, say, filters and sorting need the same joinm it will be done
|
||||
only one. That's why all stuff is combined in single method
|
||||
|
||||
:param filters: dict
|
||||
:param sort_attrs: List[basestring]
|
||||
:param schema: dict
|
||||
"""
|
||||
return smart_query(self.query, filters, sort_attrs, schema)
|
||||
|
||||
def where(self, **filters):
|
||||
"""
|
||||
Shortcut for smart_query() method
|
||||
Example 1:
|
||||
Product.where(subject_ids__in=[1,2], grade_from_id=2).all()
|
||||
|
||||
Example 2:
|
||||
filters = {'subject_ids__in': [1,2], 'grade_from_id': 2}
|
||||
Product.where(**filters).all()
|
||||
|
||||
Example 3 (with joins):
|
||||
Post.where(public=True, user___name__startswith='Bi').all()
|
||||
"""
|
||||
return self.smart_query(filters)
|
||||
|
||||
def sort(self, *columns):
|
||||
"""
|
||||
Shortcut for smart_query() method
|
||||
Example 1:
|
||||
User.sort('first_name','-user_id')
|
||||
This is equal to
|
||||
db.query(User).order_by(*User.order_expr('first_name','-user_id'))
|
||||
|
||||
Example 2:
|
||||
columns = ['first_name','-user_id']
|
||||
User.sort(*columns)
|
||||
This is equal to
|
||||
columns = ['first_name','-user_id']
|
||||
db.query(User).order_by(*User.order_expr(*columns))
|
||||
|
||||
Exanple 3 (with joins):
|
||||
Post.sort('comments___rating', 'user___name').all()
|
||||
"""
|
||||
return self.smart_query({}, columns)
|
||||
|
||||
async def fill(self, **kwargs):
|
||||
model = self.model
|
||||
for name in kwargs.keys():
|
||||
if name in settable_attributes(model):
|
||||
setattr(model, name, kwargs[name])
|
||||
else:
|
||||
raise KeyError("Attribute '{}' doesn't exist".format(name))
|
||||
|
||||
return model
|
|
@ -8,10 +8,10 @@ const LoadingMessage = defineAsyncComponent(
|
|||
)
|
||||
|
||||
const props = defineProps({
|
||||
targetId: String,
|
||||
targetUid: String,
|
||||
targetType: String,
|
||||
});
|
||||
const { targetType, targetId } = toRefs(props);
|
||||
const { targetType, targetUid } = toRefs(props);
|
||||
|
||||
const userStore = useUserStore();
|
||||
const auditLogStore = useAuditLogStore();
|
||||
|
@ -22,14 +22,14 @@ const { auditLogs, fetchingAudits } = storeToRefs(auditLogStore);
|
|||
userStore.fetchUsers({});
|
||||
auditLogStore.fetchAuditLogs({
|
||||
targetType: targetType?.value,
|
||||
targetId: targetId?.value,
|
||||
targetUid: targetUid?.value,
|
||||
});
|
||||
|
||||
let users = computed(() => userStore.getUsers);
|
||||
|
||||
function translateUser(userId: any): string {
|
||||
function translateUser(userUid: any): string {
|
||||
const user = users?.value?.find(
|
||||
(u: any) => u["uid"]?.toString() === userId?.toString()
|
||||
(u: any) => u["uid"]?.toString() === userUid?.toString()
|
||||
);
|
||||
if (!user) return "";
|
||||
return (user as any)["userName"];
|
||||
|
@ -104,7 +104,7 @@ function changes(log: any): any {
|
|||
<div class="bg-indigo-600 rounded-full h-4 w-4 border-gray-200 border-2 z-10"></div>
|
||||
<div class="ml-4 font-medium italic">
|
||||
<span>
|
||||
{{ translateUser(log?.userId) }} {{ translateAction(log?.action) }}
|
||||
{{ translateUser(log?.userUid) }} {{ translateAction(log?.action) }}
|
||||
{{ log?.targetType }}
|
||||
</span>
|
||||
on <span> {{ parseDate(log?.stateAfter?.updated_at) }}</span>
|
||||
|
|
|
@ -61,5 +61,5 @@ const columns = [
|
|||
<span v-else>No Mutations for this result</span>
|
||||
<h4 class="mt-4 font-bold text-xl text-gray-500">Result Audit Log</h4>
|
||||
<hr>
|
||||
<FelAuditLog targetType="analysis_result" :targetId="analysisResultesultUid" />
|
||||
<FelAuditLog targetType="analysis_result" :targetUid="analysisResultesultUid" />
|
||||
</template>
|
||||
|
|
|
@ -103,12 +103,12 @@ export const GET_GROUPS_AND_PERMISSIONS = gql`
|
|||
`;
|
||||
|
||||
export const GET_AUDIT_LOG_FOR_TARGET = gql`
|
||||
query getAuditLogs($targetType: String!, $targetId: String!) {
|
||||
auditLogsFilter(targetType: $targetType, targetId: $targetId) {
|
||||
query getAuditLogs($targetType: String!, $targetUid: String!) {
|
||||
auditLogsFilter(targetType: $targetType, targetUid: $targetUid) {
|
||||
uid
|
||||
userId
|
||||
userUid
|
||||
targetType
|
||||
targetId
|
||||
targetUid
|
||||
action
|
||||
stateBefore
|
||||
stateAfter
|
||||
|
|
|
@ -30,7 +30,7 @@ let client = computed(() => clientStore.getClient);
|
|||
<div class="pt-4">
|
||||
<tab-samples v-if="currentTab === 'samples'" />
|
||||
<tab-contacts v-if="currentTab === 'contacts'" :clientUid="client?.uid" />
|
||||
<tab-logs v-if="currentTab === 'logs'" targetType="client" :targetId="client?.uid" />
|
||||
<tab-logs v-if="currentTab === 'logs'" targetType="client" :targetUid="client?.uid" />
|
||||
</div>
|
||||
</section>
|
||||
</template>
|
||||
|
|
|
@ -179,7 +179,7 @@ const updatePatient = (patient: IPatient) => {
|
|||
<tab-samples v-if="currentTab === 'samples'" target="patient-samples" :targetUid="patientForm.uid" />
|
||||
<tab-cases v-if="currentTab === 'cases'" />
|
||||
<tab-patient-bills v-if="currentTab === 'billing'" :patientUid="patientForm?.uid" />
|
||||
<tab-logs v-if="currentTab === 'logs'" targetType="patient" :targetId="patientForm?.uid" />
|
||||
<tab-logs v-if="currentTab === 'logs'" targetType="patient" :targetUid="patientForm?.uid" />
|
||||
</section>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ function addSample(patient?: IPatient): void {
|
|||
<div>
|
||||
<tab-samples v-if="currentTab === 'samples'" target="patient-samples" :targetUid="patient?.uid" />
|
||||
<tab-cases v-if="currentTab === 'cases'" />
|
||||
<tab-logs v-if="currentTab === 'logs'" targetType="patient" :targetId="patient?.uid" />
|
||||
<tab-logs v-if="currentTab === 'logs'" targetType="patient" :targetUid="patient?.uid" />
|
||||
<tab-patient-bills v-if="currentTab === 'billing'" :patientUid="patient?.uid" />
|
||||
</div>
|
||||
</section>
|
||||
|
|
|
@ -55,7 +55,7 @@ let currentTabComponent = computed(() => "tab-" + currentTab);
|
|||
<tab-logs
|
||||
v-if="currentTab === 'logs'"
|
||||
targetType="sample"
|
||||
:targetId="sampleStore.sample?.uid"
|
||||
:targetUid="sampleStore.sample?.uid"
|
||||
/>
|
||||
<tab-impress v-if="currentTab === 'impress-reports'" />
|
||||
</div>
|
||||
|
|
|
@ -56,7 +56,7 @@
|
|||
<tab-logs
|
||||
v-if="currentTab === 'logs'"
|
||||
targetType="shipment"
|
||||
:targetId="shipment?.uid"
|
||||
:targetUid="shipment?.uid"
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@
|
|||
<tab-logs
|
||||
v-if="currentTab === 'logs'"
|
||||
targetType="worksheet"
|
||||
:targetId="worksheet?.uid"
|
||||
:targetUid="worksheet?.uid"
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
|
Loading…
Reference in a new issue