mirror of
https://github.com/beak-insights/felicity-lims.git
synced 2025-02-16 13:02:53 +08:00
removed intermidiary states and replaced with process tracker +resid
This commit is contained in:
parent
60916b62de
commit
d9e13221af
25 changed files with 197 additions and 105 deletions
|
@ -18,5 +18,6 @@ RUN apt-get update && apt-get install -y git libpq-dev gcc
|
|||
COPY ./requirements.txt /tmp/requirements.txt
|
||||
RUN pip install --no-cache-dir --upgrade -r /tmp/requirements.txt
|
||||
WORKDIR /app
|
||||
COPY ./felicity /app/felicity
|
||||
COPY . /app
|
||||
RUN pip install -e .
|
||||
ENV PYTHONPATH=/app
|
||||
|
|
|
@ -47,7 +47,7 @@ services:
|
|||
- MINIO_ACCESS=felicity
|
||||
- MINIO_SECRET=felicity
|
||||
volumes:
|
||||
- ./felicity/:/app/felicity
|
||||
- ./felicity:/app/felicity
|
||||
ports:
|
||||
- 8000:8000
|
||||
depends_on:
|
||||
|
|
|
@ -29,6 +29,8 @@ from felicity.apps.analysis.workflow.analysis_result import AnalysisResultWorkFl
|
|||
from felicity.apps.analysis.workflow.sample import SampleWorkFlow
|
||||
from felicity.apps.billing.utils import bill_order
|
||||
from felicity.apps.client.services import ClientService
|
||||
from felicity.apps.iol.redis import process_tracker
|
||||
from felicity.apps.iol.redis.enum import TrackableObject
|
||||
from felicity.apps.job import schemas as job_schemas
|
||||
from felicity.apps.job.enum import (JobAction, JobCategory, JobPriority,
|
||||
JobState)
|
||||
|
@ -461,13 +463,6 @@ async def publish_samples(
|
|||
# set status of these samples to PUBLISHING for those whose action is "publish" !important
|
||||
final_publish = list(filter(lambda p: p.action == "publish", samples))
|
||||
not_final = list(filter(lambda p: p.action != "publish", samples))
|
||||
if final_publish:
|
||||
await SampleService().bulk_update_with_mappings(
|
||||
[
|
||||
{"uid": sample.uid, "status": SampleState.PUBLISHING}
|
||||
for sample in final_publish
|
||||
]
|
||||
)
|
||||
|
||||
data = [{"uid": s.uid, "action": s.action} for s in samples] # noqa
|
||||
job_schema = job_schemas.JobCreate(
|
||||
|
@ -481,7 +476,11 @@ async def publish_samples(
|
|||
)
|
||||
|
||||
await JobService().create(job_schema)
|
||||
if final_publish:
|
||||
for sample in final_publish:
|
||||
await process_tracker.process(uid=sample.uid, object_type=TrackableObject.SAMPLE)
|
||||
|
||||
# TODO: clean up below - probably no longer necessary - needs checking
|
||||
# !important for frontend
|
||||
# unfreeze frontend and return sample to original state since it is a non final publish
|
||||
if not_final:
|
||||
|
|
|
@ -8,18 +8,17 @@ from felicity.api.gql.auth import auth_from_info
|
|||
from felicity.api.gql.permissions import (CanVerifyAnalysisResult,
|
||||
IsAuthenticated)
|
||||
from felicity.api.gql.types import OperationError, OperationSuccess
|
||||
from felicity.apps.analysis.enum import ResultState
|
||||
from felicity.apps.analysis.services.analysis import SampleService
|
||||
from felicity.apps.analysis.services.result import AnalysisResultService
|
||||
from felicity.apps.analysis.utils import retest_from_result_uids
|
||||
from felicity.apps.analysis.workflow.analysis_result import AnalysisResultWorkFlow
|
||||
from felicity.apps.iol.redis import process_tracker
|
||||
from felicity.apps.iol.redis.enum import TrackableObject
|
||||
from felicity.apps.job import schemas as job_schemas
|
||||
from felicity.apps.job.enum import (JobAction, JobCategory, JobPriority,
|
||||
JobState)
|
||||
from felicity.apps.job.services import JobService
|
||||
from felicity.apps.notification.services import ActivityStreamService
|
||||
from felicity.apps.worksheet.enum import WorkSheetState
|
||||
from felicity.apps.worksheet.services import WorkSheetService
|
||||
from felicity.apps.analysis.workflow.analysis_result import AnalysisResultWorkFlow
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -54,10 +53,10 @@ AnalysisResultOperationResponse = strawberry.union(
|
|||
|
||||
@strawberry.mutation(permission_classes=[IsAuthenticated])
|
||||
async def submit_analysis_results(
|
||||
info,
|
||||
analysis_results: List[ARResultInputType],
|
||||
source_object: str,
|
||||
source_object_uid: str,
|
||||
info,
|
||||
analysis_results: List[ARResultInputType],
|
||||
source_object: str,
|
||||
source_object_uid: str,
|
||||
) -> AnalysisResultOperationResponse:
|
||||
felicity_user = await auth_from_info(info)
|
||||
|
||||
|
@ -66,11 +65,6 @@ async def submit_analysis_results(
|
|||
|
||||
an_results = [result.__dict__ for result in analysis_results]
|
||||
|
||||
# set status of these analysis_results to SUBMITTING
|
||||
await AnalysisResultService().bulk_update_with_mappings(
|
||||
[{"uid": _ar["uid"], "status": ResultState.SUBMITTING} for _ar in an_results]
|
||||
)
|
||||
|
||||
# submit an results as jobs
|
||||
job_schema = job_schemas.JobCreate( # noqa
|
||||
action=JobAction.RESULT_SUBMIT,
|
||||
|
@ -83,15 +77,13 @@ async def submit_analysis_results(
|
|||
)
|
||||
|
||||
await JobService().create(job_schema)
|
||||
for _ar in an_results:
|
||||
await process_tracker.process(uid=_ar["uid"], object_type=TrackableObject.RESULT)
|
||||
|
||||
if source_object == "worksheet" and source_object_uid:
|
||||
ws = await WorkSheetService().get(uid=source_object_uid)
|
||||
await WorkSheetService().change_state(
|
||||
ws.uid, WorkSheetState.SUBMITTING, felicity_user.uid
|
||||
)
|
||||
# elif source_object == "sample" and source_object_uid:
|
||||
# sa = await SampleService().get(uid=source_object_uid)
|
||||
# await sa.change_status("processing", felicity_user.uid)
|
||||
await process_tracker.process(uid=source_object_uid, object_type=TrackableObject.WORKSHEET)
|
||||
elif source_object == "sample" and source_object_uid:
|
||||
await process_tracker.process(uid=source_object_uid, object_type=TrackableObject.SAMPLE)
|
||||
|
||||
return OperationSuccess(
|
||||
message="Your results are being submitted in the background."
|
||||
|
@ -100,18 +92,13 @@ async def submit_analysis_results(
|
|||
|
||||
@strawberry.mutation(permission_classes=[CanVerifyAnalysisResult])
|
||||
async def verify_analysis_results(
|
||||
info, analyses: list[str], source_object: str, source_object_uid: str
|
||||
info, analyses: list[str], source_object: str, source_object_uid: str
|
||||
) -> AnalysisResultOperationResponse:
|
||||
felicity_user = await auth_from_info(info)
|
||||
|
||||
if len(analyses) == 0:
|
||||
return OperationError(error=f"No analyses to verify are provided!")
|
||||
|
||||
# set status of these analysis_results to PROCESSING
|
||||
await AnalysisResultService().bulk_update_with_mappings(
|
||||
[{"uid": uid, "status": ResultState.APPROVING} for uid in analyses]
|
||||
)
|
||||
|
||||
job_schema = job_schemas.JobCreate( # noqa
|
||||
action=JobAction.RESULT_VERIFY,
|
||||
category=JobCategory.RESULT,
|
||||
|
@ -123,15 +110,14 @@ async def verify_analysis_results(
|
|||
)
|
||||
|
||||
await JobService().create(job_schema)
|
||||
for uid in analyses:
|
||||
await process_tracker.process(uid=uid, object_type=TrackableObject.RESULT)
|
||||
|
||||
if source_object == "worksheet" and source_object_uid:
|
||||
ws = await WorkSheetService().get(uid=source_object_uid)
|
||||
await WorkSheetService().change_state(
|
||||
ws.uid, WorkSheetState.APPROVING, felicity_user.uid
|
||||
)
|
||||
# elif source_object == "sample" and source_object_uid:
|
||||
# sa = await SampleService().get(uid=source_object_uid)
|
||||
# await sa.change_status("APPROVING", felicity_user.uid)
|
||||
await process_tracker.process(uid=source_object_uid, object_type=TrackableObject.WORKSHEET)
|
||||
elif source_object == "sample" and source_object_uid:
|
||||
# TODO: ? we might not need to lock the sample
|
||||
await process_tracker.process(uid=source_object_uid, object_type=TrackableObject.SAMPLE)
|
||||
|
||||
return OperationSuccess(
|
||||
message="Your results are being verified in the background."
|
||||
|
@ -155,7 +141,6 @@ async def retract_analysis_results(info, analyses: list[str]) -> AnalysisResultR
|
|||
a_result.uid, retested_by=felicity_user, action="retract"
|
||||
)
|
||||
|
||||
|
||||
# monkeypatch -> notify of sample state
|
||||
sample = await SampleService().get(uid=a_result.sample_uid)
|
||||
await ActivityStreamService().stream(
|
||||
|
@ -233,7 +218,7 @@ async def cancel_analysis_results(info, analyses: list[str]) -> AnalysisResultRe
|
|||
|
||||
@strawberry.mutation(permission_classes=[IsAuthenticated])
|
||||
async def re_instate_analysis_results(
|
||||
info, analyses: list[str]
|
||||
info, analyses: list[str]
|
||||
) -> AnalysisResultResponse:
|
||||
felicity_user = await auth_from_info(info)
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from typing import List, Optional, Self
|
||||
|
||||
import strawberry # noqa
|
||||
from typing import List, Optional, Self
|
||||
|
||||
from felicity.api.gql.setup.types.department import DepartmentType
|
||||
from felicity.api.gql.types import PageInfo
|
||||
|
@ -12,6 +11,13 @@ class PermissionType:
|
|||
action: str | None = None
|
||||
target: str | None = None
|
||||
active: bool | None = None
|
||||
#
|
||||
created_by_uid: str | None = None
|
||||
created_by: Self | None = None
|
||||
created_at: str | None = None
|
||||
updated_by_uid: str | None = None
|
||||
updated_by: Self | None = None
|
||||
updated_at: str | None = None
|
||||
|
||||
|
||||
@strawberry.type
|
||||
|
@ -23,6 +29,13 @@ class GroupType:
|
|||
permissions: Optional[List[PermissionType]] = None
|
||||
active: bool | None = None
|
||||
pages: str | None = None
|
||||
#
|
||||
created_by_uid: str | None = None
|
||||
created_by: Self | None = None
|
||||
created_at: str | None = None
|
||||
updated_by_uid: str | None = None
|
||||
updated_by: Self | None = None
|
||||
updated_at: str | None = None
|
||||
|
||||
|
||||
@strawberry.type
|
||||
|
@ -89,3 +102,10 @@ class UserPreferenceType:
|
|||
expanded_menu: bool | None = None
|
||||
departments: list[DepartmentType] | None = None
|
||||
theme: str | None = None
|
||||
#
|
||||
created_by_uid: str | None = None
|
||||
created_by: Self | None = None
|
||||
created_at: str | None = None
|
||||
updated_by_uid: str | None = None
|
||||
updated_by: Self | None = None
|
||||
updated_at: str | None = None
|
||||
|
|
|
@ -7,7 +7,6 @@ class SampleState(StrEnum):
|
|||
RECEIVED = auto() # received in the laboratory and ready for processing
|
||||
AWAITING = auto() # pending approval
|
||||
APPROVED = auto() # authorised for release
|
||||
PUBLISHING = auto() # printed samples, ready for dispatch
|
||||
PUBLISHED = auto() # printed samples, ready for dispatch
|
||||
INVALIDATED = auto() # approval condemnation
|
||||
CANCELLED = auto() # no longer required <in other words deleted>
|
||||
|
@ -19,13 +18,9 @@ class SampleState(StrEnum):
|
|||
|
||||
class ResultState(StrEnum):
|
||||
PENDING = auto() # analytes that are pending results
|
||||
SUBMITTING = auto()
|
||||
RESULTED = (
|
||||
auto()
|
||||
) # analytes that have results but not approved yet. 'to_be_verified' / 'un-authorised'
|
||||
RESULTED = auto() # analytes that have results but not approved yet. 'to_be_verified' / 'un-authorised'
|
||||
RETRACTED = auto() # analytes with erroneous results for correction
|
||||
CANCELLED = auto() # analytes that are no longer required <in other words deleted>
|
||||
APPROVING = auto()
|
||||
APPROVED = auto() # analytes that are authorised/approved
|
||||
REFERRED = auto() # analytes that are referred
|
||||
|
||||
|
|
|
@ -291,9 +291,7 @@ class SampleService(BaseService[Sample, SampleCreate, SampleUpdate]):
|
|||
|
||||
async def get_incomplete_analysis_results(self, uid: str):
|
||||
pending_states = [
|
||||
ResultState.SUBMITTING,
|
||||
ResultState.PENDING,
|
||||
ResultState.APPROVING,
|
||||
ResultState.REFERRED,
|
||||
]
|
||||
analysis = await self.get_analysis_results(uid)
|
||||
|
|
|
@ -2,6 +2,8 @@ import logging
|
|||
from typing import NoReturn
|
||||
|
||||
from felicity.apps.analysis import utils
|
||||
from felicity.apps.iol.redis import process_tracker
|
||||
from felicity.apps.iol.redis.enum import TrackableObject
|
||||
from felicity.apps.job.enum import JobState
|
||||
from felicity.apps.job.services import JobService
|
||||
from felicity.apps.notification.services import NotificationService
|
||||
|
@ -31,6 +33,8 @@ async def submit_results(job_uid: str) -> NoReturn:
|
|||
try:
|
||||
await utils.results_submitter(job.data, user)
|
||||
await job_service.change_status(job.uid, new_status=JobState.FINISHED)
|
||||
for res in job.data:
|
||||
await process_tracker.release(uid=res['uid'], object_type=TrackableObject.RESULT)
|
||||
await notification_service.notify(
|
||||
f"Your results were successfully submitted", user
|
||||
)
|
||||
|
@ -63,6 +67,8 @@ async def verify_results(job_uid: str) -> NoReturn:
|
|||
try:
|
||||
await utils.verify_from_result_uids(job.data, user)
|
||||
await job_service.change_status(job.uid, new_status=JobState.FINISHED)
|
||||
for res in job.data:
|
||||
await process_tracker.release(uid=res['uid'], object_type=TrackableObject.RESULT)
|
||||
await notification_service.notify(
|
||||
"Your results were successfully verified", user
|
||||
)
|
||||
|
@ -105,4 +111,4 @@ async def setup_billing(job_uid: str) -> NoReturn:
|
|||
f"Failed to setup billing in job with uid: {job.uid} with error: {str(e)}",
|
||||
user,
|
||||
)
|
||||
raise
|
||||
raise
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import logging
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from felicity.apps.analysis.entities.results import AnalysisResult
|
||||
|
@ -22,7 +21,7 @@ class AnalysisResultWorkFlow:
|
|||
Defines a set of guards that allow or prevent actions taken on AnalysisResult
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self):
|
||||
self.analysis_result_service = AnalysisResultService()
|
||||
|
||||
async def revert(self, uid: str, by_uid: str) -> None:
|
||||
|
@ -54,8 +53,8 @@ class AnalysisResultWorkFlow:
|
|||
async def _guard_assign(analysis_result: AnalysisResult) -> bool:
|
||||
allow = False
|
||||
if (
|
||||
analysis_result.status == ResultState.PENDING
|
||||
and analysis_result.assigned is False
|
||||
analysis_result.status == ResultState.PENDING
|
||||
and analysis_result.assigned is False
|
||||
):
|
||||
allow = True
|
||||
|
||||
|
@ -72,8 +71,8 @@ class AnalysisResultWorkFlow:
|
|||
async def _guard_un_assign(analysis_result: AnalysisResult) -> bool:
|
||||
allow = False
|
||||
if (
|
||||
analysis_result.status == ResultState.PENDING
|
||||
and analysis_result.assigned is True
|
||||
analysis_result.status == ResultState.PENDING
|
||||
and analysis_result.assigned is True
|
||||
):
|
||||
allow = True
|
||||
|
||||
|
@ -121,7 +120,7 @@ class AnalysisResultWorkFlow:
|
|||
if not allow:
|
||||
raise AnalysisResultWorkFlowException(f"Cannot re-instate this Result")
|
||||
return True
|
||||
|
||||
|
||||
async def submit(self, data: list[dict], submitter) -> tuple[list[AnalysisResult], list[AnalysisResult]]:
|
||||
_skipped = []
|
||||
_submitted = []
|
||||
|
@ -136,34 +135,35 @@ class AnalysisResultWorkFlow:
|
|||
return _skipped, _submitted
|
||||
|
||||
async def _guard_submit(self, result: AnalysisResult) -> bool:
|
||||
if result.status not in [ResultState.PENDING, ResultState.SUBMITTING]:
|
||||
if result.status not in [ResultState.PENDING]:
|
||||
return False
|
||||
return True
|
||||
|
||||
async def approve(self, result_uids: list[str], approved_by) -> list[AnalysisResult]:
|
||||
results = await self.analysis_result_service.get_all(uid__in=result_uids) # get_related(related=["analysis"], uid__in=result_uids)
|
||||
results = await self.analysis_result_service.get_all(
|
||||
uid__in=result_uids) # get_related(related=["analysis"], uid__in=result_uids)
|
||||
await self._guard_approve(results, approved_by.uid)
|
||||
return [(await self.analysis_result_service.verify(r.uid, approved_by)) for r in results]
|
||||
|
||||
async def _guard_approve(
|
||||
self, analysis_results: list[AnalysisResult], approved_by_uid
|
||||
self, analysis_results: list[AnalysisResult], approved_by_uid
|
||||
) -> bool:
|
||||
laboratory = await LaboratoryService().get_by_setup_name("felicity")
|
||||
settings = await LaboratorySettingService().get(
|
||||
laboratory_uid=laboratory.uid
|
||||
)
|
||||
states = [ResultState.RESULTED, ResultState.APPROVING]
|
||||
states = [ResultState.RESULTED]
|
||||
|
||||
for result in analysis_results:
|
||||
# Self Verification check
|
||||
if (
|
||||
settings.allow_self_verification is False
|
||||
and result.analysis.self_verification is False
|
||||
settings.allow_self_verification is False
|
||||
and result.analysis.self_verification is False
|
||||
):
|
||||
# First time verifier must not be the submitter
|
||||
if (
|
||||
len(result.verified_by) == 0
|
||||
and result.submitted_by_uid == approved_by_uid
|
||||
len(result.verified_by) == 0
|
||||
and result.submitted_by_uid == approved_by_uid
|
||||
):
|
||||
raise AnalysisResultWorkFlowException(
|
||||
"Cannot approve a result your own work"
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import felicity.api.gql.analysis.types
|
||||
from felicity.apps.analysis.entities.analysis import Sample
|
||||
from felicity.apps.analysis.enum import ResultState, SampleState
|
||||
from felicity.apps.analysis.services.analysis import SampleService
|
||||
|
@ -18,9 +17,9 @@ class SampleWorkFlow:
|
|||
async def revert(self, uid: str, by_uid: str) -> None:
|
||||
to_status = ResultState.PENDING
|
||||
results = await self.sample_service.get_analysis_results(uid)
|
||||
awaiting_satatuses = [ResultState.RESULTED, ResultState.APPROVING, ResultState.RETRACTED, ResultState.CANCELLED]
|
||||
awaiting_satatuses = [ResultState.RESULTED, ResultState.RETRACTED, ResultState.CANCELLED]
|
||||
approved_satatuses = [ResultState.APPROVED, ResultState.RETRACTED, ResultState.CANCELLED]
|
||||
|
||||
|
||||
if any([result.status in ResultState.PENDING for result in results]):
|
||||
to_status = SampleState.RECEIVED
|
||||
elif all([result.status == ResultState.CANCELLED for result in results]):
|
||||
|
@ -30,7 +29,7 @@ class SampleWorkFlow:
|
|||
elif all([result.status in approved_satatuses for result in results]):
|
||||
to_status = SampleState.APPROVED
|
||||
await self.sample_service.change_status(uid, to_status, by_uid)
|
||||
|
||||
|
||||
async def receive(self, uid, received_by):
|
||||
sample = await self.sample_service.get(uid=uid)
|
||||
await self._guard_receive(sample)
|
||||
|
@ -177,7 +176,7 @@ class SampleWorkFlow:
|
|||
|
||||
@staticmethod
|
||||
async def _guard_publish(sample):
|
||||
allow = sample.status in [SampleState.APPROVED, SampleState.PUBLISHING]
|
||||
allow = sample.status in [SampleState.APPROVED]
|
||||
if not allow:
|
||||
raise SampleWorkFlowException(f"Cannot publish this Sample")
|
||||
return True
|
||||
|
@ -234,7 +233,7 @@ class SampleWorkFlow:
|
|||
# Are there are results in referred state or some are in pending state
|
||||
analysis, referred = await self.sample_service.get_referred_analyses(sample.uid)
|
||||
if not referred and list( # and has pending results then :)
|
||||
filter(lambda an: an.status in [ResultState.PENDING], analysis)
|
||||
filter(lambda an: an.status in [ResultState.PENDING], analysis)
|
||||
):
|
||||
allow = False
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
#
|
||||
# Client Schemas
|
||||
#
|
||||
|
@ -76,6 +77,7 @@ class ClientContactBaseInDB(ClientContactBase):
|
|||
class ClientContactCreate(ClientContactBase):
|
||||
client_uid: str
|
||||
email: str
|
||||
user_name: str
|
||||
|
||||
|
||||
# Properties to receive via API on update
|
||||
|
|
|
@ -5,6 +5,8 @@ from felicity.apps.analysis.entities.analysis import Sample
|
|||
from felicity.apps.analysis.enum import SampleState
|
||||
from felicity.apps.analysis.services.analysis import SampleService
|
||||
from felicity.apps.impress.sample import utils
|
||||
from felicity.apps.iol.redis import process_tracker
|
||||
from felicity.apps.iol.redis.enum import TrackableObject
|
||||
from felicity.apps.job import schemas as job_schemas
|
||||
from felicity.apps.job.enum import JobAction, JobCategory, JobState
|
||||
from felicity.apps.job.enum import JobPriority
|
||||
|
@ -35,6 +37,7 @@ async def impress_results(job_uid: str):
|
|||
await utils.impress_samples(job.data, user)
|
||||
try:
|
||||
await JobService().change_status(job.uid, new_status=JobState.FINISHED)
|
||||
await process_tracker.release(uid=job.uid, object_type=TrackableObject.SAMPLE)
|
||||
await NotificationService().notify("Your results were successfully published", user)
|
||||
except Exception as e:
|
||||
await JobService().change_status(job.uid, new_status=JobState.FAILED)
|
||||
|
@ -48,11 +51,6 @@ async def impress_results(job_uid: str):
|
|||
async def prepare_for_impress():
|
||||
samples: List[Sample] = await SampleService().get_all(status__in=[SampleState.APPROVED])
|
||||
sample_uids = [sample.uid for sample in samples]
|
||||
|
||||
await SampleService().bulk_update_with_mappings(
|
||||
[{"uid": uid, "status": SampleState.PUBLISHING} for uid in sample_uids]
|
||||
)
|
||||
|
||||
system_daemon: User = await UserService().get(email=settings.SYSTEM_DAEMON_EMAIL)
|
||||
|
||||
job_schema = job_schemas.JobCreate(
|
||||
|
@ -66,3 +64,5 @@ async def prepare_for_impress():
|
|||
)
|
||||
|
||||
await JobService().create(job_schema)
|
||||
for uid in sample_uids:
|
||||
await process_tracker.process(uid=uid, object_type=TrackableObject.SAMPLE)
|
||||
|
|
|
@ -44,7 +44,6 @@ async def impress_samples(sample_meta: List[any], user):
|
|||
SampleState.PAIRED,
|
||||
SampleState.AWAITING,
|
||||
SampleState.APPROVED,
|
||||
SampleState.PUBLISHING,
|
||||
SampleState.PUBLISHED,
|
||||
]:
|
||||
impress_meta = marshaller(sample, exclude=exclude, depth=3)
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
from .client import create_redis_pool
|
||||
from .tracking import process_tracker
|
||||
|
||||
__all__ = [
|
||||
"create_redis_pool",
|
||||
"process_tracker",
|
||||
]
|
8
felicity/apps/iol/redis/enum.py
Normal file
8
felicity/apps/iol/redis/enum.py
Normal file
|
@ -0,0 +1,8 @@
|
|||
from enum import StrEnum, auto
|
||||
|
||||
|
||||
class TrackableObject(StrEnum):
|
||||
WORKSHEET = auto()
|
||||
SAMPLE = auto()
|
||||
RESULT = auto()
|
||||
SHIPMENT = auto()
|
|
@ -7,6 +7,14 @@ from .client import create_redis_pool
|
|||
|
||||
|
||||
class ProcessingTracker:
|
||||
"""Track background processing
|
||||
This will help notify the frontend to lock and prevent user taking action
|
||||
on these objects
|
||||
|
||||
Note: Easiest implementation is by using Job service.
|
||||
Reason is that all bg tasks go through job - samples, worksheets :)
|
||||
However for minute objects like Result actions we might action on the specific implementations :)
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._store = {}
|
||||
|
@ -52,8 +60,11 @@ class ProcessingTracker:
|
|||
pool.release(redis)
|
||||
else:
|
||||
exists = uid in self._store
|
||||
|
||||
|
||||
await broadcast.publish(NotificationChannel.PROCESSING, json.dumps({
|
||||
"uid": uid, "object_type": object_type, "status": "processing"
|
||||
}))
|
||||
return exists
|
||||
|
||||
|
||||
process_tracker = ProcessingTracker
|
||||
|
|
|
@ -9,7 +9,6 @@ class ShipmentState(StrEnum):
|
|||
"""
|
||||
|
||||
DUE = auto() # shipment recived from external labs
|
||||
RECEIVING = auto()
|
||||
EMPTY = auto() # shipment without samples
|
||||
PREPERATION = auto() # shipment containing at least a single sample
|
||||
READY = auto() # shipment finalised
|
||||
|
|
|
@ -2,6 +2,8 @@ import logging
|
|||
|
||||
from felicity.apps.iol.fhir.utils import (get_diagnostic_report_resource,
|
||||
get_shipment_bundle_resource)
|
||||
from felicity.apps.iol.redis import process_tracker
|
||||
from felicity.apps.iol.redis.enum import TrackableObject
|
||||
from felicity.apps.iol.relay import post_data
|
||||
from felicity.apps.job.enum import JobState
|
||||
from felicity.apps.job.services import JobService
|
||||
|
@ -60,6 +62,7 @@ async def populate_shipment_manually(job_uid: str):
|
|||
await shipment_assign(shipment.uid, job.data, job.creator_uid)
|
||||
|
||||
await job_service.change_status(job.uid, new_status=JobState.FINISHED)
|
||||
await process_tracker.release(uid=job.uid, object_type=TrackableObject.SHIPMENT)
|
||||
logger.info(f"Done !! Job {job_uid} was executed successfully :)")
|
||||
|
||||
|
||||
|
@ -132,6 +135,7 @@ async def receive_shipment(job_uid: str):
|
|||
await shipment_receive(shipment.uid, job.data, job.creator_uid)
|
||||
|
||||
await job_service.change_status(job.uid, new_status=JobState.FINISHED)
|
||||
await process_tracker.release(uid=job.uid, object_type=TrackableObject.SHIPMENT)
|
||||
logger.info(f"Done !! Job {job_uid} was executed successfully :)")
|
||||
|
||||
|
||||
|
|
|
@ -17,6 +17,8 @@ from felicity.apps.client.services import ClientService
|
|||
from felicity.apps.common.utils.serializer import marshaller
|
||||
from felicity.apps.impress.shipment.utils import gen_pdf_manifest
|
||||
from felicity.apps.iol.fhir.utils import get_shipment_bundle_resource
|
||||
from felicity.apps.iol.redis import process_tracker
|
||||
from felicity.apps.iol.redis.enum import TrackableObject
|
||||
from felicity.apps.iol.relay import post_data
|
||||
from felicity.apps.job import schemas as job_schemas
|
||||
from felicity.apps.job.enum import (JobAction, JobCategory, JobPriority,
|
||||
|
@ -404,10 +406,8 @@ async def add_sh_receive_task(shipment_uid: str, actor_uid):
|
|||
data=None,
|
||||
)
|
||||
await job_service.create(job_schema)
|
||||
|
||||
return await shipment_service.change_state(
|
||||
shipment.uid, ShipmentState.RECEIVING, actor_uid
|
||||
)
|
||||
await process_tracker.process(uid=shipment.uid, object_type=TrackableObject.SHIPMENT)
|
||||
return shipment
|
||||
|
||||
|
||||
async def shipment_cancel(shipment_uid: str, actor_uid):
|
||||
|
|
|
@ -10,6 +10,8 @@ from felicity.apps.analysis.services.quality_control import (QCSetService,
|
|||
from felicity.apps.analysis.services.result import AnalysisResultService
|
||||
from felicity.apps.analysis.utils import get_qc_sample_type
|
||||
from felicity.apps.analysis.workflow.analysis_result import AnalysisResultWorkFlow
|
||||
from felicity.apps.iol.redis import process_tracker
|
||||
from felicity.apps.iol.redis.enum import TrackableObject
|
||||
from felicity.apps.job.enum import JobState
|
||||
from felicity.apps.job.services import JobService
|
||||
from felicity.apps.worksheet.entities import WorkSheet
|
||||
|
@ -19,6 +21,7 @@ from felicity.apps.worksheet.services import WorkSheetService
|
|||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def populate_worksheet_plate(job_uid: str):
|
||||
logger.info(f"starting job {job_uid} ....")
|
||||
job_service = JobService()
|
||||
|
@ -65,7 +68,7 @@ async def populate_worksheet_plate(job_uid: str):
|
|||
job.uid,
|
||||
new_status=JobState.FAILED,
|
||||
change_reason=f"WorkSheet {ws_uid} - contains at least a "
|
||||
f"processed sample",
|
||||
f"processed sample",
|
||||
)
|
||||
logger.warning(f"WorkSheet {ws_uid} - contains at least a processed sample")
|
||||
return
|
||||
|
@ -76,7 +79,7 @@ async def populate_worksheet_plate(job_uid: str):
|
|||
job.uid,
|
||||
new_status=JobState.FAILED,
|
||||
change_reason=f"WorkSheet {ws_uid} already has "
|
||||
f"{ws.assigned_count} assigned samples",
|
||||
f"{ws.assigned_count} assigned samples",
|
||||
)
|
||||
logger.warning(
|
||||
f"WorkSheet {ws_uid} already has {ws.assigned_count} assigned samples"
|
||||
|
@ -109,7 +112,7 @@ async def populate_worksheet_plate(job_uid: str):
|
|||
|
||||
position = 1
|
||||
for key, sample in enumerate(
|
||||
sorted(samples, key=lambda s: s.uid, reverse=True)
|
||||
sorted(samples, key=lambda s: s.uid, reverse=True)
|
||||
):
|
||||
|
||||
while position in reserved:
|
||||
|
@ -150,6 +153,7 @@ async def populate_worksheet_plate(job_uid: str):
|
|||
await setup_ws_quality_control(ws)
|
||||
|
||||
await job_service.change_status(job.uid, new_status=JobState.FINISHED)
|
||||
await process_tracker.release(uid=job.uid, object_type=TrackableObject.WORKSHEET)
|
||||
logger.info(f"Done !! Job {job_uid} was executed successfully :)")
|
||||
|
||||
|
||||
|
@ -348,7 +352,7 @@ async def populate_worksheet_plate_manually(job_uid: str):
|
|||
job.uid,
|
||||
new_status=JobState.FAILED,
|
||||
change_reason=f"WorkSheet {ws_uid} - contains at least a "
|
||||
f"processed sample",
|
||||
f"processed sample",
|
||||
)
|
||||
logger.warning(f"WorkSheet {ws_uid} - contains at least a processed sample")
|
||||
return
|
||||
|
@ -372,7 +376,7 @@ async def populate_worksheet_plate_manually(job_uid: str):
|
|||
|
||||
position = 1
|
||||
for key, sample in enumerate(
|
||||
sorted(samples, key=lambda s: s.uid, reverse=True)
|
||||
sorted(samples, key=lambda s: s.uid, reverse=True)
|
||||
):
|
||||
|
||||
while position in reserved:
|
||||
|
@ -431,4 +435,5 @@ async def populate_worksheet_plate_manually(job_uid: str):
|
|||
await setup_ws_quality_control_manually(ws, data["qc_template_uid"])
|
||||
|
||||
await job_service.change_status(job.uid, new_status=JobState.FINISHED)
|
||||
await process_tracker.release(uid=job.uid, object_type=TrackableObject.WORKSHEET)
|
||||
logger.info(f"Done !! Job {job_uid} was executed successfully :)")
|
||||
|
|
|
@ -21,7 +21,7 @@ class WorkSheetWorkFlow:
|
|||
results = _results + qc_results
|
||||
print("arrrrrrrrrrrrrr: ", results)
|
||||
|
||||
awaiting_states = [ResultState.RESULTED, ResultState.APPROVING, ResultState.RETRACTED]
|
||||
awaiting_states = [ResultState.RESULTED, ResultState.RETRACTED]
|
||||
if all([(ar.status in awaiting_states) for ar in results]):
|
||||
to_status = WorkSheetState.AWAITING
|
||||
await self.worksheet_service.change_state(uid, to_status, by_uid)
|
||||
|
@ -43,12 +43,12 @@ class WorkSheetWorkFlow:
|
|||
worksheet.uid
|
||||
)
|
||||
if (
|
||||
len(
|
||||
list(
|
||||
filter(lambda ar: ar.status in result_states, results + qc_results)
|
||||
len(
|
||||
list(
|
||||
filter(lambda ar: ar.status in result_states, results + qc_results)
|
||||
)
|
||||
)
|
||||
)
|
||||
> 0
|
||||
> 0
|
||||
):
|
||||
raise WorksheetWorkFlowException(
|
||||
f"Cannot submit a Worksheet with pending results"
|
||||
|
@ -61,7 +61,7 @@ class WorkSheetWorkFlow:
|
|||
await self.worksheet_service.verify(worksheet.uid, approved_by)
|
||||
|
||||
async def _guard_approve(self, worksheet: WorkSheet) -> bool:
|
||||
if worksheet.state not in [WorkSheetState.AWAITING, WorkSheetState.APPROVING]:
|
||||
if worksheet.state not in [WorkSheetState.AWAITING]:
|
||||
raise WorksheetWorkFlowException(
|
||||
f"Cannot approve a {worksheet.state} WorkSheet"
|
||||
)
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
|
||||
import asyncio
|
||||
import subprocess
|
||||
|
||||
import typer
|
||||
from uvicorn import Config, Server
|
||||
|
||||
from felicity.almigrator import FelicityMigrator
|
||||
from felicity.logcong import LOGGING_CONFIG
|
||||
|
||||
alembic_service = FelicityMigrator()
|
||||
app = typer.Typer()
|
||||
|
@ -22,8 +22,8 @@ def runserver(host: str = "0.0.0.0", port: int = 8000, workers: int = 1, reload:
|
|||
port=port,
|
||||
workers=workers,
|
||||
reload=reload,
|
||||
log_level="info",
|
||||
use_colors=colors
|
||||
use_colors=colors,
|
||||
log_config=LOGGING_CONFIG
|
||||
)
|
||||
Server(config).run()
|
||||
|
||||
|
|
|
@ -9,7 +9,6 @@ from felicity.apps.setup.services import (CountryService, DepartmentService,
|
|||
LaboratorySettingService,
|
||||
ProvinceService)
|
||||
from felicity.core.config import get_settings
|
||||
|
||||
from .data import get_seeds
|
||||
|
||||
settings = get_settings()
|
||||
|
@ -93,7 +92,11 @@ async def seed_clients() -> None:
|
|||
contacts = await client_contact_Service.get(client_uid=client.uid)
|
||||
if not contacts:
|
||||
cc_in = client_schemas.ClientContactCreate(
|
||||
first_name="Sr", last_name="in Charge", client_uid=client.uid, email=f"{uuid4().hex}@dummy.inc"
|
||||
first_name="Sr",
|
||||
last_name="in Charge",
|
||||
client_uid=client.uid,
|
||||
email=f"{uuid4().hex}@dummy.inc",
|
||||
user_name=uuid4().hex
|
||||
)
|
||||
await client_contact_Service.create(cc_in)
|
||||
|
||||
|
|
50
felicity/logcong.py
Normal file
50
felicity/logcong.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
from typing import Any
|
||||
|
||||
LOGGING_CONFIG: dict[str, Any] = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"default": {
|
||||
"()": "uvicorn.logging.DefaultFormatter",
|
||||
"fmt": "%(asctime)s - %(name)s - %(levelprefix)s %(message)s",
|
||||
"use_colors": None,
|
||||
},
|
||||
"access": {
|
||||
"()": "uvicorn.logging.AccessFormatter",
|
||||
"fmt": '%(asctime)s - %(name)s - %(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
|
||||
# noqa: E501
|
||||
},
|
||||
"access_file": {
|
||||
"()": "uvicorn.logging.AccessFormatter",
|
||||
"fmt": '%(asctime)s - %(name)s - %(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
|
||||
# noqa: E501
|
||||
"use_colors": False,
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"file_handler": {
|
||||
"formatter": "access_file",
|
||||
"class": "logging.handlers.RotatingFileHandler",
|
||||
"filename": "./app.log",
|
||||
"mode": "a+",
|
||||
"maxBytes": 10 * 1024 * 1024,
|
||||
"backupCount": 0,
|
||||
},
|
||||
|
||||
"default": {
|
||||
"formatter": "default",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stderr",
|
||||
},
|
||||
"access": {
|
||||
"formatter": "access",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stdout",
|
||||
},
|
||||
},
|
||||
"loggers": {
|
||||
"uvicorn": {"handlers": ["default"], "level": "INFO", "propagate": False},
|
||||
"uvicorn.error": {"level": "INFO"},
|
||||
"uvicorn.access": {"handlers": ["access", "file_handler"], "level": "INFO", "propagate": False},
|
||||
},
|
||||
}
|
|
@ -41,6 +41,7 @@ typing-extensions==4.11.0
|
|||
async-cache==1.1.1
|
||||
minio==7.2.7
|
||||
motor==3.5.1
|
||||
pymongo==4.8.0
|
||||
redis[hiredis]==5.0.8
|
||||
asyncio_redis==0.16.0
|
||||
typer==0.12.3
|
||||
|
|
Loading…
Reference in a new issue