felicity-lims/felicity/api/gql/analytics/query/dashboard.py

385 lines
12 KiB
Python
Raw Normal View History

2022-01-19 08:40:02 +08:00
import logging
2022-04-22 03:33:54 +08:00
from typing import Optional
2022-11-06 20:09:44 +08:00
import strawberry # noqa
2023-03-19 23:21:32 +08:00
2022-11-06 20:09:44 +08:00
from felicity.api.gql.analytics import types
from felicity.apps.analysis.conf import states
2023-03-19 23:21:32 +08:00
from felicity.apps.analysis.models.analysis import Sample
2022-01-19 08:40:02 +08:00
from felicity.apps.analysis.models.results import AnalysisResult
from felicity.apps.analytics import SampleAnalyticsInit
2022-11-06 20:09:44 +08:00
from felicity.apps.setup.models import Instrument
from felicity.apps.user.models import User
from felicity.apps.worksheet.conf import worksheet_states
2023-03-19 23:21:32 +08:00
from felicity.apps.worksheet.models import WorkSheet
2022-01-19 08:40:02 +08:00
from felicity.utils import has_value_or_is_truthy
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def group_exists(val):
if has_value_or_is_truthy(val):
return str(val)
2022-11-06 20:09:44 +08:00
return "unknown"
2022-01-19 08:40:02 +08:00
async def get_username(val):
2022-11-06 20:09:44 +08:00
if val == "unknown":
2022-01-19 08:40:02 +08:00
return val
2023-04-08 17:16:11 +08:00
user = await User.get(uid=val)
2022-01-19 08:40:02 +08:00
return user.auth.user_name
async def get_instrument(val):
2022-11-06 20:09:44 +08:00
if val == "unknown":
2022-01-19 08:40:02 +08:00
return val
2023-04-08 17:16:11 +08:00
instrument = await Instrument.get(uid=val)
2022-01-19 08:40:02 +08:00
return instrument.name
@strawberry.field
async def count_sample_group_by_status(info) -> types.GroupedCounts:
analytics = SampleAnalyticsInit(Sample)
state_in = [
states.sample.SCHEDULED,
states.sample.EXPECTED,
states.sample.RECEIVED,
states.sample.AWAITING,
states.sample.APPROVED,
]
2023-03-19 23:21:32 +08:00
results = await analytics.get_counts_group_by(
"status", ("", ""), ("", ""), state_in
)
2022-01-19 08:40:02 +08:00
stats = []
for row in results:
2023-04-08 17:16:11 +08:00
stats.append(types.GroupCount(
group=group_exists(row[0]), count=row[1]))
2022-01-19 08:40:02 +08:00
return types.GroupedCounts(data=stats)
@strawberry.field
async def count_analyte_group_by_status(info) -> types.GroupedCounts:
analytics = SampleAnalyticsInit(AnalysisResult)
state_in = [
states.result.PENDING,
states.result.RESULTED,
]
2023-03-19 23:21:32 +08:00
results = await analytics.get_counts_group_by(
"status", ("", ""), ("", ""), state_in
)
2022-01-19 08:40:02 +08:00
stats = []
for row in results:
2023-04-08 17:16:11 +08:00
stats.append(types.GroupCount(
group=group_exists(row[0]), count=row[1]))
2022-01-19 08:40:02 +08:00
return types.GroupedCounts(data=stats)
2022-01-19 08:40:02 +08:00
2023-03-19 23:21:32 +08:00
@strawberry.field
async def count_extras_group_by_status(info) -> types.GroupedCounts:
sample_analytics = SampleAnalyticsInit(Sample)
sample_states = [
states.sample.CANCELLED,
states.sample.REJECTED,
states.sample.INVALIDATED,
]
2023-03-19 23:21:32 +08:00
sample_results = await sample_analytics.get_counts_group_by(
"status", ("", ""), ("", ""), sample_states
)
result_analytics = SampleAnalyticsInit(AnalysisResult)
result_states = [
states.result.RETRACTED,
]
2023-03-19 23:21:32 +08:00
result_results = await result_analytics.get_counts_group_by(
"status", ("", ""), ("", ""), result_states
)
retests = await result_analytics.count_analyses_retests(("", ""), ("", ""))
stats = []
for s_row in sample_results:
2023-03-19 23:21:32 +08:00
stats.append(
2023-04-08 17:16:11 +08:00
types.GroupCount(
group=f"sample {group_exists(s_row[0])}", count=s_row[1])
2023-03-19 23:21:32 +08:00
)
for r_row in result_results:
2023-03-19 23:21:32 +08:00
stats.append(
2023-04-08 17:16:11 +08:00
types.GroupCount(
group=f"analysis {group_exists(r_row[0])}", count=r_row[1])
2023-03-19 23:21:32 +08:00
)
if retests:
val = retests[0][0]
if val > 0:
2023-04-08 17:16:11 +08:00
stats.append(types.GroupCount(
group="analysis retested", count=val))
return types.GroupedCounts(data=stats)
2022-01-19 08:40:02 +08:00
@strawberry.field
async def count_worksheet_group_by_status(info) -> types.GroupedCounts:
analytics = SampleAnalyticsInit(WorkSheet)
state_in = [
worksheet_states.EMPTY,
worksheet_states.AWAITING,
2023-03-19 23:21:32 +08:00
worksheet_states.PENDING,
]
results = await analytics.get_counts_group_by("state", ("", ""), ("", ""), state_in)
2022-01-19 08:40:02 +08:00
stats = []
for row in results:
2023-04-08 17:16:11 +08:00
stats.append(types.GroupCount(
group=group_exists(row[0]), count=row[1]))
2022-01-19 08:40:02 +08:00
return types.GroupedCounts(data=stats)
@strawberry.field
2022-04-22 03:33:54 +08:00
async def count_analyte_group_by_instrument(
2022-11-06 20:09:44 +08:00
info, start_date: Optional[str] = None, end_date: Optional[str] = None
2022-04-22 03:33:54 +08:00
) -> types.GroupedCounts:
analytics = SampleAnalyticsInit(AnalysisResult)
2022-04-22 03:33:54 +08:00
results = await analytics.get_counts_group_by(
2023-04-08 17:16:11 +08:00
"instrument_uid", ("date_submitted",
start_date), ("date_submitted", end_date)
2022-04-22 03:33:54 +08:00
)
2022-01-19 08:40:02 +08:00
stats = []
for row in results:
2022-11-06 20:09:44 +08:00
stats.append(
2023-04-08 17:16:11 +08:00
types.GroupCount(group=get_instrument(
group_exists(row[0])), count=row[1])
2022-11-06 20:09:44 +08:00
)
2022-01-19 08:40:02 +08:00
return types.GroupedCounts(data=stats)
@strawberry.field
2022-04-22 03:33:54 +08:00
async def count_sample_group_by_action(
2022-11-06 20:09:44 +08:00
info, start_date: Optional[str] = None, end_date: Optional[str] = None
2022-04-22 03:33:54 +08:00
) -> types.GroupedData:
analytics = SampleAnalyticsInit(Sample)
2022-04-22 03:33:54 +08:00
created = await analytics.get_counts_group_by(
2022-11-06 20:09:44 +08:00
"created_by_uid", ("created_at", start_date), ("created_at", end_date)
2022-04-22 03:33:54 +08:00
)
submitted = await analytics.get_counts_group_by(
2023-04-08 17:16:11 +08:00
"submitted_by_uid", ("date_submitted",
start_date), ("date_submitted", end_date)
2022-04-22 03:33:54 +08:00
)
verified = await analytics.get_counts_group_by(
2023-04-08 17:16:11 +08:00
"verified_by_uid", ("date_verified",
start_date), ("date_verified", end_date)
2022-04-22 03:33:54 +08:00
)
published = await analytics.get_counts_group_by(
2023-04-08 17:16:11 +08:00
"published_by_uid", ("date_published",
start_date), ("date_published", end_date)
2022-04-22 03:33:54 +08:00
)
2022-01-19 08:40:02 +08:00
stats = []
registration = types.GroupData(group="registration", counts=[])
for row in created:
2022-11-06 20:09:44 +08:00
registration.counts.append(
2023-04-08 17:16:11 +08:00
types.GroupCount(group=get_username(
group_exists(row[0])), count=row[1])
2022-11-06 20:09:44 +08:00
)
stats.append(registration)
2022-01-19 08:40:02 +08:00
submission = types.GroupData(group="submission", counts=[])
for row in submitted:
2022-11-06 20:09:44 +08:00
submission.counts.append(
2023-04-08 17:16:11 +08:00
types.GroupCount(group=get_username(
group_exists(row[0])), count=row[1])
2022-11-06 20:09:44 +08:00
)
stats.append(submission)
verification = types.GroupData(group="verification", counts=[])
for row in verified:
2022-11-06 20:09:44 +08:00
verification.counts.append(
2023-04-08 17:16:11 +08:00
types.GroupCount(group=get_username(
group_exists(row[0])), count=row[1])
2022-11-06 20:09:44 +08:00
)
stats.append(verification)
publication = types.GroupData(group="publication", counts=[])
for row in published:
2022-11-06 20:09:44 +08:00
publication.counts.append(
2023-04-08 17:16:11 +08:00
types.GroupCount(group=get_username(
group_exists(row[0])), count=row[1])
2022-11-06 20:09:44 +08:00
)
stats.append(publication)
return types.GroupedData(data=stats)
2022-01-19 08:40:02 +08:00
@strawberry.field
2022-11-06 20:09:44 +08:00
async def sample_process_performance(
info, start_date: str, end_date: str
) -> types.ProcessStatistics:
analytics = SampleAnalyticsInit(Sample)
2022-01-19 08:40:02 +08:00
received_to_published = await analytics.get_sample_process_performance(
2022-11-06 20:09:44 +08:00
start=("date_received", start_date), end=("date_published", end_date)
2022-01-19 08:40:02 +08:00
)
received_to_submitted = await analytics.get_sample_process_performance(
2022-11-06 20:09:44 +08:00
start=("date_received", start_date), end=("date_submitted", end_date)
2022-01-19 08:40:02 +08:00
)
submitted_to_verified = await analytics.get_sample_process_performance(
2022-11-06 20:09:44 +08:00
start=("date_submitted", start_date), end=("date_verified", end_date)
2022-01-19 08:40:02 +08:00
)
verified_to_published = await analytics.get_sample_process_performance(
2022-11-06 20:09:44 +08:00
start=("date_verified", start_date), end=("date_published", end_date)
2022-01-19 08:40:02 +08:00
)
final_data = []
2022-11-06 20:09:44 +08:00
rtp_process = types.ProcessData(
process="received_to_published", counts=None, groups=[]
)
2022-01-19 08:40:02 +08:00
for row in received_to_published:
rtp_process.counts = types.ProcessCounts(
total_samples=row[0],
total_late=row[1],
total_not_late=row[2],
process_average=row[3],
2022-11-06 20:09:44 +08:00
avg_extra_days=row[4],
2022-01-19 08:40:02 +08:00
)
final_data.append(rtp_process)
2022-11-06 20:09:44 +08:00
rts_process = types.ProcessData(
process="received_to_submitted", counts=None, groups=[]
)
2022-01-19 08:40:02 +08:00
for row in received_to_submitted:
rts_process.counts = types.ProcessCounts(
total_samples=row[0],
total_late=row[1],
total_not_late=row[2],
process_average=row[3],
2022-11-06 20:09:44 +08:00
avg_extra_days=row[4],
2022-01-19 08:40:02 +08:00
)
final_data.append(rts_process)
2022-11-06 20:09:44 +08:00
stv_process = types.ProcessData(
process="submitted_to_verified", counts=None, groups=[]
)
2022-01-19 08:40:02 +08:00
for row in submitted_to_verified:
stv_process.counts = types.ProcessCounts(
total_samples=row[0],
total_late=row[1],
total_not_late=row[2],
process_average=row[3],
2022-11-06 20:09:44 +08:00
avg_extra_days=row[4],
2022-01-19 08:40:02 +08:00
)
final_data.append(stv_process)
2022-11-06 20:09:44 +08:00
stp_process = types.ProcessData(
process="verified_to_published", counts=None, groups=[]
)
2022-01-19 08:40:02 +08:00
for row in verified_to_published:
stp_process.counts = types.ProcessCounts(
total_samples=row[0],
total_late=row[1],
total_not_late=row[2],
process_average=row[3],
2022-11-06 20:09:44 +08:00
avg_extra_days=row[4],
2022-01-19 08:40:02 +08:00
)
final_data.append(stp_process)
return types.ProcessStatistics(data=final_data)
@strawberry.field
2022-11-06 20:09:44 +08:00
async def analysis_process_performance(
info, process: str, start_date: str, end_date: str
) -> types.ProcessStatistics:
analytics = SampleAnalyticsInit(Sample)
2022-01-19 08:40:02 +08:00
processes = [
2022-11-06 20:09:44 +08:00
"received_to_published",
"received_to_submitted",
"submitted_to_verified",
"verified_to_published",
2022-01-19 08:40:02 +08:00
]
if process not in processes:
logger.warning(f"invalid process {process}")
raise Exception(f"invalid process {process}")
performance = []
2022-11-06 20:09:44 +08:00
if process == "received_to_published":
2022-01-19 08:40:02 +08:00
performance = await analytics.get_analysis_process_performance(
2022-11-06 20:09:44 +08:00
start=("date_received", start_date), end=("date_published", end_date)
2022-01-19 08:40:02 +08:00
)
2022-11-06 20:09:44 +08:00
if process == "received_to_submitted":
2022-01-19 08:40:02 +08:00
performance = await analytics.get_analysis_process_performance(
2022-11-06 20:09:44 +08:00
start=("date_received", start_date), end=("date_submitted", end_date)
2022-01-19 08:40:02 +08:00
)
2022-11-06 20:09:44 +08:00
if process == "submitted_to_verified":
2022-01-19 08:40:02 +08:00
performance = await analytics.get_analysis_process_performance(
2022-11-06 20:09:44 +08:00
start=("date_submitted", start_date), end=("date_verified", end_date)
2022-01-19 08:40:02 +08:00
)
2022-11-06 20:09:44 +08:00
if process == "verified_to_published":
2022-01-19 08:40:02 +08:00
performance = await analytics.get_analysis_process_performance(
2022-11-06 20:09:44 +08:00
start=("date_verified", start_date), end=("date_published", end_date)
2022-01-19 08:40:02 +08:00
)
final_data = []
rtp_process = types.ProcessData(process=process, counts=None, groups=[])
for row in performance:
2022-11-06 20:09:44 +08:00
rtp_process.groups.append(
types.ProcessCounts(
service=row[0],
total_samples=row[1],
total_late=row[2],
total_not_late=row[3],
process_average=row[4],
avg_extra_days=row[5],
)
)
2022-01-19 08:40:02 +08:00
final_data.append(rtp_process)
return types.ProcessStatistics(data=final_data)
@strawberry.field
async def sample_laggards(info) -> types.LaggardStatistics:
analytics = SampleAnalyticsInit(Sample)
2022-01-19 08:40:02 +08:00
not_complete, complete = await analytics.get_laggards()
final_data = []
2022-11-06 20:09:44 +08:00
complete_laggards = types.LaggardData(
category="authorised_already_delayed", counts=[]
)
2022-01-19 08:40:02 +08:00
for row in complete:
complete_laggards.counts = types.LaggardCounts(
total_delayed=row[0],
lessThanTen=row[1],
tenToTwenty=row[2],
twentyToThirty=row[3],
2022-11-06 20:09:44 +08:00
graterThanThirty=row[4],
2022-01-19 08:40:02 +08:00
)
final_data.append(complete_laggards)
2022-11-06 20:09:44 +08:00
in_complete_laggards = types.LaggardData(
category="delayed_and_incomplete", counts=[]
)
2022-01-19 08:40:02 +08:00
for row in not_complete:
in_complete_laggards.counts = types.LaggardCounts(
total_incomplete=row[0],
total_delayed=row[1],
total_not_delayed=row[2],
lessThanTen=row[3],
tenToTwenty=row[4],
twentyToThirty=row[5],
2022-11-06 20:09:44 +08:00
graterThanThirty=row[6],
2022-01-19 08:40:02 +08:00
)
final_data.append(in_complete_laggards)
return types.LaggardStatistics(data=final_data)