mirror of
https://github.com/beak-insights/felicity-lims.git
synced 2025-02-16 13:02:53 +08:00
updated user preferences and tests
This commit is contained in:
parent
c09321a382
commit
0fe5f6da63
20 changed files with 200 additions and 119 deletions
3
al_revision.sh
Normal file
3
al_revision.sh
Normal file
|
@ -0,0 +1,3 @@
|
|||
#!/bin/sh
|
||||
|
||||
alembic revision --autogenerate -m "$1"
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
script_location = felicity/migrations
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
|
@ -1,3 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
alembic revision --autogenerate -m $1
|
|
@ -88,16 +88,16 @@ def simple_task(message: str):
|
|||
class UserMutations:
|
||||
@strawberry.mutation(permission_classes=[IsAuthenticated])
|
||||
async def create_user(
|
||||
self,
|
||||
info,
|
||||
first_name: str,
|
||||
last_name: str,
|
||||
email: str,
|
||||
user_name: str,
|
||||
password: str,
|
||||
passwordc: str,
|
||||
group_uid: str | None = None,
|
||||
open_reg: bool | None = False,
|
||||
self,
|
||||
info,
|
||||
first_name: str,
|
||||
last_name: str,
|
||||
email: str,
|
||||
user_name: str,
|
||||
password: str,
|
||||
passwordc: str,
|
||||
group_uid: str | None = None,
|
||||
open_reg: bool | None = False,
|
||||
) -> UserResponse:
|
||||
user_service = UserService()
|
||||
group_service = GroupService()
|
||||
|
@ -124,6 +124,7 @@ class UserMutations:
|
|||
"user_name": user_name,
|
||||
"password": password,
|
||||
"is_superuser": False,
|
||||
"login_retry": 0,
|
||||
"created_by_uid": felicity_user.uid,
|
||||
"updated_by_uid": felicity_user.uid,
|
||||
}
|
||||
|
@ -135,11 +136,10 @@ class UserMutations:
|
|||
user = await user_service.save(user)
|
||||
|
||||
# initial user-preferences
|
||||
pref_in = user_schemas.UserPreferenceCreate(expanded_menu=False, theme="LIGHT")
|
||||
preference = await user_preference_service.create(pref_in)
|
||||
user = await user_service.link_preference(
|
||||
user.uid, preference_uid=preference.uid
|
||||
)
|
||||
pref = user_preference_service.get(user_uid=user.uid)
|
||||
if not pref:
|
||||
pref_in = user_schemas.UserPreferenceCreate(user_uid=user.uid, expanded_menu=False, theme="LIGHT")
|
||||
await user_preference_service.create(pref_in)
|
||||
|
||||
if user_in.email:
|
||||
logger.info("Handle email sending in a standalone service")
|
||||
|
@ -147,17 +147,17 @@ class UserMutations:
|
|||
|
||||
@strawberry.mutation(permission_classes=[IsAuthenticated])
|
||||
async def update_user(
|
||||
self,
|
||||
info,
|
||||
user_uid: str,
|
||||
first_name: str | None,
|
||||
last_name: str | None,
|
||||
mobile_phone: str | None,
|
||||
email: str | None,
|
||||
group_uid: str | None,
|
||||
is_active: bool | None,
|
||||
password: str | None = None,
|
||||
passwordc: str | None = None,
|
||||
self,
|
||||
info,
|
||||
user_uid: str,
|
||||
first_name: str | None,
|
||||
last_name: str | None,
|
||||
mobile_phone: str | None,
|
||||
email: str | None,
|
||||
group_uid: str | None,
|
||||
is_active: bool | None,
|
||||
password: str | None = None,
|
||||
passwordc: str | None = None,
|
||||
) -> UserResponse:
|
||||
user_service = UserService()
|
||||
group_service = GroupService()
|
||||
|
@ -202,7 +202,7 @@ class UserMutations:
|
|||
|
||||
@strawberry.mutation
|
||||
async def authenticate_user(
|
||||
self, info, username: str, password: str
|
||||
self, info, username: str, password: str
|
||||
) -> AuthenticatedDataResponse:
|
||||
user_service = UserService()
|
||||
|
||||
|
@ -248,7 +248,7 @@ class UserMutations:
|
|||
|
||||
@strawberry.mutation()
|
||||
async def validate_password_reset_token(
|
||||
self, info, token: str
|
||||
self, info, token: str
|
||||
) -> PasswordResetValidityResponse:
|
||||
user_service = UserService()
|
||||
email = verify_password_reset_token(token)
|
||||
|
@ -265,11 +265,11 @@ class UserMutations:
|
|||
|
||||
@strawberry.mutation()
|
||||
async def reset_password(
|
||||
self,
|
||||
info,
|
||||
user_uid: str,
|
||||
password: str,
|
||||
passwordc: str,
|
||||
self,
|
||||
info,
|
||||
user_uid: str,
|
||||
password: str,
|
||||
passwordc: str,
|
||||
) -> MessageResponse:
|
||||
user_service = UserService()
|
||||
|
||||
|
@ -311,7 +311,7 @@ class UserMutations:
|
|||
|
||||
@strawberry.mutation(permission_classes=[IsAuthenticated])
|
||||
async def update_group(
|
||||
self, info, uid: str, payload: GroupInputType
|
||||
self, info, uid: str, payload: GroupInputType
|
||||
) -> GroupResponse:
|
||||
group_service = GroupService()
|
||||
|
||||
|
@ -336,7 +336,7 @@ class UserMutations:
|
|||
|
||||
@strawberry.mutation(permission_classes=[IsAuthenticated])
|
||||
async def update_group_permissions(
|
||||
self, info, group_uid: str, permission_uid: str
|
||||
self, info, group_uid: str, permission_uid: str
|
||||
) -> UpdatedGroupPermsResponse:
|
||||
group_service = GroupService()
|
||||
permission_service = PermissionService()
|
||||
|
|
|
@ -217,7 +217,7 @@ class BaseRepository(Generic[M]):
|
|||
await session.flush()
|
||||
|
||||
async def query_table(
|
||||
self, table: Table, columns: list[str] | None = None, **kwargs
|
||||
self, table: Table, columns: list[str] | None = None, **kwargs
|
||||
):
|
||||
"""
|
||||
Query a specific table with optional column selection and filters.
|
||||
|
@ -444,11 +444,11 @@ class BaseRepository(Generic[M]):
|
|||
return list(combined)
|
||||
|
||||
async def filter(
|
||||
self,
|
||||
filters: dict | list[dict],
|
||||
sort_attrs: list[str] | None = None,
|
||||
limit: int | None = None,
|
||||
either: bool = False,
|
||||
self,
|
||||
filters: dict | list[dict],
|
||||
sort_attrs: list[str] | None = None,
|
||||
limit: int | None = None,
|
||||
either: bool = False,
|
||||
) -> list[M]:
|
||||
"""
|
||||
Filter model instances based on the given conditions.
|
||||
|
@ -471,13 +471,13 @@ class BaseRepository(Generic[M]):
|
|||
return found
|
||||
|
||||
async def paginate(
|
||||
self,
|
||||
page_size: int | None,
|
||||
after_cursor: str | None,
|
||||
before_cursor: str | None,
|
||||
filters: dict | list[dict] | None,
|
||||
sort_by: list[str] | None,
|
||||
**kwargs,
|
||||
self,
|
||||
page_size: int | None,
|
||||
after_cursor: str | None,
|
||||
before_cursor: str | None,
|
||||
filters: dict | list[dict] | None,
|
||||
sort_by: list[str] | None,
|
||||
**kwargs,
|
||||
) -> PageCursor:
|
||||
"""
|
||||
Paginate model instances based on the given conditions.
|
||||
|
@ -575,10 +575,10 @@ class BaseRepository(Generic[M]):
|
|||
|
||||
@staticmethod
|
||||
def build_page_info(
|
||||
start_cursor: str | None = None,
|
||||
end_cursor: str | None = None,
|
||||
has_next_page: bool = False,
|
||||
has_previous_page: bool = False,
|
||||
start_cursor: str | None = None,
|
||||
end_cursor: str | None = None,
|
||||
has_next_page: bool = False,
|
||||
has_previous_page: bool = False,
|
||||
) -> PageInfo:
|
||||
"""
|
||||
Build a PageInfo object with the given parameters.
|
||||
|
|
|
@ -77,7 +77,6 @@ class ClientContactBaseInDB(ClientContactBase):
|
|||
class ClientContactCreate(ClientContactBase):
|
||||
client_uid: str
|
||||
email: str
|
||||
user_name: str
|
||||
|
||||
|
||||
# Properties to receive via API on update
|
||||
|
|
|
@ -3,11 +3,11 @@ from enum import Enum
|
|||
|
||||
|
||||
def _marshall_object(
|
||||
obj,
|
||||
path: list[str] | None = None,
|
||||
memoize: dict | None = None,
|
||||
exclude: list[str] | None = None,
|
||||
depth: int = 2,
|
||||
obj,
|
||||
path: list[str] | None = None,
|
||||
memoize: dict | None = None,
|
||||
exclude: list[str] | None = None,
|
||||
depth: int = 2,
|
||||
) -> dict | str:
|
||||
"""
|
||||
Custom marshaller function to convert objects to dictionaries or strings with proper handling for
|
||||
|
@ -62,18 +62,18 @@ def _marshall_object(
|
|||
|
||||
|
||||
def marshaller(
|
||||
obj,
|
||||
path: list[str] | None = None,
|
||||
memoize: dict | None = None,
|
||||
exclude: list[str] | None = None,
|
||||
depth: int = 2,
|
||||
obj,
|
||||
path: list[str] | None = None,
|
||||
memoize: dict | None = None,
|
||||
exclude: list[str] | None = None,
|
||||
depth: int = 2,
|
||||
) -> dict:
|
||||
if isinstance(obj, str):
|
||||
raise TypeError("Unsupported object of type 'str'")
|
||||
# if isinstance(obj, str):
|
||||
# return obj
|
||||
|
||||
output = _marshall_object(obj, path, memoize, exclude, depth)
|
||||
|
||||
if isinstance(output, str):
|
||||
raise TypeError("Unexpected return type 'str' while marshalling")
|
||||
#
|
||||
# if isinstance(output, str):
|
||||
# raise TypeError("Unexpected return type 'str' while marshalling")
|
||||
|
||||
return output
|
||||
|
|
|
@ -2,7 +2,7 @@ import logging
|
|||
from operator import gt, lt, eq, ge, le, ne
|
||||
from typing import List, Optional
|
||||
|
||||
from cachetools import TTLCache, cached
|
||||
from cachetools import TTLCache
|
||||
|
||||
from felicity.apps.abstract.service import BaseService
|
||||
from felicity.apps.analysis.entities.analysis import Analysis, Sample
|
||||
|
@ -173,9 +173,9 @@ class ReflexEngineService:
|
|||
logger.info(f"Reflex actions set for {result}")
|
||||
|
||||
@staticmethod
|
||||
@cached(cache=reflex_action_cache)
|
||||
# @cached(cache=reflex_action_cache)
|
||||
async def get_reflex_action(
|
||||
analysis_uid: str, level: int
|
||||
analysis_uid: str, level: int
|
||||
) -> Optional[ReflexAction]:
|
||||
"""
|
||||
Get reflex action with caching to improve performance.
|
||||
|
@ -278,7 +278,7 @@ class ReflexEngineService:
|
|||
await self.apply_actions(brain.actions, results_pool)
|
||||
|
||||
async def evaluate(
|
||||
self, conditions: list[ReflexBrainCondition], results_pool: List[AnalysisResult]
|
||||
self, conditions: list[ReflexBrainCondition], results_pool: List[AnalysisResult]
|
||||
) -> bool:
|
||||
"""
|
||||
Evaluate conditions for decision-making.
|
||||
|
@ -298,7 +298,7 @@ class ReflexEngineService:
|
|||
|
||||
@staticmethod
|
||||
async def _eval_condition(
|
||||
condition: ReflexBrainCondition, results_pool: List[AnalysisResult]
|
||||
condition: ReflexBrainCondition, results_pool: List[AnalysisResult]
|
||||
) -> bool:
|
||||
"""
|
||||
Evaluate a single condition against the results pool.
|
||||
|
@ -401,7 +401,7 @@ class ReflexEngineService:
|
|||
return all(evaluations)
|
||||
|
||||
async def apply_actions(
|
||||
self, actions: list[ReflexBrainAction], results_pool: List[AnalysisResult]
|
||||
self, actions: list[ReflexBrainAction], results_pool: List[AnalysisResult]
|
||||
) -> None:
|
||||
"""
|
||||
Execute actions for a matching reflex brain.
|
||||
|
@ -436,7 +436,7 @@ class ReflexEngineService:
|
|||
await self.analysis_result_service.hide_report(r.uid)
|
||||
|
||||
async def get_results_pool(
|
||||
self, conditions: list[ReflexBrainCondition]
|
||||
self, conditions: list[ReflexBrainCondition]
|
||||
) -> List[AnalysisResult]:
|
||||
"""
|
||||
Get a pool of relevant analysis results for the given conditions.
|
||||
|
|
|
@ -4,16 +4,13 @@ from sqlalchemy import Boolean, Column, ForeignKey, String, Table
|
|||
from sqlalchemy.orm import Mapped, relationship
|
||||
|
||||
from felicity.apps.abstract.entity import BaseEntity
|
||||
|
||||
from .abstract import AbstractBaseUser
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# TODO: Refactor User to LaboratoryContact, UserAuth to ContactAuth
|
||||
|
||||
|
||||
"""
|
||||
Many to Many Link between Group and User
|
||||
"""
|
||||
|
@ -38,13 +35,19 @@ permission_groups = Table(
|
|||
class User(AbstractBaseUser):
|
||||
__tablename__ = "user"
|
||||
|
||||
# One-to-one relationship with UserPreference
|
||||
preference: Mapped["UserPreference"] = relationship(
|
||||
"UserPreference",
|
||||
back_populates="user",
|
||||
lazy="selectin",
|
||||
uselist=False,
|
||||
cascade="all, delete-orphan",
|
||||
foreign_keys="[UserPreference.user_uid]"
|
||||
)
|
||||
|
||||
groups = relationship(
|
||||
"Group", secondary=user_groups, back_populates="members", lazy="selectin"
|
||||
)
|
||||
preference_uid = Column(String, ForeignKey("user_preference.uid"))
|
||||
preference = relationship(
|
||||
"UserPreference", foreign_keys=[preference_uid], lazy="selectin"
|
||||
)
|
||||
|
||||
|
||||
class Permission(BaseEntity):
|
||||
|
@ -85,6 +88,14 @@ class UserPreference(BaseEntity):
|
|||
|
||||
__tablename__ = "user_preference"
|
||||
|
||||
user_uid = Column(String, ForeignKey("user.uid", ondelete="CASCADE"), unique=True)
|
||||
user: Mapped["User"] = relationship(
|
||||
"User",
|
||||
back_populates="preference",
|
||||
foreign_keys=[user_uid],
|
||||
single_parent=True
|
||||
)
|
||||
|
||||
expanded_menu = Column(Boolean(), default=False)
|
||||
departments = relationship(
|
||||
"Department", secondary=department_preference, lazy="selectin"
|
||||
|
|
|
@ -5,6 +5,7 @@ from pydantic import ConfigDict, EmailStr
|
|||
|
||||
from felicity.apps.common.schemas import BaseAuditModel, BaseModel
|
||||
|
||||
|
||||
#
|
||||
# Permission Schema
|
||||
#
|
||||
|
@ -87,6 +88,7 @@ class GroupInDB(GroupInDBBase):
|
|||
# User Preferences
|
||||
#
|
||||
class UserPreferenceBase(BaseAuditModel):
|
||||
user_uid: str
|
||||
expanded_menu: bool | None = False
|
||||
theme: str | None = "light"
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ class UserService(BaseService[User, UserCreate, UserUpdate]):
|
|||
super().__init__(UserRepository())
|
||||
|
||||
async def create(
|
||||
self, user_in: UserCreate, related: list[str] | None = None
|
||||
self, user_in: UserCreate, related: list[str] | None = None
|
||||
) -> User:
|
||||
by_username = await self.get_by_username(user_in.user_name)
|
||||
if by_username:
|
||||
|
@ -119,11 +119,6 @@ class UserService(BaseService[User, UserCreate, UserUpdate]):
|
|||
user_in = UserUpdate(**{**user_obj, "is_active": False})
|
||||
await super().update(user_uid, user_in)
|
||||
|
||||
async def link_preference(self, user_uid: str, preference_uid):
|
||||
_update = {"preference_uid": preference_uid}
|
||||
update_in = UserUpdate(**_update)
|
||||
return await super().update(user_uid, update_in)
|
||||
|
||||
|
||||
class GroupService(BaseService[Group, GroupCreate, GroupUpdate]):
|
||||
def __init__(self):
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
from felicity.apps.abstract.entity import BaseEntity # noqa
|
||||
from felicity.apps.analysis.entities.analysis import Analysis # noqa
|
||||
from felicity.apps.analysis.entities.analysis import AnalysisCategory # noqa
|
||||
from felicity.apps.analysis.entities.analysis import AnalysisCoding # noqa
|
||||
|
@ -32,7 +31,6 @@ from felicity.apps.client.entities import Client # noqa
|
|||
from felicity.apps.client.entities import ClientContact # noqa
|
||||
from felicity.apps.idsequencer.entities import IdSequence # noqa
|
||||
from felicity.apps.impress.entities import ReportImpress # noqa
|
||||
|
||||
# from felicity.apps.setup.entities import MethodValidation # noqa
|
||||
# from felicity.apps.setup.entities import InstrumentCompetence # noqa
|
||||
from felicity.apps.instrument.entities import CalibrationCertificate # noqa
|
||||
|
@ -88,3 +86,4 @@ from felicity.apps.user.entities import User # noqa
|
|||
from felicity.apps.user.entities import UserPreference # noqa
|
||||
from felicity.apps.worksheet.entities import WorkSheet # noqa
|
||||
from felicity.apps.worksheet.entities import WorkSheetTemplate # noqa
|
||||
from felicity.apps.abstract.entity import BaseEntity # noqa
|
|
@ -3,7 +3,6 @@ import logging
|
|||
from felicity.apps.user import schemas
|
||||
from felicity.apps.user.services import GroupService, UserPreferenceService, UserService
|
||||
from felicity.core.config import get_settings
|
||||
|
||||
from .groups_perms import FGroup
|
||||
|
||||
settings = get_settings()
|
||||
|
@ -39,13 +38,10 @@ async def seed_daemon_user() -> None:
|
|||
await user_service.save(system_daemon)
|
||||
|
||||
# initial user-preferences
|
||||
if not system_daemon.preference_uid:
|
||||
pref_in = schemas.UserPreferenceCreate(expanded_menu=False, theme="light")
|
||||
preference = await preference_service.create(pref_in)
|
||||
logger.info(
|
||||
f"linking system daemon {system_daemon.uid} to preference {preference.uid}"
|
||||
)
|
||||
await user_service.link_preference(system_daemon.uid, preference.uid)
|
||||
preference = preference_service.get(user_uid=system_daemon.uid)
|
||||
if not preference:
|
||||
pref_in = schemas.UserPreferenceCreate(user_uid=system_daemon.uid, expanded_menu=False, theme="light")
|
||||
await preference_service.create(pref_in)
|
||||
|
||||
logger.info("Done Setting up system daemon")
|
||||
|
||||
|
@ -78,9 +74,9 @@ async def seed_super_user() -> None:
|
|||
await user_service.save(super_user)
|
||||
|
||||
# initial user-preferences
|
||||
if not super_user.preference_uid:
|
||||
pref_in = schemas.UserPreferenceCreate(expanded_menu=False, theme="light")
|
||||
preference = await preference_service.create(pref_in)
|
||||
await user_service.link_preference(super_user.uid, preference.uid)
|
||||
preference = preference_service.get(user_uid=super_user.uid)
|
||||
if not preference:
|
||||
pref_in = schemas.UserPreferenceCreate(user_uid=super_user.uid, expanded_menu=False, theme="light")
|
||||
await preference_service.create(pref_in)
|
||||
|
||||
logger.info("Done Setting up system admin")
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
"""remodelled user preferences
|
||||
|
||||
Revision ID: 9d42a09f0754
|
||||
Revises: c03167691623
|
||||
Create Date: 2024-10-27 06:28:17.903319
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9d42a09f0754'
|
||||
down_revision = 'c03167691623'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('ix_analysis_result_level', table_name='analysis_result')
|
||||
op.drop_index('ix_analysis_result_lft', table_name='analysis_result')
|
||||
op.drop_index('ix_analysis_result_rgt', table_name='analysis_result')
|
||||
op.create_index('analysis_result_level_idx', 'analysis_result', ['level'], unique=False)
|
||||
op.create_index('analysis_result_lft_idx', 'analysis_result', ['lft'], unique=False)
|
||||
op.create_index('analysis_result_rgt_idx', 'analysis_result', ['rgt'], unique=False)
|
||||
op.drop_index('ix_message_level', table_name='message')
|
||||
op.drop_index('ix_message_lft', table_name='message')
|
||||
op.drop_index('ix_message_rgt', table_name='message')
|
||||
op.create_index('message_level_idx', 'message', ['level'], unique=False)
|
||||
op.create_index('message_lft_idx', 'message', ['lft'], unique=False)
|
||||
op.create_index('message_rgt_idx', 'message', ['rgt'], unique=False)
|
||||
op.drop_index('ix_sample_level', table_name='sample')
|
||||
op.drop_index('ix_sample_lft', table_name='sample')
|
||||
op.drop_index('ix_sample_rgt', table_name='sample')
|
||||
op.create_index('sample_level_idx', 'sample', ['level'], unique=False)
|
||||
op.create_index('sample_lft_idx', 'sample', ['lft'], unique=False)
|
||||
op.create_index('sample_rgt_idx', 'sample', ['rgt'], unique=False)
|
||||
op.drop_constraint('user_preference_uid_fkey', 'user', type_='foreignkey')
|
||||
op.drop_column('user', 'preference_uid')
|
||||
op.add_column('user_preference', sa.Column('user_uid', sa.String(), nullable=True))
|
||||
op.create_unique_constraint(None, 'user_preference', ['user_uid'])
|
||||
op.create_foreign_key(None, 'user_preference', 'user', ['user_uid'], ['uid'], ondelete='CASCADE')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'user_preference', type_='foreignkey')
|
||||
op.drop_constraint(None, 'user_preference', type_='unique')
|
||||
op.drop_column('user_preference', 'user_uid')
|
||||
op.add_column('user', sa.Column('preference_uid', sa.VARCHAR(), autoincrement=False, nullable=True))
|
||||
op.create_foreign_key('user_preference_uid_fkey', 'user', 'user_preference', ['preference_uid'], ['uid'])
|
||||
op.drop_index('sample_rgt_idx', table_name='sample')
|
||||
op.drop_index('sample_lft_idx', table_name='sample')
|
||||
op.drop_index('sample_level_idx', table_name='sample')
|
||||
op.create_index('ix_sample_rgt', 'sample', ['rgt'], unique=False)
|
||||
op.create_index('ix_sample_lft', 'sample', ['lft'], unique=False)
|
||||
op.create_index('ix_sample_level', 'sample', ['level'], unique=False)
|
||||
op.drop_index('message_rgt_idx', table_name='message')
|
||||
op.drop_index('message_lft_idx', table_name='message')
|
||||
op.drop_index('message_level_idx', table_name='message')
|
||||
op.create_index('ix_message_rgt', 'message', ['rgt'], unique=False)
|
||||
op.create_index('ix_message_lft', 'message', ['lft'], unique=False)
|
||||
op.create_index('ix_message_level', 'message', ['level'], unique=False)
|
||||
op.drop_index('analysis_result_rgt_idx', table_name='analysis_result')
|
||||
op.drop_index('analysis_result_lft_idx', table_name='analysis_result')
|
||||
op.drop_index('analysis_result_level_idx', table_name='analysis_result')
|
||||
op.create_index('ix_analysis_result_rgt', 'analysis_result', ['rgt'], unique=False)
|
||||
op.create_index('ix_analysis_result_lft', 'analysis_result', ['lft'], unique=False)
|
||||
op.create_index('ix_analysis_result_level', 'analysis_result', ['level'], unique=False)
|
||||
# ### end Alembic commands ###
|
|
@ -1,9 +1,9 @@
|
|||
import logging
|
||||
|
||||
import pytest
|
||||
from felicity.tests.integration.utils.user import make_password, make_username
|
||||
|
||||
from felicity.apps.analysis.tasks import submit_results, verify_results
|
||||
from felicity.tests.integration.utils.user import make_password, make_username
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
|
|||
@pytest.mark.asyncio
|
||||
@pytest.mark.order(80)
|
||||
async def test_submit_results(
|
||||
app_gql, app_api, auth_data, samples, worksheets, laboratory_instruments, methods
|
||||
app_gql, app_api, auth_data, samples, worksheets, laboratory_instruments, methods
|
||||
):
|
||||
add_gql = """
|
||||
mutation SubmitAnalysisResults($analysisResults: [ARResultInputType!]!, $sourceObject: String!, $sourceObjectUid: String!) {
|
||||
|
@ -404,6 +404,14 @@ async def test_verify_sample_results(app_gql, users, samples):
|
|||
logger.info(f"samples::::::::::::::: {samples}")
|
||||
results = samples[0]["analysisResults"]
|
||||
|
||||
sample = None
|
||||
results = None
|
||||
for _sample in samples:
|
||||
results = list(filter(lambda r: r["status"] == "resulted", _sample["analysisResults"]))
|
||||
if len(results) > 0:
|
||||
sample = _sample
|
||||
break
|
||||
|
||||
response = await app_gql.post(
|
||||
"/felicity-gql",
|
||||
json={
|
||||
|
@ -411,7 +419,7 @@ async def test_verify_sample_results(app_gql, users, samples):
|
|||
"variables": {
|
||||
"analyses": [results[0]["uid"]],
|
||||
"sourceObject": "sample",
|
||||
"sourceObjectUid": samples[0]["uid"],
|
||||
"sourceObjectUid": sample["uid"],
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"bearer {user_data['token']}"},
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from pydantic import BaseModel
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, create_autospec
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import String
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
|
@ -85,7 +85,7 @@ async def test_get(test_service):
|
|||
test_service.repository.get = AsyncMock(return_value=mock_entity)
|
||||
result = await test_service.get(uid="1")
|
||||
assert result == mock_entity
|
||||
test_service.repository.get.assert_awaited_once_with(uid="1")
|
||||
test_service.repository.get.assert_awaited_once_with(uid="1", related=None)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -103,10 +103,10 @@ async def test_get_by_uids(test_service):
|
|||
@pytest.mark.asyncio
|
||||
async def test_get_related(test_service):
|
||||
mock_entity = MockEntity(uid="1", name="test", bio="test bio")
|
||||
test_service.repository.get_related = AsyncMock(return_value=mock_entity)
|
||||
test_service.repository.get = AsyncMock(return_value=mock_entity)
|
||||
result = await test_service.get(related=["related1"], uid="1")
|
||||
assert result == mock_entity
|
||||
test_service.repository.get_related.assert_awaited_once_with(
|
||||
test_service.repository.get.assert_awaited_once_with(
|
||||
related=["related1"], uid="1"
|
||||
)
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ class UserServiceTestCase(unittest.IsolatedAsyncioTestCase):
|
|||
)
|
||||
result = await self.user_service.create(UserCreate(**self.user_data))
|
||||
self.repository.get.assert_called_once_with(
|
||||
user_name=self.user_data["user_name"]
|
||||
user_name=self.user_data["user_name"], related=None
|
||||
)
|
||||
self.repository.create.assert_called_once()
|
||||
self.assertIsNotNone(result.return_value.uid)
|
||||
|
|
|
@ -44,7 +44,7 @@ async def test_add_user(user_service, mocker, user_data):
|
|||
result = await user_service.create(UserCreate(**user_data))
|
||||
|
||||
user_service.repository.get.assert_called_once_with(
|
||||
user_name=user_data["user_name"]
|
||||
user_name=user_data["user_name"], related=None
|
||||
)
|
||||
user_service.repository.create.assert_called_once()
|
||||
|
||||
|
@ -84,7 +84,6 @@ async def test_add_user_password_policy_weak(user_service, mocker, user_data):
|
|||
)
|
||||
)
|
||||
|
||||
|
||||
# @pytest.mark.asyncio
|
||||
# async def test_add_user_password_mismatch(user_service, mocker, user_data):
|
||||
# mocker.patch.object(user_service.repository, "get", return_value=None)
|
||||
|
|
|
@ -52,7 +52,7 @@ async def test_access_token(trial_data):
|
|||
@pytest.mark.asyncio
|
||||
async def test_access_token_timedelta(trial_data):
|
||||
with pytest.raises(TypeError):
|
||||
create_access_token(trial_data["username"], timedelta(minutes=5))
|
||||
create_access_token(trial_data["username"], 5)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
|
|
@ -7,7 +7,7 @@ python-jose[cryptography]==3.3.0
|
|||
passlib[bcrypt]==1.7.4
|
||||
emails==0.6
|
||||
apscheduler==3.10.4
|
||||
sqlalchemy_mptt==0.2.5
|
||||
sqlalchemy_mptt@git+https://github.com/lowatt/sqlalchemy_mptt@sqlalchemy
|
||||
sqlalchemy_mixins@git+https://github.com/absent1706/sqlalchemy-mixins
|
||||
tenacity==8.2.3
|
||||
pydantic[email]==2.5.3
|
||||
|
|
Loading…
Reference in a new issue