2019-12-31 21:45:14 +08:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
Yuuki_Libs
|
2020-02-01 13:58:50 +08:00
|
|
|
(c) 2020 Star Inc.
|
2019-12-31 21:45:14 +08:00
|
|
|
This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
|
|
"""
|
2019-12-25 21:35:43 +08:00
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import random
|
|
|
|
import time
|
2019-08-24 14:43:16 +08:00
|
|
|
|
2019-12-25 21:35:43 +08:00
|
|
|
import requests
|
2020-01-29 14:31:01 +08:00
|
|
|
from yuuki_core.ttypes import OpType
|
2019-10-12 19:48:19 +08:00
|
|
|
|
2019-12-25 21:35:43 +08:00
|
|
|
from .data_mds import listen as msd_listen
|
2019-10-11 21:43:05 +08:00
|
|
|
from .thread_control import Yuuki_Multiprocess
|
2019-12-25 21:35:43 +08:00
|
|
|
from .thread_control import Yuuki_Thread
|
|
|
|
|
2019-08-31 12:36:18 +08:00
|
|
|
|
2019-08-22 18:33:10 +08:00
|
|
|
class Yuuki_Data:
|
2019-08-31 13:17:48 +08:00
|
|
|
def __init__(self, threading):
|
2019-08-31 12:36:18 +08:00
|
|
|
self.threading = threading
|
|
|
|
self.ThreadControl = Yuuki_Thread()
|
2019-10-12 19:48:19 +08:00
|
|
|
MdsThreadControl = Yuuki_Multiprocess()
|
2019-08-24 17:22:23 +08:00
|
|
|
|
|
|
|
# Data
|
2019-10-12 19:48:19 +08:00
|
|
|
self.Data = {}
|
2019-08-24 17:22:23 +08:00
|
|
|
|
2019-08-27 19:24:11 +08:00
|
|
|
self.DataType = {
|
2019-12-25 21:35:43 +08:00
|
|
|
"Global": {
|
|
|
|
"LastResetLimitTime": None,
|
2019-08-27 19:24:11 +08:00
|
|
|
},
|
|
|
|
"Group": {},
|
2019-12-25 21:35:43 +08:00
|
|
|
"LimitInfo": {},
|
|
|
|
"BlackList": []
|
2019-08-24 17:22:23 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
self.GroupType = {
|
2019-12-25 21:35:43 +08:00
|
|
|
"SEGroup": None,
|
|
|
|
"Ext_Admin": [],
|
|
|
|
"GroupTicket": {}
|
2019-08-24 17:22:23 +08:00
|
|
|
}
|
|
|
|
|
2019-08-25 02:04:49 +08:00
|
|
|
self.LimitType = {
|
2019-12-25 21:35:43 +08:00
|
|
|
"KickLimit": {},
|
|
|
|
"CancelLimit": {}
|
2019-08-24 17:22:23 +08:00
|
|
|
}
|
|
|
|
|
2019-08-27 19:24:11 +08:00
|
|
|
self.SEGrouptype = {
|
2019-12-25 21:35:43 +08:00
|
|
|
OpType.NOTIFIED_UPDATE_GROUP: False,
|
|
|
|
OpType.NOTIFIED_INVITE_INTO_GROUP: False,
|
|
|
|
OpType.NOTIFIED_ACCEPT_GROUP_INVITATION: False,
|
|
|
|
OpType.NOTIFIED_KICKOUT_FROM_GROUP: False
|
2019-08-27 14:42:05 +08:00
|
|
|
}
|
|
|
|
|
2019-08-24 17:22:23 +08:00
|
|
|
self.DataPath = "data/"
|
|
|
|
self.DataName = "{}.json"
|
|
|
|
|
|
|
|
if not os.path.isdir(self.DataPath):
|
|
|
|
os.mkdir(self.DataPath)
|
|
|
|
|
|
|
|
for Type in self.DataType:
|
|
|
|
name = self.DataPath + self.DataName.format(Type)
|
|
|
|
if not os.path.isfile(name):
|
|
|
|
with open(name, "w") as f:
|
|
|
|
f.write("")
|
2019-08-24 20:40:17 +08:00
|
|
|
Type = 0
|
2019-08-24 17:22:23 +08:00
|
|
|
else:
|
|
|
|
with open(name, "r") as f:
|
2019-08-24 20:31:58 +08:00
|
|
|
try:
|
2019-09-28 11:03:41 +08:00
|
|
|
json.load(f)
|
2019-08-24 20:31:58 +08:00
|
|
|
Type = 0
|
|
|
|
except ValueError:
|
|
|
|
Type = 1
|
|
|
|
assert Type == 0, "{}\nJson Test Error".format(name)
|
2019-08-24 17:22:23 +08:00
|
|
|
|
|
|
|
# Data Initialize
|
|
|
|
|
|
|
|
for Type in self.DataType:
|
|
|
|
name = self.DataPath + self.DataName.format(Type)
|
|
|
|
with open(name, "r+") as f:
|
2019-08-24 20:40:17 +08:00
|
|
|
text = f.read()
|
|
|
|
if text != "":
|
|
|
|
self.Data[Type] = json.loads(text)
|
2019-08-24 17:22:23 +08:00
|
|
|
else:
|
|
|
|
self.Data[Type] = self.DataType[Type]
|
|
|
|
f.write(json.dumps(self.Data[Type]))
|
|
|
|
|
2019-10-12 19:48:19 +08:00
|
|
|
# Python MDS
|
|
|
|
|
|
|
|
if self.threading:
|
|
|
|
self.mdsHost = "http://localhost:2019/"
|
|
|
|
self.mdsCode = "{}.{}".format(random.random(), time.time())
|
|
|
|
MdsThreadControl.add(msd_listen, (self.mdsCode,))
|
|
|
|
|
|
|
|
# MDS Sync
|
|
|
|
|
|
|
|
time.sleep(1)
|
|
|
|
requests.post(
|
|
|
|
url=self.mdsHost,
|
|
|
|
json={
|
|
|
|
"code": self.mdsCode,
|
|
|
|
"do": "SYC",
|
|
|
|
"path": self.Data
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2019-08-24 17:22:23 +08:00
|
|
|
# Log
|
|
|
|
|
2019-08-24 14:43:16 +08:00
|
|
|
self.LogType = {
|
2019-12-25 21:35:43 +08:00
|
|
|
"JoinGroup": "<li>%s: %s(%s) -> Inviter: %s</li>",
|
|
|
|
"KickEvent": "<li>%s: %s(%s) -(%s)> Kicker: %s | Kicked: %s | Status: %s</li>",
|
|
|
|
"CancelEvent": "<li>%s: %s(%s) -(%s)> Inviter: %s | Canceled: %s</li>",
|
|
|
|
"BlackList": "<li>%s: %s(%s)</li>"
|
2019-08-24 14:43:16 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
self.LogPath = "logs/"
|
|
|
|
self.LogName = "{}.html"
|
|
|
|
|
|
|
|
self.initHeader = "<title>{} - SYB</title>" \
|
|
|
|
"<meta charset='utf-8' />"
|
|
|
|
|
2019-08-24 15:05:00 +08:00
|
|
|
if not os.path.isdir(self.LogPath):
|
|
|
|
os.mkdir(self.LogPath)
|
|
|
|
|
2019-08-24 14:43:16 +08:00
|
|
|
for Type in self.LogType:
|
|
|
|
name = self.LogPath + self.LogName.format(Type)
|
2019-08-24 15:02:26 +08:00
|
|
|
if not os.path.isfile(name):
|
2019-08-24 14:43:16 +08:00
|
|
|
with open(name, "w") as f:
|
|
|
|
f.write(self.initHeader.format(Type))
|
|
|
|
|
2019-08-31 12:36:18 +08:00
|
|
|
def ThreadExec(self, Function, args):
|
|
|
|
if self.threading:
|
|
|
|
self.ThreadControl.lock.acquire()
|
|
|
|
self.ThreadControl.add(Function, args)
|
|
|
|
self.ThreadControl.lock.release()
|
|
|
|
else:
|
|
|
|
Function(*args)
|
|
|
|
|
2019-10-12 19:48:19 +08:00
|
|
|
def _mdsShake(self, do, path, data=None):
|
|
|
|
if self.threading:
|
|
|
|
mds = requests.post(
|
|
|
|
url=self.mdsHost,
|
|
|
|
json={
|
|
|
|
"code": self.mdsCode,
|
|
|
|
"do": do,
|
|
|
|
"path": path,
|
|
|
|
"data": data
|
|
|
|
}
|
|
|
|
)
|
|
|
|
over = mds.json()
|
2019-10-12 21:52:27 +08:00
|
|
|
assert_result = "mds - ERROR\n{} on {}".format(do, path)
|
|
|
|
assert over["status"] == 200, assert_result
|
2019-10-12 19:48:19 +08:00
|
|
|
return over
|
|
|
|
else:
|
2019-12-25 21:35:43 +08:00
|
|
|
status = {"status": 0}
|
2019-10-12 19:48:19 +08:00
|
|
|
return json.dumps(status)
|
|
|
|
|
|
|
|
def _local_query(self, query_data):
|
|
|
|
if type(query_data) is list:
|
2019-10-14 20:40:20 +08:00
|
|
|
result = self.Data
|
2019-10-27 22:44:57 +08:00
|
|
|
query_len = len(query_data) - 1
|
2019-10-12 19:48:19 +08:00
|
|
|
for count, key in enumerate(query_data):
|
2019-10-27 22:44:57 +08:00
|
|
|
if key in result:
|
|
|
|
if count < query_len:
|
|
|
|
if type(result.get(key)) is not dict:
|
2019-10-12 19:48:19 +08:00
|
|
|
result = 1
|
|
|
|
break
|
2019-10-27 22:44:57 +08:00
|
|
|
result = result.get(key)
|
2019-10-12 19:48:19 +08:00
|
|
|
else:
|
|
|
|
result = 2
|
|
|
|
break
|
|
|
|
|
|
|
|
return result
|
|
|
|
return 0
|
|
|
|
|
|
|
|
def _local_update(self, path, data):
|
|
|
|
over = self._local_query(path)
|
|
|
|
if not str(over).isnumeric():
|
|
|
|
over.update(data)
|
|
|
|
return False
|
|
|
|
|
2019-08-24 23:06:28 +08:00
|
|
|
def file(self, Type, Mode, Format):
|
|
|
|
if Format == "Data":
|
2019-08-24 23:50:09 +08:00
|
|
|
return open(self.DataPath + self.DataName.format(Type), Mode)
|
2019-08-24 23:06:28 +08:00
|
|
|
elif Format == "Log":
|
|
|
|
return open(self.LogPath + self.LogName.format(Type), Mode)
|
2019-08-24 14:43:16 +08:00
|
|
|
|
2019-08-24 23:06:28 +08:00
|
|
|
def syncData(self):
|
2019-10-12 19:48:19 +08:00
|
|
|
if self.threading:
|
2019-10-14 20:40:20 +08:00
|
|
|
self.Data = self.getData([])
|
2019-08-24 23:06:28 +08:00
|
|
|
for Type in self.DataType:
|
|
|
|
with self.file(Type, "w", "Data") as f:
|
|
|
|
f.write(json.dumps(self.Data[Type]))
|
2019-12-25 21:35:43 +08:00
|
|
|
return self.getData(["Global", "Power"])
|
2019-08-24 23:06:28 +08:00
|
|
|
|
2019-10-12 19:48:19 +08:00
|
|
|
def updateData(self, path, data):
|
2019-08-31 12:36:18 +08:00
|
|
|
if self.threading:
|
2019-10-12 19:48:19 +08:00
|
|
|
self.ThreadExec(self._updateData, (path, data))
|
2019-08-31 12:36:18 +08:00
|
|
|
else:
|
2019-10-12 19:48:19 +08:00
|
|
|
self._updateData(path, data)
|
2019-08-31 12:36:18 +08:00
|
|
|
|
2019-10-12 19:48:19 +08:00
|
|
|
def _updateData(self, path, data):
|
2019-10-12 21:52:27 +08:00
|
|
|
assert path and type(path) is list, "Empty path - updateData"
|
|
|
|
if len(path) == 1:
|
2019-10-14 20:40:20 +08:00
|
|
|
origin_data = self.getData([])
|
|
|
|
assert type(origin_data) is dict, "Error origin data type (1) - updateData"
|
|
|
|
origin = origin_data.copy()
|
2019-10-12 21:52:27 +08:00
|
|
|
origin[path[0]] = data
|
2019-10-14 20:40:20 +08:00
|
|
|
path = []
|
2019-10-12 19:48:19 +08:00
|
|
|
else:
|
2019-10-14 20:40:20 +08:00
|
|
|
origin_data = self.getData(path[:-1])
|
|
|
|
assert type(origin_data) is dict, "Error origin data type (2) - updateData"
|
|
|
|
origin = origin_data.copy()
|
2019-10-12 19:48:19 +08:00
|
|
|
origin[path[-1]] = data
|
|
|
|
path = path[:-1]
|
2019-10-14 20:40:20 +08:00
|
|
|
assert type(origin) is dict, "Error request data type - updateData"
|
2019-10-12 19:48:19 +08:00
|
|
|
if self.threading:
|
2019-10-14 20:40:20 +08:00
|
|
|
self._mdsShake("UPT", path, origin)
|
2019-10-12 19:48:19 +08:00
|
|
|
else:
|
2019-10-14 20:40:20 +08:00
|
|
|
self._local_update(path, origin)
|
2019-08-24 17:22:23 +08:00
|
|
|
|
2019-08-24 14:43:16 +08:00
|
|
|
def updateLog(self, Type, Data):
|
2019-08-31 12:36:18 +08:00
|
|
|
if self.threading:
|
|
|
|
self.ThreadExec(self._updateLog, (Type, Data))
|
|
|
|
else:
|
|
|
|
self._updateLog(Type, Data)
|
|
|
|
|
|
|
|
def _updateLog(self, Type, Data):
|
2019-08-24 23:06:28 +08:00
|
|
|
with self.file(Type, "a", "Log") as f:
|
2019-08-24 14:43:16 +08:00
|
|
|
f.write(self.LogType[Type] % Data)
|
|
|
|
|
2019-10-12 19:48:19 +08:00
|
|
|
@staticmethod
|
|
|
|
def getTime(time_format="%b %d %Y %H:%M:%S %Z"):
|
2019-08-24 14:43:16 +08:00
|
|
|
Time = time.localtime(time.time())
|
2019-10-12 19:48:19 +08:00
|
|
|
return time.strftime(time_format, Time)
|
|
|
|
|
|
|
|
def getData(self, path):
|
|
|
|
if self.threading:
|
|
|
|
return self._mdsShake("GET", path).get("data")
|
2019-08-25 11:24:26 +08:00
|
|
|
else:
|
2019-10-12 19:48:19 +08:00
|
|
|
return self._local_query(path)
|
2019-08-25 02:04:49 +08:00
|
|
|
|
2019-08-27 19:24:11 +08:00
|
|
|
def getGroup(self, GroupID):
|
2019-10-12 19:48:19 +08:00
|
|
|
Groups = self.getData(["Group"])
|
2019-10-27 22:37:07 +08:00
|
|
|
if GroupID not in Groups:
|
2019-10-12 19:48:19 +08:00
|
|
|
self.updateData(["Group", GroupID], self.GroupType)
|
2019-10-27 22:37:07 +08:00
|
|
|
return self.GroupType
|
|
|
|
return Groups.get(GroupID)
|
2019-08-27 19:24:11 +08:00
|
|
|
|
2019-08-24 17:22:23 +08:00
|
|
|
def getSEGroup(self, GroupID):
|
2019-10-12 22:19:07 +08:00
|
|
|
GroupData = self.getGroup(GroupID)
|
|
|
|
SEMode = GroupData.get("SEGroup")
|
2019-10-12 19:48:19 +08:00
|
|
|
if SEMode is None:
|
2019-08-25 02:44:17 +08:00
|
|
|
return None
|
2019-08-25 02:38:38 +08:00
|
|
|
SEMode_ = {}
|
2019-08-25 02:33:12 +08:00
|
|
|
for Mode in SEMode:
|
2019-08-25 02:38:38 +08:00
|
|
|
SEMode_[int(Mode)] = SEMode[Mode]
|
2019-08-25 02:36:10 +08:00
|
|
|
return SEMode_
|
2019-10-12 21:29:38 +08:00
|
|
|
|
|
|
|
def limitDecrease(self, limit_type, userId):
|
|
|
|
if self.threading:
|
|
|
|
self._mdsShake("YLD", limit_type, userId)
|
|
|
|
else:
|
|
|
|
self.Data["LimitInfo"][limit_type][userId] -= 1
|