mirror of
https://github.com/Tencent/Metis.git
synced 2025-12-26 04:02:48 +00:00
refactor(local): refactor code
This commit is contained in:
parent
42fc9e64b8
commit
3ffa06599d
|
|
@ -4,6 +4,7 @@ import json
|
|||
from django.shortcuts import render
|
||||
from django.http import FileResponse
|
||||
from common.render import render_json
|
||||
from functools import wraps
|
||||
from app.service.time_series_detector.anomaly_service import *
|
||||
from app.service.time_series_detector.sample_service import *
|
||||
from app.service.time_series_detector.task_service import *
|
||||
|
|
@ -11,90 +12,58 @@ from app.service.time_series_detector.detect_service import *
|
|||
from app.config.errorcode import *
|
||||
from app.utils.utils import *
|
||||
|
||||
def check_post(func):
|
||||
@wraps(func)
|
||||
def f(request):
|
||||
if request.method == "POST":
|
||||
return_dict = func(request)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
return f
|
||||
|
||||
|
||||
@check_post
|
||||
def search_anomaly(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
anomaly_service = AnomalyService()
|
||||
return_dict = anomaly_service.query_anomaly(request.body)
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
anomaly_service = AnomalyService()
|
||||
return anomaly_service.query_anomaly(request.body)
|
||||
|
||||
|
||||
@check_post
|
||||
def import_sample(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
sample_service = SampleService()
|
||||
return_dict = sample_service.import_file(request.FILES)
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
sample_service = SampleService()
|
||||
return sample_service.import_file(request.FILES)
|
||||
|
||||
|
||||
@check_post
|
||||
def update_sample(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
sample_service = SampleService()
|
||||
return_dict = sample_service.update_sample(request.body)
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
sample_service = SampleService()
|
||||
return sample_service.update_sample(request.body)
|
||||
|
||||
|
||||
@check_post
|
||||
def query_sample(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
sample_service = SampleService()
|
||||
return_dict = sample_service.query_sample(request.body)
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
sample_service = SampleService()
|
||||
return sample_service.query_sample(request.body)
|
||||
|
||||
|
||||
@check_post
|
||||
def update_anomaly(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
sample_service = AnomalyService()
|
||||
return_dict = sample_service.update_anomaly(request.body)
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
sample_service = AnomalyService()
|
||||
return sample_service.update_anomaly(request.body)
|
||||
|
||||
|
||||
@check_post
|
||||
def train(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
detect_service = DetectService()
|
||||
return_dict = detect_service.process_train(json.loads(request.body))
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
detect_service = DetectService()
|
||||
return detect_service.process_train(json.loads(request.body))
|
||||
|
||||
|
||||
def download_sample(request):
|
||||
if request.method == "GET":
|
||||
try:
|
||||
sample_service = SampleService()
|
||||
file_name = sample_service.sample_download(request.GET['id'])
|
||||
ret_code, file_name = sample_service.sample_download(request.GET['id'])
|
||||
files = open(file_name, 'rb')
|
||||
response = FileResponse(files)
|
||||
response['Content-Type'] = 'application/octet-stream'
|
||||
|
|
@ -108,92 +77,43 @@ def download_sample(request):
|
|||
return render_json(return_dict)
|
||||
|
||||
|
||||
@check_post
|
||||
def predict_rate(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
detect_service = DetectService()
|
||||
return_dict = detect_service.rate_predict(json.loads(request.body))
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
detect_service = DetectService()
|
||||
return detect_service.rate_predict(json.loads(request.body))
|
||||
|
||||
|
||||
@check_post
|
||||
def predict_value(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
detect_service = DetectService()
|
||||
return_dict = detect_service.value_predict(json.loads(request.body))
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
detect_service = DetectService()
|
||||
return detect_service.value_predict(json.loads(request.body))
|
||||
|
||||
|
||||
@check_post
|
||||
def query_train_task(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
train_service = TrainService()
|
||||
return_dict = train_service.query_train(request.body)
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
train_service = TrainService()
|
||||
return train_service.query_train(request.body)
|
||||
|
||||
|
||||
@check_post
|
||||
def query_train_source(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
sample_service = SampleService()
|
||||
return_dict = sample_service.query_sample_source()
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
sample_service = SampleService()
|
||||
return sample_service.query_sample_source(request.body)
|
||||
|
||||
|
||||
@check_post
|
||||
def delete_train_task(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
train_service = TrainService()
|
||||
return_dict = train_service.delete_train(request.body)
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
train_service = TrainService()
|
||||
return train_service.delete_train(request.body)
|
||||
|
||||
|
||||
@check_post
|
||||
def delete_sample(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
sample_service = SampleService()
|
||||
return_dict = sample_service.delete_sample(request.body)
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
sample_service = SampleService()
|
||||
return sample_service.delete_sample(request.body)
|
||||
|
||||
|
||||
@check_post
|
||||
def count_sample(request):
|
||||
if request.method == "POST":
|
||||
try:
|
||||
sample_service = SampleService()
|
||||
return_dict = sample_service.count_sample(request.body)
|
||||
except Exception, ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
return_dict = build_ret_data(NOT_POST)
|
||||
return render_json(return_dict)
|
||||
sample_service = SampleService()
|
||||
return sample_service.count_sample(request.body)
|
||||
|
|
|
|||
|
|
@ -153,7 +153,7 @@ class SampleOperation(object):
|
|||
"negative_count": int(row[2])
|
||||
})
|
||||
|
||||
return OP_SUCCESS, sample_list
|
||||
return OP_SUCCESS, {"count": sample_list}
|
||||
|
||||
def download_sample(self, data):
|
||||
sample_list = []
|
||||
|
|
@ -185,7 +185,7 @@ class SampleOperation(object):
|
|||
writer = csv.writer(pfile)
|
||||
writer.writerow(head)
|
||||
writer.writerows(sample_list)
|
||||
return download_file_path
|
||||
return 0, download_file_path
|
||||
|
||||
def query_sample(self, data):
|
||||
item_per_page = data['itemPerPage']
|
||||
|
|
@ -261,7 +261,7 @@ class SampleOperation(object):
|
|||
command = "delete from sample_dataset where id = %s "
|
||||
num = self.__cur.execute(command, id_num)
|
||||
self.__conn.commit()
|
||||
return OP_SUCCESS, num
|
||||
return OP_SUCCESS, {"count":num}
|
||||
|
||||
def delete_sample_by_anomaly_id(self, data):
|
||||
id_num = data['id']
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ Unless required by applicable law or agreed to in writing, software distributed
|
|||
"""
|
||||
|
||||
import json
|
||||
import traceback
|
||||
from app.dao.time_series_detector.anomaly_op import *
|
||||
from app.utils.utils import *
|
||||
|
||||
|
|
@ -19,28 +18,10 @@ class AnomalyService(object):
|
|||
def __init__(self):
|
||||
self.__anomaly = AbnormalOperation()
|
||||
|
||||
@exce_service
|
||||
def query_anomaly(self, body):
|
||||
try:
|
||||
form = json.loads(body)
|
||||
ret_code, ret_data = check_value(form)
|
||||
if OP_SUCCESS == ret_code:
|
||||
ret_code, ret_data = self.__anomaly.get_anomaly(form)
|
||||
return_dict = build_ret_data(ret_code, ret_data)
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
return self.__anomaly.get_anomaly(json.loads(body))
|
||||
|
||||
@exce_service
|
||||
def update_anomaly(self, body):
|
||||
try:
|
||||
form = json.loads(body)
|
||||
ret_code, ret_data = check_value(form)
|
||||
if OP_SUCCESS == ret_code:
|
||||
print form
|
||||
ret_code, ret_data = self.__anomaly.update_anomaly(form)
|
||||
return_dict = build_ret_data(ret_code, ret_data)
|
||||
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
return self.__anomaly.update_anomaly(json.loads(body))
|
||||
|
|
|
|||
|
|
@ -193,9 +193,6 @@ class DetectService(object):
|
|||
return build_ret_data(OP_SUCCESS, ret_data)
|
||||
|
||||
def rate_predict(self, data):
|
||||
ret_code, ret_data = check_value(data)
|
||||
if ret_code != OP_SUCCESS:
|
||||
return build_ret_data(ret_code, ret_data)
|
||||
combined_data = data["dataC"] + "," + data["dataB"] + "," + data["dataA"]
|
||||
time_series = map(float, combined_data.split(','))
|
||||
statistic_result = self.statistic_obj.predict(time_series)
|
||||
|
|
|
|||
|
|
@ -24,14 +24,9 @@ class SampleService(object):
|
|||
uuid_str = uuid.uuid4().hex[:8]
|
||||
self.__upload_file_path = UPLOAD_PATH % uuid_str
|
||||
|
||||
@exce_service
|
||||
def import_sample(self, data):
|
||||
try:
|
||||
ret_code, ret_data = self.__sample.import_sample(data)
|
||||
return_dict = build_ret_data(ret_code, {"count": ret_data})
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
return self.__sample.import_sample(data)
|
||||
|
||||
def import_file(self, file_data):
|
||||
try:
|
||||
|
|
@ -61,9 +56,6 @@ class SampleService(object):
|
|||
"updateTime": int(row[11]),
|
||||
"time": int(row[11]),
|
||||
"anomalyId": "0"}
|
||||
check_code, check_msg = check_value(one_item)
|
||||
if OP_SUCCESS != check_code:
|
||||
return build_ret_data(check_code, check_msg)
|
||||
data.append(one_item)
|
||||
if row[6] == "positive":
|
||||
positive_count = positive_count + 1
|
||||
|
|
@ -81,70 +73,33 @@ class SampleService(object):
|
|||
import_ret["data"] = ret_data
|
||||
return import_ret
|
||||
|
||||
@exce_service
|
||||
def update_sample(self, body):
|
||||
try:
|
||||
form = json.loads(body)
|
||||
ret_code, ret_data = check_value(form)
|
||||
if OP_SUCCESS == ret_code:
|
||||
ret_code, ret_data = self.__sample.update_sample(form)
|
||||
return_dict = build_ret_data(ret_code, {"count": ret_data})
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
return self.__sample.update_sample(json.loads(body))
|
||||
|
||||
@exce_service
|
||||
def query_sample(self, body):
|
||||
try:
|
||||
form = json.loads(body)
|
||||
ret_code, ret_data = check_value(form)
|
||||
if OP_SUCCESS == ret_code:
|
||||
ret_code, ret_data = self.__sample.query_sample(form)
|
||||
return_dict = build_ret_data(ret_code, ret_data)
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
return self.__sample.query_sample(json.loads(body))
|
||||
|
||||
def sample_download(self, body):
|
||||
ret_data = ""
|
||||
try:
|
||||
if len(body) > VALUE_LEN_MAX:
|
||||
return ""
|
||||
ret_data = self.__sample.download_sample(body)
|
||||
ret_code, ret_data = self.__sample.download_sample(body)
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
ret_data = build_ret_data(THROW_EXP, str(ex))
|
||||
return ret_data
|
||||
return ret_code, ret_data
|
||||
|
||||
@exce_service
|
||||
def delete_sample(self, body):
|
||||
try:
|
||||
form = json.loads(body)
|
||||
ret_code, ret_data = check_value(form)
|
||||
if OP_SUCCESS == ret_code:
|
||||
ret_code, ret_data = self.__sample.delete_sample(form)
|
||||
return_dict = build_ret_data(ret_code, {"count": ret_data})
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
return self.__sample.delete_sample(json.loads(body))
|
||||
|
||||
@exce_service
|
||||
def count_sample(self, body):
|
||||
form = json.loads(body)
|
||||
try:
|
||||
ret_code, ret_data = check_value(form)
|
||||
if OP_SUCCESS == ret_code:
|
||||
ret_code, ret_data = self.__sample.sample_count(form)
|
||||
return_dict = build_ret_data(ret_code, {"count": ret_data})
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
return self.__sample.sample_count(json.loads(body))
|
||||
|
||||
def query_sample_source(self):
|
||||
try:
|
||||
ret_code, ret_data = self.__sample.query_sample_source()
|
||||
return_dict = build_ret_data(ret_code, ret_data)
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
@exce_service
|
||||
def query_sample_source(self, body):
|
||||
return self.__sample.query_sample_source()
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ Unless required by applicable law or agreed to in writing, software distributed
|
|||
"""
|
||||
|
||||
import json
|
||||
import traceback
|
||||
from app.dao.time_series_detector.train_op import *
|
||||
from app.config.errorcode import *
|
||||
from app.utils.utils import *
|
||||
|
|
@ -20,35 +19,10 @@ class TrainService(object):
|
|||
def __init__(self):
|
||||
self.__train_op = TrainOperation()
|
||||
|
||||
@exce_service
|
||||
def query_train(self, body):
|
||||
try:
|
||||
form = json.loads(body)
|
||||
ret_code, ret_data = check_value(form)
|
||||
if OP_SUCCESS == ret_code:
|
||||
ret_code, ret_data = self.__train_op.query_train(form)
|
||||
return_dict = build_ret_data(ret_code, ret_data)
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
|
||||
def query_train_source(self):
|
||||
try:
|
||||
ret_code, ret_data = self.__train_op.query_train_source()
|
||||
return_dict = build_ret_data(ret_code, ret_data)
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
return self.__train_op.query_train(json.loads(body))
|
||||
|
||||
@exce_service
|
||||
def delete_train(self, body):
|
||||
try:
|
||||
form = json.loads(body)
|
||||
ret_code, ret_data = check_value(form)
|
||||
if OP_SUCCESS == ret_code:
|
||||
ret_code, ret_data = self.__train_op.delete_train(form)
|
||||
return_dict = build_ret_data(ret_code, ret_data)
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
return self.__train_op.delete_train(json.loads(body))
|
||||
|
|
|
|||
|
|
@ -1,161 +1,102 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
Tencent is pleased to support the open source community by making Metis available.
|
||||
Copyright (C) 2018 THL A29 Limited, a Tencent company. All rights reserved.
|
||||
Licensed under the BSD 3-Clause License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
|
||||
https://opensource.org/licenses/BSD-3-Clause
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
from app.config.errorcode import *
|
||||
from app.config.common import *
|
||||
|
||||
|
||||
def is_standard_time_series(time_series, window=180):
|
||||
"""
|
||||
Check the length of time_series. If window = 180, then the length of time_series should be 903.
|
||||
The mean value of last window should be larger than 0.
|
||||
|
||||
:param time_series: the time series to check, like [data_c, data_b, data_a]
|
||||
:type time_series: pandas.Series
|
||||
:param window: the length of window
|
||||
:return: True or False
|
||||
:return type: boolean
|
||||
"""
|
||||
if len(time_series) == 5 * window + 3 and np.mean(time_series[(4 * window + 2):]) > 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def split_time_series(time_series, window=180):
|
||||
"""
|
||||
Spilt the time_series into five parts. Each has a length of window + 1
|
||||
|
||||
:param time_series: [data_c, data_b, data_a]
|
||||
:param window: the length of window
|
||||
:return: spilt list [[data_c_left], [data_c_right], [data_b_left], [data_b_right], [data_a]]
|
||||
"""
|
||||
data_c_left = time_series[0:(window + 1)]
|
||||
data_c_right = time_series[window:(2 * window + 1)]
|
||||
data_b_left = time_series[(2 * window + 1):(3 * window + 2)]
|
||||
data_b_right = time_series[(3 * window + 1):(4 * window + 2)]
|
||||
data_a = time_series[(4 * window + 2):]
|
||||
split_time_series = []
|
||||
split_time_series.append(data_c_left)
|
||||
split_time_series.append(data_c_right)
|
||||
split_time_series.append(data_b_left)
|
||||
split_time_series.append(data_b_right)
|
||||
split_time_series.append(data_a)
|
||||
return split_time_series
|
||||
|
||||
|
||||
def normalize_time_series(split_time_series):
|
||||
"""
|
||||
Normalize the split_time_series.
|
||||
|
||||
:param split_time_series: [[data_c_left], [data_c_right], [data_b_left], [data_b_right], [data_a]]
|
||||
:return: all list / mean(split_time_series)
|
||||
"""
|
||||
value = np.mean(split_time_series[4])
|
||||
if value > 1:
|
||||
normalized_data_c_left = list(split_time_series[0] / value)
|
||||
normalized_data_c_right = list(split_time_series[1] / value)
|
||||
normalized_data_b_left = list(split_time_series[2] / value)
|
||||
normalized_data_b_right = list(split_time_series[3] / value)
|
||||
normalized_data_a = list(split_time_series[4] / value)
|
||||
else:
|
||||
normalized_data_c_left = split_time_series[0]
|
||||
normalized_data_c_right = split_time_series[1]
|
||||
normalized_data_b_left = split_time_series[2]
|
||||
normalized_data_b_right = split_time_series[3]
|
||||
normalized_data_a = split_time_series[4]
|
||||
normalized_split_time_series = []
|
||||
normalized_split_time_series.append(normalized_data_c_left)
|
||||
normalized_split_time_series.append(normalized_data_c_right)
|
||||
normalized_split_time_series.append(normalized_data_b_left)
|
||||
normalized_split_time_series.append(normalized_data_b_right)
|
||||
normalized_split_time_series.append(normalized_data_a)
|
||||
return normalized_split_time_series
|
||||
|
||||
|
||||
def build_ret_data(ret_code, data=""):
|
||||
return {"code": ret_code, "msg": ERR_CODE[ret_code], "data": data}
|
||||
|
||||
|
||||
def validate_value(data):
|
||||
if isinstance(data, unicode):
|
||||
if len(data) > INPUT_LEN_ENG_MAX:
|
||||
return CHECK_PARAM_FAILED
|
||||
elif isinstance(data, str):
|
||||
if len(data) > INPUT_LEN_ENG_MAX:
|
||||
return CHECK_PARAM_FAILED
|
||||
elif isinstance(data, list):
|
||||
if len(data) > INPUT_LIST_LEN_MAX:
|
||||
return CHECK_PARAM_FAILED
|
||||
for item in data:
|
||||
ret_code = validate_value(item)
|
||||
if ret_code != 0:
|
||||
return ret_code
|
||||
return 0
|
||||
|
||||
|
||||
def check_value(data):
|
||||
if 'attrId' in data:
|
||||
ret_code = validate_value(data['attrId'])
|
||||
if ret_code != 0:
|
||||
return CHECK_PARAM_FAILED, "attrId too long"
|
||||
if 'attrName' in data:
|
||||
ret_code = validate_value(data['attrName'])
|
||||
if ret_code != 0:
|
||||
return CHECK_PARAM_FAILED, "attrName too long"
|
||||
if 'viewId' in data:
|
||||
ret_code = validate_value(data['viewId'])
|
||||
if ret_code != 0:
|
||||
return CHECK_PARAM_FAILED, "viewId too long"
|
||||
if 'viewName' in data:
|
||||
ret_code = validate_value(data['viewName'])
|
||||
if ret_code != 0:
|
||||
return CHECK_PARAM_FAILED, "viewName too long"
|
||||
if 'itemPerPage' in data:
|
||||
if data['itemPerPage'] > INPUT_ITEM_PER_PAGE_MAX:
|
||||
return CHECK_PARAM_FAILED, "itemPerPage too big"
|
||||
if 'beginTime' in data:
|
||||
if len(str(data['beginTime'])) > INPUT_LEN_ENG_MAX:
|
||||
return CHECK_PARAM_FAILED, "beginTime too long"
|
||||
if 'endTime' in data:
|
||||
if len(str(data['endTime'])) > INPUT_LEN_ENG_MAX:
|
||||
return CHECK_PARAM_FAILED, "endTime too long"
|
||||
if 'updateTime' in data:
|
||||
if len(str(data['updateTime'])) > INPUT_LEN_ENG_MAX:
|
||||
return CHECK_PARAM_FAILED, "updateTime too long"
|
||||
if 'source' in data:
|
||||
ret_code = validate_value(data['source'])
|
||||
if ret_code != 0:
|
||||
return CHECK_PARAM_FAILED, "source too long"
|
||||
if 'trainOrTest' in data:
|
||||
ret_code = validate_value(data['source'])
|
||||
if ret_code != 0:
|
||||
return CHECK_PARAM_FAILED, "trainOrTest too long"
|
||||
if 'positiveOrNegative' in data:
|
||||
ret_code = validate_value(data['positiveOrNegative'])
|
||||
if ret_code != 0:
|
||||
return CHECK_PARAM_FAILED, "positiveOrNegative too long"
|
||||
if 'window' in data:
|
||||
if len(str(data['window'])) > INPUT_LEN_ENG_MAX:
|
||||
return CHECK_PARAM_FAILED, "window"
|
||||
if 'dataTime' in data:
|
||||
if len(str(data['dataTime'])) > INPUT_LEN_ENG_MAX:
|
||||
return CHECK_PARAM_FAILED, "dataTime too long"
|
||||
if 'dataC' in data:
|
||||
if len(str(data['dataC'])) > VALUE_LEN_MAX:
|
||||
return CHECK_PARAM_FAILED, "dataC too long"
|
||||
if 'dataB' in data:
|
||||
if len(str(data['dataB'])) > VALUE_LEN_MAX:
|
||||
return CHECK_PARAM_FAILED, "dataB too long"
|
||||
if 'dataA' in data:
|
||||
if len(str(data['dataA'])) > VALUE_LEN_MAX:
|
||||
return CHECK_PARAM_FAILED, "dataA too long"
|
||||
return 0, ""
|
||||
#!/usr/bin/python
|
||||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
Tencent is pleased to support the open source community by making Metis available.
|
||||
Copyright (C) 2018 THL A29 Limited, a Tencent company. All rights reserved.
|
||||
Licensed under the BSD 3-Clause License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
|
||||
https://opensource.org/licenses/BSD-3-Clause
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import traceback
|
||||
import json
|
||||
from functools import wraps
|
||||
from app.config.errorcode import *
|
||||
from app.config.common import *
|
||||
|
||||
|
||||
def is_standard_time_series(time_series, window=180):
|
||||
"""
|
||||
Check the length of time_series. If window = 180, then the length of time_series should be 903.
|
||||
The mean value of last window should be larger than 0.
|
||||
|
||||
:param time_series: the time series to check, like [data_c, data_b, data_a]
|
||||
:type time_series: pandas.Series
|
||||
:param window: the length of window
|
||||
:return: True or False
|
||||
:return type: boolean
|
||||
"""
|
||||
if len(time_series) == 5 * window + 3 and np.mean(time_series[(4 * window + 2):]) > 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def split_time_series(time_series, window=180):
|
||||
"""
|
||||
Spilt the time_series into five parts. Each has a length of window + 1
|
||||
|
||||
:param time_series: [data_c, data_b, data_a]
|
||||
:param window: the length of window
|
||||
:return: spilt list [[data_c_left], [data_c_right], [data_b_left], [data_b_right], [data_a]]
|
||||
"""
|
||||
data_c_left = time_series[0:(window + 1)]
|
||||
data_c_right = time_series[window:(2 * window + 1)]
|
||||
data_b_left = time_series[(2 * window + 1):(3 * window + 2)]
|
||||
data_b_right = time_series[(3 * window + 1):(4 * window + 2)]
|
||||
data_a = time_series[(4 * window + 2):]
|
||||
split_time_series = []
|
||||
split_time_series.append(data_c_left)
|
||||
split_time_series.append(data_c_right)
|
||||
split_time_series.append(data_b_left)
|
||||
split_time_series.append(data_b_right)
|
||||
split_time_series.append(data_a)
|
||||
return split_time_series
|
||||
|
||||
|
||||
def normalize_time_series(split_time_series):
|
||||
"""
|
||||
Normalize the split_time_series.
|
||||
|
||||
:param split_time_series: [[data_c_left], [data_c_right], [data_b_left], [data_b_right], [data_a]]
|
||||
:return: all list / mean(split_time_series)
|
||||
"""
|
||||
value = np.mean(split_time_series[4])
|
||||
if value > 1:
|
||||
normalized_data_c_left = list(split_time_series[0] / value)
|
||||
normalized_data_c_right = list(split_time_series[1] / value)
|
||||
normalized_data_b_left = list(split_time_series[2] / value)
|
||||
normalized_data_b_right = list(split_time_series[3] / value)
|
||||
normalized_data_a = list(split_time_series[4] / value)
|
||||
else:
|
||||
normalized_data_c_left = split_time_series[0]
|
||||
normalized_data_c_right = split_time_series[1]
|
||||
normalized_data_b_left = split_time_series[2]
|
||||
normalized_data_b_right = split_time_series[3]
|
||||
normalized_data_a = split_time_series[4]
|
||||
normalized_split_time_series = []
|
||||
normalized_split_time_series.append(normalized_data_c_left)
|
||||
normalized_split_time_series.append(normalized_data_c_right)
|
||||
normalized_split_time_series.append(normalized_data_b_left)
|
||||
normalized_split_time_series.append(normalized_data_b_right)
|
||||
normalized_split_time_series.append(normalized_data_a)
|
||||
return normalized_split_time_series
|
||||
|
||||
|
||||
def build_ret_data(ret_code, data=""):
|
||||
return {"code": ret_code, "msg": ERR_CODE[ret_code], "data": data}
|
||||
|
||||
|
||||
def exce_service(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
ret_code, ret_data = func(*args, **kwargs)
|
||||
return_dict = build_ret_data(ret_code, ret_data)
|
||||
except Exception, ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
return wrapper
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue