mirror of
https://github.com/Tencent/Metis.git
synced 2025-12-25 19:52:49 +00:00
Merge branch 'dev'
This commit is contained in:
commit
446cd94b8b
|
|
@ -12,6 +12,7 @@ from app.service.time_series_detector.detect_service import *
|
|||
from app.config.errorcode import *
|
||||
from app.utils.utils import *
|
||||
|
||||
|
||||
def check_post(func):
|
||||
@wraps(func)
|
||||
def f(request):
|
||||
|
|
@ -69,7 +70,7 @@ def download_sample(request):
|
|||
response['Content-Type'] = 'application/octet-stream'
|
||||
response['Content-Disposition'] = 'attachment;filename = "SampleExport.csv"'
|
||||
return response
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return render_json(return_dict)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -2,9 +2,10 @@ import json
|
|||
from django.http import HttpResponse
|
||||
|
||||
|
||||
def render_json(dictionary={}):
|
||||
def render_json(dictionary=None):
|
||||
dictionary = {} if dictionary is None else dictionary
|
||||
response = HttpResponse(json.dumps(dictionary), content_type="application/json")
|
||||
response['Access-Control-Allow-Origin'] = '*'
|
||||
response["Access-Control-Allow-Headers"] = "Origin, X-Requested-With, Content-Type"
|
||||
response["Access-Control-Allow-Methods"] = "GET, POST, PUT, OPTIONS"
|
||||
return response
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ def wait_child(signum, frame):
|
|||
raise
|
||||
print('handle SIGCHLD end')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
signal.signal(signal.SIGCHLD, wait_child)
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.settings")
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ class AbnormalOperation(object):
|
|||
def update_anomaly(self, data):
|
||||
update_str = "UPDATE anomaly set mark_flag = %s where id = %s"
|
||||
params = [data['markFlag'], data['id']]
|
||||
record_num = self.__cur.execute(update_str, params)
|
||||
self.__cur.execute(update_str, params)
|
||||
self.__conn.commit()
|
||||
|
||||
if MARK_NEGATIVE == data['markFlag'] or MARK_POSITIVE == data['markFlag']:
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ class SampleOperation(object):
|
|||
item_per_page = data['itemPerPage']
|
||||
request_page = data['requestPage']
|
||||
beg_limit = (item_per_page * (request_page - 1))
|
||||
limit = (item_per_page)
|
||||
limit = item_per_page
|
||||
params = []
|
||||
query_str = ""
|
||||
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ class Gbdt(object):
|
|||
grd.fit(X_train, y_train)
|
||||
model_name = MODEL_PATH + task_id + "_model"
|
||||
joblib.dump(grd, model_name)
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
return TRAIN_ERR, str(ex)
|
||||
return OP_SUCCESS, ""
|
||||
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ class XGBoosting(object):
|
|||
"""
|
||||
try:
|
||||
f = open(feature_file_name, "w")
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
return CAL_FEATURE_ERR, str(ex)
|
||||
times = 0
|
||||
for temp in data:
|
||||
|
|
@ -104,7 +104,7 @@ class XGBoosting(object):
|
|||
features.append(temp)
|
||||
try:
|
||||
ret_code, ret_data = self.__save_libsvm_format(features, feature_file_name)
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
ret_code = CAL_FEATURE_ERR
|
||||
ret_data = str(ex)
|
||||
return ret_code, ret_data
|
||||
|
|
@ -124,7 +124,7 @@ class XGBoosting(object):
|
|||
return ret_code, ret_data
|
||||
try:
|
||||
dtrain = xgb.DMatrix(feature_file_name)
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
return READ_FEATURE_FAILED, str(ex)
|
||||
params = {
|
||||
'max_depth': self.max_depth,
|
||||
|
|
@ -141,7 +141,7 @@ class XGBoosting(object):
|
|||
try:
|
||||
bst = xgb.train(params, dtrain, num_round)
|
||||
bst.save_model(model_name)
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
return TRAIN_ERR, str(ex)
|
||||
return OP_SUCCESS, ""
|
||||
|
||||
|
|
|
|||
|
|
@ -78,9 +78,10 @@ def time_series_binned_entropy(x):
|
|||
|
||||
|
||||
def get_classification_features(x):
|
||||
classification_features = []
|
||||
classification_features.append(time_series_autocorrelation(x))
|
||||
classification_features.append(time_series_coefficient_of_variation(x))
|
||||
classification_features = [
|
||||
time_series_autocorrelation(x),
|
||||
time_series_coefficient_of_variation(x)
|
||||
]
|
||||
classification_features.extend(time_series_binned_entropy(x))
|
||||
# append yourself classification features here...
|
||||
|
||||
|
|
|
|||
|
|
@ -410,37 +410,38 @@ def time_series_range(x):
|
|||
|
||||
|
||||
def get_statistical_features(x):
|
||||
statistical_features = []
|
||||
statistical_features.append(time_series_maximum(x))
|
||||
statistical_features.append(time_series_minimum(x))
|
||||
statistical_features.append(time_series_mean(x))
|
||||
statistical_features.append(time_series_variance(x))
|
||||
statistical_features.append(time_series_standard_deviation(x))
|
||||
statistical_features.append(time_series_skewness(x))
|
||||
statistical_features.append(time_series_kurtosis(x))
|
||||
statistical_features.append(time_series_median(x))
|
||||
statistical_features.append(time_series_abs_energy(x))
|
||||
statistical_features.append(time_series_absolute_sum_of_changes(x))
|
||||
statistical_features.append(time_series_variance_larger_than_std(x))
|
||||
statistical_features.append(time_series_count_above_mean(x))
|
||||
statistical_features.append(time_series_count_below_mean(x))
|
||||
statistical_features.append(time_series_first_location_of_maximum(x))
|
||||
statistical_features.append(time_series_first_location_of_minimum(x))
|
||||
statistical_features.append(time_series_last_location_of_maximum(x))
|
||||
statistical_features.append(time_series_last_location_of_minimum(x))
|
||||
statistical_features.append(int(time_series_has_duplicate(x)))
|
||||
statistical_features.append(int(time_series_has_duplicate_max(x)))
|
||||
statistical_features.append(int(time_series_has_duplicate_min(x)))
|
||||
statistical_features.append(time_series_longest_strike_above_mean(x))
|
||||
statistical_features.append(time_series_longest_strike_below_mean(x))
|
||||
statistical_features.append(time_series_mean_abs_change(x))
|
||||
statistical_features.append(time_series_mean_change(x))
|
||||
statistical_features.append(time_series_percentage_of_reoccurring_datapoints_to_all_datapoints(x))
|
||||
statistical_features.append(time_series_ratio_value_number_to_time_series_length(x))
|
||||
statistical_features.append(time_series_sum_of_reoccurring_data_points(x))
|
||||
statistical_features.append(time_series_sum_of_reoccurring_values(x))
|
||||
statistical_features.append(time_series_sum_values(x))
|
||||
statistical_features.append(time_series_range(x))
|
||||
statistical_features = [
|
||||
time_series_maximum(x),
|
||||
time_series_minimum(x),
|
||||
time_series_mean(x),
|
||||
time_series_variance(x),
|
||||
time_series_standard_deviation(x),
|
||||
time_series_skewness(x),
|
||||
time_series_kurtosis(x),
|
||||
time_series_median(x),
|
||||
time_series_abs_energy(x),
|
||||
time_series_absolute_sum_of_changes(x),
|
||||
time_series_variance_larger_than_std(x),
|
||||
time_series_count_above_mean(x),
|
||||
time_series_count_below_mean(x),
|
||||
time_series_first_location_of_maximum(x),
|
||||
time_series_first_location_of_minimum(x),
|
||||
time_series_last_location_of_maximum(x),
|
||||
time_series_last_location_of_minimum(x),
|
||||
int(time_series_has_duplicate(x)),
|
||||
int(time_series_has_duplicate_max(x)),
|
||||
int(time_series_has_duplicate_min(x)),
|
||||
time_series_longest_strike_above_mean(x),
|
||||
time_series_longest_strike_below_mean(x),
|
||||
time_series_mean_abs_change(x),
|
||||
time_series_mean_change(x),
|
||||
time_series_percentage_of_reoccurring_datapoints_to_all_datapoints(x),
|
||||
time_series_ratio_value_number_to_time_series_length(x),
|
||||
time_series_sum_of_reoccurring_data_points(x),
|
||||
time_series_sum_of_reoccurring_values(x),
|
||||
time_series_sum_values(x),
|
||||
time_series_range(x)
|
||||
]
|
||||
# append yourself statistical features here...
|
||||
|
||||
return statistical_features
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ class SampleService(object):
|
|||
elif row[6] == "negative":
|
||||
negative_count = negative_count + 1
|
||||
count = count + 1
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(FILE_FORMAT_ERR, str(ex))
|
||||
return return_dict
|
||||
|
|
@ -82,12 +82,12 @@ class SampleService(object):
|
|||
return self.__sample.query_sample(json.loads(body))
|
||||
|
||||
def sample_download(self, body):
|
||||
ret_data = ""
|
||||
ret_code = 1000
|
||||
try:
|
||||
if len(body) > VALUE_LEN_MAX:
|
||||
return ""
|
||||
ret_code, ret_data = self.__sample.download_sample(body)
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
traceback.print_exc()
|
||||
ret_data = build_ret_data(THROW_EXP, str(ex))
|
||||
return ret_code, ret_data
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -27,10 +27,7 @@ def is_standard_time_series(time_series, window=180):
|
|||
:return: True or False
|
||||
:return type: boolean
|
||||
"""
|
||||
if len(time_series) == 5 * window + 3 and np.mean(time_series[(4 * window + 2):]) > 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return bool(len(time_series) == 5 * window + 3 and np.mean(time_series[(4 * window + 2):]) > 0)
|
||||
|
||||
|
||||
def split_time_series(time_series, window=180):
|
||||
|
|
@ -46,12 +43,13 @@ def split_time_series(time_series, window=180):
|
|||
data_b_left = time_series[(2 * window + 1):(3 * window + 2)]
|
||||
data_b_right = time_series[(3 * window + 1):(4 * window + 2)]
|
||||
data_a = time_series[(4 * window + 2):]
|
||||
split_time_series = []
|
||||
split_time_series.append(data_c_left)
|
||||
split_time_series.append(data_c_right)
|
||||
split_time_series.append(data_b_left)
|
||||
split_time_series.append(data_b_right)
|
||||
split_time_series.append(data_a)
|
||||
split_time_series = [
|
||||
data_c_left,
|
||||
data_c_right,
|
||||
data_b_left,
|
||||
data_b_right,
|
||||
data_a
|
||||
]
|
||||
return split_time_series
|
||||
|
||||
|
||||
|
|
@ -75,12 +73,13 @@ def normalize_time_series(split_time_series):
|
|||
normalized_data_b_left = split_time_series[2]
|
||||
normalized_data_b_right = split_time_series[3]
|
||||
normalized_data_a = split_time_series[4]
|
||||
normalized_split_time_series = []
|
||||
normalized_split_time_series.append(normalized_data_c_left)
|
||||
normalized_split_time_series.append(normalized_data_c_right)
|
||||
normalized_split_time_series.append(normalized_data_b_left)
|
||||
normalized_split_time_series.append(normalized_data_b_right)
|
||||
normalized_split_time_series.append(normalized_data_a)
|
||||
normalized_split_time_series = [
|
||||
normalized_data_c_left,
|
||||
normalized_data_c_right,
|
||||
normalized_data_b_left,
|
||||
normalized_data_b_right,
|
||||
normalized_data_a
|
||||
]
|
||||
return normalized_split_time_series
|
||||
|
||||
|
||||
|
|
@ -94,9 +93,8 @@ def exce_service(func):
|
|||
try:
|
||||
ret_code, ret_data = func(*args, **kwargs)
|
||||
return_dict = build_ret_data(ret_code, ret_data)
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
traceback.print_exc()
|
||||
return_dict = build_ret_data(THROW_EXP, str(ex))
|
||||
return return_dict
|
||||
return wrapper
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ Unless required by applicable law or agreed to in writing, software distributed
|
|||
from unittest import TestCase
|
||||
import random
|
||||
|
||||
|
||||
class DataTestCase(TestCase):
|
||||
def create_test_data_a(self):
|
||||
return [850600,889768,883237,896313,870407,868385,865300,889802,894983,836835,937571,904475,892846,878769,886624,892638,894804,889133,908860,
|
||||
|
|
|
|||
|
|
@ -19,10 +19,12 @@ class FeatureTestCase(DataTestCase):
|
|||
self.assertTrue(time_series_maximum(testdata_a) == 1020900)
|
||||
self.assertTrue(time_series_minimum(testdata_a) == 824757)
|
||||
self.assertTrue((time_series_mean(testdata_a) - 919324.34) < 1e-2)
|
||||
|
||||
def test_two(self):
|
||||
x = "hello"
|
||||
assert 'hello' in x
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
a = FeatureTestCase()
|
||||
a.test_features()
|
||||
|
|
|
|||
Loading…
Reference in New Issue