Created
June 10, 2020 09:09
-
-
Save daydaygo/ffe6bb257a3d388b941b0d3aff333843 to your computer and use it in GitHub Desktop.
kong: db -> dbless
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
#-*- coding: UTF-8 -*- | |
import requests | |
import yaml | |
import os | |
import sys | |
import getopt | |
import shutil | |
import re | |
from collections import OrderedDict | |
input_name='' | |
output_name='' | |
def usage(): | |
print( | |
""" | |
Usage:sys.args[0] [option] | |
-h or --help:显示帮助信息 | |
-m or --module:模式: dump/merge/check_route/check_dup_name/check_health. eg:kongdl.py -m dump | |
-i or --input:dump:kong-api地址,eg:"127.0.0.1:8001"; merge:yaml文件到输入目录, eg:"."; | |
-o or --output:dump:yaml文件的输出目录,eg:"."; merge:合成后的yaml文件输出目录,eg:"." | |
eg: | |
./kongdl.py -m merge -i ./yaml/ -o ./bin/ | |
./kongdl.py -m check_route -i ./bin/kong.yaml | |
./kongdl.py -m check_dup_name -i ./bin/kong.yaml | |
./kongdl.py -m check_health -i 127.0.0.1:8001 | |
""" | |
) | |
class UnsortableList(list): | |
def sort(self, *args, **kwargs): | |
pass | |
class UnsortableOrderedDict(OrderedDict): | |
def items(self, *args, **kwargs): | |
return UnsortableList(OrderedDict.items(self, *args, **kwargs)) | |
yaml.add_representer(UnsortableOrderedDict, yaml.representer.SafeRepresenter.represent_dict) | |
def unicode_representer(dumper, uni): | |
node = yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=uni) | |
return node | |
yaml.add_representer(unicode_representer, unicode_representer) | |
def dump_services(): | |
print('dump_services...') | |
r = requests.get('http://' + input_name + '/services') | |
services = r.json() | |
path=output_name + '/services' | |
if os.path.exists(path): | |
shutil.rmtree(path) | |
os.makedirs(path) | |
for s in services['data']: | |
fmtS = OrderedDict() | |
print(s['name']) | |
fmtS['name'] = s['name'] | |
fmtS['host'] = s['host'] | |
fmtS['port'] = s['port'] | |
fmtS['protocol'] = s['protocol'] | |
fmtS['connect_timeout'] = s['connect_timeout'] | |
fmtS['read_timeout'] = s['read_timeout'] | |
fmtS['write_timeout'] = s['write_timeout'] | |
fmtS['retries'] = s['retries'] | |
dump_service(fmtS) | |
def dump_service(s): | |
#get routes | |
r = requests.get('http://' + input_name + '/services/' + s['name'] + '/routes') | |
routes = r.json() | |
fmtRoutes = [] | |
for r in routes['data']: | |
fmtR = OrderedDict() | |
if r['name']: | |
fmtR['name'] = r['name'] | |
fmtR['hosts'] = r['hosts'] | |
fmtR['paths'] = r['paths'] | |
fmtR['methods'] = r['methods'] | |
if r.has_key('protocols'): | |
fmtR['protocols'] = r['protocols'] | |
fmtR['preserve_host'] = r['preserve_host'] | |
fmtR['strip_path'] = r['strip_path'] | |
fmtR['regex_priority'] = r['regex_priority'] | |
if r['snis']: | |
fmtR['snis'] = r['snis'] | |
#not tags if kong-version < 1.1 | |
if r.has_key('tags'): | |
fmtR['tags'] = r['tags'] | |
r = requests.get('http://' + input_name + '/routes/' + r['id'] + '/plugins') #route's name maybe none | |
plugins = r.json() | |
fmtPlugins = [] | |
for r in plugins['data']: | |
if r['name'] != 'manual-downgrade': | |
fmtP = OrderedDict() | |
fmtP['name'] = r['name'] | |
fmtP['enabled'] = r['enabled'] | |
if r.has_key('protocols'): | |
fmtP['protocols'] = r['protocols'] | |
fmtP['run_on'] = r['run_on'] | |
fmtP['config'] = r['config'] | |
if r.has_key('tags'): | |
fmtP['tags'] = r['tags'] | |
fmtPlugins.append(UnsortableOrderedDict(fmtP)) | |
fmtR['plugins'] = fmtPlugins | |
fmtRoutes.append(UnsortableOrderedDict(fmtR)) | |
s['routes'] = fmtRoutes | |
#get service's plugins | |
r = requests.get('http://' + input_name + '/services/' + s['name'] + '/plugins') | |
plugins = r.json() | |
fmtPlugins = [] | |
for r in plugins['data']: | |
if r['name'] != 'manual-downgrade': | |
fmtP = OrderedDict() | |
fmtP['name'] = r['name'] | |
fmtP['enabled'] = r['enabled'] | |
if r.has_key('protocols'): | |
fmtP['protocols'] = r['protocols'] | |
fmtP['run_on'] = r['run_on'] | |
fmtP['config'] = r['config'] | |
if r.has_key('tags'): | |
fmtP['tags'] = r['tags'] | |
fmtPlugins.append(UnsortableOrderedDict(fmtP)) | |
s['plugins'] = fmtPlugins | |
unsortS = UnsortableOrderedDict(s) | |
outFile = output_name + '/services/' + s['name'] + '.yaml' | |
with open(outFile, 'w') as fstream: | |
yaml.dump(unsortS, fstream, default_flow_style=False, encoding='utf-8', allow_unicode=True) | |
def dump_upstreams(): | |
print('dump_upstreams...') | |
r = requests.get('http://' + input_name + '/upstreams') | |
upstreams = r.json() | |
path = output_name + '/upstreams' | |
if os.path.exists(path): | |
shutil.rmtree(path) | |
os.makedirs(path) | |
hcpath = output_name + '/upstreams/healthchecks' | |
if not os.path.exists(hcpath): | |
os.makedirs(hcpath) | |
for s in upstreams['data']: | |
fmtS = OrderedDict() | |
fmtS['name'] = s['name'] | |
fmtS['slots'] = s['slots'] | |
if s.has_key('tags'): | |
fmtS['tags'] = s['tags'] | |
fmtS['hash_on'] = s['hash_on'] | |
fmtS['hash_on_cookie_path'] = s['hash_on_cookie_path'] | |
#fmtS['hash_on_cookie'] = s['hash_on_cookie'] | |
#fmtS['hash_on_header'] = s['hash_on_header'] | |
#fmtS['hash_fallback_header'] = s['hash_fallback_header'] | |
fmtS['hash_fallback'] = s['hash_fallback'] | |
dump_upstream(fmtS) | |
dump_healthcheck(s['name'], s['healthchecks']) | |
def dump_upstream(s): | |
fmtTargets = [] | |
nexturi = '/upstreams/' + s['name'] + '/targets' | |
safeindex = 0 | |
while nexturi: | |
r = requests.get('http://' + input_name + nexturi) | |
targets = r.json() | |
nexturi = targets['next'] | |
if nexturi: | |
print('targets.next:'+nexturi) | |
for r in targets['data']: | |
fmtR = OrderedDict() | |
fmtR['target'] = r['target'] | |
fmtR['weight'] = r['weight'] | |
fmtTargets.append(UnsortableOrderedDict(fmtR)) | |
safeindex += 1 | |
if safeindex > 10: | |
break | |
s['targets'] = fmtTargets | |
unsortS = UnsortableOrderedDict(s) | |
outFile = output_name + '/upstreams/' + s['name'] + '.yaml' | |
with open(outFile, 'w') as fstream: | |
yaml.dump(unsortS, fstream, default_flow_style=False, encoding='utf-8', allow_unicode=True) | |
def dump_healthcheck(name, s): | |
outFile = output_name + '/upstreams/healthchecks/' + name + '.yaml' | |
with open(outFile, 'w') as fstream: | |
yaml.dump(s, fstream, default_flow_style=False, encoding='utf-8', allow_unicode=True) | |
def dump_plugins(): | |
print('dump_plugins...') | |
r = requests.get('http://' + input_name + '/plugins') | |
plugins = r.json() | |
fmtPlugins = [] | |
for p in plugins['data']: | |
if entity_count(p) != 1 and p['name'] != 'manual-downgrade': | |
fmtP = OrderedDict() | |
fmtP['name'] = p['name'] | |
fmtP['enabled'] = p['enabled'] | |
if p['service']: | |
fmtP['service'] = get_service_name(p['service']['id']) | |
if p['route']: | |
fmtP['route'] = get_consumer_name(p['route']['id']) | |
if p['consumer']: | |
fmtP['consumer'] = get_consumer_name(p['consumer']['id']) | |
if p.has_key('protocols'): | |
fmtP['protocols'] = p['protocols'] | |
fmtP['run_on'] = p['run_on'] | |
fmtP['config'] = p['config'] | |
if p.has_key('tags'): | |
fmtP['tags'] = p['tags'] | |
fmtPlugins.append(UnsortableOrderedDict(fmtP)) | |
outFile = output_name + '/global_plugins.yaml' | |
with open(outFile, 'w') as fstream: | |
yaml.dump(fmtPlugins, fstream, default_flow_style=False, encoding='utf-8', allow_unicode=True) | |
def dump_certificates(): | |
print('dump_certificates...') | |
r = requests.get('http://' + input_name + '/certificates') | |
certificates = r.json() | |
fmtCerts = [] | |
for c in certificates['data']: | |
fmtC = OrderedDict() | |
#convert: [a.com,b.com] to [name: a.com, name: b.com] | |
snisRecord = [] | |
for sni in c['snis']: | |
snisRecord.append({'name':sni}) | |
fmtC['snis'] = snisRecord | |
fmtC['cert'] = c['cert'] | |
fmtC['key'] = c['key'] | |
if c.has_key('tags'): | |
fmtC['tags'] = c['tags'] | |
fmtCerts.append(UnsortableOrderedDict(fmtC)) | |
outFile = output_name + '/global_certificates.yaml' | |
with open(outFile, 'w') as fstream: | |
yaml.dump(fmtCerts, fstream, default_flow_style=False, encoding='utf-8', allow_unicode=True) | |
def entity_count(p): | |
count=0 | |
if p['service']: | |
count+=1 | |
if p['route']: | |
count+=1 | |
if p['consumer']: | |
count+=1 | |
return count | |
def get_service_name(id): | |
r = requests.get('http://' + input_name + '/services/'+id) | |
s = r.json() | |
return s['name'] | |
def merge_yaml(): | |
print('merge_yaml') | |
kong = {'_format_version': "1.1"} | |
kong['services'] = load_services() | |
kong['upstreams'] = load_upstreams() | |
kong['plugins'] = load_plugins() | |
kong['certificates'] = load_certificates() | |
with open(output_name + '/kong.yaml', 'w') as fstream: | |
yaml.dump(kong, fstream, default_flow_style=False, encoding='utf-8', allow_unicode=True) | |
def load_services(): | |
services = [] | |
files = os.listdir(input_name + '/services') | |
for f in files: | |
if os.path.isfile(input_name + "/services/"+f): | |
with open(input_name + "/services/"+f, 'r') as stream: | |
try: | |
s = yaml.safe_load(stream) | |
fill_def_service(s) | |
services.append(s) | |
except yaml.YAMLError as exc: | |
print(exc) | |
return services | |
def fill_def_service(s): | |
if not s.has_key('port'): | |
s['port'] = 80 | |
if not s.has_key('protocol'): | |
s['protocol'] = 'http' | |
if not s.has_key('connect_timeout'): | |
s['connect_timeout'] = 60000 | |
if not s.has_key('read_timeout'): | |
s['read_timeout'] = 60000 | |
if not s.has_key('write_timeout'): | |
s['write_timeout'] = 60000 | |
if not s.has_key('retries'): | |
s['retries'] = 0 | |
if s.has_key('routes'): | |
for r in s['routes']: | |
if not r.has_key('paths'): | |
r['paths'] = ['/'] | |
if not r.has_key('methods'): | |
r['methods'] = [] | |
if not r.has_key('protocols'): | |
r['protocols'] = ['http','https'] | |
if not r.has_key('preserve_host'): | |
r['preserve_host'] = True | |
if not r.has_key('strip_path'): | |
r['strip_path'] = False | |
if not r.has_key('regex_priority'): | |
r['regex_priority'] = 0 | |
if not r.has_key('plugins'): | |
r['plugins'] = [] | |
if not s.has_key('plugins'): | |
s['plugins'] = [] | |
def load_upstreams(): | |
upstreams = [] | |
files = os.listdir(input_name + '/upstreams') | |
for f in files: | |
if os.path.isfile(input_name + "/upstreams/"+f): | |
with open(input_name + "/upstreams/"+f, 'r') as stream: | |
try: | |
s = yaml.safe_load(stream) | |
fill_def_upstream(s) | |
#if os.path.exists(input_name + '/upstreams/healthchecks/_common_.yaml'): | |
# s['healthchecks'] = load_healthcheck('_common_') | |
#if os.path.exists(input_name + "/upstreams/healthchecks/" + s['name'] + '.yaml'): | |
# s['healthchecks'] = load_healthcheck(s['name']) | |
s['healthchecks'] = load_healthcheck(s['name']) | |
upstreams.append(s) | |
except yaml.YAMLError as exc: | |
print(exc) | |
return upstreams | |
def fill_def_upstream(u): | |
slots = 1000 | |
if u.has_key('slots'): | |
slots = u['slots'] | |
target_count = 0 | |
if u.has_key('targets'): | |
target_count = len(u['targets']) | |
if slots < target_count*100: | |
slots = (target_count/10 + 1)*10*100 | |
if target_count > 100: | |
slots = (target_count/100 + 1)*100*100 | |
if slots != u['slots']: | |
print('rewrite upstream(' + u['name'] + ') slots:' + str(slots)) | |
u['slots'] = slots | |
if not u.has_key('hash_on'): | |
u['hash_on'] = 'none' | |
if not u.has_key('hash_on_cookie_path'): | |
u['hash_on_cookie_path'] = '/' | |
if not u.has_key('hash_fallback'): | |
u['hash_fallback'] = 'none' | |
def load_healthcheck(name): | |
hc = {} | |
f = input_name + "/upstreams/healthchecks/" + name + '.yaml' | |
if os.path.exists(f) and os.path.isfile(f): | |
with open(f, 'r') as stream: | |
try: | |
hc = yaml.safe_load(stream) | |
if hc['active'].has_key('https_sni') and not hc['active']['https_sni']: | |
hc['active'].pop('https_sni') | |
except yaml.YAMLError as exc: | |
print(exc) | |
fill_def_healthcheck(hc) | |
return hc | |
def fill_def_healthcheck(hc): | |
if not hc.has_key('active'): | |
hc['active'] = {} | |
active = hc['active'] | |
if not active.has_key('concurrency'): | |
active['concurrency'] = 10 | |
if not active.has_key('http_path'): | |
active['http_path'] = '/ping' | |
if not active.has_key('type'): | |
active['type'] = 'http' | |
if not active.has_key('https_verify_certificate'): | |
active['https_verify_certificate'] = False | |
if not active.has_key('timeout'): | |
active['timeout'] = 1 | |
if not active.has_key('healthy'): | |
active['healthy'] = {} | |
if not active['healthy'].has_key('http_statuses'): | |
active['healthy']['http_statuses'] = [200,302] | |
if not active['healthy'].has_key('interval'): | |
active['healthy']['interval'] = 5 | |
if not active['healthy'].has_key('successes'): | |
active['healthy']['successes'] = 3 | |
if not active.has_key('unhealthy'): | |
active['unhealthy'] = {} | |
if not active['unhealthy'].has_key('http_statuses'): | |
active['unhealthy']['http_statuses'] = [429,404,500,501,502,503,504,505] | |
if not active['unhealthy'].has_key('interval'): | |
active['unhealthy']['interval'] = 5 | |
if not active['unhealthy'].has_key('http_failures'): | |
active['unhealthy']['http_failures'] = 3 | |
if not active['unhealthy'].has_key('tcp_failures'): | |
active['unhealthy']['tcp_failures'] = 3 | |
if not active['unhealthy'].has_key('timeouts'): | |
active['unhealthy']['timeouts'] = 3 | |
if not hc.has_key('passive'): | |
hc['passive'] = {} | |
passive = hc['passive'] | |
if not passive.has_key('type'): | |
passive['type'] = 'http' | |
if not passive.has_key('healthy'): | |
passive['healthy'] = {} | |
if not passive['healthy'].has_key('http_statuses'): | |
passive['healthy']['http_statuses'] = [200,201,202,203,204,205,206,207,208,226,300,301,302,303,304,305,306,307,308] | |
if not passive['healthy'].has_key('successes'): | |
passive['healthy']['successes'] = 0 | |
if not passive.has_key('unhealthy'): | |
passive['unhealthy'] = {} | |
if not passive['unhealthy'].has_key('http_statuses'): | |
passive['unhealthy']['http_statuses'] = [429,500,503] | |
if not passive['unhealthy'].has_key('http_failures'): | |
passive['unhealthy']['http_failures'] = 0 | |
if not passive['unhealthy'].has_key('tcp_failures'): | |
passive['unhealthy']['tcp_failures'] = 0 | |
if not passive['unhealthy'].has_key('timeouts'): | |
passive['unhealthy']['timeouts'] = 0 | |
def load_plugins(): | |
if os.path.isfile(input_name + "/global_plugins.yaml"): | |
with open(input_name + "/global_plugins.yaml", 'r') as stream: | |
try: | |
s = yaml.safe_load(stream) | |
return s | |
except yaml.YAMLError as exc: | |
print(exc) | |
def load_certificates(): | |
if os.path.isfile(input_name + "/global_certificates.yaml"): | |
with open(input_name + "/global_certificates.yaml", 'r') as stream: | |
try: | |
s = yaml.safe_load(stream) | |
return s | |
except yaml.YAMLError as exc: | |
print(exc) | |
def check_route(name): | |
with open(name, 'r') as stream: | |
try: | |
s = yaml.safe_load(stream) | |
host_paths = {} | |
conflict_paths = [] | |
confuse_paths = [] | |
if s.has_key('services'): | |
services = s['services'] | |
for srv in services: | |
service = srv['name'] | |
if service == '': | |
service = srv['host'] | |
if srv.has_key('routes'): | |
#if srv.has_key('name'): | |
#print(srv['name']) | |
routes = srv['routes'] | |
for r in routes: | |
route = 'default' | |
if r.has_key('name'): | |
route = r['name'] | |
hosts = ['all'] | |
if r.has_key('hosts'): | |
hosts = r['hosts'] | |
prio = r['regex_priority'] | |
for h in hosts: | |
#print(h) | |
if not host_paths.has_key(h): | |
host_paths[h] = {} | |
paths = r['paths'] | |
for p in paths: | |
simple_p = simple_path(p) | |
# print(simple_p) | |
for sp in host_paths[h]: | |
if simple_p == sp: | |
# svc不同,权重相同或"/": error | |
if host_paths[h][sp]['servive'] != service: | |
if host_paths[h][sp]['priority'] == prio or simple_p == "/": | |
conflict_paths.append({simple_p + ":" + service: prio, sp + ":" + | |
host_paths[h][sp]['servive']: host_paths[h][sp]['priority']}) | |
# svc相同: warn | |
else: | |
confuse_paths.append({"1: " + simple_p + ":" + service: prio, "2: " + sp + ":" + | |
host_paths[h][sp]['servive']: host_paths[h][sp]['priority']}) | |
elif host_paths[h][sp]['servive'] != service: | |
if re.match(sp, simple_p) and host_paths[h][sp]['priority'] >= prio and sp != '/': | |
conflict_paths.append({simple_p + ":" + service: prio, sp + ":" + | |
host_paths[h][sp]['servive']: host_paths[h][sp]['priority']}) | |
print("1", simple_p) | |
elif re.match(simple_p, sp) and host_paths[h][sp]['priority'] <= prio and simple_p != '/': | |
conflict_paths.append({simple_p + ":" + service: prio, sp + ":" + | |
host_paths[h][sp]['servive']: host_paths[h][sp]['priority']}) | |
print("2", simple_p) | |
elif host_paths[h][sp]['route'] != route: | |
if re.match(sp, simple_p) and host_paths[h][sp]['priority'] >= prio and sp != '/': | |
conflict_paths.append({simple_p + ":" + service: prio, sp + ":" + | |
host_paths[h][sp]['servive']: host_paths[h][sp]['priority']}) | |
print("3", simple_p) | |
elif re.match(simple_p, sp) and host_paths[h][sp]['priority'] <= prio and simple_p != '/': | |
conflict_paths.append({simple_p + ":" + service: prio, sp + ":" + | |
host_paths[h][sp]['servive']: host_paths[h][sp]['priority']}) | |
print("4", simple_p) | |
host_paths[h][simple_p] = {'priority': prio, 'servive': service, 'route': route} | |
# print(host_paths) | |
return {"conflict_paths": conflict_paths, "confuse_paths": confuse_paths} | |
except yaml.YAMLError as exc: | |
print(exc) | |
def check_dup_name(name): | |
with open(name, 'r') as stream: | |
try: | |
s = yaml.safe_load(stream) | |
dup_name = {} | |
dup_name['service'] = [] | |
dup_name['route'] = {} | |
dup_name['upstream'] = [] | |
if s.has_key('services'): | |
services = s['services'] | |
service_map = {} | |
for srv in services: | |
route_service = {} | |
service = srv['name'] | |
if service == '': | |
service = 'default' | |
if not service_map.has_key(service): | |
service_map[service] = True | |
else: | |
dup_name['service'].append(service) | |
dup_name['route'][service] = [] | |
if srv.has_key('routes'): | |
routes = srv['routes'] | |
for r in routes: | |
route_name = 'default' | |
if r.has_key('name'): | |
route_name = r['name'] | |
if not route_service.has_key(route_name): | |
route_service[route_name] = service | |
else: | |
dup_name['route'][service].append(route_name) | |
if s.has_key('upstreams'): | |
upstreams = s['upstreams'] | |
upstream_map = {} | |
for ups in upstreams: | |
upstream = ups['name'] | |
if upstream == '': | |
upstream = 'default' | |
if not upstream_map.has_key(upstream): | |
upstream_map[upstream] = True | |
else: | |
dup_name['upstream'].append(upstream) | |
return dup_name | |
except yaml.YAMLError as exc: | |
print(exc) | |
def check_health(name): | |
r = requests.get('http://' + name + '/upstreams') | |
upstreams = r.json() | |
target_count = 0 | |
for s in upstreams['data']: | |
r = requests.get('http://' + name + '/upstreams/' + s['id'] + '/health') | |
hc_ret = r.json() | |
for hc in hc_ret['data']: | |
target_count = target_count + 1 | |
if hc['health'] != 'HEALTHY': | |
if s.has_key('name'): | |
print(s['name'] + "-" + hc['target'] + " -> " + hc['health']) | |
else: | |
print(s['id'] + "-" + hc['target'] + " -> " + hc['health']) | |
print('target count: ' + str(target_count)) | |
def simple_path(path): | |
# print(path) | |
i = path.find('(?i)') | |
if i >= 0: | |
path = path[:i] + path[i+4:] | |
if path[len(path)-1:] == '*': | |
path = path[:len(path)-1] | |
if path[len(path)-1:] == '+': | |
path = path[:len(path)-1] | |
# print(path) | |
return path | |
if __name__=="__main__": | |
opts, args = getopt.getopt(sys.argv[1:], "hm:i:o:", ["help", "module=", "input=", "output="]) | |
module_name='' | |
for cmd, arg in opts: | |
if cmd in ("-h", "--help"): | |
usage() | |
sys.exit() | |
elif cmd in ("-m", "--module"): | |
module_name = arg | |
elif cmd in ("-i", "--input"): | |
input_name = arg | |
elif cmd in ("-o", "--output"): | |
output_name = arg | |
print('kongdl module=' + module_name + ', input=' + input_name + ', output=' + output_name) | |
if module_name == 'dump': | |
if input_name == '': | |
print('input is null') | |
sys.exit() | |
if output_name == '': | |
output_name = '.' | |
dump_services() | |
dump_upstreams() | |
dump_plugins() | |
dump_certificates() | |
elif module_name == 'merge': | |
if input_name == '': | |
input_name = '.' | |
if output_name == '': | |
output_name = '.' | |
merge_yaml() | |
elif module_name == 'check_route': | |
if input_name == '': | |
print('input is null') | |
sys.exit() | |
check_paths = check_route(input_name) | |
if len(check_paths['confuse_paths']) > 0: | |
print("[warn] confuse paths in same service:") | |
print(check_paths['confuse_paths']) | |
if len(check_paths['conflict_paths']) > 0: | |
print("[error]possibly conflict paths:") | |
print(check_paths['conflict_paths']) | |
sys.exit(1) | |
elif module_name == 'check_dup_name': | |
if input_name == '': | |
print('input is null') | |
sys.exit() | |
dup_name = check_dup_name(input_name) | |
# dup service name check | |
if len(dup_name['service']) > 0: | |
print("[error]possibly duplicated service names:") | |
print(dup_name['service']) | |
sys.exit(1) | |
# dup upstream name check | |
if len(dup_name['upstream']) > 0: | |
print("[error]possibly duplicated upstream names:") | |
print(dup_name['upstream']) | |
sys.exit(1) | |
# dup route name check | |
count = 0 | |
for k, v in dup_name['route'].items(): | |
count = count+len(v) | |
if len(v) == 0: | |
del dup_name['route'][k] | |
if count > 0: | |
print("[error]possibly duplicated route names:") | |
print(dup_name['route']) | |
sys.exit(1) | |
elif module_name == 'check_health': | |
if input_name == '': | |
print('input is null. eg: 127.0.0.1:8001') | |
sys.exit() | |
check_health(input_name) | |
#print(host_paths) | |
else: | |
print('module is null') | |
sys.exit() | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment