manage.py 45 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254
  1. import os
  2. import io
  3. import sys
  4. import platform
  5. import shutil
  6. import time
  7. import subprocess
  8. import requests
  9. import json
  10. import datetime
  11. import socket
  12. import re
  13. from threading import Thread
  14. from api.utils import shell_execute, docker, const
  15. from api.model.app import App
  16. from api.model.response import Response
  17. from api.model.config import Config
  18. from api.model.status_reason import StatusReason
  19. from api.utils.common_log import myLogger
  20. from redis import Redis
  21. from rq import Queue, Worker, Connection
  22. from rq.registry import StartedJobRegistry, FinishedJobRegistry, DeferredJobRegistry, FailedJobRegistry, ScheduledJobRegistry, CanceledJobRegistry
  23. from api.exception.command_exception import CommandException
  24. from apscheduler.schedulers.background import BackgroundScheduler
  25. from apscheduler.triggers.date import DateTrigger
  26. from apscheduler.triggers.cron import CronTrigger
  27. from apscheduler.triggers.interval import IntervalTrigger
  28. # 指定 Redis 容器的主机名和端口
  29. redis_conn = Redis(host='websoft9-redis', port=6379)
  30. # 使用指定的 Redis 连接创建 RQ 队列
  31. q = Queue(connection=redis_conn, default_timeout=3600)
  32. def auto_update():
  33. shell_execute.execute_command_output_all("wget https://websoft9.github.io/StackHub/install/update_appstore.sh && bash update_appstore.sh 1>/dev/null 2>&1")
  34. scheduler = BackgroundScheduler()
  35. scheduler.add_job(auto_update, CronTrigger(hour=1, minute=0))
  36. scheduler.shutdown()
  37. # 获取github文件内容
  38. def get_github_content(repo, path):
  39. url = 'https://websoft9.github.io/{repo}/{path}'
  40. url = url.format(repo=repo, path=path)
  41. response = requests.get(url)
  42. response.encoding = 'utf-8'
  43. contents = response.text
  44. return contents
  45. def AppAutoUpdate(auto_update):
  46. if auto_update:
  47. scheduler.start()
  48. return "软件商店自动更新已经开启"
  49. else:
  50. scheduler.shutdown()
  51. return "软件商店自动更新已经关闭"
  52. # 更新软件商店
  53. def AppStoreUpdate():
  54. local_path = '/data/library/install/version.json'
  55. local_version = "0"
  56. try:
  57. op = shell_execute.execute_command_output_all("cat " + local_path)['result']
  58. local_version = json.loads(op)['VERSION']
  59. except:
  60. local_version = "0.1.0"
  61. repo = 'docker-library'
  62. version_contents = get_github_content(repo, 'install/version.json')
  63. version = json.loads(version_contents)['VERSION']
  64. if compared_version(local_version, version) == -1:
  65. content = []
  66. change_log_contents = get_github_content(repo, 'CHANGELOG.md')
  67. change_log = change_log_contents.split('## ')[1].split('\n')
  68. for change in change_log[1:]:
  69. if change != '':
  70. content.append(change)
  71. shell_execute.execute_command_output_all("wget https://websoft9.github.io/StackHub/install/update_appstore.sh && bash update_appstore.sh 1>/dev/null 2>&1")
  72. return content
  73. else:
  74. return None
  75. # 获取 update info
  76. def get_update_list():
  77. local_path = '/data/apps/stackhub/install/version.json'
  78. repo = 'StackHub'
  79. op = shell_execute.execute_command_output_all("cat " + local_path)['result']
  80. local_version = json.loads(op)['VERSION']
  81. version_contents = get_github_content(repo, 'install/version.json')
  82. version = json.loads(version_contents)['VERSION']
  83. if compared_version(local_version, version) == -1:
  84. content = []
  85. change_log_contents = get_github_content(repo, 'CHANGELOG.md')
  86. change_log = change_log_contents.split('## ')[1].split('\n')
  87. date = change_log[0].split()[-1]
  88. for change in change_log[1:]:
  89. if change != '':
  90. content.append(change)
  91. ret = {}
  92. ret['version'] = version
  93. ret['date'] = date
  94. ret['content'] = content
  95. return ret
  96. else:
  97. return None
  98. def conbine_list(installing_list, installed_list):
  99. app_list = installing_list + installed_list
  100. result_list = []
  101. appid_list = []
  102. for app in app_list:
  103. app_id = app['app_id']
  104. if app_id in appid_list:
  105. continue
  106. else:
  107. appid_list.append(app_id)
  108. result_list.append(app)
  109. return result_list
  110. # 获取所有app的信息
  111. def get_my_app(app_id):
  112. installed_list = get_apps_from_compose()
  113. installing_list = get_apps_from_queue()
  114. app_list = conbine_list(installing_list, installed_list)
  115. find = False
  116. ret = {}
  117. if app_id != None:
  118. for app in app_list:
  119. if app_id == app['app_id']:
  120. ret = app
  121. find = True
  122. break
  123. if not find:
  124. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "This App doesn't exist!", "")
  125. else:
  126. ret = app_list
  127. myLogger.info_logger("app list result ok")
  128. return ret
  129. # 获取具体某个app的信息
  130. def get_app_status(app_id):
  131. code, message = docker.check_app_id(app_id)
  132. if code == None:
  133. app = get_my_app(app_id)
  134. # 将app_list 过滤出app_id的app,并缩减信息,使其符合文档的要求
  135. ret = {}
  136. ret['app_id'] = app['app_id']
  137. ret['status'] = app['status']
  138. ret['status_reason'] = app['status_reason']
  139. else:
  140. raise CommandException(code, message, '')
  141. return ret
  142. def install_app(app_name, customer_name, app_version):
  143. myLogger.info_logger("Install app ...")
  144. ret = {}
  145. ret['ResponseData'] = {}
  146. app_id = app_name + "_" + customer_name
  147. ret['ResponseData']['app_id'] = app_id
  148. code, message = check_app(app_name, customer_name, app_version)
  149. if code == None:
  150. q.enqueue(install_app_delay, app_name, customer_name, app_version, job_id=app_id)
  151. else:
  152. ret['Error'] = get_error_info(code, message, "")
  153. return ret
  154. def start_app(app_id):
  155. info, flag = app_exits_in_docker(app_id)
  156. if flag:
  157. app_path = info.split()[-1].rsplit('/', 1)[0]
  158. cmd = "docker compose -f " + app_path + "/docker-compose.yml start"
  159. shell_execute.execute_command_output_all(cmd)
  160. else:
  161. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  162. def stop_app(app_id):
  163. info, flag = app_exits_in_docker(app_id)
  164. if flag:
  165. app_path = info.split()[-1].rsplit('/', 1)[0]
  166. cmd = "docker compose -f " + app_path + "/docker-compose.yml stop"
  167. shell_execute.execute_command_output_all(cmd)
  168. else:
  169. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  170. def restart_app(app_id):
  171. code, message = docker.check_app_id(app_id)
  172. if code == None:
  173. info, flag = app_exits_in_docker(app_id)
  174. if flag:
  175. app_path = info.split()[-1].rsplit('/', 1)[0]
  176. cmd = "docker compose -f " + app_path + "/docker-compose.yml restart"
  177. shell_execute.execute_command_output_all(cmd)
  178. else:
  179. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  180. else:
  181. raise CommandException(code, message, "")
  182. def delete_app_failedjob(job_id):
  183. myLogger.info_logger("delete_app_failedjob")
  184. failed = FailedJobRegistry(queue=q)
  185. failed.remove(job_id, delete_job=True)
  186. def delete_app(app_id):
  187. try:
  188. app_name = app_id.split('_')[0]
  189. customer_name = app_id.split('_')[1]
  190. app_path = ""
  191. info, code_exist = app_exits_in_docker(app_id)
  192. if code_exist:
  193. app_path = info.split()[-1].rsplit('/', 1)[0]
  194. cmd = "docker compose -f " + app_path + "/docker-compose.yml down -v"
  195. lib_path = '/data/library/apps/' + app_name
  196. if app_path != lib_path:
  197. cmd = cmd + " && sudo rm -rf " + app_path
  198. try:
  199. myLogger.info_logger("Intall fail, down app and delete files")
  200. shell_execute.execute_command_output_all(cmd)
  201. except Exception:
  202. myLogger.info_logger("Delete app compose exception")
  203. # 强制删除失败又无法通过docker compose down 删除的容器
  204. try:
  205. myLogger.info_logger("IF delete fail, force to delete containers")
  206. force_cmd = "docker rm -f $(docker ps -f name=^" + customer_name + " -aq)"
  207. shell_execute.execute_command_output_all(force_cmd)
  208. except Exception:
  209. myLogger.info_logger("force delete app compose exception")
  210. else:
  211. if check_app_rq(app_id):
  212. delete_app_failedjob(app_id)
  213. else:
  214. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "AppID is not exist", "")
  215. cmd = " sudo rm -rf /data/apps/" + customer_name
  216. shell_execute.execute_command_output_all(cmd)
  217. except CommandException as ce:
  218. myLogger.info_logger("Delete app compose exception")
  219. def uninstall_app(app_id):
  220. app_name = app_id.split('_')[0]
  221. customer_name = app_id.split('_')[1]
  222. app_path = ""
  223. info, code_exist = app_exits_in_docker(app_id)
  224. if code_exist:
  225. app_path = info.split()[-1].rsplit('/', 1)[0]
  226. cmd = "docker compose -f " + app_path + "/docker-compose.yml down -v"
  227. lib_path = '/data/library/apps/' + app_name
  228. if app_path != lib_path:
  229. cmd = cmd + " && sudo rm -rf " + app_path
  230. shell_execute.execute_command_output_all(cmd)
  231. else:
  232. if check_app_rq(app_id):
  233. delete_app_failedjob(app_id)
  234. else:
  235. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "AppID is not exist", "")
  236. # Force to delete docker compose
  237. try:
  238. cmd = " sudo rm -rf /data/apps/" + customer_name
  239. shell_execute.execute_command_output_all(cmd)
  240. except CommandException as ce:
  241. myLogger.info_logger("Delete app compose exception")
  242. # Delete proxy config when uninstall app
  243. app_proxy_delete(app_id)
  244. def check_app(app_name, customer_name, app_version):
  245. message = ""
  246. code = None
  247. app_id = app_name + "_" + customer_name
  248. if app_name == None:
  249. code = const.ERROR_CLIENT_PARAM_BLANK
  250. message = "app_name is null"
  251. elif customer_name == None:
  252. code = const.ERROR_CLIENT_PARAM_BLANK
  253. message = "customer_name is null"
  254. elif len(customer_name) < 2:
  255. code = const.ERROR_CLIENT_PARAM_BLANK
  256. message = "customer_name must be longer than 2 chars"
  257. elif app_version == None:
  258. code = const.ERROR_CLIENT_PARAM_BLANK
  259. message = "app_version is null"
  260. elif app_version == "undefined" or app_version == "":
  261. code = const.ERROR_CLIENT_PARAM_BLANK
  262. message = "app_version is null"
  263. elif not docker.check_app_websoft9(app_name):
  264. code = const.ERROR_CLIENT_PARAM_NOTEXIST
  265. message = "It is not support to install " + app_name
  266. elif re.match('^[a-z0-9]+$', customer_name) == None:
  267. code = const.ERROR_CLIENT_PARAM_Format
  268. message = "APP name can only be composed of numbers and lowercase letters"
  269. elif docker.check_directory("/data/apps/" + customer_name):
  270. code = const.ERROR_CLIENT_PARAM_REPEAT
  271. message = "Repeat installation: " + customer_name
  272. elif not docker.check_vm_resource(app_name):
  273. code = const.ERROR_SERVER_RESOURCE
  274. message = "Insufficient system resources (cpu, memory, disk space)"
  275. elif check_app_docker(app_id):
  276. code = const.ERROR_CLIENT_PARAM_REPEAT
  277. message = "Repeat installation: " + customer_name
  278. elif check_app_rq(app_id):
  279. code = const.ERROR_CLIENT_PARAM_REPEAT
  280. message = "Repeat installation: " + customer_name
  281. return code, message
  282. def prepare_app(app_name, customer_name):
  283. library_path = "/data/library/apps/" + app_name
  284. install_path = "/data/apps/" + customer_name
  285. shell_execute.execute_command_output_all("cp -r " + library_path + " " + install_path)
  286. def install_app_delay(app_name, customer_name, app_version):
  287. myLogger.info_logger("-------RQ install start --------")
  288. job_id = app_name + "_" + customer_name
  289. try:
  290. # 因为这个时候还没有复制文件夹,是从/data/library里面文件读取json来检查的,应该是app_name,而不是customer_name
  291. resource_flag = docker.check_vm_resource(app_name)
  292. if resource_flag == True:
  293. myLogger.info_logger("job check ok, continue to install app")
  294. env_path = "/data/apps/" + customer_name + "/.env"
  295. # prepare_app(app_name, customer_name)
  296. docker.check_app_compose(app_name, customer_name)
  297. myLogger.info_logger("start JobID=" + job_id)
  298. docker.modify_env(env_path, 'APP_NAME', customer_name)
  299. docker.modify_env(env_path, "APP_VERSION", app_version)
  300. docker.check_app_url(customer_name)
  301. cmd = "cd /data/apps/" + customer_name + " && sudo docker compose pull && sudo docker compose up -d"
  302. output = shell_execute.execute_command_output_all(cmd)
  303. myLogger.info_logger("-------Install result--------")
  304. myLogger.info_logger(output["code"])
  305. myLogger.info_logger(output["result"])
  306. try:
  307. shell_execute.execute_command_output_all("bash /data/apps/" + customer_name + "/src/after_up.sh")
  308. except Exception as e:
  309. myLogger.info_logger(str(e))
  310. else:
  311. error_info = "##websoft9##" + const.ERROR_SERVER_RESOURCE + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)" + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)"
  312. myLogger.info_logger(error_info)
  313. raise Exception(error_info)
  314. except CommandException as ce:
  315. myLogger.info_logger(customer_name + " install failed(docker)!")
  316. delete_app(job_id)
  317. error_info = "##websoft9##" + ce.code + "##websoft9##" + ce.message + "##websoft9##" + ce.detail
  318. myLogger.info_logger(error_info)
  319. raise Exception(error_info)
  320. except Exception as e:
  321. myLogger.info_logger(customer_name + " install failed(system)!")
  322. delete_app(job_id)
  323. error_info = "##websoft9##" + const.ERROR_SERVER_SYSTEM + "##websoft9##" + 'system original error' + "##websoft9##" + str(
  324. e)
  325. myLogger.info_logger(error_info)
  326. raise Exception(error_info)
  327. def app_exits_in_docker(app_id):
  328. customer_name = app_id.split('_')[1]
  329. app_name = app_id.split('_')[0]
  330. flag = False
  331. info = ""
  332. cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'"
  333. try:
  334. output = shell_execute.execute_command_output_all(cmd)
  335. if int(output["code"]) == 0:
  336. info = output["result"]
  337. app_path = info.split()[-1].rsplit('/', 1)[0]
  338. is_official = check_if_official_app(app_path + '/variables.json')
  339. if is_official:
  340. name = docker.read_var(app_path + '/variables.json', 'name')
  341. if name == app_name:
  342. flag = True
  343. elif app_name == customer_name:
  344. flag = True
  345. myLogger.info_logger("APP in docker")
  346. except CommandException as ce:
  347. myLogger.info_logger("APP not in docker")
  348. return info, flag
  349. def split_app_id(app_id):
  350. return app_id.split("_")[1]
  351. def get_createtime(official_app, app_path, customer_name):
  352. data_time = ""
  353. try:
  354. if official_app:
  355. cmd = "docker ps -f name=" + customer_name + " --format {{.RunningFor}} | head -n 1"
  356. result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
  357. data_time = result
  358. else:
  359. cmd_all = "cd " + app_path + " && docker compose ps -a --format json"
  360. output = shell_execute.execute_command_output_all(cmd_all)
  361. container_name = json.loads(output["result"])[0]["Name"]
  362. cmd = "docker ps -f name=" + container_name + " --format {{.RunningFor}} | head -n 1"
  363. result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
  364. data_time = result
  365. except Exception as e:
  366. myLogger.info_logger(str(e))
  367. myLogger.info_logger("get_createtime get success" + data_time)
  368. return data_time
  369. def get_apps_from_compose():
  370. myLogger.info_logger("Search all of apps ...")
  371. cmd = "docker compose ls -a --format json"
  372. output = shell_execute.execute_command_output_all(cmd)
  373. output_list = json.loads(output["result"])
  374. myLogger.info_logger(len(output_list))
  375. ip = "localhost"
  376. try:
  377. ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
  378. ip = ip_result["result"].rstrip('\n')
  379. except Exception:
  380. ip = "127.0.0.1"
  381. app_list = []
  382. for app_info in output_list:
  383. volume = app_info["ConfigFiles"]
  384. app_path = volume.rsplit('/', 1)[0]
  385. customer_name = volume.split('/')[-2]
  386. app_id = ""
  387. app_name = ""
  388. trade_mark = ""
  389. port = 0
  390. url = ""
  391. admin_url = ""
  392. image_url = ""
  393. user_name = ""
  394. password = ""
  395. official_app = False
  396. app_version = ""
  397. create_time = ""
  398. volume_data = ""
  399. config_path = app_path
  400. app_https = False
  401. app_replace_url = False
  402. default_domain = ""
  403. admin_path = ""
  404. admin_domain_url = ""
  405. if customer_name in ['w9appmanage', 'w9nginxproxymanager', 'w9redis', 'w9kopia',
  406. 'w9portainer'] or app_path == '/data/apps/w9services/' + customer_name:
  407. continue
  408. var_path = app_path + "/variables.json"
  409. official_app = check_if_official_app(var_path)
  410. status_show = app_info["Status"]
  411. status = app_info["Status"].split("(")[0]
  412. if status == "running" or status == "exited" or status == "restarting":
  413. if "exited" in status_show and "running" in status_show:
  414. if status == "exited":
  415. cmd = "docker ps -a -f name=" + customer_name + " --format {{.Names}}#{{.Status}}|grep Exited"
  416. result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
  417. container = result.split("#Exited")[0]
  418. if container != customer_name:
  419. status = "running"
  420. if "restarting" in status_show:
  421. about_time = get_createtime(official_app, app_path, customer_name)
  422. if "seconds" in about_time:
  423. status = "restarting"
  424. else:
  425. status = "failed"
  426. elif status == "created":
  427. status = "failed"
  428. else:
  429. continue
  430. if official_app:
  431. app_name = docker.read_var(var_path, 'name')
  432. app_id = app_name + "_" + customer_name # app_id
  433. # get trade_mark
  434. trade_mark = docker.read_var(var_path, 'trademark')
  435. image_url = get_Image_url(app_name)
  436. # get env info
  437. path = app_path + "/.env"
  438. env_map = docker.get_map(path)
  439. try:
  440. myLogger.info_logger("get domain for APP_URL")
  441. domain = env_map.get("APP_URL")
  442. if "appname.example.com" in domain or ip in domain:
  443. default_domain = ""
  444. else:
  445. default_domain = domain
  446. except Exception:
  447. myLogger.info_logger("domain exception")
  448. try:
  449. app_version = env_map.get("APP_VERSION")
  450. volume_data = "/data/apps/" + customer_name + "/data"
  451. user_name = env_map.get("APP_USER", "")
  452. password = env_map.get("POWER_PASSWORD", "")
  453. admin_path = env_map.get("APP_ADMIN_PATH")
  454. if admin_path:
  455. myLogger.info_logger(admin_path)
  456. admin_path = admin_path.replace("\"", "")
  457. else:
  458. admin_path = ""
  459. if default_domain != "" and admin_path != "":
  460. admin_domain_url = "http://" + default_domain + admin_path
  461. except Exception:
  462. myLogger.info_logger("APP_USER POWER_PASSWORD exception")
  463. try:
  464. replace = env_map.get("APP_URL_REPLACE", "false")
  465. myLogger.info_logger("replace=" + replace)
  466. if replace == "true":
  467. app_replace_url = True
  468. https = env_map.get("APP_HTTPS_ACCESS", "false")
  469. if https == "true":
  470. app_https = True
  471. except Exception:
  472. myLogger.info_logger("APP_HTTPS_ACCESS exception")
  473. try:
  474. http_port = env_map.get("APP_HTTP_PORT", "0")
  475. if http_port:
  476. port = int(http_port)
  477. except Exception:
  478. pass
  479. if port != 0:
  480. try:
  481. if app_https:
  482. easy_url = "https://" + ip + ":" + str(port)
  483. else:
  484. easy_url = "http://" + ip + ":" + str(port)
  485. url = easy_url
  486. admin_url = get_admin_url(customer_name, url)
  487. except Exception:
  488. pass
  489. else:
  490. try:
  491. db_port = list(docker.read_env(path, "APP_DB.*_PORT").values())[0]
  492. port = int(db_port)
  493. except Exception:
  494. pass
  495. else:
  496. app_name = customer_name
  497. app_id = customer_name + "_" + customer_name
  498. create_time = get_createtime(official_app, app_path, customer_name)
  499. if status in ['running', 'exited']:
  500. config = Config(port=port, compose_file=volume, url=url, admin_url=admin_url,
  501. admin_domain_url=admin_domain_url,
  502. admin_path=admin_path, admin_username=user_name, admin_password=password,
  503. default_domain=default_domain)
  504. else:
  505. config = None
  506. if status == "failed":
  507. status_reason = StatusReason(Code=const.ERROR_SERVER_SYSTEM, Message="system original error",
  508. Detail="unknown error")
  509. else:
  510. status_reason = None
  511. app = App(app_id=app_id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark,
  512. app_version=app_version, create_time=create_time, volume_data=volume_data, config_path=config_path,
  513. status=status, status_reason=status_reason, official_app=official_app, image_url=image_url,
  514. app_https=app_https, app_replace_url=app_replace_url, config=config)
  515. app_list.append(app.dict())
  516. return app_list
  517. def check_if_official_app(var_path):
  518. if docker.check_directory(var_path):
  519. if docker.read_var(var_path, 'name') != "" and docker.read_var(var_path, 'trademark') != "" and docker.read_var(
  520. var_path, 'requirements') != "":
  521. requirements = docker.read_var(var_path, 'requirements')
  522. try:
  523. cpu = requirements['cpu']
  524. mem = requirements['memory']
  525. disk = requirements['disk']
  526. return True
  527. except KeyError:
  528. return False
  529. else:
  530. return False
  531. def check_app_docker(app_id):
  532. customer_name = app_id.split('_')[1]
  533. app_name = app_id.split('_')[0]
  534. flag = False
  535. cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'"
  536. try:
  537. shell_execute.execute_command_output_all(cmd)
  538. flag = True
  539. myLogger.info_logger("APP in docker")
  540. except CommandException as ce:
  541. myLogger.info_logger("APP not in docker")
  542. return flag
  543. def check_app_rq(app_id):
  544. myLogger.info_logger("check_app_rq")
  545. started = StartedJobRegistry(queue=q)
  546. failed = FailedJobRegistry(queue=q)
  547. run_job_ids = started.get_job_ids()
  548. failed_job_ids = failed.get_job_ids()
  549. queue_job_ids = q.job_ids
  550. myLogger.info_logger(queue_job_ids)
  551. myLogger.info_logger(run_job_ids)
  552. myLogger.info_logger(failed_job_ids)
  553. if queue_job_ids and app_id in queue_job_ids:
  554. myLogger.info_logger("App in RQ")
  555. return True
  556. if failed_job_ids and app_id in failed_job_ids:
  557. myLogger.info_logger("App in RQ")
  558. return True
  559. if run_job_ids and app_id in run_job_ids:
  560. myLogger.info_logger("App in RQ")
  561. return True
  562. myLogger.info_logger("App not in RQ")
  563. return False
  564. def get_apps_from_queue():
  565. myLogger.info_logger("get queque apps...")
  566. # 获取 StartedJobRegistry 实例
  567. started = StartedJobRegistry(queue=q)
  568. finish = FinishedJobRegistry(queue=q)
  569. deferred = DeferredJobRegistry(queue=q)
  570. failed = FailedJobRegistry(queue=q)
  571. scheduled = ScheduledJobRegistry(queue=q)
  572. cancel = CanceledJobRegistry(queue=q)
  573. # 获取正在执行的作业 ID 列表
  574. run_job_ids = started.get_job_ids()
  575. finish_job_ids = finish.get_job_ids()
  576. wait_job_ids = deferred.get_job_ids()
  577. failed_jobs = failed.get_job_ids()
  578. scheduled_jobs = scheduled.get_job_ids()
  579. cancel_jobs = cancel.get_job_ids()
  580. myLogger.info_logger(q.jobs)
  581. myLogger.info_logger(run_job_ids)
  582. myLogger.info_logger(failed_jobs)
  583. myLogger.info_logger(cancel_jobs)
  584. myLogger.info_logger(wait_job_ids)
  585. myLogger.info_logger(finish_job_ids)
  586. myLogger.info_logger(scheduled_jobs)
  587. installing_list = []
  588. for job_id in run_job_ids:
  589. app = get_rq_app(job_id, 'installing', "", "", "")
  590. installing_list.append(app)
  591. for job in q.jobs:
  592. app = get_rq_app(job.id, 'installing', "", "", "")
  593. installing_list.append(app)
  594. for job_id in failed_jobs:
  595. job = q.fetch_job(job_id)
  596. exc_info = job.exc_info
  597. code = exc_info.split('##websoft9##')[1]
  598. message = exc_info.split('##websoft9##')[2]
  599. detail = exc_info.split('##websoft9##')[3]
  600. app = get_rq_app(job_id, 'failed', code, message, detail)
  601. installing_list.append(app)
  602. return installing_list
  603. def get_rq_app(id, status, code, message, detail):
  604. app_name = id.split('_')[0]
  605. customer_name = id.split('_')[1]
  606. # 当app还在RQ时,可能文件夹还没创建,无法获取trade_mark
  607. trade_mark = ""
  608. app_version = ""
  609. create_time = ""
  610. volume_data = ""
  611. config_path = ""
  612. image_url = get_Image_url(app_name)
  613. config = None
  614. if status == "installing":
  615. status_reason = None
  616. else:
  617. status_reason = StatusReason(Code=code, Message=message, Detail=detail)
  618. app = App(app_id=id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark,
  619. app_version=app_version, create_time=create_time, volume_data=volume_data, config_path=config_path,
  620. status=status, status_reason=status_reason, official_app=True, image_url=image_url,
  621. app_https=False, app_replace_url=False, config=config)
  622. return app.dict()
  623. def get_Image_url(app_name):
  624. image_url = "static/images/" + app_name + "-websoft9.png"
  625. return image_url
  626. def get_url(app_name, easy_url):
  627. url = easy_url
  628. return url
  629. def get_admin_url(customer_name, url):
  630. admin_url = ""
  631. path = "/data/apps/" + customer_name + "/.env"
  632. try:
  633. admin_path = list(docker.read_env(path, "APP_ADMIN_PATH").values())[0]
  634. admin_path = admin_path.replace("\"", "")
  635. admin_url = url + admin_path
  636. except IndexError:
  637. pass
  638. return admin_url
  639. def get_error_info(code, message, detail):
  640. error = {}
  641. error['Code'] = code
  642. error['Message'] = message
  643. error['Detail'] = detail
  644. return error
  645. def app_domain_list(app_id):
  646. code, message = docker.check_app_id(app_id)
  647. if code == None:
  648. info, flag = app_exits_in_docker(app_id)
  649. if flag:
  650. myLogger.info_logger("Check app_id ok[app_domain_list]")
  651. else:
  652. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  653. else:
  654. raise CommandException(code, message, "")
  655. domains = get_all_domains(app_id)
  656. myLogger.info_logger(domains)
  657. ret = {}
  658. ret['domains'] = domains
  659. default_domain = ""
  660. if domains != None and len(domains) > 0:
  661. customer_name = app_id.split('_')[1]
  662. app_url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env")["result"]
  663. if "APP_URL" in app_url:
  664. url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env |grep APP_URL=")[
  665. "result"].rstrip('\n')
  666. default_domain = url.split('=')[1]
  667. ret['default_domain'] = default_domain
  668. myLogger.info_logger(ret)
  669. return ret
  670. def app_proxy_delete(app_id):
  671. customer_name = app_id.split('_')[1]
  672. proxy_host = None
  673. token = get_token()
  674. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
  675. headers = {
  676. 'Authorization': token,
  677. 'Content-Type': 'application/json'
  678. }
  679. response = requests.get(url, headers=headers)
  680. for proxy in response.json():
  681. portainer_name = proxy["forward_host"]
  682. if customer_name == portainer_name:
  683. proxy_id = proxy["id"]
  684. token = get_token()
  685. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
  686. headers = {
  687. 'Authorization': token,
  688. 'Content-Type': 'application/json'
  689. }
  690. response = requests.delete(url, headers=headers)
  691. def app_domain_delete(app_id, domain):
  692. code, message = docker.check_app_id(app_id)
  693. if code == None:
  694. info, flag = app_exits_in_docker(app_id)
  695. if flag:
  696. myLogger.info_logger("Check app_id ok[app_domain_delete]")
  697. else:
  698. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  699. else:
  700. raise CommandException(code, message, "")
  701. if domain is None or domain == "undefined":
  702. raise CommandException(const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "")
  703. old_all_domains = get_all_domains(app_id)
  704. if domain not in old_all_domains:
  705. myLogger.info_logger("delete domain is not binded")
  706. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is not bind.", "")
  707. myLogger.info_logger("Start to delete " + domain)
  708. proxy = get_proxy_domain(app_id, domain)
  709. if proxy != None:
  710. myLogger.info_logger(proxy)
  711. myLogger.info_logger("before update")
  712. domains_old = proxy["domain_names"]
  713. myLogger.info_logger(domains_old)
  714. domains_old.remove(domain)
  715. myLogger.info_logger("after update")
  716. myLogger.info_logger(domains_old)
  717. if len(domains_old) == 0:
  718. proxy_id = proxy["id"]
  719. token = get_token()
  720. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
  721. headers = {
  722. 'Authorization': token,
  723. 'Content-Type': 'application/json'
  724. }
  725. response = requests.delete(url, headers=headers)
  726. try:
  727. if response.json().get("error"):
  728. raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
  729. except Exception:
  730. myLogger.info_logger(response.json())
  731. set_domain("", app_id)
  732. else:
  733. proxy_id = proxy["id"]
  734. token = get_token()
  735. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
  736. headers = {
  737. 'Authorization': token,
  738. 'Content-Type': 'application/json'
  739. }
  740. port = get_container_port(app_id.split('_')[1])
  741. host = app_id.split('_')[1]
  742. data = {
  743. "domain_names": domains_old,
  744. "forward_scheme": "http",
  745. "forward_host": host,
  746. "forward_port": port,
  747. "access_list_id": "0",
  748. "certificate_id": 0,
  749. "meta": {
  750. "letsencrypt_agree": False,
  751. "dns_challenge": False
  752. },
  753. "advanced_config": "",
  754. "locations": [],
  755. "block_exploits": False,
  756. "caching_enabled": False,
  757. "allow_websocket_upgrade": False,
  758. "http2_support": False,
  759. "hsts_enabled": False,
  760. "hsts_subdomains": False,
  761. "ssl_forced": False
  762. }
  763. response = requests.put(url, data=json.dumps(data), headers=headers)
  764. if response.json().get("error"):
  765. raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
  766. domain_set = app_domain_list(app_id)
  767. default_domain = domain_set['default_domain']
  768. # 如果被删除的域名是默认域名,删除后去剩下域名的第一个
  769. if default_domain == domain:
  770. set_domain(domains_old[0], app_id)
  771. else:
  772. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Delete domain is not bind", "")
  773. def app_domain_update(app_id, domain_old, domain_new):
  774. myLogger.info_logger("app_domain_update")
  775. domain_list = []
  776. domain_list.append(domain_old)
  777. domain_list.append(domain_new)
  778. check_domains(domain_list)
  779. code, message = docker.check_app_id(app_id)
  780. if code == None:
  781. info, flag = app_exits_in_docker(app_id)
  782. if flag:
  783. myLogger.info_logger("Check app_id ok")
  784. else:
  785. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  786. else:
  787. raise CommandException(code, message, "")
  788. proxy = get_proxy_domain(app_id, domain_old)
  789. if proxy != None:
  790. domains_old = proxy["domain_names"]
  791. index = domains_old.index(domain_old)
  792. domains_old[index] = domain_new
  793. proxy_id = proxy["id"]
  794. token = get_token()
  795. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
  796. headers = {
  797. 'Authorization': token,
  798. 'Content-Type': 'application/json'
  799. }
  800. port = get_container_port(app_id.split('_')[1])
  801. host = app_id.split('_')[1]
  802. data = {
  803. "domain_names": domains_old,
  804. "forward_scheme": "http",
  805. "forward_host": host,
  806. "forward_port": port,
  807. "access_list_id": "0",
  808. "certificate_id": 0,
  809. "meta": {
  810. "letsencrypt_agree": False,
  811. "dns_challenge": False
  812. },
  813. "advanced_config": "",
  814. "locations": [],
  815. "block_exploits": False,
  816. "caching_enabled": False,
  817. "allow_websocket_upgrade": False,
  818. "http2_support": False,
  819. "hsts_enabled": False,
  820. "hsts_subdomains": False,
  821. "ssl_forced": False
  822. }
  823. response = requests.put(url, data=json.dumps(data), headers=headers)
  824. if response.json().get("error"):
  825. raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
  826. domain_set = app_domain_list(app_id)
  827. default_domain = domain_set['default_domain']
  828. myLogger.info_logger("default_domain=" + default_domain + ",domain_old=" + domain_old)
  829. # 如果被修改的域名是默认域名,修改后也设置为默认域名
  830. if default_domain == domain_old:
  831. set_domain(domain_new, app_id)
  832. else:
  833. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "edit domain is not exist", "")
  834. def app_domain_add(app_id, domain):
  835. temp_domains = []
  836. temp_domains.append(domain)
  837. check_domains(temp_domains)
  838. code, message = docker.check_app_id(app_id)
  839. if code == None:
  840. info, flag = app_exits_in_docker(app_id)
  841. if flag:
  842. myLogger.info_logger("Check app_id ok")
  843. else:
  844. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  845. else:
  846. raise CommandException(code, message, "")
  847. old_domains = get_all_domains(app_id)
  848. if domain in old_domains:
  849. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is in use", "")
  850. proxy = get_proxy(app_id)
  851. if proxy != None:
  852. domains_old = proxy["domain_names"]
  853. domain_list = domains_old
  854. domain_list.append(domain)
  855. proxy_id = proxy["id"]
  856. token = get_token()
  857. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
  858. headers = {
  859. 'Authorization': token,
  860. 'Content-Type': 'application/json'
  861. }
  862. port = get_container_port(app_id.split('_')[1])
  863. host = app_id.split('_')[1]
  864. data = {
  865. "domain_names": domain_list,
  866. "forward_scheme": "http",
  867. "forward_host": host,
  868. "forward_port": port,
  869. "access_list_id": "0",
  870. "certificate_id": 0,
  871. "meta": {
  872. "letsencrypt_agree": False,
  873. "dns_challenge": False
  874. },
  875. "advanced_config": "",
  876. "locations": [],
  877. "block_exploits": False,
  878. "caching_enabled": False,
  879. "allow_websocket_upgrade": False,
  880. "http2_support": False,
  881. "hsts_enabled": False,
  882. "hsts_subdomains": False,
  883. "ssl_forced": False
  884. }
  885. response = requests.put(url, data=json.dumps(data), headers=headers)
  886. if response.json().get("error"):
  887. raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
  888. else:
  889. # 追加
  890. token = get_token()
  891. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
  892. headers = {
  893. 'Authorization': token,
  894. 'Content-Type': 'application/json'
  895. }
  896. port = get_container_port(app_id.split('_')[1])
  897. host = app_id.split('_')[1]
  898. data = {
  899. "domain_names": temp_domains,
  900. "forward_scheme": "http",
  901. "forward_host": host,
  902. "forward_port": port,
  903. "access_list_id": "0",
  904. "certificate_id": 0,
  905. "meta": {
  906. "letsencrypt_agree": False,
  907. "dns_challenge": False
  908. },
  909. "advanced_config": "",
  910. "locations": [],
  911. "block_exploits": False,
  912. "caching_enabled": False,
  913. "allow_websocket_upgrade": False,
  914. "http2_support": False,
  915. "hsts_enabled": False,
  916. "hsts_subdomains": False,
  917. "ssl_forced": False
  918. }
  919. response = requests.post(url, data=json.dumps(data), headers=headers)
  920. if response.json().get("error"):
  921. raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
  922. set_domain(domain, app_id)
  923. return domain
  924. def check_domains(domains):
  925. myLogger.info_logger(domains)
  926. if domains is None or len(domains) == 0:
  927. raise CommandException(const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "")
  928. else:
  929. for domain in domains:
  930. if is_valid_domain(domain):
  931. if check_real_domain(domain) == False:
  932. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain and server not match", "")
  933. else:
  934. raise CommandException(const.ERROR_CLIENT_PARAM_Format, "Domains format error", "")
  935. def is_valid_domain(domain):
  936. if domain.startswith("http"):
  937. return False
  938. return True
  939. def check_real_domain(domain):
  940. domain_real = True
  941. try:
  942. cmd = "ping -c 1 " + domain + " | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+' | uniq"
  943. domain_ip = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
  944. ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
  945. ip_save = ip_result["result"].rstrip('\n')
  946. if domain_ip == ip_save:
  947. myLogger.info_logger("Domain check ok!")
  948. else:
  949. domain_real = False
  950. except CommandException as ce:
  951. domain_real = False
  952. return domain_real
  953. def get_token():
  954. url = 'http://172.17.0.1:9092/api/tokens'
  955. headers = {'Content-type': 'application/json'}
  956. cmd = "cat /usr/share/cockpit/myapps/config.json | jq -r '.NGINXPROXYMANAGER.NGINXPROXYMANAGER_PASSWORD'"
  957. password = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
  958. myLogger.info_logger(password)
  959. param = {
  960. "identity": "help@websoft9.com",
  961. "scope": "user",
  962. "secret": password
  963. }
  964. response = requests.post(url, data=json.dumps(param), headers=headers)
  965. token = "Bearer " + response.json()["token"]
  966. return token
  967. def get_proxy(app_id):
  968. customer_name = app_id.split('_')[1]
  969. proxy_host = None
  970. token = get_token()
  971. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
  972. headers = {
  973. 'Authorization': token,
  974. 'Content-Type': 'application/json'
  975. }
  976. response = requests.get(url, headers=headers)
  977. for proxy in response.json():
  978. portainer_name = proxy["forward_host"]
  979. if customer_name == portainer_name:
  980. proxy_host = proxy
  981. break
  982. return proxy_host
  983. def get_proxy_domain(app_id, domain):
  984. customer_name = app_id.split('_')[1]
  985. proxy_host = None
  986. token = get_token()
  987. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
  988. headers = {
  989. 'Authorization': token,
  990. 'Content-Type': 'application/json'
  991. }
  992. response = requests.get(url, headers=headers)
  993. myLogger.info_logger(response.json())
  994. for proxy in response.json():
  995. portainer_name = proxy["forward_host"]
  996. domain_list = proxy["domain_names"]
  997. if customer_name == portainer_name:
  998. myLogger.info_logger("-------------------")
  999. if domain in domain_list:
  1000. myLogger.info_logger("find the domain proxy")
  1001. proxy_host = proxy
  1002. break
  1003. return proxy_host
  1004. def get_all_domains(app_id):
  1005. customer_name = app_id.split('_')[1]
  1006. domains = []
  1007. token = get_token()
  1008. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
  1009. headers = {
  1010. 'Authorization': token,
  1011. 'Content-Type': 'application/json'
  1012. }
  1013. response = requests.get(url, headers=headers)
  1014. for proxy in response.json():
  1015. portainer_name = proxy["forward_host"]
  1016. if customer_name == portainer_name:
  1017. for domain in proxy["domain_names"]:
  1018. domains.append(domain)
  1019. return domains
  1020. def app_domain_set(domain, app_id):
  1021. temp_domains = []
  1022. temp_domains.append(domain)
  1023. check_domains(temp_domains)
  1024. code, message = docker.check_app_id(app_id)
  1025. if code == None:
  1026. info, flag = app_exits_in_docker(app_id)
  1027. if flag:
  1028. myLogger.info_logger("Check app_id ok")
  1029. else:
  1030. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  1031. else:
  1032. raise CommandException(code, message, "")
  1033. set_domain(domain, app_id)
  1034. def set_domain(domain, app_id):
  1035. myLogger.info_logger("set_domain start")
  1036. old_domains = get_all_domains(app_id)
  1037. if domain != "":
  1038. if domain not in old_domains:
  1039. message = domain + " is not in use"
  1040. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, message, "")
  1041. customer_name = app_id.split('_')[1]
  1042. app_url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env")["result"]
  1043. if "APP_URL" in app_url:
  1044. myLogger.info_logger("APP_URL is exist")
  1045. if domain == "":
  1046. ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
  1047. domain = ip_result["result"].rstrip('\n')
  1048. cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + "/g' /data/apps/" + customer_name + "/.env"
  1049. shell_execute.execute_command_output_all(cmd)
  1050. if "APP_URL_REPLACE=true" in app_url:
  1051. myLogger.info_logger("need up")
  1052. shell_execute.execute_command_output_all("cd /data/apps/" + customer_name + " && docker compose up -d")
  1053. else:
  1054. cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + "/g' /data/apps/" + customer_name + "/.env"
  1055. shell_execute.execute_command_output_all(cmd)
  1056. if "APP_URL_REPLACE=true" in app_url:
  1057. myLogger.info_logger("need up")
  1058. shell_execute.execute_command_output_all("cd /data/apps/" + customer_name + " && docker compose up -d")
  1059. else:
  1060. myLogger.info_logger("APP_URL is not exist")
  1061. if domain == "":
  1062. ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
  1063. domain = ip_result["result"].rstrip('\n')
  1064. cmd = "sed -i '/APP_NETWORK/a APP_URL=" + domain + "' /data/apps/" + customer_name + "/.env"
  1065. shell_execute.execute_command_output_all(cmd)
  1066. myLogger.info_logger("set_domain success")
  1067. def get_container_port(container_name):
  1068. port = "80"
  1069. cmd = "docker port " + container_name + " |grep ::"
  1070. result = shell_execute.execute_command_output_all(cmd)["result"]
  1071. myLogger.info_logger(result)
  1072. port = result.split('/')[0]
  1073. myLogger.info_logger(port)
  1074. return port
  1075. def compared_version(ver1, ver2):
  1076. list1 = str(ver1).split(".")
  1077. list2 = str(ver2).split(".")
  1078. # 循环次数为短的列表的len
  1079. for i in range(len(list1)) if len(list1) < len(list2) else range(len(list2)):
  1080. if int(list1[i]) == int(list2[i]):
  1081. pass
  1082. elif int(list1[i]) < int(list2[i]):
  1083. return -1
  1084. else:
  1085. return 1
  1086. # 循环结束,哪个列表长哪个版本号高
  1087. if len(list1) == len(list2):
  1088. return 0
  1089. elif len(list1) < len(list2):
  1090. return -1
  1091. else:
  1092. return 1