manage.py 39 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063
  1. import os
  2. import io
  3. import sys
  4. import platform
  5. import shutil
  6. import time
  7. import subprocess
  8. import requests
  9. import json
  10. import datetime
  11. import socket
  12. import re
  13. from threading import Thread
  14. from api.utils import shell_execute, docker, const
  15. from api.model.app import App
  16. from api.model.response import Response
  17. from api.model.config import Config
  18. from api.model.status_reason import StatusReason
  19. from api.utils.common_log import myLogger
  20. from redis import Redis
  21. from rq import Queue, Worker, Connection
  22. from rq.registry import StartedJobRegistry, FinishedJobRegistry, DeferredJobRegistry, FailedJobRegistry, ScheduledJobRegistry, CanceledJobRegistry
  23. from api.exception.command_exception import CommandException
  24. # 指定 Redis 容器的主机名和端口
  25. redis_conn = Redis(host='websoft9-redis', port=6379)
  26. # 使用指定的 Redis 连接创建 RQ 队列
  27. q = Queue(connection=redis_conn,default_timeout=3600)
  28. def conbine_list(installing_list, installed_list):
  29. app_list = installing_list + installed_list
  30. result_list = []
  31. appid_list = []
  32. for app in app_list:
  33. app_id = app['app_id']
  34. if app_id in appid_list:
  35. continue
  36. else:
  37. appid_list.append(app_id)
  38. result_list.append(app)
  39. return result_list
  40. # 获取所有app的信息
  41. def get_my_app(app_id):
  42. installed_list = get_apps_from_compose()
  43. installing_list = get_apps_from_queue()
  44. app_list = conbine_list(installing_list, installed_list)
  45. find = False
  46. ret = {}
  47. if app_id != None:
  48. for app in app_list:
  49. if app_id == app['app_id']:
  50. ret = app
  51. find = True
  52. break
  53. if not find:
  54. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "This App doesn't exist!", "")
  55. else:
  56. ret = app_list
  57. myLogger.info_logger("app list result ok")
  58. return ret
  59. # 获取具体某个app的信息
  60. def get_app_status(app_id):
  61. code, message = docker.check_app_id(app_id)
  62. if code == None:
  63. app = get_my_app(app_id)
  64. # 将app_list 过滤出app_id的app,并缩减信息,使其符合文档的要求
  65. ret = {}
  66. ret['app_id'] = app['app_id']
  67. ret['status'] = app['status']
  68. ret['status_reason'] = app['status_reason']
  69. else:
  70. raise CommandException(code, message, '')
  71. return ret
  72. def install_app(app_name, customer_name, app_version):
  73. myLogger.info_logger("Install app ...")
  74. ret = {}
  75. ret['ResponseData'] = {}
  76. app_id = app_name + "_" + customer_name
  77. ret['ResponseData']['app_id'] = app_id
  78. code, message = check_app(app_name, customer_name, app_version)
  79. if code == None:
  80. q.enqueue(install_app_delay, app_name, customer_name, app_version, job_id=app_id)
  81. else:
  82. ret['Error'] = get_error_info(code, message, "")
  83. return ret
  84. def start_app(app_id):
  85. info, flag = app_exits_in_docker(app_id)
  86. if flag:
  87. app_path = info.split()[-1].rsplit('/', 1)[0]
  88. cmd = "docker compose -f " + app_path + "/docker-compose.yml start"
  89. shell_execute.execute_command_output_all(cmd)
  90. else:
  91. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  92. def stop_app(app_id):
  93. info, flag = app_exits_in_docker(app_id)
  94. if flag:
  95. app_path = info.split()[-1].rsplit('/', 1)[0]
  96. cmd = "docker compose -f " + app_path + "/docker-compose.yml stop"
  97. shell_execute.execute_command_output_all(cmd)
  98. else:
  99. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  100. def restart_app(app_id):
  101. code, message = docker.check_app_id(app_id)
  102. if code == None:
  103. info, flag = app_exits_in_docker(app_id)
  104. if flag:
  105. app_path = info.split()[-1].rsplit('/', 1)[0]
  106. cmd = "docker compose -f " + app_path + "/docker-compose.yml restart"
  107. shell_execute.execute_command_output_all(cmd)
  108. else:
  109. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  110. else:
  111. raise CommandException(code, message, "")
  112. def delete_app_failedjob(job_id):
  113. myLogger.info_logger("delete_app_failedjob")
  114. failed = FailedJobRegistry(queue=q)
  115. failed.remove(job_id, delete_job=True)
  116. def uninstall_app(app_id):
  117. app_name = app_id.split('_')[0]
  118. customer_name = app_id.split('_')[1]
  119. app_path = ""
  120. info, code_exist = app_exits_in_docker(app_id)
  121. if code_exist:
  122. app_path = info.split()[-1].rsplit('/', 1)[0]
  123. cmd = "docker compose -f " + app_path + "/docker-compose.yml down -v"
  124. lib_path = '/data/library/apps/' + app_name
  125. if app_path != lib_path:
  126. cmd = cmd + " && sudo rm -rf " + app_path
  127. shell_execute.execute_command_output_all(cmd)
  128. else:
  129. if check_app_rq(app_id):
  130. delete_app_failedjob(app_id)
  131. else:
  132. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "AppID is not exist", "")
  133. # Force to delete docker compose
  134. try:
  135. cmd = " sudo rm -rf /data/apps/" + customer_name
  136. shell_execute.execute_command_output_all(cmd)
  137. except CommandException as ce:
  138. myLogger.info_logger("Delete app compose exception")
  139. # Delete proxy config when uninstall app
  140. app_proxy_delete(app_id)
  141. def check_app(app_name, customer_name, app_version):
  142. message = ""
  143. code = None
  144. app_id = app_name + "_" + customer_name
  145. if app_name == None:
  146. code = const.ERROR_CLIENT_PARAM_BLANK
  147. message = "app_name is null"
  148. elif customer_name == None:
  149. code = const.ERROR_CLIENT_PARAM_BLANK
  150. message = "customer_name is null"
  151. elif app_version == None:
  152. code = const.ERROR_CLIENT_PARAM_BLANK
  153. message = "app_version is null"
  154. elif app_version == "undefined" or app_version == "":
  155. code = const.ERROR_CLIENT_PARAM_BLANK
  156. message = "app_version is null"
  157. elif not docker.check_app_websoft9(app_name):
  158. code = const.ERROR_CLIENT_PARAM_NOTEXIST
  159. message = "It is not support to install " + app_name
  160. elif re.match('^[a-z0-9]+$', customer_name) == None:
  161. code = const.ERROR_CLIENT_PARAM_Format
  162. message = "APP name can only be composed of numbers and lowercase letters"
  163. elif docker.check_directory("/data/apps/" + customer_name):
  164. code = const.ERROR_CLIENT_PARAM_REPEAT
  165. message = "Repeat installation: " + customer_name
  166. elif not docker.check_vm_resource(app_name):
  167. code = const.ERROR_SERVER_RESOURCE
  168. message = "Insufficient system resources (cpu, memory, disk space)"
  169. elif check_app_docker(app_id):
  170. code = const.ERROR_CLIENT_PARAM_REPEAT
  171. message = "Repeat installation: " + customer_name
  172. elif check_app_rq(app_id):
  173. code = const.ERROR_CLIENT_PARAM_REPEAT
  174. message = "Repeat installation: " + customer_name
  175. return code, message
  176. def prepare_app(app_name, customer_name):
  177. library_path = "/data/library/apps/" + app_name
  178. install_path = "/data/apps/" + customer_name
  179. shell_execute.execute_command_output_all("cp -r " + library_path + " " + install_path)
  180. def install_app_delay(app_name, customer_name, app_version):
  181. myLogger.info_logger("-------RQ install start --------")
  182. job_id = app_name + "_" + customer_name
  183. try:
  184. # 因为这个时候还没有复制文件夹,是从/data/library里面文件读取json来检查的,应该是app_name,而不是customer_name
  185. resource_flag = docker.check_vm_resource(app_name)
  186. if resource_flag == True:
  187. myLogger.info_logger("job check ok, continue to install app")
  188. env_path = "/data/apps/" + customer_name + "/.env"
  189. # prepare_app(app_name, customer_name)
  190. docker.check_app_compose(app_name, customer_name)
  191. myLogger.info_logger("start JobID=" + job_id)
  192. docker.modify_env(env_path, 'APP_NAME', customer_name)
  193. docker.modify_env(env_path, "APP_VERSION", app_version)
  194. docker.check_app_url(customer_name)
  195. cmd = "cd /data/apps/" + customer_name + " && sudo docker compose pull && sudo docker compose up -d"
  196. output = shell_execute.execute_command_output_all(cmd)
  197. myLogger.info_logger("-------Install result--------")
  198. myLogger.info_logger(output["code"])
  199. myLogger.info_logger(output["result"])
  200. else:
  201. error_info= "##websoft9##" + const.ERROR_SERVER_RESOURCE + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)" + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)"
  202. myLogger.info_logger(error_info)
  203. raise Exception(error_info)
  204. except CommandException as ce:
  205. myLogger.info_logger(customer_name + " install failed(docker)!")
  206. uninstall_app(job_id)
  207. error_info= "##websoft9##" + ce.code + "##websoft9##" + ce.message + "##websoft9##" + ce.detail
  208. myLogger.info_logger(error_info)
  209. raise Exception(error_info)
  210. except Exception as e:
  211. myLogger.info_logger(customer_name + " install failed(system)!")
  212. uninstall_app(job_id)
  213. error_info= "##websoft9##" + const.ERROR_SERVER_SYSTEM + "##websoft9##" + 'system original error' + "##websoft9##" + str(e)
  214. myLogger.info_logger(error_info)
  215. raise Exception(error_info)
  216. def app_exits_in_docker(app_id):
  217. customer_name = app_id.split('_')[1]
  218. app_name = app_id.split('_')[0]
  219. flag = False
  220. info = ""
  221. cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'"
  222. try:
  223. output = shell_execute.execute_command_output_all(cmd)
  224. if int(output["code"]) == 0:
  225. info = output["result"]
  226. app_path = info.split()[-1].rsplit('/', 1)[0]
  227. is_official = check_if_official_app(app_path + '/variables.json')
  228. if is_official:
  229. name = docker.read_var(app_path + '/variables.json', 'name')
  230. if name == app_name:
  231. flag = True
  232. elif app_name == customer_name:
  233. flag = True
  234. myLogger.info_logger("APP in docker")
  235. except CommandException as ce:
  236. myLogger.info_logger("APP not in docker")
  237. return info, flag
  238. def split_app_id(app_id):
  239. return app_id.split("_")[1]
  240. def get_createtime(official_app, app_path, customer_name):
  241. data_time = ""
  242. try:
  243. if official_app:
  244. cmd = "docker ps -f name=" + customer_name + " --format {{.RunningFor}} | head -n 1"
  245. result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
  246. data_time = result
  247. else:
  248. cmd_all = "cd " + app_path + " && docker compose ps -a --format json"
  249. output = shell_execute.execute_command_output_all(cmd_all)
  250. container_name = json.loads(output["result"])[0]["Name"]
  251. cmd = "docker ps -f name=" + customer_name + " --format {{.RunningFor}} | head -n 1"
  252. result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
  253. data_time = result
  254. except Exception as e:
  255. myLogger.info_logger(str(e))
  256. myLogger.info_logger("get_createtime get success"+data_time)
  257. return data_time
  258. def get_apps_from_compose():
  259. myLogger.info_logger("Search all of apps ...")
  260. cmd = "docker compose ls -a --format json"
  261. output = shell_execute.execute_command_output_all(cmd)
  262. output_list = json.loads(output["result"])
  263. myLogger.info_logger(len(output_list))
  264. ip = "localhost"
  265. try:
  266. ip_result = shell_execute.execute_command_output_all("cat /data/apps/stackhub/docker/w9appmanage/public_ip")
  267. ip = ip_result["result"].rstrip('\n')
  268. except Exception:
  269. ip = "127.0.0.1"
  270. app_list = []
  271. for app_info in output_list:
  272. volume = app_info["ConfigFiles"]
  273. app_path = volume.rsplit('/', 1)[0]
  274. customer_name = volume.split('/')[-2]
  275. app_id = ""
  276. app_name = ""
  277. trade_mark = ""
  278. port = 0
  279. url = ""
  280. admin_url = ""
  281. image_url = ""
  282. user_name = ""
  283. password = ""
  284. official_app = False
  285. app_version = ""
  286. create_time = ""
  287. volume_data = ""
  288. config_path = app_path
  289. app_https = False
  290. app_replace_url = False
  291. default_domain = ""
  292. admin_path = ""
  293. admin_domain_url = ""
  294. if customer_name in ['w9appmanage', 'w9nginxproxymanager','w9redis','w9portainer'] and app_path == '/data/apps/stackhub/docker/' + customer_name:
  295. continue
  296. status_show = app_info["Status"]
  297. status = app_info["Status"].split("(")[0]
  298. if status == "running" or status == "exited" or status == "restarting":
  299. if "exited" in status_show and "running" in status_show:
  300. if status == "exited":
  301. cmd = "docker ps -a -f name=" + customer_name + " --format {{.Names}}#{{.Status}}|grep Exited"
  302. result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
  303. container = result.split("#Exited")[0]
  304. if container != customer_name:
  305. status = "running"
  306. elif status == "created":
  307. status = "failed"
  308. else:
  309. continue
  310. var_path = app_path + "/variables.json"
  311. official_app = check_if_official_app(var_path)
  312. if official_app:
  313. app_name = docker.read_var(var_path, 'name')
  314. app_id = app_name + "_" + customer_name # app_id
  315. # get trade_mark
  316. trade_mark = docker.read_var(var_path, 'trademark')
  317. image_url = get_Image_url(app_name)
  318. # get env info
  319. path = app_path + "/.env"
  320. env_map = docker.get_map(path)
  321. try:
  322. myLogger.info_logger("get domain for APP_URL")
  323. domain = env_map.get("APP_URL")
  324. if "appname.example.com" in domain or ip in domain:
  325. default_domain = ""
  326. else:
  327. default_domain = domain
  328. except IndexError:
  329. myLogger.info_logger("domain exception")
  330. try:
  331. app_version = env_map.get("APP_VERSION")
  332. volume_data = "/data/apps/" + customer_name + "/data"
  333. user_name = env_map.get("APP_USER","")
  334. password = env_map.get("POWER_PASSWORD","")
  335. admin_path = env_map.get("APP_ADMIN_PATH")
  336. if admin_path:
  337. myLogger.info_logger(admin_path)
  338. admin_path = admin_path.replace("\"","")
  339. else:
  340. admin_path =""
  341. if default_domain != "" and admin_path != "":
  342. admin_domain_url = "http://" + default_domain + admin_path
  343. except IndexError:
  344. myLogger.info_logger("APP_USER POWER_PASSWORD exception")
  345. try:
  346. replace = env_map.get("APP_URL_REPLACE","false")
  347. myLogger.info_logger("replace="+replace)
  348. if replace == "true":
  349. app_replace_url = True
  350. https = env_map.get("APP_HTTPS_ACCESS","false")
  351. if https == "true":
  352. app_https = True
  353. except IndexError:
  354. myLogger.info_logger("APP_HTTPS_ACCESS exception")
  355. try:
  356. http_port = env_map.get("APP_HTTP_PORT","0")
  357. if http_port:
  358. port = int(http_port)
  359. except IndexError:
  360. pass
  361. if port != 0:
  362. try:
  363. if app_https:
  364. easy_url = "https://" + ip + ":" + str(port)
  365. else:
  366. easy_url = "http://" + ip + ":" + str(port)
  367. url = easy_url
  368. admin_url = get_admin_url(customer_name, url)
  369. except IndexError:
  370. pass
  371. else:
  372. try:
  373. db_port = list(docker.read_env(path, "APP_DB.*_PORT").values())[0]
  374. port = int(db_port)
  375. except IndexError:
  376. pass
  377. else:
  378. app_name = customer_name
  379. app_id = customer_name + "_" + customer_name
  380. create_time = get_createtime(official_app, app_path, customer_name)
  381. if status in ['running', 'exited']:
  382. config = Config(port=port, compose_file=volume, url=url, admin_url=admin_url,admin_domain_url=admin_domain_url,
  383. admin_path=admin_path,admin_username=user_name, admin_password=password, default_domain=default_domain)
  384. else:
  385. config = None
  386. if status == "failed":
  387. status_reason = StatusReason(Code=const.ERROR_SERVER_SYSTEM, Message="system original error", Detail="unknown error")
  388. else:
  389. status_reason = None
  390. app = App(app_id=app_id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark,
  391. app_version=app_version,create_time=create_time,volume_data=volume_data,config_path=config_path,
  392. status=status, status_reason=status_reason, official_app=official_app, image_url=image_url,
  393. app_https=app_https,app_replace_url=app_replace_url,config=config)
  394. app_list.append(app.dict())
  395. return app_list
  396. def check_if_official_app(var_path):
  397. if docker.check_directory(var_path):
  398. if docker.read_var(var_path, 'name') != "" and docker.read_var(var_path, 'trademark') != "" and docker.read_var(
  399. var_path, 'requirements') != "":
  400. requirements = docker.read_var(var_path, 'requirements')
  401. try:
  402. cpu = requirements['cpu']
  403. mem = requirements['memory']
  404. disk = requirements['disk']
  405. return True
  406. except KeyError:
  407. return False
  408. else:
  409. return False
  410. def check_app_docker(app_id):
  411. customer_name = app_id.split('_')[1]
  412. app_name = app_id.split('_')[0]
  413. flag = False
  414. cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'"
  415. try:
  416. shell_execute.execute_command_output_all(cmd)
  417. flag = True
  418. myLogger.info_logger("APP in docker")
  419. except CommandException as ce:
  420. myLogger.info_logger("APP not in docker")
  421. return flag
  422. def check_app_rq(app_id):
  423. myLogger.info_logger("check_app_rq")
  424. started = StartedJobRegistry(queue=q)
  425. failed = FailedJobRegistry(queue=q)
  426. run_job_ids = started.get_job_ids()
  427. failed_job_ids = failed.get_job_ids()
  428. queue_job_ids = q.job_ids
  429. myLogger.info_logger(queue_job_ids)
  430. myLogger.info_logger(run_job_ids)
  431. myLogger.info_logger(failed_job_ids)
  432. if queue_job_ids and app_id in queue_job_ids:
  433. myLogger.info_logger("App in RQ")
  434. return True
  435. if failed_job_ids and app_id in failed_job_ids:
  436. myLogger.info_logger("App in RQ")
  437. return True
  438. if run_job_ids and app_id in run_job_ids:
  439. myLogger.info_logger("App in RQ")
  440. return True
  441. myLogger.info_logger("App not in RQ")
  442. return False
  443. def get_apps_from_queue():
  444. myLogger.info_logger("get queque apps...")
  445. # 获取 StartedJobRegistry 实例
  446. started = StartedJobRegistry(queue=q)
  447. finish = FinishedJobRegistry(queue=q)
  448. deferred = DeferredJobRegistry(queue=q)
  449. failed = FailedJobRegistry(queue=q)
  450. scheduled = ScheduledJobRegistry(queue=q)
  451. cancel = CanceledJobRegistry(queue=q)
  452. # 获取正在执行的作业 ID 列表
  453. run_job_ids = started.get_job_ids()
  454. finish_job_ids = finish.get_job_ids()
  455. wait_job_ids = deferred.get_job_ids()
  456. failed_jobs = failed.get_job_ids()
  457. scheduled_jobs = scheduled.get_job_ids()
  458. cancel_jobs = cancel.get_job_ids()
  459. myLogger.info_logger(q.jobs)
  460. myLogger.info_logger(run_job_ids)
  461. myLogger.info_logger(failed_jobs)
  462. myLogger.info_logger(cancel_jobs)
  463. myLogger.info_logger(wait_job_ids)
  464. myLogger.info_logger(finish_job_ids)
  465. myLogger.info_logger(scheduled_jobs)
  466. installing_list = []
  467. for job_id in run_job_ids:
  468. app = get_rq_app(job_id, 'installing', "", "", "")
  469. installing_list.append(app)
  470. for job in q.jobs:
  471. app = get_rq_app(job.id, 'installing', "", "", "")
  472. installing_list.append(app)
  473. for job_id in failed_jobs:
  474. job = q.fetch_job(job_id)
  475. exc_info = job.exc_info
  476. code = exc_info.split('##websoft9##')[1]
  477. message = exc_info.split('##websoft9##')[2]
  478. detail = exc_info.split('##websoft9##')[3]
  479. app = get_rq_app(job_id, 'failed', code, message, detail)
  480. installing_list.append(app)
  481. return installing_list
  482. def get_rq_app(id, status, code, message, detail):
  483. app_name = id.split('_')[0]
  484. customer_name = id.split('_')[1]
  485. # 当app还在RQ时,可能文件夹还没创建,无法获取trade_mark
  486. trade_mark = ""
  487. app_version = ""
  488. create_time = ""
  489. volume_data = ""
  490. config_path = ""
  491. image_url = get_Image_url(app_name)
  492. config = None
  493. if status == "installing" :
  494. status_reason = None
  495. else:
  496. status_reason = StatusReason(Code=code, Message=message, Detail=detail)
  497. app = App(app_id=id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark,
  498. app_version=app_version,create_time=create_time,volume_data=volume_data,config_path=config_path,
  499. status=status, status_reason=status_reason, official_app=True, image_url=image_url,
  500. app_https=False,app_replace_url=False,config=config)
  501. return app.dict()
  502. def get_Image_url(app_name):
  503. image_url = "static/images/" + app_name + "-websoft9.png"
  504. return image_url
  505. def get_url(app_name, easy_url):
  506. url = easy_url
  507. return url
  508. def get_admin_url(customer_name, url):
  509. admin_url = ""
  510. path = "/data/apps/" + customer_name + "/.env"
  511. try:
  512. admin_path = list(docker.read_env(path, "APP_ADMIN_PATH").values())[0]
  513. admin_path = admin_path.replace("\"","")
  514. admin_url = url + admin_path
  515. except IndexError:
  516. pass
  517. return admin_url
  518. def get_error_info(code, message, detail):
  519. error = {}
  520. error['Code'] = code
  521. error['Message'] = message
  522. error['Detail'] = detail
  523. return error
  524. def app_domain_list(app_id):
  525. code, message = docker.check_app_id(app_id)
  526. if code == None:
  527. info, flag = app_exits_in_docker(app_id)
  528. if flag:
  529. myLogger.info_logger("Check app_id ok[app_domain_list]")
  530. else:
  531. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  532. else:
  533. raise CommandException(code, message, "")
  534. domains = get_all_domains(app_id)
  535. myLogger.info_logger(domains)
  536. ret = {}
  537. ret['domains'] = domains
  538. default_domain = ""
  539. if domains != None and len(domains) > 0:
  540. customer_name = app_id.split('_')[1]
  541. app_url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name +"/.env")["result"]
  542. if "APP_URL" in app_url:
  543. url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name +"/.env |grep APP_URL=")["result"].rstrip('\n')
  544. default_domain = url.split('=')[1]
  545. ret['default_domain'] = default_domain
  546. myLogger.info_logger(ret)
  547. return ret
  548. def app_proxy_delete(app_id):
  549. customer_name = app_id.split('_')[1]
  550. proxy_host = None
  551. token = get_token()
  552. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
  553. headers = {
  554. 'Authorization': token,
  555. 'Content-Type': 'application/json'
  556. }
  557. response = requests.get(url, headers=headers)
  558. for proxy in response.json():
  559. portainer_name = proxy["forward_host"]
  560. if customer_name == portainer_name:
  561. proxy_id = proxy["id"]
  562. token = get_token()
  563. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
  564. headers = {
  565. 'Authorization': token,
  566. 'Content-Type': 'application/json'
  567. }
  568. response = requests.delete(url, headers=headers)
  569. def app_domain_delete(app_id, domain):
  570. code, message = docker.check_app_id(app_id)
  571. if code == None:
  572. info, flag = app_exits_in_docker(app_id)
  573. if flag:
  574. myLogger.info_logger("Check app_id ok[app_domain_delete]")
  575. else:
  576. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  577. else:
  578. raise CommandException(code, message, "")
  579. if domain is None or domain == "undefined":
  580. raise CommandException(const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "")
  581. old_all_domains = get_all_domains(app_id)
  582. if domain not in old_all_domains:
  583. myLogger.info_logger("delete domain is not binded")
  584. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is not bind.", "")
  585. myLogger.info_logger("Start to delete " + domain)
  586. proxy = get_proxy_domain(app_id, domain)
  587. if proxy != None:
  588. myLogger.info_logger(proxy)
  589. myLogger.info_logger("before update")
  590. domains_old = proxy["domain_names"]
  591. myLogger.info_logger(domains_old)
  592. domains_old.remove(domain)
  593. myLogger.info_logger("after update")
  594. myLogger.info_logger(domains_old)
  595. if len(domains_old) == 0:
  596. proxy_id = proxy["id"]
  597. token = get_token()
  598. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
  599. headers = {
  600. 'Authorization': token,
  601. 'Content-Type': 'application/json'
  602. }
  603. response = requests.delete(url, headers=headers)
  604. try:
  605. if response.json().get("error"):
  606. raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
  607. except Exception:
  608. myLogger.info_logger(response.json())
  609. set_domain("", app_id)
  610. else:
  611. proxy_id = proxy["id"]
  612. token = get_token()
  613. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
  614. headers = {
  615. 'Authorization': token,
  616. 'Content-Type': 'application/json'
  617. }
  618. port = get_container_port(app_id.split('_')[1])
  619. host = app_id.split('_')[1]
  620. data = {
  621. "domain_names": domains_old,
  622. "forward_scheme": "http",
  623. "forward_host": host,
  624. "forward_port": port,
  625. "access_list_id": "0",
  626. "certificate_id": 0,
  627. "meta": {
  628. "letsencrypt_agree": False,
  629. "dns_challenge": False
  630. },
  631. "advanced_config": "",
  632. "locations": [],
  633. "block_exploits": False,
  634. "caching_enabled": False,
  635. "allow_websocket_upgrade": False,
  636. "http2_support": False,
  637. "hsts_enabled": False,
  638. "hsts_subdomains": False,
  639. "ssl_forced": False
  640. }
  641. response = requests.put(url, data=json.dumps(data), headers=headers)
  642. if response.json().get("error"):
  643. raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
  644. domain_set = app_domain_list(app_id)
  645. default_domain = domain_set['default_domain']
  646. # 如果被删除的域名是默认域名,删除后去剩下域名的第一个
  647. if default_domain == domain:
  648. set_domain(domains_old[0], app_id)
  649. else:
  650. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Delete domain is not bind", "")
  651. def app_domain_update(app_id, domain_old, domain_new):
  652. myLogger.info_logger("app_domain_update")
  653. domain_list = []
  654. domain_list.append(domain_old)
  655. domain_list.append(domain_new)
  656. check_domains(domain_list)
  657. code, message = docker.check_app_id(app_id)
  658. if code == None:
  659. info, flag = app_exits_in_docker(app_id)
  660. if flag:
  661. myLogger.info_logger("Check app_id ok")
  662. else:
  663. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  664. else:
  665. raise CommandException(code, message, "")
  666. proxy = get_proxy_domain(app_id, domain_old)
  667. if proxy != None:
  668. domains_old = proxy["domain_names"]
  669. index = domains_old.index(domain_old)
  670. domains_old[index] = domain_new
  671. proxy_id = proxy["id"]
  672. token = get_token()
  673. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
  674. headers = {
  675. 'Authorization': token,
  676. 'Content-Type': 'application/json'
  677. }
  678. port = get_container_port(app_id.split('_')[1])
  679. host = app_id.split('_')[1]
  680. data = {
  681. "domain_names": domains_old,
  682. "forward_scheme": "http",
  683. "forward_host": host,
  684. "forward_port": port,
  685. "access_list_id": "0",
  686. "certificate_id": 0,
  687. "meta": {
  688. "letsencrypt_agree": False,
  689. "dns_challenge": False
  690. },
  691. "advanced_config": "",
  692. "locations": [],
  693. "block_exploits": False,
  694. "caching_enabled": False,
  695. "allow_websocket_upgrade": False,
  696. "http2_support": False,
  697. "hsts_enabled": False,
  698. "hsts_subdomains": False,
  699. "ssl_forced": False
  700. }
  701. response = requests.put(url, data=json.dumps(data), headers=headers)
  702. if response.json().get("error"):
  703. raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
  704. domain_set = app_domain_list(app_id)
  705. default_domain = domain_set['default_domain']
  706. myLogger.info_logger("default_domain=" + default_domain + ",domain_old="+domain_old)
  707. # 如果被修改的域名是默认域名,修改后也设置为默认域名
  708. if default_domain == domain_old:
  709. set_domain(domain_new, app_id)
  710. else:
  711. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "edit domain is not exist", "")
  712. def app_domain_add(app_id, domain):
  713. temp_domains = []
  714. temp_domains.append(domain)
  715. check_domains(temp_domains)
  716. code, message = docker.check_app_id(app_id)
  717. if code == None:
  718. info, flag = app_exits_in_docker(app_id)
  719. if flag:
  720. myLogger.info_logger("Check app_id ok")
  721. else:
  722. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  723. else:
  724. raise CommandException(code, message, "")
  725. old_domains = get_all_domains(app_id)
  726. if domain in old_domains:
  727. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is in use", "")
  728. proxy = get_proxy(app_id)
  729. if proxy != None:
  730. domains_old = proxy["domain_names"]
  731. domain_list = domains_old
  732. domain_list.append(domain)
  733. proxy_id = proxy["id"]
  734. token = get_token()
  735. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
  736. headers = {
  737. 'Authorization': token,
  738. 'Content-Type': 'application/json'
  739. }
  740. port = get_container_port(app_id.split('_')[1])
  741. host = app_id.split('_')[1]
  742. data = {
  743. "domain_names": domain_list,
  744. "forward_scheme": "http",
  745. "forward_host": host,
  746. "forward_port": port,
  747. "access_list_id": "0",
  748. "certificate_id": 0,
  749. "meta": {
  750. "letsencrypt_agree": False,
  751. "dns_challenge": False
  752. },
  753. "advanced_config": "",
  754. "locations": [],
  755. "block_exploits": False,
  756. "caching_enabled": False,
  757. "allow_websocket_upgrade": False,
  758. "http2_support": False,
  759. "hsts_enabled": False,
  760. "hsts_subdomains": False,
  761. "ssl_forced": False
  762. }
  763. response = requests.put(url, data=json.dumps(data), headers=headers)
  764. if response.json().get("error"):
  765. raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
  766. else:
  767. # 追加
  768. token = get_token()
  769. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
  770. headers = {
  771. 'Authorization': token,
  772. 'Content-Type': 'application/json'
  773. }
  774. port = get_container_port(app_id.split('_')[1])
  775. host = app_id.split('_')[1]
  776. data = {
  777. "domain_names": temp_domains,
  778. "forward_scheme": "http",
  779. "forward_host": host,
  780. "forward_port": port,
  781. "access_list_id": "0",
  782. "certificate_id": 0,
  783. "meta": {
  784. "letsencrypt_agree": False,
  785. "dns_challenge": False
  786. },
  787. "advanced_config": "",
  788. "locations": [],
  789. "block_exploits": False,
  790. "caching_enabled": False,
  791. "allow_websocket_upgrade": False,
  792. "http2_support": False,
  793. "hsts_enabled": False,
  794. "hsts_subdomains": False,
  795. "ssl_forced": False
  796. }
  797. response = requests.post(url, data=json.dumps(data), headers=headers)
  798. if response.json().get("error"):
  799. raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
  800. set_domain(domain, app_id)
  801. return domain
  802. def check_domains(domains):
  803. myLogger.info_logger(domains)
  804. if domains is None or len(domains) == 0:
  805. raise CommandException(const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "")
  806. else:
  807. for domain in domains:
  808. if is_valid_domain(domain):
  809. if check_real_domain(domain) == False:
  810. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain and server not match", "")
  811. else:
  812. raise CommandException(const.ERROR_CLIENT_PARAM_Format, "Domains format error", "")
  813. def is_valid_domain(domain):
  814. if domain.startswith("http"):
  815. return False
  816. return True
  817. def check_real_domain(domain):
  818. domain_real = True
  819. try:
  820. cmd = "ping -c 1 " + domain + " | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+' | uniq"
  821. domain_ip = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
  822. ip_result = shell_execute.execute_command_output_all("cat /data/apps/stackhub/docker/w9appmanage/public_ip")
  823. ip_save = ip_result["result"].rstrip('\n')
  824. if domain_ip == ip_save:
  825. myLogger.info_logger("Domain check ok!")
  826. else:
  827. domain_real = False
  828. except CommandException as ce:
  829. domain_real = False
  830. return domain_real
  831. def get_token():
  832. url = 'http://172.17.0.1:9092/api/tokens'
  833. headers = {'Content-type': 'application/json'}
  834. cmd = "cat /usr/share/cockpit/nginx/config.json | jq -r '.NGINXPROXYMANAGER_PASSWORD'"
  835. password = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
  836. myLogger.info_logger(password)
  837. param = {
  838. "identity": "help@websoft9.com",
  839. "scope": "user",
  840. "secret": password
  841. }
  842. response = requests.post(url, data=json.dumps(param), headers=headers)
  843. token = "Bearer " + response.json()["token"]
  844. return token
  845. def get_proxy(app_id):
  846. customer_name = app_id.split('_')[1]
  847. proxy_host = None
  848. token = get_token()
  849. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
  850. headers = {
  851. 'Authorization': token,
  852. 'Content-Type': 'application/json'
  853. }
  854. response = requests.get(url, headers=headers)
  855. for proxy in response.json():
  856. portainer_name = proxy["forward_host"]
  857. if customer_name == portainer_name:
  858. proxy_host = proxy
  859. break
  860. return proxy_host
  861. def get_proxy_domain(app_id, domain):
  862. customer_name = app_id.split('_')[1]
  863. proxy_host = None
  864. token = get_token()
  865. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
  866. headers = {
  867. 'Authorization': token,
  868. 'Content-Type': 'application/json'
  869. }
  870. response = requests.get(url, headers=headers)
  871. myLogger.info_logger(response.json())
  872. for proxy in response.json():
  873. portainer_name = proxy["forward_host"]
  874. domain_list = proxy["domain_names"]
  875. if customer_name == portainer_name:
  876. myLogger.info_logger("-------------------")
  877. if domain in domain_list:
  878. myLogger.info_logger("find the domain proxy")
  879. proxy_host = proxy
  880. break
  881. return proxy_host
  882. def get_all_domains(app_id):
  883. customer_name = app_id.split('_')[1]
  884. domains = []
  885. token = get_token()
  886. url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
  887. headers = {
  888. 'Authorization': token,
  889. 'Content-Type': 'application/json'
  890. }
  891. response = requests.get(url, headers=headers)
  892. for proxy in response.json():
  893. portainer_name = proxy["forward_host"]
  894. if customer_name == portainer_name:
  895. for domain in proxy["domain_names"]:
  896. domains.append(domain)
  897. return domains
  898. def app_domain_set(domain, app_id):
  899. temp_domains = []
  900. temp_domains.append(domain)
  901. check_domains(temp_domains)
  902. code, message = docker.check_app_id(app_id)
  903. if code == None:
  904. info, flag = app_exits_in_docker(app_id)
  905. if flag:
  906. myLogger.info_logger("Check app_id ok")
  907. else:
  908. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
  909. else:
  910. raise CommandException(code, message, "")
  911. set_domain(domain, app_id)
  912. def set_domain(domain, app_id):
  913. myLogger.info_logger("set_domain start")
  914. old_domains = get_all_domains(app_id)
  915. if domain != "":
  916. if domain not in old_domains:
  917. message = domain + " is not in use"
  918. raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, message, "")
  919. customer_name = app_id.split('_')[1]
  920. app_url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name +"/.env")["result"]
  921. if "APP_URL" in app_url:
  922. myLogger.info_logger("APP_URL is exist")
  923. if domain == "":
  924. ip_result = shell_execute.execute_command_output_all("cat /data/apps/stackhub/docker/w9appmanage/public_ip")
  925. domain = ip_result["result"].rstrip('\n')
  926. cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + "/g' /data/apps/" + customer_name +"/.env"
  927. shell_execute.execute_command_output_all(cmd)
  928. if "APP_URL_REPLACE=true" in app_url:
  929. myLogger.info_logger("need up")
  930. shell_execute.execute_command_output_all("cd /data/apps/" + customer_name + " && docker compose up -d")
  931. else:
  932. cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + "/g' /data/apps/" + customer_name +"/.env"
  933. shell_execute.execute_command_output_all(cmd)
  934. if "APP_URL_REPLACE=true" in app_url:
  935. myLogger.info_logger("need up")
  936. shell_execute.execute_command_output_all("cd /data/apps/" + customer_name + " && docker compose up -d")
  937. myLogger.info_logger("set_domain success")
  938. def get_container_port(container_name):
  939. port = "80"
  940. cmd = "docker port "+ container_name + " |grep ::"
  941. result = shell_execute.execute_command_output_all(cmd)["result"]
  942. myLogger.info_logger(result)
  943. port = result.split('/')[0]
  944. myLogger.info_logger(port)
  945. return port