diff --git a/README.md b/README.md index 6a9cd95..1d99ed5 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,22 @@ # Fastapi Admin + 前后端分离项目实现的一个后端管理框架 + * 前端:vue3 + element plus * 后端:fastapi + sqlmodel +* 任务管理: rpyc+apscheduler **个人学习项目,只会对明显的bug进行修复,不会进行过多新功能的更新** 常用分支说明: + * main:主分支,所有功能都会往这里合并 * test: 测试分支 ## 测试环境服务启动 + 1. nginx添加配置: + ``` server { listen 80; @@ -49,21 +55,36 @@ } ``` + 2. 前端启动: + ``` 进入www页面,执行:npm run serve ``` + 3. 后端启动 + ``` uvicorn server.main:app --reload +uvicorn --host 192.168.137.129 server.main:app --reload --reload-exclude www +``` + +4. 任务管理启动 + +``` +cd /mnt/hgfs/simple_ams/rpyc_scheduler/&&python3 scheduler-server.py ``` ## 生产环境部署 + 1. 前端执行打包命令 + ``` npm run build ``` + 2. 添加nginx配置 + ``` server { listen 80; @@ -85,33 +106,39 @@ server { } } ``` + 3. 启动后端服务 -生产环境使用,建议通过gunicorn启动服务 + 生产环境使用,建议通过gunicorn启动服务 + ``` nohup gunicorn server.main:app -b 127.0.0.1:8000 -w 4 -k uvicorn.workers.UvicornWorker >gunicorn.log 2>&1& ``` ## 约束 + 1. 后端数据库对于布尔值的传递统一数据库设置为tinyint,0为假,1为真 2. 前端所有bool都0为假,1为真 # 功能实现介绍 ## 分页查询实现 + ### 前端 + ```js import usePagination from '@/composables/usePagination' - +< +!--定义一个搜索字段-- > const searchForm = { name: null, email: null, enable: null - } +} - - const { + +const { search, tableData, currentPage, @@ -120,11 +147,13 @@ const searchForm = { total, freshCurrentPage, handleSearch - } = usePagination('/api/users/search', searchForm) +} = usePagination('/api/users/search', searchForm) ``` ### 后端 + 定义了一个分页模型 + ```python from typing import Optional, Generic, TypeVar from pydantic import BaseModel @@ -140,13 +169,14 @@ class Pagination(BaseModel, Generic[T]): ``` 使用 + ```python @router.post('/dict/item/search', summary="字典列表查询", response_model=ApiResponse[SearchResponse[DictRead]]) async def search_items(search: Pagination[DictItemSearch], session: Session = Depends(get_session)): # 需要定义一个filter_type,用于区分各个字段的匹配形式,可用为:l_like、r_like、like、eq、ne、lt、le、gt、ge filter_type = DictItemSearchFilter(dict_id='eq', label='like', enable='eq', value='like') - total = crud.internal.dict_item.search_total(session, search.search, filter_type.dict()) - items: List[DictRead] = crud.internal.dict_item.search(session, search, filter_type.dict()) + total = crud.internal.dict_item.search_total(session, search.search, filter_type.model_dump()) + items: List[DictRead] = crud.internal.dict_item.search(session, search, filter_type.model_dump()) # 转义下数据类型,不然在执行return的时候,会去获取外键、关联字段相关的内容,容易造成数据量过多等问题 item_list = [DictRead.from_orm(item) for item in items] return ApiResponse( @@ -159,7 +189,9 @@ async def search_items(search: Pagination[DictItemSearch], session: Session = De ``` ## 权限管控 + 通过casbin实现简化的权限管控功能,思路如下: + 1. 对于不需要token验证的,写入settings的APISettings.NO_VERIFY_URL中 2. 对于需要权限管控的接口,写入casbin中,并且对需要权限验证的接口使用casbin验证 3. 前端通过权限字段,进行显示 @@ -167,13 +199,17 @@ async def search_items(search: Pagination[DictItemSearch], session: Session = De 5. 页面管控,只是后端返回菜单列表,前端根据菜单列表进行显示,后端没有对页面进行权限管控 ### 前端 + v-permission定义了权限标识,当拥有权限时,可以页面上能显示按钮,同时,后端也会进行权限的判断。 + ```js 编辑 - + @click = "handleEdit(scope.row)" > 编辑 + < /el-button> ``` + ### 后端 + ```python @router.put('/roles', summary="更新角色", response_model=ApiResponse[Role], dependencies=[Depends(Authority('role:update'))]) @@ -192,21 +228,36 @@ async def update_roles(role_info: RoleUpdate, session: Session = Depends(get_ses ``` ## 项目截图 + ### 系统登录 + ![登录页面](./images/login.png) + ### 用户管理 + ![用户管理页面](./images/user.png) ![添加新用户](./images/user_add.png) + ### 角色管理 + ![角色管理页面](./images/role.png) ![添加新角色](./images/role_add.png) + ### 菜单管理 + ![菜单管理页面](./images/menu.png) ![添加新菜单](./images/menu_add.png) + ### 数据字典管理 + ![数据字典管理](./images/dictonary.png) ![数据字典元素管理](./images/dictonary_items.png) +### 任务管理 + +![任务管理](./images/job_manage.png) +![脚本管理](./images/job_playbook.png) ### 参考项目: + * https://github.com/xingxingzaixian/FastAPI-MySQL-Tortoise-Casbin \ No newline at end of file diff --git a/images/job_manage.png b/images/job_manage.png new file mode 100644 index 0000000..fd8b026 Binary files /dev/null and b/images/job_manage.png differ diff --git a/images/job_playbook.png b/images/job_playbook.png new file mode 100644 index 0000000..ab38f7d Binary files /dev/null and b/images/job_playbook.png differ diff --git a/rpyc_scheduler/config.py b/rpyc_scheduler/config.py index 610e2eb..196c310 100644 --- a/rpyc_scheduler/config.py +++ b/rpyc_scheduler/config.py @@ -1,11 +1,13 @@ -from pydantic import BaseSettings +from pydantic_settings import BaseSettings +from pydantic import MySQLDsn, RedisDsn class RpcConfig(BaseSettings): # apscheduler指定job store和excutors - apscheduler_job_store = 'mysql+pymysql://root:123456@192.168.137.129/devops' - redis = {'host': '192.168.137.129', 'password':'seraphim','port': 6379, 'health_check_interval': 30} - rpc_port = 18861 + apscheduler_job_store: MySQLDsn = 'mysql+pymysql://root:123456@192.168.137.129/devops' + redis: dict = {'host': '192.168.137.129', 'password': 'seraphim', 'port': 6379, 'health_check_interval': 30} + rpc_port: int = 18861 + rpc_config = RpcConfig() diff --git a/rpyc_scheduler/jobstore.py b/rpyc_scheduler/jobstore.py new file mode 100644 index 0000000..d227850 --- /dev/null +++ b/rpyc_scheduler/jobstore.py @@ -0,0 +1,58 @@ +from typing import List +from loguru import logger +from apscheduler.util import maybe_ref +from apscheduler.job import Job +from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore +from sqlalchemy import ( + create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, select, and_, text, Integer) + +try: + import cPickle as pickle +except ImportError: # pragma: nocover + import pickle + + +class CustomJobStore(SQLAlchemyJobStore): + + def get_multi_jobs(self, uid=None): + """ + 通过job_id查询多个job + """ + jobs = [] + selectable = select(self.jobs_t.c.id, self.jobs_t.c.job_state). \ + order_by(self.jobs_t.c.next_run_time) + selectable = selectable.where(self.jobs_t.c.id == uid) if uid else selectable + logger.debug(selectable) + failed_job_ids = set() + with self.engine.begin() as connection: + for row in connection.execute(selectable): + try: + jobs.append(self._reconstitute_job(row.job_state)) + except BaseException: + self._logger.exception('Unable to restore job "%s" -- removing it', row.id) + failed_job_ids.add(row.id) + + # Remove all the jobs we failed to restore + if failed_job_ids: + delete = self.jobs_t.delete().where(self.jobs_t.c.id.in_(failed_job_ids)) + connection.execute(delete) + logger.debug(jobs) + return jobs + + def get_user_jobs(self, uid, job_name, jobstore=None) -> list[Job]: + """ + 通过uid和job_name,获取用户的jobs列表 + :param uid:用户ID + :param job_name:指定匹配job name + :param jobstore: None + """ + user_jobs: List[Job] = [] + jobs = self.get_multi_jobs(uid) + logger.debug(jobs) + for job in jobs: + if job_name is None: + user_jobs.append(job) + elif (job.name.find(job_name)) >= 0: + user_jobs.append(job) + logger.debug(user_jobs) + return user_jobs diff --git a/rpyc_scheduler/models.py b/rpyc_scheduler/models.py index d1e0a11..e82a86c 100644 --- a/rpyc_scheduler/models.py +++ b/rpyc_scheduler/models.py @@ -3,8 +3,26 @@ from sqlmodel import SQLModel, Field, Column, Integer, create_engine, Session from sqlalchemy.dialects import mysql from config import rpc_config +from sqlmodel import SQLModel +from typing import Union - -engine = create_engine(rpc_config.apscheduler_job_store, pool_size=5, max_overflow=10, pool_timeout=30, +engine = create_engine(str(rpc_config.apscheduler_job_store), pool_size=5, max_overflow=10, pool_timeout=30, pool_pre_ping=True) + + # SQLModel.metadata.create_all(engine) + +class InventoryHost(SQLModel): + """ + 主机资产涉及的参数 + """ + name: str + ansible_host: str + ansible_port: int = 22 + ansible_user: str + ansible_password: Union[str, None] = None + ansible_ssh_private_key: Union[str, None] = None + + +class AnsibleInventory(SQLModel): + pass diff --git a/rpyc_scheduler/scheduler-server.py b/rpyc_scheduler/scheduler-server.py index f4f408a..871205f 100644 --- a/rpyc_scheduler/scheduler-server.py +++ b/rpyc_scheduler/scheduler-server.py @@ -5,14 +5,18 @@ """ import rpyc +from typing import List + +from apscheduler.triggers.cron import CronTrigger +from apscheduler.triggers.date import DateTrigger from sqlmodel import text -from tasks import run_command_with_channel +from tasks import * from datetime import datetime from loguru import logger from config import rpc_config from rpyc.utils.server import ThreadedServer from apscheduler.job import Job -from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore +# from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor from apscheduler.events import ( EVENT_JOB_EXECUTED, @@ -21,8 +25,8 @@ EVENT_JOB_SUBMITTED, EVENT_JOB_REMOVED ) - -from scheduler import CustomScheduler +from jobstore import CustomJobStore +from scheduler import CustomBackgroundScheduler from models import engine @@ -32,20 +36,44 @@ def print_text(*args, **kwargs): class SchedulerService(rpyc.Service): - def exposed_add_job(self, func, *args, **kwargs): - return scheduler.add_job(func, *args, **kwargs) + def exposed_add_job(self, func, **kwargs): + trigger = kwargs.pop('trigger', None) + trigger_args = kwargs.pop('trigger_args', None) + if trigger == 'cron': + cron = trigger_args['cron'] + values = cron.split() + trigger = CronTrigger(minute=values[0], hour=values[1], day=values[2], month=values[3], + day_of_week=values[4], start_date=trigger_args['start_date'], + end_date=trigger_args['end_date']) + return scheduler.add_job(func, CronTrigger.from_crontab(trigger), **kwargs) + elif trigger == 'date': + return scheduler.add_job(func, DateTrigger( + run_date=trigger_args['run_date'] if trigger_args is not None else None), **kwargs) def exposed_modify_job(self, job_id, jobstore=None, **changes): + logger.debug(changes) + trigger = changes.pop('trigger', 'date') + trigger_args = changes.pop('trigger_args', None) + if trigger == 'cron': + cron = trigger_args['cron'] + values = cron.split() + changes['trigger'] = CronTrigger(minute=values[0], hour=values[1], day=values[2], month=values[3], + day_of_week=values[4], start_date=trigger_args['start_date'], + end_date=trigger_args['end_date']) + else: + run_date = trigger_args['run_date'] + changes['trigger'] = DateTrigger(run_date=run_date, timezone=None) + logger.debug(changes) return scheduler.modify_job(job_id, jobstore, **changes) - def exposed_pause_job(self, job_id, jobstore=None): - return scheduler.pause_job(job_id, jobstore) + def exposed_pause_job(self, job_id): + return scheduler.pause_job(job_id) - def exposed_resume_job(self, job_id, jobstore=None): - return scheduler.resume_job(job_id, jobstore) + def exposed_resume_job(self, job_id): + return scheduler.resume_job(job_id) - def exposed_remove_job(self, job_id, jobstore=None): - return scheduler.remove_job(job_id, jobstore) + def exposed_remove_job(self, job_id): + return scheduler.remove_job(job_id) def exposed_get_jobs(self, jobstore=None): return scheduler.get_jobs(jobstore) @@ -53,40 +81,28 @@ def exposed_get_jobs(self, jobstore=None): def exposed_get_job(self, job_id, jobstore=None): return scheduler.get_job(job_id, jobstore) - def exposed_get_user_jobs(self, uid, job_name, jobstore=None) -> list[Job]: + def exposed_get_multi_jobs(self, job_ids, jobstore=None) -> list[Job]: + """ + 通过job_ids,获取jobs列表 + :param job_ids: + :param jobstore: None + """ + logger.debug(f'get_multi_jobs:{job_ids}') + jobs = [] + for job_id in job_ids: + job = self.exposed_get_job(job_id, jobstore) + jobs.append(job) + return jobs + + def exposed_get_user_jobs(self, uid=None, job_name=None, jobstore=None) -> list[Job]: """ 通过uid和job_name,获取用户的jobs列表 :param uid:用户ID :param job_name:指定匹配job name :param jobstore: None """ - logger.debug(f'get_user_jobs:{job_name}') - sql = text("select job_id from user_job where user_id=:uid ") - user_jobs = [] - with engine.connect() as conn: - results = conn.execute(sql, {'uid': uid}) - for job_id in results.fetchall(): - job = self.exposed_get_job(job_id[0], jobstore) - if job_name is None: - user_jobs.append(job) - elif (job.name.find(job_name)) >= 0: - user_jobs.append(job) - return user_jobs - - def exposed_switch_job(self, job_id, jobstore=None): - """ - 任务状态切换,暂停、启用 - """ - job = scheduler.get_job(job_id, jobstore) - if job.next_run_time is None: - now = datetime.now(job.trigger.timezone) - next_fire_time = job.trigger.get_next_fire_time(None, now) - if next_fire_time: - scheduler.resume_job(job_id, jobstore) - else: - raise ValueError('无法指定下次运行时间,请确认任务时间配置') - else: - scheduler.pause_job(job_id, jobstore) + logger.debug('get user jobs') + return scheduler.get_user_jobs(uid, job_name, jobstore) def event_listener(event): @@ -102,14 +118,14 @@ def event_listener(event): if __name__ == '__main__': job_store = { - 'default': SQLAlchemyJobStore(url=rpc_config.apscheduler_job_store) + 'default': CustomJobStore(url=str(rpc_config.apscheduler_job_store)) } apscheduler_excutors = { 'default': ThreadPoolExecutor(20), 'processpool': ProcessPoolExecutor(20) } - scheduler = CustomScheduler(jobstores=job_store, - excutors=apscheduler_excutors) + scheduler = CustomBackgroundScheduler(jobstores=job_store, + excutors=apscheduler_excutors) scheduler.add_listener(event_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_SUBMITTED) scheduler.start() diff --git a/rpyc_scheduler/scheduler.py b/rpyc_scheduler/scheduler.py index 8b69e6b..a049337 100644 --- a/rpyc_scheduler/scheduler.py +++ b/rpyc_scheduler/scheduler.py @@ -1,8 +1,8 @@ from __future__ import print_function - +from loguru import logger from datetime import datetime, timedelta import six - +import warnings from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.executors.base import MaxInstancesReachedError, BaseExecutor from apscheduler.util import (timedelta_seconds, TIMEOUT_MAX) @@ -20,7 +20,61 @@ STATE_PAUSED = 2 -class CustomScheduler(BackgroundScheduler): +class CustomBackgroundScheduler(BackgroundScheduler): + + def get_user_jobs(self, uid, job_name, jobstore=None, pending=None): + """ + 分页搜索jobs + + :param str|unicode jobstore: alias of the job store + :param bool pending: **DEPRECATED** + :rtype: list[Job] + + """ + if pending is not None: + warnings.warn('The "pending" option is deprecated -- get_jobs() always returns ' + 'scheduled jobs if the scheduler has been started and pending jobs ' + 'otherwise', DeprecationWarning) + + with self._jobstores_lock: + jobs = [] + if self.state == STATE_STOPPED: + for job, alias, replace_existing in self._pending_jobs: + if jobstore is None or alias == jobstore: + jobs.append(job) + else: + for alias, store in six.iteritems(self._jobstores): + if jobstore is None or alias == jobstore: + jobs.extend(store.get_user_jobs(uid, job_name, jobstore)) + logger.debug(jobs) + return jobs + + def get_jobs(self, jobstore=None, pending=None): + """ + 分页搜索jobs + + :param str|unicode jobstore: alias of the job store + :param bool pending: **DEPRECATED** + :rtype: list[Job] + + """ + if pending is not None: + warnings.warn('The "pending" option is deprecated -- get_jobs() always returns ' + 'scheduled jobs if the scheduler has been started and pending jobs ' + 'otherwise', DeprecationWarning) + + with self._jobstores_lock: + jobs = [] + if self.state == STATE_STOPPED: + for job, alias, replace_existing in self._pending_jobs: + if jobstore is None or alias == jobstore: + jobs.append(job) + else: + for alias, store in six.iteritems(self._jobstores): + if jobstore is None or alias == jobstore: + jobs.extend(store.get_all_jobs()) + return jobs + def _process_jobs(self): """ 修改_process_jobs方法,把remove_job全部替换为pause_job,当任务执行完成后改成暂停状态 diff --git a/rpyc_scheduler/tasks.py b/rpyc_scheduler/tasks.py index a978e9b..54f7248 100644 --- a/rpyc_scheduler/tasks.py +++ b/rpyc_scheduler/tasks.py @@ -1,12 +1,16 @@ +import os.path import subprocess import json -from typing import List, Dict, Any +import ansible_runner +from pathlib import Path +from pydantic import parse_obj_as +from typing import List, Dict, Any, Union from datetime import datetime from loguru import logger from sqlmodel import text -from utils import Channel +from utils import Channel, hosts_to_inventory from config import rpc_config -from models import engine +from models import engine, InventoryHost def local_executor(job_id, host, command): @@ -24,8 +28,77 @@ def local_executor(job_id, host, command): return status, (end_time - start_time).total_seconds(), channel.msg -def host_executor(job_id, host, command): - pass +class EventLogs: + """ + ansible runner的event_handler,用于把输出写入redis + """ + + def __init__(self, channel): + self.channel = channel + + def __call__(self, event): + logger.debug(event) + self.channel.send({'msg': event['stdout']}) + return True + + +def ansible_task(job_id, targets, ansible_args): + """ + 通过ansible runner执行ansible任务 + """ + start_time = datetime.now() + private_data_dir: Path = Path(f'/tmp/ansible/{job_id}') + if not private_data_dir.exists(): + private_data_dir.mkdir(parents=True) + logger.debug(f'job id:{job_id},task hosts:{targets},ansible_args:{ansible_args}') + hosts: List[InventoryHost] = [] + # 生成ansible inventory + with engine.connect() as conn: + sql = text( + "select id,name,ansible_host,ansible_port,ansible_user,ansible_password,ansible_ssh_private_key from host where id in :targets").bindparams( + targets=targets) + result = conn.execute(sql).fetchall() + logger.debug(result) + for row in result: + hosts.append( + InventoryHost(id=row[0], name=row[1], ansible_host=row[2], ansible_port=row[3], ansible_user=row[4], + ansible_password=row[5], ansible_ssh_private_key=row[6])) + ansible_inventory = hosts_to_inventory(hosts, private_data_dir) + logger.debug(ansible_inventory) + # playbook获取对应内容并写入文件 + if ansible_args['playbook']: + logger.debug('playbook任务') + project_dir = private_data_dir / 'project' + if not project_dir.exists(): + project_dir.mkdir(parents=True) + with engine.connect() as conn: + sql = text( + "select name,playbook from playbook where id = :playbook").bindparams( + playbook=ansible_args['playbook']) + result = conn.execute(sql).one() + (playbook_name, playbook_content) = result + logger.debug(f'playbook:{playbook_name}') + (project_dir / playbook_name).write_text(playbook_content) + ansible_args['playbook'] = playbook_name + # 执行任务,且日志实时写入redis + with Channel(rpc_config.redis, job_id=job_id) as channel: + runner = ansible_runner.run(private_data_dir=str(private_data_dir), inventory=ansible_inventory, + host_pattern='all', event_handler=EventLogs(channel), + **ansible_args) + run_logs = channel.msg + end_time = datetime.now() + # 日志写入数据库 + with engine.connect() as conn: + logger.debug(runner.stats) + sql = text( + "INSERT INTO job_logs (job_id,start_time,end_time,log,stats) values (:job_id,:start_time,:end_time,:log,:stats)") + conn.execute(sql, + {'job_id': job_id, + 'start_time': start_time, + 'end_time': end_time, + 'log': json.dumps(run_logs), + 'stats': json.dumps(runner.stats)}) + conn.commit() def run_command_with_channel(job_id=None, targets: List[str] = None, command=None): diff --git a/rpyc_scheduler/utils.py b/rpyc_scheduler/utils.py index c197311..5a04035 100644 --- a/rpyc_scheduler/utils.py +++ b/rpyc_scheduler/utils.py @@ -1,6 +1,9 @@ import redis +import os from loguru import logger -from typing import Dict, Any +from pathlib import Path +from typing import Dict, Any, List +from models import InventoryHost class Channel: @@ -37,3 +40,28 @@ def __exit__(self, exc_type, exc_val, exc_tb): def close(self, ): self.conn.close() + + +def hosts_to_inventory(hosts: List[InventoryHost], private_data_dir: Path) -> dict: + """ + 转换hosts为inventory格式的数据 + :params hosts: + :params private_data_dir:ansible-runner的环境目录,其中保存runner执行过程的所有数据 + """ + inventory = {} + logger.debug(hosts) + for host in hosts: + inventory[host.name] = { + "ansible_host": host.ansible_host, + "ansible_port": host.ansible_port, + "ansible_user": host.ansible_user, + } + if host.ansible_password: + inventory[host.name]["ansible_password"] = host.ansible_password + if host.ansible_ssh_private_key: + # 私钥保存到本地文件,并指向对应路径 + private_key_file = private_data_dir / f"{host.ansible_host}" + private_key_file.write_text(host.ansible_ssh_private_key) + os.chmod(str(private_key_file), 0o600) + inventory[host.name]["ansible_ssh_private_key_file"] = str(private_key_file) + return {'all': {'hosts': inventory}} diff --git a/server/alembic/env.py b/server/alembic/env.py index b93a82e..62047fe 100644 --- a/server/alembic/env.py +++ b/server/alembic/env.py @@ -22,9 +22,21 @@ from models import * from models.internal import * + target_metadata = SQLModel.metadata +def include_name(name, type_, parent_names): + """ + 排除某些表 + """ + exclude_tables = ['apscheduler_jobs', 'casbin_rule'] + if name in exclude_tables and type_ == 'table': + return False + else: + return True + + # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") @@ -49,6 +61,7 @@ def run_migrations_offline() -> None: target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"}, + include_name=include_name ) with context.begin_transaction(): @@ -70,7 +83,7 @@ def run_migrations_online() -> None: with connectable.connect() as connection: context.configure( - connection=connection, target_metadata=target_metadata, compare_type=True + connection=connection, target_metadata=target_metadata, compare_type=True, include_name=include_name ) with context.begin_transaction(): diff --git a/server/common/database.py b/server/common/database.py index 90b4865..158c4ba 100644 --- a/server/common/database.py +++ b/server/common/database.py @@ -1,6 +1,7 @@ import redis.asyncio as redis +import rpyc from sqlmodel import create_engine, SQLModel, Session, select -from ..settings import engine +from server.settings import settings, engine def init_db(): @@ -17,6 +18,11 @@ def get_session(): yield session +def get_rpyc(): + with rpyc.connect(**settings.rpyc_config) as conn: + yield conn + + def get_or_create(session: Session, model, **kwargs): """ 检查表中是否存在对象,如果不存在就创建 diff --git a/server/common/utils.py b/server/common/utils.py index 9150880..9c2c2c9 100644 --- a/server/common/utils.py +++ b/server/common/utils.py @@ -5,7 +5,6 @@ from loguru import logger from sqlmodel import SQLModel from ..models.internal.menu import MenusWithChild -from ..crud.internal import job_log T = TypeVar('T', bound=SQLModel) @@ -18,7 +17,7 @@ class Tree(Generic[T]): def __init__(self, tree_list: List[T], model: Type[T]): self.tree_list = [] for tree in tree_list: - self.tree_list.append(model(**tree.dict())) + self.tree_list.append(model(**tree.model_dump())) def get_root_node(self): root_tree = [] diff --git a/server/crud/base.py b/server/crud/base.py index bdc1dfc..14a45f6 100644 --- a/server/crud/base.py +++ b/server/crud/base.py @@ -2,11 +2,13 @@ from copy import deepcopy from loguru import logger from pydantic import BaseModel -from typing import TypeVar, Generic, List, Type, Any, Dict, Optional -from sqlmodel import Session, select, SQLModel, func, desc +from typing import TypeVar, Generic, List, Type, Any, Dict, Optional, Union +from sqlmodel import Session, select, SQLModel, func, desc, join +from sqlalchemy.orm.exc import NoResultFound from ..models.internal import Pagination ModelType = TypeVar('ModelType', bound=SQLModel) +JoinType = TypeVar('JoinType', bound=SQLModel) class CRUDBase(Generic[ModelType]): @@ -19,6 +21,12 @@ def get(self, db: Session, id: int): def get_multi(self, db: Session, *, skip: int = 0, limit: int = 100) -> List[ModelType]: return db.exec(select(self.model).offset(skip).limit(limit)).all() + def get_multi_by_ids(self, db: Session, ids: List[int]) -> List[ModelType]: + """ + 通过数组id列表清单,查找符合的数据 + """ + return db.exec(select(self.model).where(self.model.id.in_(ids))).all() + def insert(self, db: Session, obj_in): # obj_in_data = jsonable_encoder(obj_in) # db_obj = self.model(**obj_in_data) @@ -79,8 +87,29 @@ def _make_search(self, sql, search: BaseModel = None, filter_type: Optional[Dict sql = sql.where(getattr(self.model, key) > q[key]) elif filter_type[key] == 'ge': sql = sql.where(getattr(self.model, key) >= q[key]) + elif filter_type[key] == 'in': + sql = sql.where(getattr(self.model, key).in_(q[key])) return sql + def _make_pagination(self, sql, subquery, search: Pagination, order_col: str): + """ + 创建分页,sql为外层,subquery用于查找最近的id + """ + if search.model == 'desc': + subquery = subquery.order_by(desc(getattr(self.model, order_col))) + else: + subquery = subquery.order_by(getattr(self.model, order_col)) + subquery = subquery.offset( + (search.page - 1) * search.page_size).limit(1).scalar_subquery() + # sql查询从subquery找到的order_col开始,并limit限制数量 + if search.model == 'desc': + sql = sql.where(getattr(self.model, order_col) <= subquery).order_by( + desc(getattr(self.model, order_col))).limit( + search.page_size) + else: + sql = sql.where(getattr(self.model, order_col) >= subquery).limit(search.page_size) + return sql, subquery + def search(self, session: Session, search: Pagination, filter_type: Optional[Dict[str, str]] = None, columns: Optional[List] = None, order_col: Optional[str] = 'id'): """ @@ -97,20 +126,12 @@ def search(self, session: Session, search: Pagination, filter_type: Optional[Dic else: sql = select(*columns) sql = self._make_search(sql, search.search, filter_type) + logger.debug(sql) + # subquery查询找到order_col的起始值 subquery = select(getattr(self.model, order_col)) + logger.debug(subquery) subquery = self._make_search(subquery, search.search, filter_type) - if search.model == 'desc': - subquery = subquery.order_by(desc(getattr(self.model, order_col))) - else: - subquery = subquery.order_by(getattr(self.model, order_col)) - subquery = subquery.offset( - (search.page - 1) * search.page_size).limit(1).scalar_subquery() - if search.model == 'desc': - sql = sql.where(getattr(self.model, order_col) <= subquery).order_by( - desc(getattr(self.model, order_col))).limit( - search.page_size) - else: - sql = sql.where(getattr(self.model, order_col) >= subquery).limit(search.page_size) + sql, subquery = self._make_pagination(sql, subquery, search, order_col) logger.debug(sql) results = session.exec(sql).all() return results @@ -126,4 +147,8 @@ def search_total(self, session: Session, q: BaseModel, filter_type: Optional[Dic sql = select(func.count(self.model.id)) sql = self._make_search(sql, q, filter_type) logger.debug(str(sql)) - return session.execute(sql).scalar() + try: + result = session.exec(sql).one() + except NoResultFound: + result = 0 + return result diff --git a/server/crud/internal/__init__.py b/server/crud/internal/__init__.py index 56f1406..9e78b15 100644 --- a/server/crud/internal/__init__.py +++ b/server/crud/internal/__init__.py @@ -2,4 +2,6 @@ from .roles import role from .menu import menu from .dictonary import data_dict, dict_item -from .job import job_log +from .host import host, group +from .job import job_logs +from .playbook import playbook diff --git a/server/crud/internal/host.py b/server/crud/internal/host.py new file mode 100644 index 0000000..2d31a2a --- /dev/null +++ b/server/crud/internal/host.py @@ -0,0 +1,69 @@ +from typing import List, Optional, Dict, Union, Type + +import sqlalchemy.orm.exc +from loguru import logger +from pydantic import BaseModel +from sqlmodel import select, Session, func + +from ...models.internal import Pagination +from ...models.internal.host import Host, Group, HostGroup +from ..base import CRUDBase + + +class CRUDHost(CRUDBase[Host]): + def _host_search(self, sql, q: BaseModel): + """ + 构建主机查询语句,返回sql + """ + logger.debug(q) + sql = sql.join(HostGroup, self.model.id == HostGroup.host_id) + if q.name is not None: + sql = sql.where(self.model.name.like('%' + q.name + '%')) + if q.ansible_host is not None: + sql = sql.where(self.model.ansible_host.like('%' + q.ansible_host + '%')) + if q.group_id is not None: + sub_query1 = select(Group.id).where(Group.id == q.group_id) + if q.ancestors is None: + sub_query2 = select(Group.id).where(Group.ancestors.like(q.ancestors + ',' + str(q.group_id) + ',%')) + else: + sub_query2 = select(Group.id).where(Group.ancestors.like(str(q.group_id))) + sql = sql.where(HostGroup.group_id.in_(sub_query1.union_all(sub_query2))) + + sql = sql.group_by(self.model.id).order_by(self.model.id) + return sql + + def search_total(self, session: Session, q: BaseModel, filter_type: Optional[Dict[str, str]] = None): + sql = select(self.model) + sql = self._host_search(sql, q) + sql = sql.subquery() + count_sql = select(func.count(sql.c.id)).select_from(sql) + logger.debug(count_sql) + try: + result = session.exec(count_sql).one() + except sqlalchemy.orm.exc.NoResultFound: + result = 0 + logger.debug(result) + return result + + def search(self, session: Session, search: Pagination, filter_type: Optional[Dict[str, str]] = None, + columns: Optional[List] = None, order_col: Optional[str] = 'id'): + """ + 实现主机管理界面的分页查询 + """ + sql = select(self.model) + sql = self._host_search(sql, search.search) + sql = sql.limit(search.page_size).offset((search.page - 1) * search.page_size) + logger.debug(sql) + results = session.exec(sql).all() + logger.debug(results) + return results + + +class CRUDGroup(CRUDBase[Group]): + def search_groups(self, session: Session) -> List[Group]: + sql = select(self.model) + return session.exec(sql).all() + + +host = CRUDHost(Host) +group = CRUDGroup(Group) diff --git a/server/crud/internal/job.py b/server/crud/internal/job.py index 67c8254..91fbb97 100644 --- a/server/crud/internal/job.py +++ b/server/crud/internal/job.py @@ -1,13 +1,12 @@ from typing import Union from loguru import logger from sqlmodel import select, Session -from ...models.internal.job import JobLog +from ...models.internal.job import JobLogs from ..base import CRUDBase -from .roles import role -class CRUDJobLog(CRUDBase[JobLog]): +class CRUDJobLogs(CRUDBase[JobLogs]): pass -job_log = CRUDJobLog(JobLog) +job_logs = CRUDJobLogs(JobLogs) diff --git a/server/crud/internal/playbook.py b/server/crud/internal/playbook.py new file mode 100644 index 0000000..dc6fc61 --- /dev/null +++ b/server/crud/internal/playbook.py @@ -0,0 +1,22 @@ +from typing import Union +from loguru import logger +from sqlmodel import select, Session +from ...models.internal.playbook import Playbook +from ..base import CRUDBase +from ...models.internal.user import UserInfo, UserLogin +from .roles import role + + +class CRUDPlaybook(CRUDBase[Playbook]): + + def query_playbooks(self, session: Session, query: Union[str, None] = None): + """ + 通过名称查询playbook + """ + sql = select(Playbook) + if query: + sql = sql.where(Playbook.name.like(f'%{query}%')) + return session.exec(sql).all() + + +playbook = CRUDPlaybook(Playbook) diff --git a/server/crud/internal/user.py b/server/crud/internal/user.py index 51206a8..199308c 100644 --- a/server/crud/internal/user.py +++ b/server/crud/internal/user.py @@ -19,7 +19,7 @@ def check_name(self, session: Session, name: str): return session.exec(sql).one() def insert(self, session: Session, user_info: UserInfo) -> User: - updated_user = User(**user_info.user.dict()) + updated_user = User(**user_info.user.model_dump()) user_roles = role.get_roles_by_id(session, user_info.roles) updated_user.roles = user_roles return super(CRUDUser, self).insert(session, updated_user) diff --git a/server/main.py b/server/main.py index 420e821..3e57b34 100644 --- a/server/main.py +++ b/server/main.py @@ -1,7 +1,7 @@ from fastapi import FastAPI, Depends from loguru import logger from .common.log import init_logging -from .routers.internal import login, user, menu, roles, dictonary, job +from .routers.internal import login, user, menu, roles, dictonary, job, host, playbook from .common.security import auth_check init_logging() @@ -14,6 +14,8 @@ app.include_router(roles.router, tags=['角色管理']) app.include_router(dictonary.router, tags=['数据字典']) app.include_router(job.router, tags=['任务管理']) +app.include_router(host.router, tags=['主机管理']) +app.include_router(playbook.router, tags=['playbook管理']) @app.on_event("startup") diff --git a/server/models/internal/__init__.py b/server/models/internal/__init__.py index c42a697..d18d34d 100644 --- a/server/models/internal/__init__.py +++ b/server/models/internal/__init__.py @@ -2,7 +2,9 @@ from .menu import Menu from .role import Role, RoleMenu from .dictonary import DataDict, DictItem -from .job import Job, JobLog +from .host import Host, Group +from .job import JobLogs +from .playbook import Playbook from pydantic import BaseModel from typing import TypeVar, Generic, Optional diff --git a/server/models/internal/host.py b/server/models/internal/host.py new file mode 100644 index 0000000..d5d2af0 --- /dev/null +++ b/server/models/internal/host.py @@ -0,0 +1,62 @@ +from typing import List, Union +from sqlmodel import SQLModel, Field, Column, Text, Integer, String, Relationship + + +class HostGroup(SQLModel, table=True): + """ + 通过中间表实现:主机-组的对应关系 + """ + __tablename__ = 'host_group' + host_id: int = Field(foreign_key="host.id", primary_key=True, nullable=False) + group_id: int = Field(foreign_key="group.id", primary_key=True, nullable=False) + + +class Host(SQLModel, table=True): + __tablename__ = 'host' + id: int = Field(sa_column=Column('id', Integer, primary_key=True, autoincrement=True)) + name: str = Field(sa_column=Column(String(50), unique=True, nullable=False, comment='主机名')) + ansible_host: str = Field(sa_column=Column(String(50), nullable=False, comment='主机地址')) + ansible_port: int = Field(sa_column=Column(Integer, default=22, nullable=False, comment='ssh端口')) + ansible_user: str = Field(sa_column=Column(String(50), nullable=True, default=None, comment='ssh用户名')) + ansible_password: str = Field(sa_column=Column(String(50), default=None, comment='ssh密码')) + ansible_ssh_private_key: str = Field( + sa_column=Column('ansible_ssh_private_key', Text, default=None, nullable=True, comment='私钥')) + desc: str = Field(sa_column=Column(String(100), default=None, nullable=True, comment='描述')) + groups: List['Group'] = Relationship(back_populates='hosts', link_model=HostGroup) + + +class Group(SQLModel, table=True): + __tablename__ = 'group' + id: int = Field(sa_column=Column('id', Integer, primary_key=True, autoincrement=True)) + name: str = Field(sa_column=Column(String(50), nullable=False, comment='组名')) + parent_id: int = Field(sa_column=Column(Integer, default=None, nullable=True, comment='父ID')) + ancestors: Union[str, None] = Field( + sa_column=Column(String(100), default=None, nullable=True, comment='祖先ID列表')) + hosts: List['Host'] = Relationship(back_populates='groups', link_model=HostGroup) + + +class GroupWithChild(SQLModel): + id: int + name: str + parent_id: Union[int, None] + ancestors: Union[str, None] + children: List['GroupWithChild'] = [] + + +class CreateHost(SQLModel): + id: Union[int, None] = None + name: str + groups: List[int] + ansible_host: str + ansible_port: int + ansible_user: str + ansible_password: Union[str, None] = None + ansible_ssh_private_key: Union[str, None] + desc: Union[str, None] + + +class HostWithIp(SQLModel): + name: Union[str, None] = None + ansible_host: Union[str, None] = None + group_id: Union[int, None] = None + ancestors: Union[str, None] diff --git a/server/models/internal/job.py b/server/models/internal/job.py index 020277d..8a63d80 100644 --- a/server/models/internal/job.py +++ b/server/models/internal/job.py @@ -1,10 +1,9 @@ from enum import Enum from typing import Union, List, Tuple, Dict, Any, Optional -from sqlmodel import SQLModel, Field, Column, Relationship, Integer, Unicode, LargeBinary, JSON +from sqlmodel import SQLModel, Field, Column, Relationship, Integer, String, LargeBinary, JSON, Unicode from sqlalchemy.dialects import mysql from datetime import datetime from pydantic import BaseModel -from .relationships import UserJob class CronJobArgs(BaseModel): @@ -18,53 +17,48 @@ class TriggerEnum(str, Enum): cron = 'cron' -class JobAdd(BaseModel): - id: Optional[str] = None - name: str - targets: List[str] - trigger: TriggerEnum - trigger_args: Union[CronJobArgs, str] = None - command: str - type: str - +class TriggerArgs(BaseModel): + run_date: Optional[str] = Field(default=None, description="date类型触发器设定执行时间,None为立即执行") + cron: Optional[str] = Field(default=None, description="cron类型触发器参数") + start_date: Optional[str] = Field(default=None, description="cron类型触发器开始时间") + end_date: Optional[str] = Field(default=None, description="cron类型触发器结束时间") -class Job(SQLModel, table=True): - """ - 此表同步apschedule中的建表语句,如果没有,则apscheduler会自动创建对应表 - """ - __tablename__ = 'apscheduler_jobs' - id: str = Field(sa_column=Column('id', Unicode(191), primary_key=True)) - next_run_time: float = Field(sa_column=Column('next_run_time', mysql.DOUBLE, index=True)) - job_state: bytes = Field(sa_column=Column('job_state', LargeBinary, nullable=False)) - logs: List["JobLog"] = Relationship(back_populates="job") - user: "User" = Relationship(back_populates="jobs", link_model=UserJob) - -class JobLog(SQLModel, table=True): - __tablename__ = 'job_log' +class JobAdd(BaseModel): + id: Optional[str] = Field(default=None, description="任务ID") + name: str = Field(description="任务名称") + trigger: TriggerEnum = Field(description="触发器类型") + trigger_args: TriggerArgs = Field(description="触发器") + targets: List[int] = Field(description="执行任务的主机") + ansible_args: Dict[str, Any] = Field(default=None, description="ansible任务参数") + + +# class Job(SQLModel, table=True): +# """ +# 此表同步apschedule中的建表语句,如果没有,则apscheduler会自动创建对应表 +# """ +# __tablename__ = 'jobs' +# id: str = Field(sa_column=Column('id', autoincrement=True, primary_key=True)) +# name: str = Field(sa_column=Column('name', String(50), nullable=False, unique=True)) +# create_time: float = Field(sa_column=Column('create_time', mysql.DOUBLE, index=True)) +# update_time: float = Field(sa_column=Column('update_time', mysql.DOUBLE, index=True)) +# job_id: str = Field(sa_column=Column('job_id', Unicode(255))) +# job_logs: List["JobLog"] = Relationship(back_populates="job") +# +# +class JobLogs(SQLModel, table=True): + __tablename__ = 'job_logs' id: Optional[int] = Field(sa_column=Column('id', Integer, primary_key=True, autoincrement=True)) - status: int = Field(default=0, sa_column=Column(mysql.TINYINT, comment='执行命令返回状态')) + job_id: Optional[str] = Field(sa_column=Column('job_id', String(191), index=True)) start_time: datetime = Field(default_factory=datetime.now, sa_column_kwargs={'comment': '任务开始时间'}) end_time: datetime = Field(default=datetime.now, sa_column_kwargs={'comment': '任务结束时间'}) - log: JSON = Field(sa_column=Column(JSON, comment='执行日志')) - job_id: Optional[str] = Field(default=None, foreign_key="apscheduler_jobs.id") - job: Optional[Job] = Relationship(back_populates="logs") - - class Config: - arbitrary_types_allowed = True + log: str = Field(sa_column=Column(mysql.TEXT, comment='执行日志')) + stats: str = Field(sa_column=Column(mysql.TEXT, comment='任务返回状态')) class JobSearch(SQLModel): job_name: Optional[str] = None - - -class JobLogs(SQLModel): - id: int - status: int - start_time: datetime - end_time: datetime - log: Any - job_id: str + job_trigger: Optional[str] = None class JobLogSearch(BaseModel): diff --git a/server/models/internal/menu.py b/server/models/internal/menu.py index 5785ed1..e3be050 100644 --- a/server/models/internal/menu.py +++ b/server/models/internal/menu.py @@ -6,13 +6,13 @@ class MenuBase(SQLModel): id: Optional[int] = Field(sa_column=Column('id', Integer, primary_key=True, autoincrement=True)) name: str = Field(sa_column=Column(String(20), nullable=False, comment='菜单名')) - icon: Optional[str] = Field(sa_column=Column(String(50), default=None, comment='Icon图标')) + icon: Optional[str] = Field(default=None, sa_column=Column(String(50), default=None, comment='Icon图标')) path: Optional[str] = Field(sa_column=Column(String(100), default=None, comment='路径')) component: Optional[str] = Field(sa_column=Column(String(50), default=None, comment='组件')) auth: Optional[str] = Field(sa_column=Column(String(50), default=None, comment='授权标识')) type: str = Field(sa_column=Column(String(10), nullable=False, comment='类型')) parent_id: Optional[int] = Field(sa_column=Column(Integer, default=None, comment='父级ID')) - sort: Optional[float] = Field(sa_column=Column(Float, default=None, comment='菜单排序')) + sort: Optional[float] = Field(default=None, sa_column=Column(Float, default=None, comment='菜单排序')) enable: bool = Field(sa_column=Column(Boolean, default=True, comment='启用')) @@ -28,4 +28,4 @@ class MenusWithChild(MenuBase): # 底部导入,且延迟注释 from .role import Role -MenusWithChild.update_forward_refs() +MenusWithChild.model_rebuild() diff --git a/server/models/internal/playbook.py b/server/models/internal/playbook.py new file mode 100644 index 0000000..1f90727 --- /dev/null +++ b/server/models/internal/playbook.py @@ -0,0 +1,14 @@ +from typing import List, Union +from sqlmodel import SQLModel, Field, Column, Integer, String, TEXT + + +class Playbook(SQLModel, table=True): + id: Union[int, None] = Field(default=None, sa_column=Column('id', Integer, primary_key=True, autoincrement=True)) + name: str = Field(sa_column=Column(String(50), nullable=False, comment='playbook名称')) + playbook: str = Field(sa_column=Column(TEXT, nullable=False, comment='playbook文件')) + desc: Union[str, None] = Field(default=None, + sa_column=Column(String(255), default=None, nullable=True, comment='描述')) + + +class PlaybookSearch(SQLModel): + name: Union[str, None] = None diff --git a/server/models/internal/relationships.py b/server/models/internal/relationships.py index 37fcf55..af50d9c 100644 --- a/server/models/internal/relationships.py +++ b/server/models/internal/relationships.py @@ -15,10 +15,10 @@ class UserRole(SQLModel, table=True): role_id: int = Field(foreign_key="roles.id", primary_key=True) -class UserJob(SQLModel, table=True): - """ - 通过中间表实现:用户-任务的对应关系 - """ - __tablename__ = 'user_job' - user_id: int = Field(foreign_key="user.id", primary_key=True, nullable=False) - job_id: str = Field(Unicode(191), foreign_key="apscheduler_jobs.id", primary_key=True, nullable=False) +# class UserJob(SQLModel, table=True): +# """ +# 通过中间表实现:用户-任务的对应关系 +# """ +# __tablename__ = 'user_job' +# user_id: int = Field(foreign_key="user.id", primary_key=True, nullable=False) +# job_id: str = Field(Unicode(191), foreign_key="apscheduler_jobs.id", primary_key=True, nullable=False) diff --git a/server/models/internal/user.py b/server/models/internal/user.py index 113a7fe..34ac849 100644 --- a/server/models/internal/user.py +++ b/server/models/internal/user.py @@ -1,7 +1,7 @@ from typing import Optional, List, Union, Literal, TYPE_CHECKING from pydantic import BaseModel from sqlmodel import SQLModel, Field, Relationship, Column, Integer, Boolean, String -from .relationships import UserRole, UserJob +from .relationships import UserRole from .role import Role if TYPE_CHECKING: @@ -18,7 +18,6 @@ class User(SQLModel, table=True): email: Union[str, None] = Field(sa_column=Column(String(20), default=None, comment='邮箱')) password: Optional[str] = Field(sa_column=Column(String(50), comment='密码')) roles: List['Role'] = Relationship(back_populates="users", link_model=UserRole) - jobs: List['Job'] = Relationship(back_populates="user", link_model=UserJob) class UserWithOutPasswd(SQLModel): diff --git a/server/routers/internal/dictonary.py b/server/routers/internal/dictonary.py index 34f4751..476577f 100644 --- a/server/routers/internal/dictonary.py +++ b/server/routers/internal/dictonary.py @@ -14,8 +14,8 @@ @router.post('/dict/item/search', summary="字典列表查询", response_model=ApiResponse[SearchResponse[DictRead]]) async def search_items(search: Pagination[DictItemSearch], session: Session = Depends(get_session)): filter_type = DictItemSearchFilter(dict_id='eq', label='like', enable='eq', value='like') - total = crud.internal.dict_item.search_total(session, search.search, filter_type.dict()) - items: List[DictRead] = crud.internal.dict_item.search(session, search, filter_type.dict()) + total = crud.internal.dict_item.search_total(session, search.search, filter_type.model_dump()) + items: List[DictRead] = crud.internal.dict_item.search(session, search, filter_type.model_dump()) item_list = [DictRead.from_orm(item) for item in items] return ApiResponse( data={ @@ -28,7 +28,7 @@ async def search_items(search: Pagination[DictItemSearch], session: Session = De @router.post('/dict/item', summary="添加字典字段", response_model=ApiResponse[DictRead]) async def add_dict_item(dict_item: DictUpdate, session: Session = Depends(get_session)): - new_item = crud.internal.dict_item.insert(session, DictItem(**dict_item.dict())) + new_item = crud.internal.dict_item.insert(session, DictItem(**dict_item.model_dump())) return ApiResponse( data=DictRead.from_orm(new_item) ) @@ -83,8 +83,8 @@ async def add_dict(data_dict: DataDict, session: Session = Depends(get_session)) summary="查询数据字典") async def get_dicts(search: Pagination[DataDictSearch], session: Session = Depends(get_session)): filter_type = DataDictSearch(name='like', code='like') - total = crud.internal.data_dict.search_total(session, search.search, filter_type.dict()) - dicts: List[DataDict] = crud.internal.data_dict.search(session, search, filter_type.dict()) + total = crud.internal.data_dict.search_total(session, search.search, filter_type.model_dump()) + dicts: List[DataDict] = crud.internal.data_dict.search(session, search, filter_type.model_dump()) return ApiResponse( data={ 'total': total, diff --git a/server/routers/internal/host.py b/server/routers/internal/host.py index 2333fbf..940751a 100644 --- a/server/routers/internal/host.py +++ b/server/routers/internal/host.py @@ -1,15 +1,140 @@ -from typing import Optional, List -from fastapi import APIRouter, Depends, status, HTTPException +from typing import Optional, List, Union +from typing_extensions import Annotated +from uuid import uuid4 + +from loguru import logger +from fastapi import APIRouter, Depends, status, HTTPException, Query from sqlmodel import Session -from server.models.internal.host import CreateGroup +from sqlalchemy.exc import IntegrityError +from server.models.internal.host import Host, Group, GroupWithChild, CreateHost, HostWithIp +from server.common.utils import Tree +from server.models.internal import Pagination from ...common.response_code import ApiResponse, SearchResponse -from ...common.database import get_session +from ...common.database import get_session, get_rpyc from ... import crud router = APIRouter(prefix='/api') @router.post('/host/group', summary='添加主机分组') -async def create_group(group: CreateGroup, session: Session = Depends(get_session)): - crud.internal.group.insert(session, group) +async def create_group(group: Group, session: Session = Depends(get_session)): + try: + crud.internal.group.insert(session, Group(**group.model_dump(exclude_unset=True))) + except IntegrityError as e: + logger.error(f"add Group Error:{str(e)}") + return ApiResponse( + code=500, + message="组名已存在" + ) + return ApiResponse() + + +@router.put('/host/group', summary='更新主机分组') +async def update_group(group: Group, session: Session = Depends(get_session)): + crud.internal.group.update(session, crud.internal.group.get(session, group.id), + Group(**group.model_dump(exclude_unset=True))) + return ApiResponse() + + +@router.get('/host/group', summary='获取所有组') +async def get_groups(session: Session = Depends(get_session)): + groups: List[Group] = crud.internal.group.search_groups(session) + nest_groups = Tree[GroupWithChild](groups, GroupWithChild).build() + return ApiResponse( + data=nest_groups + ) + + +@router.delete('/host/group/{group_id}', summary='删除组') +async def delete_group(group_id: int, session: Session = Depends(get_session)): + group: Group = crud.internal.group.get(session, group_id) + if len(group.hosts) > 0: + return ApiResponse( + code=500, + message='此分组存在主机,请先迁移主机' + ) + else: + crud.internal.group.delete(session, group_id) + return ApiResponse() + + +@router.get('/host/targets/', summary='获取多个主机信息', response_model=ApiResponse[List[Host]]) +async def get_hosts(ids: Annotated[List[int], Query()] = [], session: Session = Depends(get_session)): + logger.debug(ids) + hosts: List[Host] = crud.internal.host.get_multi_by_ids(session, ids) + hosts_list = [host.model_dump(exclude={'ansible_password', 'ansible_ssh_private_key'}) for host + in hosts] + return ApiResponse( + data=hosts_list + ) + + +@router.post('/host', summary='新增主机') +async def create_host(host: CreateHost, session: Session = Depends(get_session)): + groups = crud.internal.group.get_multi_by_ids(session, host.groups) + crud.internal.host.insert(session, Host(**host.model_dump(exclude_unset=True, exclude={'groups'}), + groups=groups)) + return ApiResponse() + + +@router.put('/host', summary='更新主机') +async def update_host(host: CreateHost, session: Session = Depends(get_session)): + groups = crud.internal.group.get_multi_by_ids(session, host.groups) + db_obj = crud.internal.host.get(session, host.id) + db_obj = crud.internal.host.update(session, db_obj, + Host(**host.model_dump(exclude_unset=True, exclude={'groups'}))) + db_obj.groups = groups + session.add(db_obj) + session.commit() + session.refresh(db_obj) + logger.debug(db_obj) + return ApiResponse() + + +@router.delete('/host/{host_id}', summary='删除主机') +async def delete_host(host_id: int, session: Session = Depends(get_session)): + crud.internal.host.delete(session, host_id) + return ApiResponse() + + +@router.post('/host/search', summary="获取主机列表", response_model=ApiResponse[SearchResponse[Host]]) +async def get_all_user(search: Pagination[HostWithIp], + session: Session = Depends(get_session)): + """ + :param search: Pagination实例,包含搜索的所有参数 偏移页面 + :param session: + :return: + """ + total = crud.internal.host.search_total(session, search.search) + logger.debug(total) + hosts: List[Host] = crud.internal.host.search(session, search) + hosts_list = [host.model_dump(exclude={'ansible_password', 'ansible_ssh_private_key'}) for host + in hosts] + logger.debug(hosts_list) + return ApiResponse( + data={ + 'total': total, + 'data': hosts_list + } + ) + + +@router.get('/host/{id}', summary='获取主机信息') +async def get_host(id: int, session: Session = Depends(get_session)): + host: Host = crud.internal.host.get(session, id) + host_groups = [group.id for group in host.groups] + return ApiResponse( + data={**host.model_dump(exclude={'ansible_password', 'ansible_ssh_private_key'}), 'groups': host_groups} + ) + + +@router.get('/host/ping/{host_id}', summary="检查主机可用性") +async def host_ping_check(host_id: int, session: Session = Depends(get_session), rpyc=Depends(get_rpyc)): + host: Host = crud.internal.host.get(session, host_id) + inventory = {'all': {'hosts': {host.name: {**host.model_dump()}}}} + logger.debug(inventory) + job_id = uuid4().hex + job = rpyc.root.add_job('scheduler-server:ansible_task', trigger='date', id=job_id, + kwargs={'job_id': job_id, 'targets': [host.id, ], + 'ansible_args': {'module': 'ping'}}) return ApiResponse() diff --git a/server/routers/internal/job.py b/server/routers/internal/job.py index af19081..cac6976 100644 --- a/server/routers/internal/job.py +++ b/server/routers/internal/job.py @@ -1,19 +1,15 @@ import rpyc -import re import anyio -from datetime import datetime from uuid import uuid4 from typing import List, Any, Dict from sqlmodel import Session, text -from apscheduler.job import Job -from apscheduler.triggers.interval import IntervalTrigger from apscheduler.triggers.date import DateTrigger from apscheduler.triggers.cron import CronTrigger from fastapi import APIRouter, Depends, WebSocket, Request from loguru import logger from server.settings import settings -from server.models.internal.job import JobAdd, JobLog -from server.common.database import get_session, get_redis +from server.models.internal.job import JobAdd +from server.common.database import get_session, get_redis, get_rpyc from server.common.response_code import ApiResponse, SearchResponse from server.common.utils import get_task_logs from server.models.internal import Pagination @@ -21,20 +17,20 @@ from server import crud from server.common.dep import get_uid -router = APIRouter(prefix='/api/jobs') - +# if TYPE_CHECKING: +# from apscheduler.job import Job -def cron_to_dict(cron): - cron_list = re.split(r'\s+', cron) - return {'minute': cron_list[0], 'hour': cron_list[1], 'day': cron_list[2], 'month': cron_list[3], - 'day_of_week': cron_list[4]} +router = APIRouter(prefix='/api/jobs') -@router.get('/switch/{job_id}', summary='切换任务状态') -async def switch_job(job_id: str, ): +@router.get('/switch/{job_id}', summary='任务状态切换') +async def switch_job(job_id: str, status: int): try: conn = rpyc.connect(**settings.rpyc_config) - conn.root.switch_job(job_id) + if status == 1: + conn.root.resume_job(job_id) + else: + conn.root.pause_job(job_id) except ValueError as e: logger.warning(e) return ApiResponse( @@ -44,38 +40,11 @@ async def switch_job(job_id: str, ): return ApiResponse() -@router.get('/resume/{job_id}', summary='恢复任务') -async def resume_job(job_id: str): - try: - conn = rpyc.connect(**settings.rpyc_config) - conn.root.resume_job(job_id) - except Exception as e: - logger.warning(e) - return ApiResponse( - code=500, - message='恢复任务出错,请联系管理员!' - ) - return ApiResponse() - - @router.delete('/{job_id}', summary='删除任务', response_model=ApiResponse[str]) async def delete_job(job_id: str, uid: int = Depends(get_uid), session: Session = Depends(get_session)): - sql = text("select job_id from user_job where user_id=:uid") - user_jobs = tuple([job[0] for job in session.execute(sql, {'uid': uid})]) - logger.debug(user_jobs) - if job_id not in user_jobs: - return ApiResponse( - code=500, - message='用户无权限操作此任务!' - ) try: conn = rpyc.connect(**settings.rpyc_config) conn.root.pause_job(job_id) - delete_user_job = text("delete from user_job where job_id=:job_id") - session.execute(delete_user_job, {'job_id': job_id}) - delete_job_logs = text("delete from job_log where job_id=:job_id") - session.execute(delete_job_logs, {'job_id': job_id}) - session.commit() conn.root.remove_job(job_id) except Exception as e: logger.warning(e) @@ -89,17 +58,11 @@ async def delete_job(job_id: str, uid: int = Depends(get_uid), session: Session @router.put('/', summary='修改任务', response_model=ApiResponse[str]) async def modify_job(job: JobAdd, session: Session = Depends(get_session)): logger.debug(job) - if job.trigger == 'cron': - trigger_args = job.trigger_args.dict() - trigger_args.update(cron_to_dict(job.trigger_args.cron)) - del trigger_args['cron'] - trigger = CronTrigger(**trigger_args) - elif job.trigger == 'date': - trigger = DateTrigger(run_date=job.trigger_args) try: conn = rpyc.connect(**settings.rpyc_config) - conn.root.modify_job(job.id, kwargs={'job_id': job.id, 'targets': job.targets, 'command': job.command}, - name=job.name, trigger=trigger) + conn.root.modify_job(job.id, + kwargs={'job_id': job.id, 'targets': job.targets, 'ansible_args': job.ansible_args}, + name=job.name, trigger=job.trigger, trigger_args=job.trigger_args.model_dump()) except Exception as e: logger.warning(e) return ApiResponse( @@ -112,24 +75,17 @@ async def modify_job(job: JobAdd, session: Session = Depends(get_session)): @router.post('/', summary='添加任务', response_model=ApiResponse[str]) async def add_job(job: JobAdd, uid: int = Depends(get_uid), session: Session = Depends(get_session)): logger.debug(job) - # 手动生成job_id,需要传递到内部 + # 手动生成job_id,传入执行函数,方便后期日志写入redis job_id = uuid4().hex - # 只支持cron的date任务,interval间隔任务完全可以用cron替代,没必要单独实现功能 - if job.trigger == 'cron': - trigger_args: Dict[str, Any] = job.trigger_args.dict() - trigger_args.update(cron_to_dict(job.trigger_args.cron)) - del trigger_args['cron'] - elif job.trigger == 'date': - trigger_args = {'run_date': job.trigger_args} + logger.debug(f'user defined job ID:{job_id}') + # 只支持cron和date任务,interval间隔任务完全可以用cron替代,没必要单独实现功能 try: conn = rpyc.connect(**settings.rpyc_config) - job = conn.root.add_job('scheduler-server:run_command_with_channel', trigger=job.trigger.value, - kwargs={'job_id': job_id, - 'targets': job.targets, - 'command': job.command}, id=job_id, name=job.name, **trigger_args) - sql = text("INSERT INTO user_job values (:uid,:job_id)") - session.execute(sql, {'uid': uid, "job_id": job_id}) - session.commit() + job = conn.root.add_job('scheduler-server:ansible_task', trigger=job.trigger, id=job_id, + kwargs={'job_id': job_id, 'targets': job.targets, + 'ansible_args': job.ansible_args}, name=job.name, + trigger_args=job.trigger_args.model_dump()) + logger.debug(job.id) except Exception as e: logger.warning(e) return ApiResponse( @@ -137,16 +93,16 @@ async def add_job(job: JobAdd, uid: int = Depends(get_uid), session: Session = D message=str(e) ) return ApiResponse( - data=job.id + message=f'新建任务成功:{job.name}' ) @router.post('/search', summary='获取所有任务', response_model=ApiResponse[SearchResponse[Any]]) -async def show_jobs(search: Pagination[JobSearch], uid: int = Depends(get_uid)): - job_name = search.search['job_name'] +async def show_jobs(search: Pagination[JobSearch], uid: int = Depends(get_uid), conn: Any = Depends(get_rpyc)): + # job_name = search.search.job_name try: - conn = rpyc.connect(**settings.rpyc_config) - user_jobs: List[Job] = conn.root.get_user_jobs(uid, job_name) + user_jobs = conn.root.get_user_jobs(uid=None) + logger.debug(user_jobs) except Exception as e: logger.warning(e) return ApiResponse( @@ -156,7 +112,7 @@ async def show_jobs(search: Pagination[JobSearch], uid: int = Depends(get_uid)): if len(user_jobs) == 0: return ApiResponse( data={ - 'total': len(user_jobs), + 'total': 0, 'data': [] } ) @@ -175,10 +131,11 @@ async def show_jobs(search: Pagination[JobSearch], uid: int = Depends(get_uid)): logger.debug('cron') for field in job.trigger.fields: trigger_args[field.name] = str(field) + # job.kwargs['targets']默认为rpyc类型的list,需要转成python的list类型,否则pydantic类型检查会去找rpyc list,报错 info.update({ - 'id': job.id, - 'name': job.name, - 'targets': job.kwargs['targets'], + 'id': str(job.id), + 'name': str(job.name), + 'targets': list(job.kwargs['targets']), 'trigger': 'cron', 'trigger_args': { 'cron': f"{trigger_args['minute']} {trigger_args['hour']} {trigger_args['day']} {trigger_args['month']} {trigger_args['day_of_week']}", @@ -186,27 +143,33 @@ async def show_jobs(search: Pagination[JobSearch], uid: int = Depends(get_uid)): "%Y-%m-%d %H:%M:%S"), 'end_date': None if job.trigger.end_date is None else job.trigger.end_date.strftime( "%Y-%m-%d %H:%M:%S"), + 'run_date': None, }, - 'command': job.kwargs['command'], - 'status': 'running' if job.next_run_time is not None else 'stop' + 'ansible_args': dict(job.kwargs['ansible_args']), + 'status': '1' if job.next_run_time is not None else '0' }) elif isinstance(job.trigger, DateTrigger): info.update({ - 'id': job.id, - 'name': job.name, - 'targets': job.kwargs['targets'], + 'id': str(job.id), + 'name': str(job.name), + 'targets': list(job.kwargs['targets']), 'trigger': 'date', - 'trigger_args': job.trigger.run_date.strftime( - "%Y-%m-%d %H:%M:%S"), - 'command': job.kwargs['command'], - 'status': 'running' if job.next_run_time is not None else 'stop' + 'trigger_args': { + 'run_date': str(job.trigger.run_date.strftime( + "%Y-%m-%d %H:%M:%S")), + 'cron': None, + 'start_date': None, + 'end_date': None, + }, + 'ansible_args': dict(job.kwargs['ansible_args']), + 'status': '1' if job.next_run_time is not None else '0' }) logger.debug(info) job_info_list.append(info) logger.debug(job_info_list) return ApiResponse( data={ - 'total': len(user_jobs), + 'total': 10, 'data': job_info_list } ) @@ -214,10 +177,9 @@ async def show_jobs(search: Pagination[JobSearch], uid: int = Depends(get_uid)): @router.post('/logs', summary='任务日志查询', response_model=ApiResponse[SearchResponse[JobLogs]]) async def job_logs(page_search: Pagination[JobLogSearch], session: Session = Depends(get_session)): - filter_type = JobLogSearch(job_id='eq') logger.debug(page_search) - total = crud.internal.job_log.search_total(session, page_search.search, filter_type.dict()) - jobs = crud.internal.job_log.search(session, page_search, filter_type.dict()) + total = crud.internal.job_logs.search_total(session, page_search.search, filter_type={'job_id': 'eq'}) + jobs = crud.internal.job_logs.search(session, page_search, filter_type={'job_id': 'eq'}) logger.debug(jobs) return ApiResponse( data={ diff --git a/server/routers/internal/menu.py b/server/routers/internal/menu.py index d4aa1bb..188921d 100644 --- a/server/routers/internal/menu.py +++ b/server/routers/internal/menu.py @@ -31,7 +31,7 @@ async def add_menu(menu: MenuBase, session: Session = Depends(get_session)): :param session: :return: """ - db_obj = crud.menu.insert(session, Menu(**menu.dict())) + db_obj = crud.menu.insert(session, Menu(**menu.model_dump())) session.add(db_obj) session.commit() session.refresh(db_obj) diff --git a/server/routers/internal/playbook.py b/server/routers/internal/playbook.py new file mode 100644 index 0000000..13d65dc --- /dev/null +++ b/server/routers/internal/playbook.py @@ -0,0 +1,67 @@ +from typing import List, Union +from loguru import logger +from fastapi import APIRouter, Depends +from sqlmodel import Session +from server.common.response_code import ApiResponse, SearchResponse +from server.common.database import get_session +from server.models.internal.playbook import Playbook, PlaybookSearch +from server.models.internal import Pagination +from server import crud + +router = APIRouter(prefix='/api') + + +@router.get('/playbook/{playbook_id}', summary='获取playbook详情', response_model=ApiResponse[Playbook]) +async def get_playbook_by_id(playbook_id: int, session: Session = Depends(get_session)): + playbook = crud.internal.playbook.get(session, playbook_id) + return ApiResponse(data=playbook.model_dump()) + + +@router.post('/playbook/search', summary="获取playbook列表", response_model=ApiResponse[SearchResponse[Playbook]]) +async def get_all_user(search: Pagination[PlaybookSearch], + session: Session = Depends(get_session)): + """ + :param search: Pagination实例,包含搜索的所有参数 偏移页面 + :param session: + :return: + """ + total = crud.internal.playbook.search_total(session, search.search, {'name': 'like'}) + logger.debug(total) + playbooks: List[Playbook] = crud.internal.playbook.search(session, search, {'name': 'like'}) + playbook_list = [playbook.model_dump() for playbook + in playbooks] + logger.debug(playbook_list) + return ApiResponse( + data={ + 'total': total, + 'data': playbook_list + } + ) + + +@router.get('/playbook', summary='获取playbooks列表', response_model=ApiResponse[List[Playbook]]) +async def query_playbooks(query: Union[str, None] = None, session: Session = Depends(get_session)): + playbooks: List[Playbook] = crud.internal.playbook.query_playbooks(session, query) + playbook_list = [playbook.model_dump(exclude={'playbook'}) for playbook + in playbooks] + return ApiResponse(data=playbook_list) + + +@router.post('/playbook', summary='创建playbook') +async def create_playbook(playbook: Playbook, session: Session = Depends(get_session)): + logger.debug(playbook) + crud.internal.playbook.insert(session, Playbook(**playbook.model_dump(exclude_unset=True))) + return ApiResponse() + + +@router.put('/playbook', summary='更新playbook') +async def update_playbook(playbook: Playbook, session: Session = Depends(get_session)): + crud.internal.playbook.update(session, crud.internal.playbook.get(session, playbook.id), + Playbook(**playbook.model_dump(exclude_unset=True))) + return ApiResponse() + + +@router.delete('/playbook/{book_id}', summary='删除playbook') +async def delete_playbook(book_id: int, session: Session = Depends(get_session)): + crud.internal.playbook.delete(session, book_id) + return ApiResponse() diff --git a/server/routers/internal/roles.py b/server/routers/internal/roles.py index 607b215..7119a4c 100644 --- a/server/routers/internal/roles.py +++ b/server/routers/internal/roles.py @@ -44,7 +44,7 @@ async def get_roles(search: Pagination[RoleBase], session: Session = Depends(get roles: List[Role] = crud.internal.role.search(session, search, {'name': 'like', 'enable': 'eq'}) role_with_menus: List[RoleWithMenus] = [] for role in roles: - new_role = RoleWithMenus(**role.dict(), menus=role.menus) + new_role = RoleWithMenus(**role.model_dump(), menus=role.menus) role_with_menus.append(new_role) return ApiResponse( data={ @@ -74,7 +74,7 @@ async def add_roles(role_info: RoleInsert, session: Session = Depends(get_sessio enable_menus = role_info.menus delattr(role_info, 'menus') try: - db_obj = crud.internal.role.insert(session, Role(**role_info.dict())) + db_obj = crud.internal.role.insert(session, Role(**role_info.model_dump())) crud.internal.role.update_menus(session, db_obj, enable_menus) return ApiResponse( data=db_obj diff --git a/server/settings.py b/server/settings.py index 651e438..513e7c1 100644 --- a/server/settings.py +++ b/server/settings.py @@ -16,7 +16,7 @@ class APISettings(BaseSettings): CASBIN_MODEL_PATH: str = "server/model.conf" # sql数据库信息 - DATABASE_URI: MySQLDsn = "mysql+pymysql://root:123456@192.168.137.129/devops" + DATABASE_URI: MySQLDsn = "mysql+pymysql://root:123456@localhost/devops" # 白名单,不需要进行任何验证即可访问 NO_VERIFY_URL: List = [ '/', diff --git a/www/package.json b/www/package.json index bef6b04..7dfed24 100644 --- a/www/package.json +++ b/www/package.json @@ -14,15 +14,16 @@ "axios": "^1.6.2", "core-js": "^3.34.0", "echarts": "^5.4.2", - "element-plus": "^2.5.6", + "element-plus": "^2.7.8", "js-base64": "^3.7.5", "js-cookie": "^3.0.5", "jsonref": "^8.0.8", - "pinia": "2.1.7", - "vue": "^3.4.19", + "pinia": "2.2.0", + "v-contextmenu": "^3.2.0", + "vue": "^3.4.35", "vue-draggable-plus": "^0.3.5", "vue-echarts": "^6.6.5", - "vue-router": "^4.3.0", + "vue-router": "^4.4.2", "xterm": "^5.3.0", "xterm-addon-attach": "^0.9.0", "xterm-addon-fit": "^0.8.0" diff --git a/www/src/api/host.js b/www/src/api/host.js new file mode 100644 index 0000000..57213be --- /dev/null +++ b/www/src/api/host.js @@ -0,0 +1,13 @@ +import {GET, POST, PUT, DELETE} from '@/utils/request' + +// 用户相关接口 +export const PostNewGroup = (group) => POST('/api/host/group', group) +export const PutGroup = (group) => PUT('/api/host/group', group) +export const GetAllGroup = () => GET('/api/host/group') +export const DelGroup = (groupId) => DELETE('/api/host/group/' + groupId) +export const PostNewHost = (host) => POST('/api/host', host) +export const PutHost = (host) => PUT('/api/host', host) +export const DelHost = (hostId) => DELETE('/api/host/' + hostId) +export const PingHost = (hostId) => GET('/api/host/ping/' + hostId) +export const GetHostById = (hostId) => GET('/api/host/' + hostId) +export const GetHostsByIds = (ids) => GET('/api/host/targets/?'+ ids) \ No newline at end of file diff --git a/www/src/api/jobs.js b/www/src/api/jobs.js index 0d8fc00..fe43219 100644 --- a/www/src/api/jobs.js +++ b/www/src/api/jobs.js @@ -4,5 +4,5 @@ export const PostNewCronJob = (dict) => POST('/api/jobs/', dict) export const GetJobList = () => GET('/api/jobs/') export const DelJob = (jobId) => DELETE('/api/jobs/' + jobId) export const PutCronJob = (job) => PUT('/api/jobs/', job) -export const SwitchJob = (jobId) => GET('/api/jobs/switch/' + jobId) +export const SwitchJob = (jobId, status) => GET('/api/jobs/switch/' + jobId, {status: status}) export const GetLogs = (jobId) => GET('/api/jobs/logs') \ No newline at end of file diff --git a/www/src/api/playbook.js b/www/src/api/playbook.js new file mode 100644 index 0000000..c0e0ed4 --- /dev/null +++ b/www/src/api/playbook.js @@ -0,0 +1,8 @@ +import {GET, POST, PUT, DELETE} from '@/utils/request' + +//playbook相关接口 +export const GetPlaybook = (playbookId) => GET('/api/playbook/' + playbookId) +export const PostNewPlaybook = (playbook) => POST('/api/playbook', playbook) +export const PutPlaybook = (playbook) => PUT('/api/playbook', playbook) +export const DelPlaybook = (playbookId) => DELETE('/api/playbook/' + playbookId) +export const GetPlaybooksByQuery = (query) => GET('/api/playbook', {query}) \ No newline at end of file diff --git a/www/src/composables/useTerm.js b/www/src/composables/useTerm.js new file mode 100644 index 0000000..7008017 --- /dev/null +++ b/www/src/composables/useTerm.js @@ -0,0 +1,55 @@ +import {onMounted, ref, watch} from 'vue' +import {FitAddon} from 'xterm-addon-fit' +import {Terminal} from 'xterm' + +/** + * @name: useTerm,用于统一使用terminal + * @description: 终端 + * @return {Promise} + * + * 示例用法: + * 创建终端 + *
+ * const {term, terminalRef, initTerm} = useTerm() + * 手动初始化 + * await initTerm() + * 写入内容 + * term.value.write('写入terminal 内容\n') + */ +export default function useTerm() { + const term = ref(null) + const terminalRef = ref(null) + const fitAddon = new FitAddon() + + function initTerm() { + return new Promise((resolve) => { + setTimeout(() => { + console.log('init terminal') + term.value = new Terminal({ + rendererType: 'canvas', + disableStdin: false, + convertEol: true, + cursorStyle: 'block', + scrollback: 9999999, + }) + term.value.open(terminalRef.value) + term.value.loadAddon(fitAddon) + fitAddon.fit() + term.value.focus() + resolve() + }, 300 + ) + }) + } + + window.onresize = function () { + fitAddon.fit() + } + + + return { + term, + terminalRef, + initTerm + } +} \ No newline at end of file diff --git a/www/src/utils/common.js b/www/src/utils/common.js new file mode 100644 index 0000000..998b9dc --- /dev/null +++ b/www/src/utils/common.js @@ -0,0 +1,53 @@ +// 一些通用的js +/** + * 根据id查找节点,并返回节点内容 + * @param nodes + * @param id + * @returns {*|null} + */ +export function findNodeById(nodes, id) { + for (let node of nodes) { + if (node.id === id) { + return node + } + if (node.children) { + let found = findNodeById(node.children, id) + if (found) { + return found + } + } + } + return null +} + + +/** + * 通过id生成对应的层级label,例如:根节点/子节点/孙节点 + * @param {Array} data - 传入的树形数据, 例如:[{id: 1, name: '根节点', children: [{id: 2, name: '子节点', children: [{id: 3, name: '孙节点'}]}]}] + * @param {number} findId - 传入的唯一值 + * @param {Object} props - 传入的属性,例如:{id: 'id', children: 'children', label: 'label',parent_id: 'parent_id'} + * @returns {string} - 返回层级label,例如:根节点/子节点/孙节点 + */ +export function getHierarchyLabel(data, findId, props) { + const defaultProps = { + id: 'id', + children: 'children', + label: 'label', + parent_id: 'parent_id' + } + const id = props && props.hasOwnProperty('id') ? props.id : defaultProps.id + const children = props && props.hasOwnProperty('children') ? props.children : defaultProps.children + const label = props && props.hasOwnProperty('label') ? props.label : defaultProps.label + const parent_id = props && props.hasOwnProperty('parent_id') ? props.parent_id : defaultProps.parent_id + + let selectNode = findNodeById(data, findId) + let hierarchyLabel = selectNode[label] + let parent = selectNode[parent_id] + console.log(selectNode, parent) + while (parent !== null) { + let parentItem = findNodeById(data, selectNode[parent_id]) + hierarchyLabel = parentItem[label] + ' / ' + hierarchyLabel + parent = parentItem[parent_id] + } + return hierarchyLabel +} \ No newline at end of file diff --git a/www/src/utils/request.js b/www/src/utils/request.js index c8f8081..87614e7 100644 --- a/www/src/utils/request.js +++ b/www/src/utils/request.js @@ -149,7 +149,16 @@ export function DELETE(url, params) { }) } -export function ConfirmDel(txt,func,id) { +/** + * 删除确认对话框,可以直接提供func和对应id + * @returns {Promise} + * @constructor + * @param txt 提示信息 + * @param func 执行函数 + * @param id 执行ID + */ +export function ConfirmDel(txt, func, id) { + return new Promise((resolve, reject) => { ElMessageBox.confirm(txt, '警告', {type: 'warning'}).then(() => { func(id).then(() => { ElMessage({ @@ -157,7 +166,8 @@ export function ConfirmDel(txt,func,id) { message: '删除成功', type: 'success' }) - }) + resolve() + }).catch(reject) }).catch(() => { ElMessage({ title: 'success', @@ -165,6 +175,8 @@ export function ConfirmDel(txt,func,id) { type: 'warning' }) }) - } + }) + +} export default service \ No newline at end of file diff --git a/www/src/views/host/AddGroup.vue b/www/src/views/host/AddGroup.vue new file mode 100644 index 0000000..b95a7e6 --- /dev/null +++ b/www/src/views/host/AddGroup.vue @@ -0,0 +1,116 @@ + + + + + + + \ No newline at end of file diff --git a/www/src/views/host/HostDialog.vue b/www/src/views/host/HostDialog.vue new file mode 100644 index 0000000..51ca19e --- /dev/null +++ b/www/src/views/host/HostDialog.vue @@ -0,0 +1,140 @@ + + + + + \ No newline at end of file diff --git a/www/src/views/host/index.vue b/www/src/views/host/index.vue new file mode 100644 index 0000000..a1a6925 --- /dev/null +++ b/www/src/views/host/index.vue @@ -0,0 +1,207 @@ + + + + + \ No newline at end of file diff --git a/www/src/views/jobs/JobManage/AddJob.vue b/www/src/views/jobs/JobManage/AddJob.vue index 10294c9..bfe7c1c 100644 --- a/www/src/views/jobs/JobManage/AddJob.vue +++ b/www/src/views/jobs/JobManage/AddJob.vue @@ -1,81 +1,114 @@ @@ -86,91 +119,166 @@ diff --git a/www/src/views/jobs/JobManage/AddTargetsDialog.vue b/www/src/views/jobs/JobManage/AddTargetsDialog.vue new file mode 100644 index 0000000..6f3a07f --- /dev/null +++ b/www/src/views/jobs/JobManage/AddTargetsDialog.vue @@ -0,0 +1,159 @@ + + + + + + \ No newline at end of file diff --git a/www/src/views/jobs/JobManage/JobLogs.vue b/www/src/views/jobs/JobManage/JobLogs.vue index d9771f2..2b90f37 100644 --- a/www/src/views/jobs/JobManage/JobLogs.vue +++ b/www/src/views/jobs/JobManage/JobLogs.vue @@ -3,14 +3,20 @@ {{ props.job.id }} {{ props.job.name }} {{ props.job.trigger }} - {{ props.job.command }} + + 模块:{{ + props.job.ansible_args.module + }},参数:{{ props.job.ansible_args.module_args }} + 脚本:{{ props.job.ansible_args.playbook }} + @@ -19,21 +25,19 @@ - - - + \ No newline at end of file diff --git a/www/src/views/jobs/playbook/index.vue b/www/src/views/jobs/playbook/index.vue new file mode 100644 index 0000000..2c0b208 --- /dev/null +++ b/www/src/views/jobs/playbook/index.vue @@ -0,0 +1,91 @@ + + + + + + \ No newline at end of file