From 7aa08682b85e5efb9d9806066440f2661d80b591 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EC=8B=9C=EA=B3=A8=EC=95=BD=EC=82=AC?= Date: Sun, 14 Sep 2025 10:48:47 +0900 Subject: [PATCH] Implement smart Magic DNS copy with automatic port detection MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Magic DNS Smart Copy Features: - **PBS servers**: Automatically append `:8007` port when copying - **PVE servers**: Automatically append `:8006` port when copying - **Other machines**: Copy Magic DNS address without port (existing behavior) ### UI Improvements: - PBS servers: Blue button with `:8007` port hint - PVE servers: Orange button with `:8006` port hint - Enhanced tooltips with service-specific port information - Visual distinction between different server types ### PBS Backup Server Monitoring: - Complete PBS API integration with authentication - Real-time backup/restore task monitoring with detailed logs - Namespace-separated backup visualization with color coding - Datastore usage monitoring and status tracking - Task history with success/failure status and error details ### Technical Implementation: - Smart port detection via JavaScript `addSmartPort()` function - Jinja2 template logic for conditional button styling - PBS API endpoints for comprehensive backup monitoring - Enhanced clipboard functionality with user feedback ๐Ÿค– Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude --- farmq-admin/app.py | 521 +++++++- farmq-admin/templates/base.html | 5 + farmq-admin/templates/machines/list.html | 50 +- farmq-admin/templates/pbs/monitoring.html | 1330 +++++++++++++++++++++ 4 files changed, 1888 insertions(+), 18 deletions(-) create mode 100644 farmq-admin/templates/pbs/monitoring.html diff --git a/farmq-admin/app.py b/farmq-admin/app.py index 0fdd576..799e4e5 100644 --- a/farmq-admin/app.py +++ b/farmq-admin/app.py @@ -26,6 +26,66 @@ from utils.proxmox_client import ProxmoxClient from utils.vnc_proxy import init_vnc_proxy, get_vnc_proxy from utils.vnc_websocket_proxy import vnc_proxy import websockets +import requests +from urllib3.exceptions import InsecureRequestWarning +requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + +# PBS API ์„ค์ • +PBS_HOST = "100.64.0.8" +PBS_PORT = "8007" +PBS_USERNAME = "root@pam" +PBS_PASSWORD = "trajet6640" +PBS_BASE_URL = f"https://{PBS_HOST}:{PBS_PORT}/api2/json" + +def pbs_get_auth_ticket(): + """PBS ์ธ์ฆ ํ‹ฐ์ผ“ ํš๋“""" + try: + response = requests.post( + f"{PBS_BASE_URL}/access/ticket", + data={ + 'username': PBS_USERNAME, + 'password': PBS_PASSWORD + }, + verify=False, + timeout=10 + ) + if response.status_code == 200: + data = response.json()['data'] + return { + 'ticket': data['ticket'], + 'csrf_token': data['CSRFPreventionToken'] + } + return None + except Exception as e: + print(f"PBS ์ธ์ฆ ์‹คํŒจ: {e}") + return None + +def pbs_api_call(endpoint, auth_info=None): + """PBS API ํ˜ธ์ถœ""" + if not auth_info: + auth_info = pbs_get_auth_ticket() + if not auth_info: + return None + + try: + headers = { + 'Cookie': f"PBSAuthCookie={auth_info['ticket']}", + 'CSRFPreventionToken': auth_info['csrf_token'] + } + + response = requests.get( + f"{PBS_BASE_URL}/{endpoint}", + headers=headers, + verify=False, + timeout=10 + ) + + if response.status_code == 200: + return response.json()['data'] + return None + except Exception as e: + print(f"PBS API ํ˜ธ์ถœ ์‹คํŒจ ({endpoint}): {e}") + return None def create_app(config_name=None): """Flask ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ํŒฉํ† ๋ฆฌ""" @@ -1586,54 +1646,491 @@ def create_app(config_name=None): try: import sqlite3 from datetime import datetime, timedelta - + db_path = '/srv/headscale-setup/farmq-admin/farmq.db' conn = sqlite3.connect(db_path) cursor = conn.cursor() - + # ์ตœ๊ทผ 6๊ฐœ์›” ๊ตฌ๋… ํŠธ๋ Œ๋“œ trends = [] current_date = datetime.now() - + for i in range(6): target_date = current_date - timedelta(days=30*i) year = target_date.year month = target_date.month - + # ํ•ด๋‹น ์›” ์‹ ๊ทœ ๊ตฌ๋… ์ˆ˜ cursor.execute(''' - SELECT COUNT(*) + SELECT COUNT(*) FROM pharmacy_subscriptions WHERE strftime('%Y-%m', start_date) = ? ''', (f'{year}-{month:02d}',)) new_subscriptions = cursor.fetchone()[0] - - # ํ•ด๋‹น ์›” ํ•ด์ง€ ๊ตฌ๋… ์ˆ˜ + + # ํ•ด๋‹น ์›” ํ•ด์ง€ ๊ตฌ๋… ์ˆ˜ cursor.execute(''' - SELECT COUNT(*) + SELECT COUNT(*) FROM pharmacy_subscriptions WHERE subscription_status = 'CANCELLED' AND strftime('%Y-%m', updated_at) = ? ''', (f'{year}-{month:02d}',)) cancelled_subscriptions = cursor.fetchone()[0] - + trends.insert(0, { 'month': f'{year}-{month:02d}', 'new_subscriptions': new_subscriptions, 'cancelled_subscriptions': cancelled_subscriptions, 'net_growth': new_subscriptions - cancelled_subscriptions }) - + conn.close() - + return jsonify({ 'success': True, 'data': trends }) - + except Exception as e: print(f"โŒ ๊ตฌ๋… ํŠธ๋ Œ๋“œ ์กฐํšŒ ์˜ค๋ฅ˜: {e}") return jsonify({'success': False, 'error': str(e)}), 500 + + # =================== PBS ๋ฐฑ์—… ์„œ๋ฒ„ ๋ชจ๋‹ˆํ„ฐ๋ง =================== + + @app.route('/pbs') + def pbs_monitoring(): + """PBS ๋ฐฑ์—… ์„œ๋ฒ„ ๋ชจ๋‹ˆํ„ฐ๋ง ํŽ˜์ด์ง€""" + return render_template('pbs/monitoring.html') + + @app.route('/api/pbs/status') + def api_pbs_status(): + """PBS ์„œ๋ฒ„ ์ƒํƒœ ๋ฐ ๊ธฐ๋ณธ ์ •๋ณด ์กฐํšŒ""" + try: + auth_info = pbs_get_auth_ticket() + if not auth_info: + return jsonify({'success': False, 'error': 'PBS ์„œ๋ฒ„ ์ธ์ฆ ์‹คํŒจ'}), 500 + + # ์„œ๋ฒ„ ๋ฒ„์ „ ์ •๋ณด + version_data = pbs_api_call('version', auth_info) + if not version_data: + return jsonify({'success': False, 'error': 'PBS ๋ฒ„์ „ ์ •๋ณด ์กฐํšŒ ์‹คํŒจ'}), 500 + + return jsonify({ + 'success': True, + 'data': { + 'status': 'online', + 'version': version_data.get('version', 'unknown'), + 'release': version_data.get('release', 'unknown'), + 'server_time': datetime.now().isoformat() + } + }) + + except Exception as e: + print(f"โŒ PBS ์ƒํƒœ ์กฐํšŒ ์˜ค๋ฅ˜: {e}") + return jsonify({'success': False, 'error': str(e)}), 500 + + @app.route('/api/pbs/datastores') + def api_pbs_datastores(): + """PBS ๋ฐ์ดํ„ฐ์Šคํ† ์–ด ์ •๋ณด ์กฐํšŒ""" + try: + auth_info = pbs_get_auth_ticket() + if not auth_info: + return jsonify({'success': False, 'error': 'PBS ์„œ๋ฒ„ ์ธ์ฆ ์‹คํŒจ'}), 500 + + # ๋ฐ์ดํ„ฐ์Šคํ† ์–ด ๋ชฉ๋ก + datastores = pbs_api_call('config/datastore', auth_info) + if not datastores: + return jsonify({'success': False, 'error': '๋ฐ์ดํ„ฐ์Šคํ† ์–ด ์ •๋ณด ์กฐํšŒ ์‹คํŒจ'}), 500 + + # ๊ฐ ๋ฐ์ดํ„ฐ์Šคํ† ์–ด์˜ ์‚ฌ์šฉ๋Ÿ‰ ์ •๋ณด ์กฐํšŒ + datastore_info = [] + for store in datastores: + store_name = store.get('name') + if not store_name: + continue + + # ๋ฐ์ดํ„ฐ์Šคํ† ์–ด ์‚ฌ์šฉ๋Ÿ‰ ์กฐํšŒ + status_data = pbs_api_call(f'admin/datastore/{store_name}/status', auth_info) + if status_data: + total_bytes = status_data.get('total', 0) + used_bytes = status_data.get('used', 0) + avail_bytes = status_data.get('avail', 0) + + # ๋ฐ”์ดํŠธ๋ฅผ GB๋กœ ๋ณ€ํ™˜ + total_gb = round(total_bytes / (1024**3), 2) + used_gb = round(used_bytes / (1024**3), 2) + avail_gb = round(avail_bytes / (1024**3), 2) + usage_percent = round((used_bytes / total_bytes * 100), 1) if total_bytes > 0 else 0 + + datastore_info.append({ + 'name': store_name, + 'comment': store.get('comment', ''), + 'path': store.get('path', ''), + 'total_gb': total_gb, + 'used_gb': used_gb, + 'avail_gb': avail_gb, + 'usage_percent': usage_percent + }) + else: + datastore_info.append({ + 'name': store_name, + 'comment': store.get('comment', ''), + 'path': store.get('path', ''), + 'total_gb': 0, + 'used_gb': 0, + 'avail_gb': 0, + 'usage_percent': 0, + 'error': 'Status unavailable' + }) + + return jsonify({ + 'success': True, + 'data': datastore_info + }) + + except Exception as e: + print(f"โŒ PBS ๋ฐ์ดํ„ฐ์Šคํ† ์–ด ์กฐํšŒ ์˜ค๋ฅ˜: {e}") + return jsonify({'success': False, 'error': str(e)}), 500 + + @app.route('/api/pbs/tasks') + def api_pbs_tasks(): + """PBS ์ž‘์—… ์ƒํƒœ ์กฐํšŒ""" + try: + auth_info = pbs_get_auth_ticket() + if not auth_info: + return jsonify({'success': False, 'error': 'PBS ์„œ๋ฒ„ ์ธ์ฆ ์‹คํŒจ'}), 500 + + # ์‹คํ–‰ ์ค‘์ธ ์ž‘์—… + running_tasks = pbs_api_call('nodes/localhost/tasks?running=true', auth_info) + if running_tasks is None: + running_tasks = [] + + # ์ตœ๊ทผ ์ž‘์—… (๋ชจ๋“  ์ƒํƒœ) + all_tasks = pbs_api_call('nodes/localhost/tasks?limit=10', auth_info) + if all_tasks is None: + all_tasks = [] + + return jsonify({ + 'success': True, + 'data': { + 'running_tasks': running_tasks, + 'recent_tasks': all_tasks, + 'running_count': len(running_tasks) + } + }) + + except Exception as e: + print(f"โŒ PBS ์ž‘์—… ์กฐํšŒ ์˜ค๋ฅ˜: {e}") + return jsonify({'success': False, 'error': str(e)}), 500 + + @app.route('/api/pbs/backups/') + def api_pbs_backups(datastore_name): + """PBS ๋ฐฑ์—… ๋ชฉ๋ก ์กฐํšŒ (์ƒ์„ธ)""" + try: + auth_info = pbs_get_auth_ticket() + if not auth_info: + return jsonify({'success': False, 'error': 'PBS ์„œ๋ฒ„ ์ธ์ฆ ์‹คํŒจ'}), 500 + + # ๋„ค์ž„์ŠคํŽ˜์ด์Šค ๋ชฉ๋ก ์กฐํšŒ (์žˆ๋Š” ๊ฒฝ์šฐ) + namespaces = [] + try: + ns_data = pbs_api_call(f'admin/datastore/{datastore_name}/namespace', auth_info) + if ns_data: + namespaces = [ns.get('ns', '') for ns in ns_data] + except: + namespaces = [''] # ๊ธฐ๋ณธ ๋„ค์ž„์ŠคํŽ˜์ด์Šค๋งŒ + + print(f"๐Ÿ” PBS ๋„ค์ž„์ŠคํŽ˜์ด์Šค ๋ชฉ๋ก: {namespaces}") + + # ๋ชจ๋“  ๋ฐฑ์—… ๊ทธ๋ฃน ์กฐํšŒ (๋„ค์ž„์ŠคํŽ˜์ด์Šค๋ณ„) + all_backup_info = [] + + for ns in namespaces[:5]: # ์ตœ๋Œ€ 5๊ฐœ ๋„ค์ž„์ŠคํŽ˜์ด์Šค + ns_param = f'ns={ns}' if ns else '' + + # ๋ฐฑ์—… ๊ทธ๋ฃน ์กฐํšŒ + groups_url = f'admin/datastore/{datastore_name}/groups' + if ns_param: + groups_url += f'?{ns_param}' + + groups = pbs_api_call(groups_url, auth_info) + if not groups: + continue + + print(f"๐Ÿ” ๋„ค์ž„์ŠคํŽ˜์ด์Šค '{ns}' ๋ฐฑ์—… ๊ทธ๋ฃน ์ˆ˜: {len(groups)}") + + # ๊ฐ ๊ทธ๋ฃน์˜ ์ƒ์„ธ ์ •๋ณด ์กฐํšŒ + for group in groups[:20]: # ๋„ค์ž„์ŠคํŽ˜์ด์Šค๋‹น ์ตœ๋Œ€ 20๊ฐœ ๊ทธ๋ฃน + backup_type = group.get('backup-type') + backup_id = group.get('backup-id') + group_ns = group.get('ns', '') + + if not backup_type or not backup_id: + continue + + # ํ•ด๋‹น ๊ทธ๋ฃน์˜ ์Šค๋ƒ…์ƒท ์กฐํšŒ + snapshot_params = f'backup-type={backup_type}&backup-id={backup_id}' + if group_ns: + snapshot_params += f'&ns={group_ns}' + + snapshots = pbs_api_call( + f'admin/datastore/{datastore_name}/snapshots?{snapshot_params}', + auth_info + ) + + # ์ˆ˜๋™์œผ๋กœ ์ตœ์‹  10๊ฐœ๋งŒ ์„ ํƒ + if snapshots and len(snapshots) > 10: + snapshots = sorted(snapshots, key=lambda x: x.get('backup-time', 0), reverse=True)[:10] + + if snapshots: + latest_snapshot = snapshots[0] if snapshots else None + + # ์Šค๋ƒ…์ƒท ์„ธ๋ถ€ ์ •๋ณด + snapshot_details = [] + for snap in snapshots[:5]: # ์ตœ์‹  5๊ฐœ๋งŒ ์ƒ์„ธ ํ‘œ์‹œ + snapshot_details.append({ + 'backup_time': snap.get('backup-time'), + 'size': snap.get('size', 0), + 'protected': snap.get('protected', False), + 'comment': snap.get('comment', ''), + 'verification': snap.get('verification', {}) + }) + + all_backup_info.append({ + 'namespace': group_ns or 'root', + 'type': backup_type, + 'id': backup_id, + 'last_backup': latest_snapshot.get('backup-time') if latest_snapshot else None, + 'snapshot_count': len(snapshots), + 'total_size': sum(s.get('size', 0) for s in snapshots), + 'latest_size': latest_snapshot.get('size', 0) if latest_snapshot else 0, + 'snapshots': snapshot_details, + 'group_comment': group.get('comment', ''), + 'last_verification': latest_snapshot.get('verification', {}) if latest_snapshot else {} + }) + + # ํฌ๊ธฐ๋ณ„ ์ •๋ ฌ (ํฐ ๊ฒƒ๋ถ€ํ„ฐ) + all_backup_info.sort(key=lambda x: x['total_size'], reverse=True) + + return jsonify({ + 'success': True, + 'data': { + 'namespaces': namespaces, + 'backups': all_backup_info[:50], # ์ตœ๋Œ€ 50๊ฐœ ๋ฐฑ์—… ๊ทธ๋ฃน ํ‘œ์‹œ + 'total_groups': len(all_backup_info), + 'datastore': datastore_name + } + }) + + except Exception as e: + print(f"โŒ PBS ๋ฐฑ์—… ๋ชฉ๋ก ์กฐํšŒ ์˜ค๋ฅ˜: {e}") + return jsonify({'success': False, 'error': str(e)}), 500 + + @app.route('/api/pbs/backup-details///') + def api_pbs_backup_details(datastore_name, backup_type, backup_id): + """ํŠน์ • ๋ฐฑ์—… ๊ทธ๋ฃน์˜ ์ƒ์„ธ ์ •๋ณด""" + try: + auth_info = pbs_get_auth_ticket() + if not auth_info: + return jsonify({'success': False, 'error': 'PBS ์„œ๋ฒ„ ์ธ์ฆ ์‹คํŒจ'}), 500 + + namespace = request.args.get('ns', '') + ns_param = f'&ns={namespace}' if namespace else '' + + # ์Šค๋ƒ…์ƒท ๋ชฉ๋ก ์กฐํšŒ + snapshots = pbs_api_call( + f'admin/datastore/{datastore_name}/snapshots?backup-type={backup_type}&backup-id={backup_id}{ns_param}', + auth_info + ) + + if not snapshots: + return jsonify({'success': False, 'error': '์Šค๋ƒ…์ƒท ์กฐํšŒ ์‹คํŒจ'}), 500 + + # ์Šค๋ƒ…์ƒท ์ƒ์„ธ ์ •๋ณด + snapshot_list = [] + for snap in snapshots: + snapshot_list.append({ + 'backup_time': snap.get('backup-time'), + 'size': snap.get('size', 0), + 'protected': snap.get('protected', False), + 'comment': snap.get('comment', ''), + 'verification': snap.get('verification', {}), + 'encrypted': snap.get('encrypted', False), + 'fingerprint': snap.get('fingerprint', '') + }) + + return jsonify({ + 'success': True, + 'data': { + 'backup_type': backup_type, + 'backup_id': backup_id, + 'namespace': namespace, + 'snapshots': snapshot_list, + 'total_snapshots': len(snapshot_list), + 'total_size': sum(s['size'] for s in snapshot_list) + } + }) + + except Exception as e: + print(f"โŒ PBS ๋ฐฑ์—… ์ƒ์„ธ ์กฐํšŒ ์˜ค๋ฅ˜: {e}") + return jsonify({'success': False, 'error': str(e)}), 500 + + @app.route('/api/pbs/restore-tasks') + def api_pbs_restore_tasks(): + """PBS ๋ณต๊ตฌ ์ž‘์—… ๋ชฉ๋ก ์กฐํšŒ""" + try: + auth_info = pbs_get_auth_ticket() + if not auth_info: + return jsonify({'success': False, 'error': 'PBS ์„œ๋ฒ„ ์ธ์ฆ ์‹คํŒจ'}), 500 + + # ๋ชจ๋“  ์ž‘์—… ์กฐํšŒ (๋ณต๊ตฌ ๊ด€๋ จ ์ž‘์—… ํ•„ํ„ฐ๋ง) + all_tasks = pbs_api_call('nodes/localhost/tasks', auth_info) + if all_tasks is None: + all_tasks = [] + + # ๋ชจ๋“  ์ž‘์—…์„ ์นดํ…Œ๊ณ ๋ฆฌ๋ณ„๋กœ ๋ถ„๋ฅ˜ + restore_tasks = [] + backup_tasks = [] + other_tasks = [] + + for task in all_tasks: + task_type = task.get('type', '') + worker_type = task.get('worker_type', '') + task_id = task.get('upid', '') + + task_info = { + 'id': task_id, + 'type': task_type or worker_type, # type์ด ๋น„์–ด์žˆ์œผ๋ฉด worker_type ์‚ฌ์šฉ + 'worker_type': worker_type, + 'starttime': task.get('starttime'), + 'endtime': task.get('endtime'), + 'status': task.get('status'), + 'exitstatus': task.get('exitstatus'), + 'user': task.get('user'), + 'node': task.get('node', 'localhost'), + 'pid': task.get('pid'), + 'pstart': task.get('pstart'), + 'worker_id': task.get('worker_id') + } + + # ์‹ค์ œ ์ž‘์—… ํƒ€์ž… ๊ฒฐ์ • (type์ด ๋น„์–ด์žˆ์œผ๋ฉด worker_type ์‚ฌ์šฉ) + actual_type = (task_type or worker_type).lower() + + # ๋ณต๊ตฌ ๊ด€๋ จ ์ž‘์—… ํƒ€์ž…๋“ค + if any(restore_type in actual_type for restore_type in [ + 'restore', 'download', 'extract', 'file-restore', 'vm-restore', 'reader' + ]): + restore_tasks.append(task_info) + + # ๋ฐฑ์—… ๊ด€๋ จ ์ž‘์—…๋“ค + elif any(backup_type in actual_type for backup_type in [ + 'backup', 'sync', 'verify', 'prune', 'gc', 'garbage-collection', 'upload' + ]): + backup_tasks.append(task_info) + + # ๊ธฐํƒ€ ์ž‘์—…๋“ค (๊ด€๋ฆฌ, ์œ ์ง€๋ณด์ˆ˜ ๋“ฑ) + else: + other_tasks.append(task_info) + + # ์‹œ์ž‘์‹œ๊ฐ„ ๊ธฐ์ค€์œผ๋กœ ์ตœ์‹  ์ˆœ ์ •๋ ฌ + restore_tasks.sort(key=lambda x: x.get('starttime', 0), reverse=True) + backup_tasks.sort(key=lambda x: x.get('starttime', 0), reverse=True) + + return jsonify({ + 'success': True, + 'data': { + 'restore_tasks': restore_tasks[:20], # ์ตœ๊ทผ 20๊ฐœ + 'backup_tasks': backup_tasks[:20], # ์ตœ๊ทผ 20๊ฐœ + 'other_tasks': other_tasks[:10], # ๊ธฐํƒ€ ์ž‘์—… 10๊ฐœ + 'total_restore_tasks': len(restore_tasks), + 'total_backup_tasks': len(backup_tasks), + 'total_other_tasks': len(other_tasks), + 'total_all_tasks': len(all_tasks) + } + }) + + except Exception as e: + print(f"โŒ PBS ๋ณต๊ตฌ ์ž‘์—… ์กฐํšŒ ์˜ค๋ฅ˜: {e}") + return jsonify({'success': False, 'error': str(e)}), 500 + + @app.route('/api/pbs/task-log/') + def api_pbs_task_log(task_id): + """PBS ์ž‘์—… ๋กœ๊ทธ ์กฐํšŒ""" + try: + auth_info = pbs_get_auth_ticket() + if not auth_info: + return jsonify({'success': False, 'error': 'PBS ์„œ๋ฒ„ ์ธ์ฆ ์‹คํŒจ'}), 500 + + # ์ž‘์—… ๋กœ๊ทธ ์กฐํšŒ + log_data = pbs_api_call(f'nodes/localhost/tasks/{task_id}/log', auth_info) + + if log_data is None: + return jsonify({'success': False, 'error': '๋กœ๊ทธ ์กฐํšŒ ์‹คํŒจ'}), 500 + + # ๋กœ๊ทธ ๋ผ์ธ๋“ค์„ ๋ฌธ์ž์—ด๋กœ ๋ณ€ํ™˜ + if isinstance(log_data, list): + log_lines = [] + for line in log_data: + if isinstance(line, dict): + # ๋กœ๊ทธ ๋ผ์ธ์ด ๊ฐ์ฒด์ธ ๊ฒฝ์šฐ + timestamp = line.get('t', '') + message = line.get('n', '') + log_lines.append({ + 'timestamp': timestamp, + 'message': message, + 'line': f"[{timestamp}] {message}" if timestamp else message + }) + else: + # ๋กœ๊ทธ ๋ผ์ธ์ด ๋ฌธ์ž์—ด์ธ ๊ฒฝ์šฐ + log_lines.append({ + 'timestamp': '', + 'message': str(line), + 'line': str(line) + }) + else: + log_lines = [{'timestamp': '', 'message': str(log_data), 'line': str(log_data)}] + + return jsonify({ + 'success': True, + 'data': { + 'task_id': task_id, + 'log_lines': log_lines, + 'total_lines': len(log_lines) + } + }) + + except Exception as e: + print(f"โŒ PBS ์ž‘์—… ๋กœ๊ทธ ์กฐํšŒ ์˜ค๋ฅ˜: {e}") + return jsonify({'success': False, 'error': str(e)}), 500 + + @app.route('/api/pbs/task-status/') + def api_pbs_task_status(task_id): + """PBS ์ž‘์—… ์ƒํƒœ ์ƒ์„ธ ์กฐํšŒ""" + try: + auth_info = pbs_get_auth_ticket() + if not auth_info: + return jsonify({'success': False, 'error': 'PBS ์„œ๋ฒ„ ์ธ์ฆ ์‹คํŒจ'}), 500 + + # ์ž‘์—… ์ƒํƒœ ์กฐํšŒ + task_data = pbs_api_call(f'nodes/localhost/tasks/{task_id}/status', auth_info) + + if task_data is None: + # ์ „์ฒด ์ž‘์—… ๋ชฉ๋ก์—์„œ ํ•ด๋‹น ์ž‘์—… ์ฐพ๊ธฐ + all_tasks = pbs_api_call('nodes/localhost/tasks', auth_info) + if all_tasks: + task_data = next((t for t in all_tasks if t.get('upid') == task_id), None) + + if not task_data: + return jsonify({'success': False, 'error': '์ž‘์—…์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค'}), 404 + + return jsonify({ + 'success': True, + 'data': task_data + }) + + except Exception as e: + print(f"โŒ PBS ์ž‘์—… ์ƒํƒœ ์กฐํšŒ ์˜ค๋ฅ˜: {e}") + return jsonify({'success': False, 'error': str(e)}), 500 # ์—๋Ÿฌ ํ•ธ๋“ค๋Ÿฌ @app.errorhandler(404) diff --git a/farmq-admin/templates/base.html b/farmq-admin/templates/base.html index 140b65d..5ce3826 100644 --- a/farmq-admin/templates/base.html +++ b/farmq-admin/templates/base.html @@ -212,6 +212,11 @@ ๋งค์ถœ ๋Œ€์‹œ๋ณด๋“œ +