New file |
| | |
| | | # Default ignored files |
| | | /shelf/ |
| | | /workspace.xml |
| | | # Editor-based HTTP Client requests |
| | | /httpRequests/ |
| | | # Environment-dependent path to Maven home directory |
| | | /mavenHomeManager.xml |
| | | # Datasource local storage ignored files |
| | | /dataSources/ |
| | | /dataSources.local.xml |
New file |
| | |
| | | <?xml version="1.0" encoding="UTF-8"?> |
| | | <project version="4"> |
| | | <component name="KubernetesApiProvider">{}</component> |
| | | </project> |
New file |
| | |
| | | # Algorithm System Module |
| | | __version__ = "1.0.0" |
New file |
| | |
| | | """ |
| | | ç®æ³ç³»ç»æå¡å¨ |
| | | """ |
| | | import json |
| | | import logging |
| | | import os |
| | | import time |
| | | from typing import Dict, List, Optional, Any, Tuple |
| | | from flask import Flask, request, jsonify |
| | | |
| | | from common.data_models import ( |
| | | TaskData, AGVStatus, TaskAssignment, PlannedPath, |
| | | create_success_response, create_error_response, ResponseCode, to_dict, from_dict |
| | | ) |
| | | from common.utils import load_path_mapping |
| | | from algorithm_system.models.agv_model import AGVModelManager |
| | | from algorithm_system.algorithms.task_allocation import TaskAllocationFactory |
| | | from algorithm_system.algorithms.path_planning import PathPlanningFactory |
| | | from common.api_client import RCSAPIClient |
| | | from algorithm_system.path_monitor import PathMonitorService |
| | | |
| | | try: |
| | | from config.settings import ( |
| | | ALGORITHM_SERVER_HOST, ALGORITHM_SERVER_PORT, |
| | | RCS_SERVER_HOST, RCS_SERVER_PORT, |
| | | MONITOR_POLLING_INTERVAL |
| | | ) |
| | | except ImportError: |
| | | ALGORITHM_SERVER_HOST = "10.10.10.239" |
| | | ALGORITHM_SERVER_PORT = 8002 |
| | | RCS_SERVER_HOST = "10.10.10.156" |
| | | RCS_SERVER_PORT = 8088 |
| | | MONITOR_POLLING_INTERVAL = 5.0 |
| | | logging.warning("æ æ³ä»config.settings导å
¥é
ç½®ï¼ä½¿ç¨é»è®¤å¼") |
| | | |
| | | |
| | | class AlgorithmServer: |
| | | """ç®æ³ç³»ç»æå¡å¨""" |
| | | |
| | | def __init__(self, host: str = None, port: int = None, enable_path_monitor: bool = True, |
| | | monitor_interval: float = None): |
| | | """åå§åç®æ³æå¡å¨""" |
| | | self.host = host or ALGORITHM_SERVER_HOST |
| | | self.port = port or ALGORITHM_SERVER_PORT |
| | | self.enable_path_monitor = enable_path_monitor |
| | | self.monitor_interval = monitor_interval or MONITOR_POLLING_INTERVAL |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | self.app = Flask(__name__) |
| | | |
| | | self.path_mapping = load_path_mapping() |
| | | |
| | | self.agv_manager = AGVModelManager(self.path_mapping) |
| | | |
| | | self.rcs_client = RCSAPIClient() |
| | | |
| | | self.task_allocation_algorithm = "LOAD_BALANCED" # é»è®¤ä»»å¡åé
|
| | | self.path_planning_algorithm = "A_STAR" # é»è®¤è·¯å¾è§å |
| | | |
| | | self.path_monitor = None |
| | | if self.enable_path_monitor: |
| | | self.path_monitor = PathMonitorService( |
| | | rcs_host=RCS_SERVER_HOST, |
| | | rcs_port=RCS_SERVER_PORT, |
| | | poll_interval=self.monitor_interval, |
| | | path_algorithm=self.path_planning_algorithm, |
| | | auto_send_paths=True # èªå¨åéè·¯å¾å°RCS |
| | | ) |
| | | self.logger.info(f"è·¯å¾çæ§æå¡å·²åå§å - 轮询é´é: {self.monitor_interval}s, èªå¨åéè·¯å¾: True") |
| | | |
| | | self._setup_routes() |
| | | |
| | | self.logger.info("ç®æ³ç³»ç»æå¡å¨åå§å宿") |
| | | |
| | | def _setup_routes(self): |
| | | """设置APIè·¯ç±""" |
| | | |
| | | @self.app.route('/open/task/send/v1', methods=['POST']) |
| | | def task_send_endpoint(): |
| | | """ä»»å¡ä¸åæ¥å£""" |
| | | return self._handle_task_send_request() |
| | | |
| | | @self.app.route('/open/path/plan/v1', methods=['POST']) |
| | | def path_plan_endpoint(): |
| | | """è·¯å¾è§åæ¥å£""" |
| | | return self._handle_path_planning_request() |
| | | |
| | | @self.app.route('/open/path/batch/plan/v1', methods=['POST']) |
| | | def batch_path_plan_endpoint(): |
| | | """æ¹éè·¯å¾è§åæ¥å£""" |
| | | return self._handle_batch_path_planning_request() |
| | | |
| | | @self.app.route('/open/algorithm/config/v1', methods=['POST']) |
| | | def algorithm_config_endpoint(): |
| | | """ç®æ³é
ç½®æ¥å£""" |
| | | return self._handle_algorithm_config_request() |
| | | |
| | | @self.app.route('/monitor/path/start/v1', methods=['POST']) |
| | | def start_path_monitor_endpoint(): |
| | | """å¯å¨è·¯å¾çæ§æå¡æ¥å£""" |
| | | return self._handle_start_path_monitor_request() |
| | | |
| | | @self.app.route('/monitor/path/stop/v1', methods=['POST']) |
| | | def stop_path_monitor_endpoint(): |
| | | """åæ¢è·¯å¾çæ§æå¡æ¥å£""" |
| | | return self._handle_stop_path_monitor_request() |
| | | |
| | | @self.app.route('/monitor/path/status/v1', methods=['GET']) |
| | | def path_monitor_status_endpoint(): |
| | | """è·¯å¾çæ§æå¡ç¶ææ¥è¯¢æ¥å£""" |
| | | return self._handle_path_monitor_status_request() |
| | | |
| | | @self.app.route('/health', methods=['GET']) |
| | | def health_check(): |
| | | """å¥åº·æ£æ¥æ¥å£""" |
| | | monitor_status = None |
| | | if self.path_monitor: |
| | | monitor_status = self.path_monitor.get_monitoring_status() |
| | | |
| | | return jsonify({ |
| | | "status": "healthy", |
| | | "service": "algorithm_system", |
| | | "timestamp": time.time(), |
| | | "path_mapping_loaded": len(self.path_mapping) > 0, |
| | | "algorithms": { |
| | | "task_allocation": self.task_allocation_algorithm, |
| | | "path_planning": self.path_planning_algorithm |
| | | }, |
| | | "agv_count": len(self.agv_manager.get_all_agvs()), |
| | | "path_monitor": monitor_status, |
| | | "available_endpoints": [ |
| | | "POST /open/task/send/v1 - ä»»å¡åé
æ¥å£", |
| | | "POST /open/path/plan/v1 - åè·¯å¾è§åæ¥å£", |
| | | "POST /open/path/batch/plan/v1 - æ¹éè·¯å¾è§åæ¥å£", |
| | | "POST /open/algorithm/config/v1 - ç®æ³é
ç½®æ¥å£", |
| | | "POST /monitor/path/start/v1 - å¯å¨è·¯å¾çæ§æå¡", |
| | | "POST /monitor/path/stop/v1 - åæ¢è·¯å¾çæ§æå¡", |
| | | "GET /monitor/path/status/v1 - è·¯å¾çæ§æå¡ç¶ææ¥è¯¢", |
| | | "GET /health - å¥åº·æ£æ¥æ¥å£" |
| | | ] |
| | | }) |
| | | |
| | | @self.app.errorhandler(404) |
| | | def not_found(error): |
| | | """404é误å¤ç""" |
| | | return jsonify(to_dict(create_error_response(404, "æ¥å£ä¸åå¨"))), 404 |
| | | |
| | | @self.app.errorhandler(500) |
| | | def internal_error(error): |
| | | """500é误å¤ç""" |
| | | self.logger.error(f"å
鍿å¡å¨é误: {error}") |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, "å
鍿å¡å¨é误"))), 500 |
| | | |
| | | def _handle_task_send_request(self) -> Dict[str, Any]: |
| | | """å¤çä»»å¡ä¸å请æ±""" |
| | | try: |
| | | if not request.is_json: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.PARAM_EMPTY, "è¯·æ±æ°æ®æ ¼å¼é误"))) |
| | | |
| | | request_data = request.get_json() |
| | | if not request_data: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.PARAM_EMPTY, "è¯·æ±æ°æ®ä¸ºç©º"))) |
| | | |
| | | if isinstance(request_data, dict) and "tasks" in request_data: |
| | | task_data = request_data.get("tasks", []) |
| | | agv_status_data = request_data.get("agvStatus", []) |
| | | self.logger.info(f"æ¶å°ç»æåä»»å¡åé
请æ±ï¼ä»»å¡æ°é: {len(task_data)}, AGVæ°é: {len(agv_status_data)}") |
| | | elif isinstance(request_data, list): |
| | | task_data = request_data |
| | | agv_status_data = [] |
| | | self.logger.info(f"æ¶å°ä»»å¡åé
请æ±ï¼ä»»å¡æ°é: {len(task_data)}") |
| | | else: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.PARAM_EMPTY, "任塿°æ®æ ¼å¼æ æ"))) |
| | | |
| | | if not task_data or not isinstance(task_data, list): |
| | | return jsonify(to_dict(create_error_response(ResponseCode.PARAM_EMPTY, "任塿°æ®æ æ"))) |
| | | |
| | | tasks = [] |
| | | for task_dict in task_data: |
| | | try: |
| | | task = TaskData( |
| | | taskId=task_dict.get("taskId", ""), |
| | | start=task_dict.get("start", ""), |
| | | end=task_dict.get("end", ""), |
| | | type=task_dict.get("type", "1"), |
| | | priority=task_dict.get("priority", 5) |
| | | ) |
| | | tasks.append(task) |
| | | except Exception as e: |
| | | self.logger.warning(f"è§£æä»»å¡æ°æ®å¤±è´¥: {task_dict} - {e}") |
| | | continue |
| | | |
| | | if not tasks: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.PARAM_EMPTY, "æ²¡æææç任塿°æ®"))) |
| | | |
| | | agv_status_list = [] |
| | | if agv_status_data: |
| | | self.logger.info("使ç¨è¯·æ±ä¸æä¾çAGVç¶ææ°æ®") |
| | | for agv_data in agv_status_data: |
| | | try: |
| | | agv_status = from_dict(AGVStatus, agv_data) |
| | | agv_status_list.append(agv_status) |
| | | except Exception as e: |
| | | self.logger.warning(f"è§£æè¯·æ±ä¸çAGVç¶ææ°æ®å¤±è´¥: {agv_data} - {e}") |
| | | continue |
| | | else: |
| | | self.logger.info("ä»RCSç³»ç»è·åAGVç¶ææ°æ®") |
| | | agv_response = self.rcs_client.get_agv_status() |
| | | if agv_response.code == ResponseCode.SUCCESS and agv_response.data: |
| | | for agv_data in agv_response.data: |
| | | try: |
| | | agv_status = from_dict(AGVStatus, agv_data) |
| | | agv_status_list.append(agv_status) |
| | | except Exception as e: |
| | | self.logger.warning(f"è§£æRCSè¿åçAGVç¶ææ°æ®å¤±è´¥: {agv_data} - {e}") |
| | | continue |
| | | else: |
| | | error_msg = f"æ æ³è·åAGVç¶æ: {agv_response.msg if agv_response else 'RCSç³»ç»è¿æ¥å¤±è´¥'}" |
| | | self.logger.error(error_msg) |
| | | |
| | | use_fallback_allocation = getattr(self, 'use_fallback_allocation', True) |
| | | |
| | | if use_fallback_allocation: |
| | | self.logger.warning("使ç¨è½®è¯¢åé
ä½ä¸ºå¤ç¨æ¹æ¡") |
| | | return self._simple_round_robin_allocation(tasks) |
| | | else: |
| | | return jsonify(to_dict(create_error_response( |
| | | ResponseCode.SERVER_ERROR, |
| | | error_msg |
| | | ))) |
| | | |
| | | if not agv_status_list: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.NO_DATA, "没æå¯ç¨çAGVç¶ææ°æ®"))) |
| | | |
| | | self.agv_manager.update_agv_data(agv_status_list) |
| | | self.logger.info(f"æ´æ°äº {len(agv_status_list)} 个AGVç¶æ") |
| | | |
| | | all_agvs = self.agv_manager.get_all_agvs() |
| | | self.logger.info(f"ç®æ³ç³»ç»ä¸å½åæ {len(all_agvs)} 个AGV模å") |
| | | |
| | | available_count = 0 |
| | | for agv in all_agvs: |
| | | can_accept = agv.can_accept_task(5) |
| | | self.logger.debug(f"AGV {agv.agvId}: status={agv.status}, can_accept_task={can_accept}, " |
| | | f"is_overloaded={agv.is_overloaded()}, task_count={agv.current_task_count}") |
| | | if can_accept: |
| | | available_count += 1 |
| | | |
| | | self.logger.info(f"å
¶ä¸ {available_count} 个AGVå¯ä»¥æ¥åä»»å¡") |
| | | |
| | | if available_count == 0: |
| | | self.logger.warning("没æAGVå¯ä»¥æ¥åä»»å¡ï¼è¯¦ç»ç¶æ:") |
| | | for agv in all_agvs: |
| | | status_str = str(agv.status) |
| | | self.logger.warning(f" AGV {agv.agvId}: ç¶æ={status_str}, ç±»å={type(agv.status)}, " |
| | | f"任塿°={agv.current_task_count}/{agv.max_capacity}") |
| | | |
| | | allocator = TaskAllocationFactory.create_allocator( |
| | | self.task_allocation_algorithm, |
| | | self.agv_manager |
| | | ) |
| | | |
| | | start_time = time.time() |
| | | assignments = allocator.allocate_tasks(tasks) |
| | | end_time = time.time() |
| | | |
| | | allocation_time = (end_time - start_time) * 1000 # 转æ¢ä¸ºæ¯«ç§ |
| | | |
| | | assignment_data = [to_dict(assignment) for assignment in assignments] |
| | | |
| | | self.logger.info(f"ä»»å¡åé
宿ï¼åé
äº {len(assignments)} 个任å¡ï¼èæ¶: {allocation_time:.2f}ms") |
| | | |
| | | response = create_success_response(assignment_data) |
| | | return jsonify(to_dict(response)) |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"å¤çä»»å¡åé
请æ±å¤±è´¥: {e}", exc_info=True) |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, f"æå¡å¨é误: {str(e)}"))) |
| | | |
| | | def _handle_path_planning_request(self) -> Dict[str, Any]: |
| | | """å¤çè·¯å¾è§å请æ±""" |
| | | try: |
| | | # éªè¯è¯·æ±æ°æ® |
| | | if not request.is_json: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.PARAM_EMPTY, "è¯·æ±æ°æ®æ ¼å¼é误"))) |
| | | |
| | | request_data = request.get_json() |
| | | if not request_data: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.PARAM_EMPTY, "è¯·æ±æ°æ®ä¸ºç©º"))) |
| | | |
| | | agv_id = request_data.get("agvId") |
| | | start_code = request_data.get("start") |
| | | end_code = request_data.get("end") |
| | | constraints = request_data.get("constraints", []) |
| | | |
| | | if not agv_id or not start_code or not end_code: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.PARAM_EMPTY, "缺å°å¿
è¦åæ°"))) |
| | | |
| | | self.logger.info(f"æ¶å°è·¯å¾è§åè¯·æ± - AGV: {agv_id}, ä» {start_code} å° {end_code}") |
| | | |
| | | # å建路å¾è§åå¨ |
| | | path_planner = PathPlanningFactory.create_path_planner( |
| | | self.path_planning_algorithm, |
| | | self.path_mapping |
| | | ) |
| | | |
| | | # æ§è¡è·¯å¾è§å |
| | | start_time = time.time() |
| | | planned_path = path_planner.plan_path(start_code, end_code, constraints) |
| | | end_time = time.time() |
| | | |
| | | planning_time = (end_time - start_time) * 1000 # 转æ¢ä¸ºæ¯«ç§ |
| | | |
| | | if planned_path: |
| | | # 设置AGV ID |
| | | planned_path.agvId = agv_id |
| | | |
| | | # 转æ¢ä¸ºååºæ ¼å¼ |
| | | path_data = to_dict(planned_path) |
| | | |
| | | self.logger.info(f"è·¯å¾è§å宿 - AGV: {agv_id}, è·¯å¾é¿åº¦: {len(planned_path.codeList)}, èæ¶: {planning_time:.2f}ms") |
| | | |
| | | response = create_success_response(path_data) |
| | | return jsonify(to_dict(response)) |
| | | else: |
| | | self.logger.warning(f"è·¯å¾è§å失败 - AGV: {agv_id}, ä» {start_code} å° {end_code}") |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, "æ æ³è§åè·¯å¾"))) |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"å¤çè·¯å¾è§å请æ±å¤±è´¥: {e}", exc_info=True) |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, f"æå¡å¨é误: {str(e)}"))) |
| | | |
| | | def _handle_batch_path_planning_request(self) -> Dict[str, Any]: |
| | | """å¤çæ¹éè·¯å¾è§å请æ±""" |
| | | try: |
| | | if not request.is_json: |
| | | request_data = {} |
| | | else: |
| | | request_data = request.get_json() or {} |
| | | |
| | | constraints = request_data.get("constraints", []) |
| | | include_idle_agv = request_data.get("includeIdleAgv", False) |
| | | use_enhanced_planning = request_data.get("useEnhancedPlanning", True) |
| | | |
| | | agv_status_from_request = ( |
| | | request_data.get("agvStatusList", []) or |
| | | request_data.get("agvs", []) |
| | | ) |
| | | |
| | | self.logger.info(f"æ¶å°æ¹éè·¯å¾è§å请æ±ï¼å
å«ç©ºé²AGV: {include_idle_agv}, " |
| | | f"请æ±ä¸AGVæ°é: {len(agv_status_from_request)}, " |
| | | f"使ç¨å¢å¼ºè§å: {use_enhanced_planning}") |
| | | |
| | | agv_status_list = [] |
| | | if agv_status_from_request: |
| | | self.logger.info("使ç¨è¯·æ±ä¸æä¾çAGVç¶ææ°æ®è¿è¡è·¯å¾è§å") |
| | | for agv_data in agv_status_from_request: |
| | | try: |
| | | agv_status = from_dict(AGVStatus, agv_data) |
| | | agv_status_list.append(agv_status) |
| | | except Exception as e: |
| | | self.logger.warning(f"è§£æè¯·æ±ä¸çAGVç¶ææ°æ®å¤±è´¥: {agv_data} - {e}") |
| | | continue |
| | | else: |
| | | self.logger.info("ä»RCSç³»ç»è·åAGVç¶ææ°æ®è¿è¡è·¯å¾è§å") |
| | | agv_response = self.rcs_client.get_agv_status() |
| | | if agv_response.code != ResponseCode.SUCCESS or not agv_response.data: |
| | | error_msg = f"æ æ³è·åAGVç¶æè¿è¡è·¯å¾è§å: {agv_response.msg if agv_response else 'RCSç³»ç»è¿æ¥å¤±è´¥'}" |
| | | self.logger.error(error_msg) |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, error_msg))) |
| | | |
| | | for agv_data in agv_response.data: |
| | | try: |
| | | agv_status = from_dict(AGVStatus, agv_data) |
| | | agv_status_list.append(agv_status) |
| | | except Exception as e: |
| | | self.logger.warning(f"è§£æRCSè¿åçAGVç¶ææ°æ®å¤±è´¥: {agv_data} - {e}") |
| | | continue |
| | | |
| | | if not agv_status_list: |
| | | error_msg = "没æå¯ç¨çAGVç¶ææ°æ®è¿è¡è·¯å¾è§å" |
| | | self.logger.error(error_msg) |
| | | return jsonify(to_dict(create_error_response(ResponseCode.NO_DATA, error_msg))) |
| | | |
| | | # æ´æ°AGV模å管çå¨ |
| | | self.agv_manager.update_agv_data(agv_status_list) |
| | | |
| | | self.logger.info(f"强å¶ä½¿ç¨è·¯å¾è§å以确ä¿å
嫿°å段") |
| | | result = self._enhanced_batch_path_planning(agv_status_list, include_idle_agv, constraints) |
| | | |
| | | response = create_success_response(result) |
| | | return jsonify(to_dict(response)) |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"å¤çæ¹éè·¯å¾è§å请æ±å¤±è´¥: {e}", exc_info=True) |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, f"æå¡å¨é误: {str(e)}"))) |
| | | |
| | | def _enhanced_batch_path_planning(self, agv_status_list: List[AGVStatus], |
| | | include_idle_agv: bool, |
| | | constraints: List[Tuple[int, int, float]]) -> Dict[str, Any]: |
| | | """æ¹éè·¯å¾è§å""" |
| | | from algorithm_system.algorithms.path_planning import PathPlanningFactory |
| | | |
| | | # å建æ¹éè·¯å¾è§åå¨ |
| | | batch_planner = PathPlanningFactory.create_batch_path_planner( |
| | | algorithm_type=self.path_planning_algorithm, |
| | | path_mapping=self.path_mapping |
| | | ) |
| | | |
| | | # æ§è¡æ¹éè·¯å¾è§å |
| | | result = batch_planner.plan_all_agv_paths( |
| | | agv_status_list=agv_status_list, |
| | | include_idle_agv=include_idle_agv, |
| | | constraints=constraints |
| | | ) |
| | | |
| | | # æåplannedPathsä½ä¸ºæç»è¿åæ°æ® |
| | | planned_paths = result.get('plannedPaths', []) |
| | | |
| | | self.logger.info(f"æ¹éè·¯å¾è§å宿 - æ»AGV: {result['totalAgvs']}, " |
| | | f"æ§è¡ä»»å¡: {result['executingTasksCount']}, " |
| | | f"è§åè·¯å¾: {result['plannedPathsCount']}, " |
| | | f"å²çªæ£æµ: {result['conflictsDetected']}, " |
| | | f"æ»èæ¶: {result['totalPlanningTime']:.2f}ms") |
| | | |
| | | # åªè¿åè·¯å¾æ°ç»ï¼ä¸å
å«å
¶ä»ç»è®¡ä¿¡æ¯ |
| | | return planned_paths |
| | | |
| | | def _handle_algorithm_config_request(self) -> Dict[str, Any]: |
| | | """ |
| | | å¤çç®æ³é
ç½®è¯·æ± |
| | | |
| | | Returns: |
| | | Dict: ååºæ°æ® |
| | | """ |
| | | try: |
| | | # éªè¯è¯·æ±æ°æ® |
| | | if not request.is_json: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.PARAM_EMPTY, "è¯·æ±æ°æ®æ ¼å¼é误"))) |
| | | |
| | | config_data = request.get_json() |
| | | if not config_data: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.PARAM_EMPTY, "é
ç½®æ°æ®ä¸ºç©º"))) |
| | | |
| | | # æ´æ°ç®æ³é
ç½® |
| | | if "task_allocation_algorithm" in config_data: |
| | | self.task_allocation_algorithm = config_data["task_allocation_algorithm"] |
| | | self.logger.info(f"ä»»å¡åé
ç®æ³æ´æ°ä¸º: {self.task_allocation_algorithm}") |
| | | |
| | | if "path_planning_algorithm" in config_data: |
| | | self.path_planning_algorithm = config_data["path_planning_algorithm"] |
| | | self.logger.info(f"è·¯å¾è§åç®æ³æ´æ°ä¸º: {self.path_planning_algorithm}") |
| | | |
| | | # è¿åå½åé
ç½® |
| | | current_config = { |
| | | "task_allocation_algorithm": self.task_allocation_algorithm, |
| | | "path_planning_algorithm": self.path_planning_algorithm |
| | | } |
| | | |
| | | response = create_success_response(current_config) |
| | | return jsonify(to_dict(response)) |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"å¤çç®æ³é
置请æ±å¤±è´¥: {e}", exc_info=True) |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, f"æå¡å¨é误: {str(e)}"))) |
| | | |
| | | def _simple_round_robin_allocation(self, tasks: List[TaskData]) -> Dict[str, Any]: |
| | | """ç®åç轮询åé
""" |
| | | assignments = [] |
| | | |
| | | # å°è¯ä»RCSç³»ç»è·åAGVæ°é |
| | | agv_count = 5 # é»è®¤å¼ |
| | | try: |
| | | agv_response = self.rcs_client.get_agv_status() |
| | | if agv_response.code == ResponseCode.SUCCESS and agv_response.data: |
| | | agv_count = len(agv_response.data) |
| | | self.logger.info(f"ä»RCSç³»ç»è·åå°AGVæ°é: {agv_count}") |
| | | else: |
| | | # 妿RCSè¿åæ æ°æ®ï¼ä½¿ç¨é
ç½®çé»è®¤å¼ |
| | | from config.settings import DEFAULT_AGV_COUNT |
| | | agv_count = DEFAULT_AGV_COUNT |
| | | self.logger.warning(f"æ æ³ä»RCSè·åAGVæ°éï¼ä½¿ç¨é»è®¤å¼: {agv_count}") |
| | | except Exception as e: |
| | | # 妿å®å
¨æ æ³è¿æ¥RCSï¼ä½¿ç¨é
ç½®çé»è®¤å¼ |
| | | from config.settings import DEFAULT_AGV_COUNT |
| | | agv_count = DEFAULT_AGV_COUNT |
| | | self.logger.error(f"è·åAGVæ°é失败ï¼ä½¿ç¨é»è®¤å¼: {agv_count} - {e}") |
| | | |
| | | for i, task in enumerate(tasks): |
| | | agv_id = f"AGV_{10001 + (i % agv_count)}" |
| | | assignment = TaskAssignment( |
| | | taskId=task.taskId, |
| | | agvId=agv_id |
| | | ) |
| | | assignments.append(assignment) |
| | | |
| | | assignment_data = [to_dict(assignment) for assignment in assignments] |
| | | |
| | | self.logger.info(f"使ç¨ç®å轮询åé
äº {len(assignments)} 个任å¡ï¼AGVæ°é: {agv_count}") |
| | | |
| | | response = create_success_response(assignment_data) |
| | | return jsonify(to_dict(response)) |
| | | |
| | | def _handle_start_path_monitor_request(self) -> Dict[str, Any]: |
| | | """å¤çå¯å¨è·¯å¾çæ§æå¡è¯·æ±""" |
| | | try: |
| | | if not self.path_monitor: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, "è·¯å¾çæ§æå¡æªåå§å"))) |
| | | |
| | | if self.path_monitor.is_running: |
| | | return jsonify(to_dict(create_success_response({"message": "è·¯å¾çæ§æå¡å·²å¨è¿è¡ä¸"}))) |
| | | |
| | | # å¯å¨è·¯å¾çæ§æå¡ |
| | | self.path_monitor.start_monitoring() |
| | | |
| | | self.logger.info("è·¯å¾çæ§æå¡å·²éè¿APIå¯å¨") |
| | | |
| | | response = create_success_response({ |
| | | "message": "è·¯å¾çæ§æå¡å¯å¨æå", |
| | | "monitor_status": self.path_monitor.get_monitoring_status() |
| | | }) |
| | | |
| | | return jsonify(to_dict(response)) |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"å¯å¨è·¯å¾çæ§æå¡å¤±è´¥: {e}") |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, f"å¯å¨è·¯å¾çæ§æå¡å¤±è´¥: {str(e)}"))) |
| | | |
| | | def _handle_stop_path_monitor_request(self) -> Dict[str, Any]: |
| | | """å¤çåæ¢è·¯å¾çæ§æå¡è¯·æ±""" |
| | | try: |
| | | if not self.path_monitor: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, "è·¯å¾çæ§æå¡æªåå§å"))) |
| | | |
| | | if not self.path_monitor.is_running: |
| | | return jsonify(to_dict(create_success_response({"message": "è·¯å¾çæ§æå¡æªå¨è¿è¡"}))) |
| | | |
| | | # åæ¢è·¯å¾çæ§æå¡ |
| | | self.path_monitor.stop_monitoring() |
| | | |
| | | self.logger.info("è·¯å¾çæ§æå¡å·²éè¿API忢") |
| | | |
| | | response = create_success_response({ |
| | | "message": "è·¯å¾çæ§æå¡åæ¢æå", |
| | | "monitor_status": self.path_monitor.get_monitoring_status() |
| | | }) |
| | | |
| | | return jsonify(to_dict(response)) |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"åæ¢è·¯å¾çæ§æå¡å¤±è´¥: {e}") |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, f"åæ¢è·¯å¾çæ§æå¡å¤±è´¥: {str(e)}"))) |
| | | |
| | | def _handle_path_monitor_status_request(self) -> Dict[str, Any]: |
| | | """å¤çè·¯å¾çæ§æå¡ç¶ææ¥è¯¢è¯·æ±""" |
| | | try: |
| | | if not self.path_monitor: |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, "è·¯å¾çæ§æå¡æªåå§å"))) |
| | | |
| | | monitor_status = self.path_monitor.get_monitoring_status() |
| | | |
| | | response = create_success_response({ |
| | | "monitor_status": monitor_status, |
| | | "description": "è·¯å¾çæ§æå¡ç¶æä¿¡æ¯" |
| | | }) |
| | | |
| | | return jsonify(to_dict(response)) |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"è·åè·¯å¾çæ§æå¡ç¶æå¤±è´¥: {e}") |
| | | return jsonify(to_dict(create_error_response(ResponseCode.SERVER_ERROR, f"è·åè·¯å¾çæ§æå¡ç¶æå¤±è´¥: {str(e)}"))) |
| | | |
| | | def start_server(self): |
| | | """å¯å¨ç®æ³æå¡å¨""" |
| | | self.logger.info(f"å¯å¨ç®æ³ç³»ç»æå¡å¨: http://{self.host}:{self.port}") |
| | | |
| | | try: |
| | | # 妿å¯ç¨äºè·¯å¾çæ§æå¡ï¼èªå¨å¯å¨ |
| | | if self.path_monitor and self.enable_path_monitor: |
| | | self.path_monitor.start_monitoring() |
| | | self.logger.info("è·¯å¾çæ§æå¡å·²èªå¨å¯å¨") |
| | | |
| | | self.app.run( |
| | | host=self.host, |
| | | port=self.port, |
| | | debug=False, |
| | | threaded=True |
| | | ) |
| | | except Exception as e: |
| | | self.logger.error(f"å¯å¨ç®æ³æå¡å¨å¤±è´¥: {e}") |
| | | # ç¡®ä¿å¨å¯å¨å¤±è´¥æ¶åæ¢è·¯å¾çæ§æå¡ |
| | | if self.path_monitor and self.path_monitor.is_running: |
| | | self.path_monitor.stop_monitoring() |
| | | raise |
| | | |
| | | def stop_server(self): |
| | | """åæ¢ç®æ³æå¡å¨""" |
| | | self.logger.info("åæ¢ç®æ³æå¡å¨") |
| | | |
| | | # åæ¢è·¯å¾çæ§æå¡ |
| | | if self.path_monitor and self.path_monitor.is_running: |
| | | self.path_monitor.stop_monitoring() |
| | | self.logger.info("è·¯å¾çæ§æå¡å·²åæ¢") |
| | | |
| | | |
| | | def get_server_status(self) -> Dict[str, Any]: |
| | | """è·åæå¡å¨ç¶æ""" |
| | | monitor_status = None |
| | | if self.path_monitor: |
| | | monitor_status = self.path_monitor.get_monitoring_status() |
| | | |
| | | return { |
| | | "host": self.host, |
| | | "port": self.port, |
| | | "path_mapping_loaded": len(self.path_mapping) > 0, |
| | | "agv_count": len(self.agv_manager.get_all_agvs()), |
| | | "algorithms": { |
| | | "task_allocation": self.task_allocation_algorithm, |
| | | "path_planning": self.path_planning_algorithm |
| | | }, |
| | | "path_monitor": monitor_status, |
| | | "timestamp": time.time() |
| | | } |
New file |
| | |
| | | # Algorithm System Algorithms Module |
| | | __version__ = "1.0.0" |
New file |
| | |
| | | """ |
| | | AGVè·¯å¾ç¢°ææ£æµåè§£å³æ¨¡å |
| | | """ |
| | | import math |
| | | import logging |
| | | from typing import Dict, List, Tuple, Optional, Set |
| | | from dataclasses import dataclass |
| | | from collections import defaultdict |
| | | |
| | | from common.data_models import PlannedPath, PathCode, AGVActionTypeEnum |
| | | from common.utils import get_coordinate_from_path_id |
| | | |
| | | |
| | | @dataclass |
| | | class SpaceTimeNode: |
| | | """æ¶ç©ºèç¹ - 表示AGVå¨ç¹å®æ¶é´åä½ç½®çç¶æ""" |
| | | agv_id: str |
| | | position: str # ä½ç½®ç |
| | | coordinates: Tuple[int, int] # åæ |
| | | time_step: int # æ¶é´æ¥ |
| | | direction: str # æ¹å |
| | | |
| | | |
| | | @dataclass |
| | | class Conflict: |
| | | """å²çªæè¿°""" |
| | | type: str # å²çªç±»å: "vertex", "edge", "follow" |
| | | agv1: str |
| | | agv2: str |
| | | time_step: int |
| | | position1: str |
| | | position2: str |
| | | description: str |
| | | |
| | | |
| | | @dataclass |
| | | class AGVPriority: |
| | | """AGVä¼å
级è¯ä¼°ç»æ""" |
| | | agv_id: str |
| | | priority_score: float # ä¼å
çº§åæ°ï¼è¶é«è¶ä¼å
|
| | | task_status: str # ä»»å¡ç¶æ |
| | | action_type: str # å¨ä½ç±»å |
| | | task_priority: int # ä»»å¡ä¼å
级 |
| | | voltage: int # çµé |
| | | explanation: str # ä¼å
级说æ |
| | | |
| | | |
| | | class CollisionDetector: |
| | | """AGVè·¯å¾ç¢°ææ£æµå¨""" |
| | | |
| | | def __init__(self, path_mapping: Dict[str, Dict[str, int]], |
| | | min_distance: float = 3.0, time_buffer: int = 1): |
| | | """ |
| | | åå§åç¢°ææ£æµå¨ |
| | | |
| | | Args: |
| | | path_mapping: è·¯å¾ç¹æ å°åå
¸ |
| | | min_distance: AGVä¹é´çæå°å®å
¨è·ç¦» |
| | | time_buffer: æ¶é´ç¼å²åºï¼æ¶é´æ¥æ°ï¼ |
| | | """ |
| | | self.path_mapping = path_mapping |
| | | self.min_distance = min_distance |
| | | self.time_buffer = time_buffer |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | def detect_conflicts(self, planned_paths: List[Dict]) -> List[Conflict]: |
| | | """ |
| | | æ£æµææAGVè·¯å¾ä¹é´çå²çª |
| | | |
| | | Args: |
| | | planned_paths: è§åè·¯å¾å表 |
| | | |
| | | Returns: |
| | | List[Conflict]: å²çªå表 |
| | | """ |
| | | conflicts = [] |
| | | |
| | | # æå»ºæ¶ç©ºè¡¨ |
| | | space_time_table = self._build_space_time_table(planned_paths) |
| | | |
| | | # æ£æµé¡¶ç¹å²çªï¼å䏿¶é´åä¸ä½ç½®ï¼ |
| | | conflicts.extend(self._detect_vertex_conflicts(space_time_table)) |
| | | |
| | | # æ£æµè¾¹å²çªï¼AGV交æ¢ä½ç½®ï¼ |
| | | conflicts.extend(self._detect_edge_conflicts(space_time_table)) |
| | | |
| | | # æ£æµè·éå²çªï¼AGVè·ç¦»è¿è¿ï¼ |
| | | conflicts.extend(self._detect_following_conflicts(space_time_table)) |
| | | |
| | | self.logger.info(f"æ£æµå° {len(conflicts)} 个路å¾å²çª") |
| | | return conflicts |
| | | |
| | | def _build_space_time_table(self, planned_paths: List[Dict]) -> Dict[int, List[SpaceTimeNode]]: |
| | | """ |
| | | æå»ºæ¶ç©ºè¡¨ |
| | | |
| | | Args: |
| | | planned_paths: è§åè·¯å¾å表 |
| | | |
| | | Returns: |
| | | Dict[int, List[SpaceTimeNode]]: æ¶é´æ¥ -> æ¶ç©ºèç¹å表 |
| | | """ |
| | | space_time_table = defaultdict(list) |
| | | |
| | | for path_data in planned_paths: |
| | | agv_id = path_data.get('agvId', '') |
| | | code_list = path_data.get('codeList', []) |
| | | |
| | | for time_step, path_code in enumerate(code_list): |
| | | position = path_code.get('code', '') if isinstance(path_code, dict) else path_code.code |
| | | direction = path_code.get('direction', '90') if isinstance(path_code, dict) else path_code.direction |
| | | |
| | | coordinates = get_coordinate_from_path_id(position, self.path_mapping) |
| | | if coordinates: |
| | | node = SpaceTimeNode( |
| | | agv_id=agv_id, |
| | | position=position, |
| | | coordinates=coordinates, |
| | | time_step=time_step, |
| | | direction=direction |
| | | ) |
| | | space_time_table[time_step].append(node) |
| | | |
| | | return space_time_table |
| | | |
| | | def _detect_vertex_conflicts(self, space_time_table: Dict[int, List[SpaceTimeNode]]) -> List[Conflict]: |
| | | """æ£æµé¡¶ç¹å²çªï¼å䏿¶é´åä¸ä½ç½®ï¼""" |
| | | conflicts = [] |
| | | |
| | | for time_step, nodes in space_time_table.items(): |
| | | # æä½ç½®åç» |
| | | position_groups = defaultdict(list) |
| | | for node in nodes: |
| | | position_groups[node.position].append(node) |
| | | |
| | | # æ£æ¥æ¯ä¸ªä½ç½®æ¯å¦æå¤ä¸ªAGV |
| | | for position, agv_nodes in position_groups.items(): |
| | | if len(agv_nodes) > 1: |
| | | # åç°å²çª |
| | | for i in range(len(agv_nodes)): |
| | | for j in range(i + 1, len(agv_nodes)): |
| | | conflict = Conflict( |
| | | type="vertex", |
| | | agv1=agv_nodes[i].agv_id, |
| | | agv2=agv_nodes[j].agv_id, |
| | | time_step=time_step, |
| | | position1=position, |
| | | position2=position, |
| | | description=f"AGV {agv_nodes[i].agv_id} å {agv_nodes[j].agv_id} 卿¶é´ {time_step} å ç¨åä¸ä½ç½® {position}" |
| | | ) |
| | | conflicts.append(conflict) |
| | | |
| | | return conflicts |
| | | |
| | | def _detect_edge_conflicts(self, space_time_table: Dict[int, List[SpaceTimeNode]]) -> List[Conflict]: |
| | | """æ£æµè¾¹å²çªï¼AGV交æ¢ä½ç½®ï¼""" |
| | | conflicts = [] |
| | | |
| | | max_time = max(space_time_table.keys()) if space_time_table else 0 |
| | | |
| | | for time_step in range(max_time): |
| | | current_nodes = space_time_table.get(time_step, []) |
| | | next_nodes = space_time_table.get(time_step + 1, []) |
| | | |
| | | # æå»ºå½ååä¸ä¸æ¶å»çä½ç½®æ å° |
| | | current_positions = {node.agv_id: node.position for node in current_nodes} |
| | | next_positions = {node.agv_id: node.position for node in next_nodes} |
| | | |
| | | # æ£æ¥æ¯å¦æAGV交æ¢ä½ç½® |
| | | for agv1, pos1_current in current_positions.items(): |
| | | for agv2, pos2_current in current_positions.items(): |
| | | if agv1 >= agv2: # é¿å
é夿£æ¥ |
| | | continue |
| | | |
| | | pos1_next = next_positions.get(agv1) |
| | | pos2_next = next_positions.get(agv2) |
| | | |
| | | if (pos1_next and pos2_next and |
| | | pos1_current == pos2_next and pos2_current == pos1_next): |
| | | # åç°è¾¹å²çª |
| | | conflict = Conflict( |
| | | type="edge", |
| | | agv1=agv1, |
| | | agv2=agv2, |
| | | time_step=time_step, |
| | | position1=pos1_current, |
| | | position2=pos2_current, |
| | | description=f"AGV {agv1} å {agv2} 卿¶é´ {time_step}-{time_step+1} 交æ¢ä½ç½® {pos1_current}<->{pos2_current}" |
| | | ) |
| | | conflicts.append(conflict) |
| | | |
| | | return conflicts |
| | | |
| | | def _detect_following_conflicts(self, space_time_table: Dict[int, List[SpaceTimeNode]]) -> List[Conflict]: |
| | | """æ£æµè·éå²çªï¼AGVè·ç¦»è¿è¿ï¼""" |
| | | conflicts = [] |
| | | |
| | | for time_step, nodes in space_time_table.items(): |
| | | # æ£æ¥ææAGV对ä¹é´çè·ç¦» |
| | | for i in range(len(nodes)): |
| | | for j in range(i + 1, len(nodes)): |
| | | node1, node2 = nodes[i], nodes[j] |
| | | |
| | | # è®¡ç®æ¬§å éå¾è·ç¦» |
| | | distance = math.sqrt( |
| | | (node1.coordinates[0] - node2.coordinates[0]) ** 2 + |
| | | (node1.coordinates[1] - node2.coordinates[1]) ** 2 |
| | | ) |
| | | |
| | | if distance < self.min_distance and distance > 0: |
| | | conflict = Conflict( |
| | | type="follow", |
| | | agv1=node1.agv_id, |
| | | agv2=node2.agv_id, |
| | | time_step=time_step, |
| | | position1=node1.position, |
| | | position2=node2.position, |
| | | description=f"AGV {node1.agv_id} å {node2.agv_id} 卿¶é´ {time_step} è·ç¦»è¿è¿ ({distance:.2f} < {self.min_distance})" |
| | | ) |
| | | conflicts.append(conflict) |
| | | |
| | | return conflicts |
| | | |
| | | |
| | | class CollisionResolver: |
| | | """AGVè·¯å¾ç¢°æè§£å³å¨""" |
| | | |
| | | def __init__(self, path_mapping: Dict[str, Dict[str, int]], detector: CollisionDetector, agv_manager=None): |
| | | """ |
| | | åå§å碰æè§£å³å¨ |
| | | |
| | | Args: |
| | | path_mapping: è·¯å¾ç¹æ å°åå
¸ |
| | | detector: ç¢°ææ£æµå¨ |
| | | agv_manager: AGV管çå¨ï¼ç¨äºè·åAGVç¶æä¿¡æ¯ |
| | | """ |
| | | self.path_mapping = path_mapping |
| | | self.detector = detector |
| | | self.agv_manager = agv_manager |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | def evaluate_agv_priority(self, agv_id: str, path: Dict, executing_tasks: List[Dict] = None) -> AGVPriority: |
| | | """ |
| | | è¯ä¼°AGVçé¿è®©ä¼å
级 |
| | | |
| | | Args: |
| | | agv_id: AGV ID |
| | | path: AGVè·¯å¾ä¿¡æ¯ |
| | | executing_tasks: æ§è¡ä¸çä»»å¡å表 |
| | | |
| | | Returns: |
| | | AGVPriority: AGVä¼å
级è¯ä¼°ç»æ |
| | | """ |
| | | # é»è®¤å¼ |
| | | task_status = "idle" |
| | | action_type = "unknown" |
| | | task_priority = 1 |
| | | voltage = 100 |
| | | priority_score = 0.0 |
| | | explanation_parts = [] |
| | | |
| | | # è·åAGVç¶æä¿¡æ¯ |
| | | agv_model = None |
| | | if self.agv_manager: |
| | | agv_model = self.agv_manager.get_agv_by_id(agv_id) |
| | | if agv_model: |
| | | voltage = agv_model.voltage |
| | | |
| | | # ä»è·¯å¾ä¿¡æ¯ä¸è·åå¨ä½ç±»å |
| | | code_list = path.get('codeList', []) |
| | | if code_list: |
| | | first_code = code_list[0] |
| | | if isinstance(first_code, dict): |
| | | action_type = first_code.get('type', 'unknown') |
| | | |
| | | # 仿§è¡ä¸ä»»å¡è·åä»»å¡ç¶æåä¼å
级 |
| | | if executing_tasks: |
| | | for task in executing_tasks: |
| | | if task.get('agvId') == agv_id: |
| | | task_status = task.get('status', 'idle') |
| | | # å°è¯ä»ä»»å¡IDè·åä¼å
级ï¼ç®åå¤çï¼ |
| | | task_id = task.get('taskId', '') |
| | | if 'HIGH_PRIORITY' in task_id: |
| | | task_priority = 10 |
| | | elif 'PRIORITY' in task_id: |
| | | task_priority = 8 |
| | | else: |
| | | task_priority = 5 |
| | | break |
| | | |
| | | # 计ç®ä¼å
çº§åæ°ï¼åæ°è¶é«ä¼å
级è¶é«ï¼è¶ä¸å®¹æè®©æ¥ï¼ |
| | | priority_score = 0.0 |
| | | |
| | | # 1. ä»»å¡ç¶æä¼å
级 (40%æé) |
| | | if task_status == "executing": |
| | | priority_score += 40.0 |
| | | explanation_parts.append("æ§è¡ä»»å¡ä¸(+40)") |
| | | elif task_status == "assigned": |
| | | priority_score += 20.0 |
| | | explanation_parts.append("å·²åé
ä»»å¡(+20)") |
| | | else: |
| | | priority_score += 5.0 |
| | | explanation_parts.append("空é²ç¶æ(+5)") |
| | | |
| | | # 2. å¨ä½ç±»åä¼å
级 (30%æé) |
| | | if action_type == AGVActionTypeEnum.TASK.value: |
| | | priority_score += 30.0 |
| | | explanation_parts.append("ä»»å¡è¡ä¸º(+30)") |
| | | elif action_type == AGVActionTypeEnum.CHARGING.value: |
| | | priority_score += 25.0 |
| | | explanation_parts.append("å
çµè¡ä¸º(+25)") |
| | | elif action_type == AGVActionTypeEnum.AVOIDANCE.value: |
| | | priority_score += 5.0 |
| | | explanation_parts.append("é¿è®©è¡ä¸º(+5)") |
| | | elif action_type == AGVActionTypeEnum.STANDBY.value: |
| | | priority_score += 10.0 |
| | | explanation_parts.append("å¾
æºè¡ä¸º(+10)") |
| | | else: |
| | | priority_score += 15.0 |
| | | explanation_parts.append("æªç¥è¡ä¸º(+15)") |
| | | |
| | | # 3. ä»»å¡ä¼å
级 (20%æé) |
| | | task_priority_score = (task_priority / 10.0) * 20.0 |
| | | priority_score += task_priority_score |
| | | explanation_parts.append(f"ä»»å¡ä¼å
级{task_priority}(+{task_priority_score:.1f})") |
| | | |
| | | # 4. çµéç¶æ (10%æé) |
| | | if voltage <= 10: # å¿
é¡»å
çµ |
| | | priority_score += 10.0 |
| | | explanation_parts.append("çµéå±é©(+10)") |
| | | elif voltage <= 20: # å¯èªå¨å
çµ |
| | | priority_score += 8.0 |
| | | explanation_parts.append("çµéåä½(+8)") |
| | | elif voltage <= 50: |
| | | priority_score += 5.0 |
| | | explanation_parts.append("çµéä¸è¬(+5)") |
| | | else: |
| | | priority_score += 2.0 |
| | | explanation_parts.append("çµéå
è¶³(+2)") |
| | | |
| | | explanation = f"æ»å{priority_score:.1f}: " + ", ".join(explanation_parts) |
| | | |
| | | return AGVPriority( |
| | | agv_id=agv_id, |
| | | priority_score=priority_score, |
| | | task_status=task_status, |
| | | action_type=action_type, |
| | | task_priority=task_priority, |
| | | voltage=voltage, |
| | | explanation=explanation |
| | | ) |
| | | |
| | | def resolve_conflicts(self, planned_paths: List[Dict], conflicts: List[Conflict], executing_tasks: List[Dict] = None) -> List[Dict]: |
| | | """ |
| | | è§£å³å²çª |
| | | |
| | | Args: |
| | | planned_paths: åå§è§åè·¯å¾ |
| | | conflicts: å²çªå表 |
| | | executing_tasks: æ§è¡ä¸çä»»å¡å表 |
| | | |
| | | Returns: |
| | | List[Dict]: è§£å³å²çªåçè·¯å¾ |
| | | """ |
| | | if not conflicts: |
| | | return planned_paths |
| | | |
| | | resolved_paths = [path.copy() for path in planned_paths] |
| | | |
| | | # ææ¶é´æ¥æåºå²çªï¼ä¼å
è§£å³æ©æå²çª |
| | | conflicts_sorted = sorted(conflicts, key=lambda c: c.time_step) |
| | | |
| | | for conflict in conflicts_sorted: |
| | | resolved_paths = self._resolve_single_conflict(resolved_paths, conflict, executing_tasks) |
| | | |
| | | self.logger.info(f"è§£å³äº {len(conflicts)} 个路å¾å²çª") |
| | | return resolved_paths |
| | | |
| | | def _resolve_single_conflict(self, paths: List[Dict], conflict: Conflict, executing_tasks: List[Dict] = None) -> List[Dict]: |
| | | """ |
| | | è§£å³å个å²çª |
| | | |
| | | Args: |
| | | paths: å½åè·¯å¾å表 |
| | | conflict: å²çª |
| | | executing_tasks: æ§è¡ä¸çä»»å¡å表 |
| | | |
| | | Returns: |
| | | List[Dict]: æ´æ°åçè·¯å¾å表 |
| | | """ |
| | | if conflict.type == "vertex": |
| | | return self._resolve_vertex_conflict(paths, conflict, executing_tasks) |
| | | elif conflict.type == "edge": |
| | | return self._resolve_edge_conflict(paths, conflict, executing_tasks) |
| | | elif conflict.type == "follow": |
| | | return self._resolve_following_conflict(paths, conflict, executing_tasks) |
| | | |
| | | return paths |
| | | |
| | | def _resolve_vertex_conflict(self, paths: List[Dict], conflict: Conflict, executing_tasks: List[Dict] = None) -> List[Dict]: |
| | | """è§£å³é¡¶ç¹å²çª - åºäºä¼å
级è¯ä¼°éæ©è®©æ¥çAGV""" |
| | | updated_paths = paths.copy() |
| | | |
| | | # æ¾å°å²çªçAGVè·¯å¾ |
| | | agv1_path = None |
| | | agv2_path = None |
| | | agv1_index = agv2_index = -1 |
| | | |
| | | for i, path in enumerate(updated_paths): |
| | | if path.get('agvId') == conflict.agv1: |
| | | agv1_path = path |
| | | agv1_index = i |
| | | elif path.get('agvId') == conflict.agv2: |
| | | agv2_path = path |
| | | agv2_index = i |
| | | |
| | | if not agv1_path or not agv2_path: |
| | | return updated_paths |
| | | |
| | | # è¯ä¼°ä¸¤ä¸ªAGVçä¼å
级 |
| | | agv1_priority = self.evaluate_agv_priority(conflict.agv1, agv1_path, executing_tasks) |
| | | agv2_priority = self.evaluate_agv_priority(conflict.agv2, agv2_path, executing_tasks) |
| | | |
| | | # éæ©ä¼å
级è¾ä½çAGVè¿è¡çå¾
|
| | | if agv1_priority.priority_score <= agv2_priority.priority_score: |
| | | waiting_agv_path = agv1_path |
| | | waiting_agv_index = agv1_index |
| | | waiting_agv_id = conflict.agv1 |
| | | higher_priority_agv_id = conflict.agv2 |
| | | else: |
| | | waiting_agv_path = agv2_path |
| | | waiting_agv_index = agv2_index |
| | | waiting_agv_id = conflict.agv2 |
| | | higher_priority_agv_id = conflict.agv1 |
| | | |
| | | # è®°å½è¯¦ç»çé¿è®©å³çä¿¡æ¯ |
| | | self.logger.info(f"é¡¶ç¹å²çªé¿è®©å³ç - ä½ç½®: {conflict.position1}, æ¶é´: {conflict.time_step}") |
| | | self.logger.info(f" AGV {conflict.agv1}: {agv1_priority.explanation}") |
| | | self.logger.info(f" AGV {conflict.agv2}: {agv2_priority.explanation}") |
| | | self.logger.info(f" å³ç: AGV {waiting_agv_id} 让æ¥ç» AGV {higher_priority_agv_id}") |
| | | |
| | | # å¨å²çªä½ç½®ä¹åæå
¥çå¾
æ¥éª¤ |
| | | self._insert_wait_step(waiting_agv_path, conflict.time_step) |
| | | updated_paths[waiting_agv_index] = waiting_agv_path |
| | | |
| | | return updated_paths |
| | | |
| | | def _resolve_edge_conflict(self, paths: List[Dict], conflict: Conflict, executing_tasks: List[Dict] = None) -> List[Dict]: |
| | | """è§£å³è¾¹å²çª - åºäºä¼å
级è¯ä¼°éæ©è®©æ¥çAGV""" |
| | | updated_paths = paths.copy() |
| | | |
| | | # æ¾å°å²çªçAGVè·¯å¾ |
| | | agv1_path = None |
| | | agv2_path = None |
| | | agv1_index = agv2_index = -1 |
| | | |
| | | for i, path in enumerate(updated_paths): |
| | | if path.get('agvId') == conflict.agv1: |
| | | agv1_path = path |
| | | agv1_index = i |
| | | elif path.get('agvId') == conflict.agv2: |
| | | agv2_path = path |
| | | agv2_index = i |
| | | |
| | | if not agv1_path or not agv2_path: |
| | | return updated_paths |
| | | |
| | | # è¯ä¼°ä¸¤ä¸ªAGVçä¼å
级 |
| | | agv1_priority = self.evaluate_agv_priority(conflict.agv1, agv1_path, executing_tasks) |
| | | agv2_priority = self.evaluate_agv_priority(conflict.agv2, agv2_path, executing_tasks) |
| | | |
| | | # éæ©ä¼å
级è¾ä½çAGVè¿è¡çå¾
|
| | | if agv1_priority.priority_score <= agv2_priority.priority_score: |
| | | waiting_agv_path = agv1_path |
| | | waiting_agv_index = agv1_index |
| | | waiting_agv_id = conflict.agv1 |
| | | higher_priority_agv_id = conflict.agv2 |
| | | else: |
| | | waiting_agv_path = agv2_path |
| | | waiting_agv_index = agv2_index |
| | | waiting_agv_id = conflict.agv2 |
| | | higher_priority_agv_id = conflict.agv1 |
| | | |
| | | # è®°å½è¯¦ç»çé¿è®©å³çä¿¡æ¯ |
| | | self.logger.info(f"è¾¹å²çªé¿è®©å³ç - ä½ç½®äº¤æ¢: {conflict.position1}<->{conflict.position2}, æ¶é´: {conflict.time_step}") |
| | | self.logger.info(f" AGV {conflict.agv1}: {agv1_priority.explanation}") |
| | | self.logger.info(f" AGV {conflict.agv2}: {agv2_priority.explanation}") |
| | | self.logger.info(f" å³ç: AGV {waiting_agv_id} 让æ¥ç» AGV {higher_priority_agv_id}") |
| | | |
| | | # å¨å²çªä½ç½®ä¹åæå
¥çå¾
æ¥éª¤ |
| | | self._insert_wait_step(waiting_agv_path, conflict.time_step) |
| | | updated_paths[waiting_agv_index] = waiting_agv_path |
| | | |
| | | return updated_paths |
| | | |
| | | def _resolve_following_conflict(self, paths: List[Dict], conflict: Conflict, executing_tasks: List[Dict] = None) -> List[Dict]: |
| | | """è§£å³è·éå²çª - åºäºä¼å
级è¯ä¼°éæ©è®©æ¥çAGV""" |
| | | updated_paths = paths.copy() |
| | | |
| | | # æ¾å°å²çªçAGVè·¯å¾ |
| | | agv1_path = None |
| | | agv2_path = None |
| | | agv1_index = agv2_index = -1 |
| | | |
| | | for i, path in enumerate(updated_paths): |
| | | if path.get('agvId') == conflict.agv1: |
| | | agv1_path = path |
| | | agv1_index = i |
| | | elif path.get('agvId') == conflict.agv2: |
| | | agv2_path = path |
| | | agv2_index = i |
| | | |
| | | if not agv1_path or not agv2_path: |
| | | return updated_paths |
| | | |
| | | # è¯ä¼°ä¸¤ä¸ªAGVçä¼å
级 |
| | | agv1_priority = self.evaluate_agv_priority(conflict.agv1, agv1_path, executing_tasks) |
| | | agv2_priority = self.evaluate_agv_priority(conflict.agv2, agv2_path, executing_tasks) |
| | | |
| | | # éæ©ä¼å
级è¾ä½çAGVè¿è¡çå¾
|
| | | if agv1_priority.priority_score <= agv2_priority.priority_score: |
| | | waiting_agv_path = agv1_path |
| | | waiting_agv_index = agv1_index |
| | | waiting_agv_id = conflict.agv1 |
| | | higher_priority_agv_id = conflict.agv2 |
| | | else: |
| | | waiting_agv_path = agv2_path |
| | | waiting_agv_index = agv2_index |
| | | waiting_agv_id = conflict.agv2 |
| | | higher_priority_agv_id = conflict.agv1 |
| | | |
| | | # è®°å½è¯¦ç»çé¿è®©å³çä¿¡æ¯ |
| | | self.logger.info(f"è·éå²çªé¿è®©å³ç - è·ç¦»è¿è¿, æ¶é´: {conflict.time_step}") |
| | | self.logger.info(f" AGV {conflict.agv1} (ä½ç½®: {conflict.position1}): {agv1_priority.explanation}") |
| | | self.logger.info(f" AGV {conflict.agv2} (ä½ç½®: {conflict.position2}): {agv2_priority.explanation}") |
| | | self.logger.info(f" å³ç: AGV {waiting_agv_id} 让æ¥ç» AGV {higher_priority_agv_id}") |
| | | |
| | | # å¨å²çªä½ç½®ä¹åæå
¥çå¾
æ¥éª¤ |
| | | self._insert_wait_step(waiting_agv_path, conflict.time_step) |
| | | updated_paths[waiting_agv_index] = waiting_agv_path |
| | | |
| | | return updated_paths |
| | | |
| | | def _insert_wait_step(self, path: Dict, time_step: int): |
| | | """ |
| | | 卿宿¶é´æ¥æå
¥çå¾
æ¥éª¤ |
| | | |
| | | Args: |
| | | path: AGVè·¯å¾ |
| | | time_step: æå
¥ä½ç½®çæ¶é´æ¥ |
| | | """ |
| | | code_list = path.get('codeList', []) |
| | | |
| | | if time_step < len(code_list) and time_step > 0: |
| | | # 卿å®ä½ç½®æå
¥çå¾
æ¥éª¤ï¼éå¤åä¸ä¸ªä½ç½®ï¼ |
| | | prev_code = code_list[time_step - 1] |
| | | |
| | | # ç¡®ä¿å¤å¶ææå段 |
| | | if isinstance(prev_code, dict): |
| | | wait_code = prev_code.copy() |
| | | else: |
| | | wait_code = { |
| | | 'code': prev_code.code, |
| | | 'direction': prev_code.direction |
| | | } |
| | | |
| | | code_list.insert(time_step, wait_code) |
| | | path['codeList'] = code_list |
| | | |
| | | def validate_four_direction_movement(self, planned_paths: List[Dict]) -> List[Dict]: |
| | | """ |
| | | éªè¯å¹¶ä¿®æ£è·¯å¾ï¼ç¡®ä¿AGVåªè½ä»¥0ã90ã180ã270åº¦ç§»å¨ |
| | | |
| | | Args: |
| | | planned_paths: è§åè·¯å¾å表 |
| | | |
| | | Returns: |
| | | List[Dict]: ä¿®æ£åçè·¯å¾å表 |
| | | """ |
| | | validated_paths = [] |
| | | |
| | | for path_data in planned_paths: |
| | | validated_path = self._validate_single_path_directions(path_data) |
| | | validated_paths.append(validated_path) |
| | | |
| | | return validated_paths |
| | | |
| | | def _validate_single_path_directions(self, path_data: Dict) -> Dict: |
| | | """ |
| | | éªè¯å个路å¾çç§»å¨æ¹å |
| | | |
| | | Args: |
| | | path_data: å个AGVè·¯å¾æ°æ® |
| | | |
| | | Returns: |
| | | Dict: ä¿®æ£åçè·¯å¾æ°æ® |
| | | """ |
| | | validated_path = path_data.copy() |
| | | code_list = path_data.get('codeList', []) |
| | | |
| | | if len(code_list) < 2: |
| | | return validated_path |
| | | |
| | | validated_code_list = [] |
| | | |
| | | for i in range(len(code_list)): |
| | | current_code = code_list[i] |
| | | |
| | | if isinstance(current_code, dict): |
| | | position = current_code.get('code', '') |
| | | direction = current_code.get('direction', '90') |
| | | |
| | | # ä¿çåæçææå段 |
| | | validated_code = current_code.copy() |
| | | else: |
| | | position = current_code.code |
| | | direction = current_code.direction |
| | | |
| | | # 转æ¢ä¸ºåå
¸æ ¼å¼å¹¶ä¿çåºæ¬å段 |
| | | validated_code = { |
| | | 'code': position, |
| | | 'direction': direction |
| | | } |
| | | |
| | | # 妿䏿¯ç¬¬ä¸ä¸ªç¹ï¼è®¡ç®æ£ç¡®çæ¹å |
| | | if i > 0: |
| | | prev_code = code_list[i - 1] |
| | | prev_position = prev_code.get('code', '') if isinstance(prev_code, dict) else prev_code.code |
| | | |
| | | # 计ç®ç§»å¨æ¹å |
| | | correct_direction = self._calculate_movement_direction(prev_position, position) |
| | | if correct_direction: |
| | | direction = correct_direction |
| | | |
| | | # ç¡®ä¿æ¹åæ¯ææçåæ¹åä¹ä¸ |
| | | direction = self._normalize_direction(direction) |
| | | |
| | | # æ´æ°æ¹åï¼ä¿çå
¶ä»ææå段 |
| | | validated_code['direction'] = direction |
| | | validated_code_list.append(validated_code) |
| | | |
| | | validated_path['codeList'] = validated_code_list |
| | | return validated_path |
| | | |
| | | def _calculate_movement_direction(self, from_position: str, to_position: str) -> Optional[str]: |
| | | """ |
| | | 计ç®ä»ä¸ä¸ªä½ç½®å°å¦ä¸ä¸ªä½ç½®çç§»å¨æ¹å |
| | | |
| | | Args: |
| | | from_position: èµ·å§ä½ç½®ç |
| | | to_position: ç®æ ä½ç½®ç |
| | | |
| | | Returns: |
| | | Optional[str]: ç§»å¨æ¹åï¼0, 90, 180, 270ï¼ |
| | | """ |
| | | from_coord = get_coordinate_from_path_id(from_position, self.path_mapping) |
| | | to_coord = get_coordinate_from_path_id(to_position, self.path_mapping) |
| | | |
| | | if not from_coord or not to_coord: |
| | | return None |
| | | |
| | | dx = to_coord[0] - from_coord[0] |
| | | dy = to_coord[1] - from_coord[1] |
| | | |
| | | # åªå
许å个æ¹åçç§»å¨ |
| | | if dx > 0 and dy == 0: |
| | | return "0" # åä¸ |
| | | elif dx < 0 and dy == 0: |
| | | return "180" # å西 |
| | | elif dx == 0 and dy > 0: |
| | | return "90" # åå |
| | | elif dx == 0 and dy < 0: |
| | | return "270" # åå |
| | | else: |
| | | # 䏿¯æ ååæ¹åç§»å¨ï¼è¿åé»è®¤æ¹å |
| | | return "90" |
| | | |
| | | def _normalize_direction(self, direction: str) -> str: |
| | | """ |
| | | æ ååæ¹åå¼ï¼ç¡®ä¿åªæ0ã90ã180ã270 |
| | | |
| | | Args: |
| | | direction: åå§æ¹åå¼ |
| | | |
| | | Returns: |
| | | str: æ åååçæ¹åå¼ |
| | | """ |
| | | try: |
| | | angle = int(direction) % 360 |
| | | |
| | | # å°è§åº¦æ å°å°æè¿çåæ¹å |
| | | if angle <= 45 or angle > 315: |
| | | return "0" |
| | | elif 45 < angle <= 135: |
| | | return "90" |
| | | elif 135 < angle <= 225: |
| | | return "180" |
| | | else: |
| | | return "270" |
| | | except: |
| | | return "90" # é»è®¤æ¹å |
New file |
| | |
| | | """ |
| | | è·¯å¾è§åç®æ³ |
| | | """ |
| | | import heapq |
| | | import random |
| | | import math |
| | | import time |
| | | import logging |
| | | from typing import Dict, List, Tuple, Optional, Set, Any |
| | | from collections import defaultdict, deque |
| | | |
| | | from common.data_models import PlannedPath, PathCode, AGVStatus, BackpackData |
| | | from common.utils import get_coordinate_from_path_id, calculate_distance, calculate_manhattan_distance, generate_segment_id, generate_navigation_code |
| | | from algorithm_system.algorithms.collision_detection import CollisionDetector, CollisionResolver |
| | | |
| | | |
| | | class ExecutingTaskExtractor: |
| | | """æ§è¡ä¸ä»»å¡æåå¨""" |
| | | |
| | | def __init__(self, path_mapping: Dict[str, Dict[str, int]]): |
| | | """ |
| | | åå§å任塿åå¨ |
| | | |
| | | Args: |
| | | path_mapping: è·¯å¾ç¹æ å°åå
¸ |
| | | """ |
| | | self.path_mapping = path_mapping |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | # é¢è®¡ç®åæ æ å° |
| | | self.coord_to_code = {(data['x'], data['y']): code for code, data in path_mapping.items()} |
| | | |
| | | # é¢åç±»ä½ç½®ç±»å |
| | | self.pickup_positions = [pos for pos in path_mapping.keys() if pos.startswith("1")] |
| | | self.charging_positions = [pos for pos in path_mapping.keys() if pos.startswith("2")] |
| | | self.delivery_positions = [pos for pos in path_mapping.keys() if pos.startswith("3")] |
| | | self.standby_positions = [pos for pos in path_mapping.keys() if pos.startswith("4")] |
| | | |
| | | def extract_optimized_executing_tasks(self, agv_status_list: List[AGVStatus]) -> List[Dict]: |
| | | """æºè½æåAGVæ§è¡ä»»å¡""" |
| | | executing_tasks = [] |
| | | |
| | | for agv_status in agv_status_list: |
| | | agv_id = agv_status.agvId |
| | | current_position = agv_status.position |
| | | |
| | | if not agv_status.backpack: |
| | | continue |
| | | |
| | | optimized_task = self._analyze_all_backpack_tasks( |
| | | agv_id, current_position, agv_status.backpack, agv_status |
| | | ) |
| | | |
| | | if optimized_task: |
| | | executing_tasks.append(optimized_task) |
| | | |
| | | self.logger.info(f"æºè½åæ {len(executing_tasks)} 个AGVçèç¯ä»»å¡å¹¶çææä¼è·¯å¾") |
| | | return executing_tasks |
| | | |
| | | def _analyze_all_backpack_tasks(self, agv_id: str, current_position: str, |
| | | backpack: List[BackpackData], agv_status: AGVStatus) -> Optional[Dict]: |
| | | """ |
| | | åæææèç¯ä»»å¡ï¼ç¡®å®æä¼æ§è¡çç¥ |
| | | """ |
| | | # 1. æåå¹¶åç±»ææææä»»å¡ |
| | | loaded_tasks = [] # å·²è£
è½½çä»»å¡ |
| | | unloaded_tasks = [] # æªè£
è½½çä»»å¡ |
| | | all_tasks = [] # ææä»»å¡ |
| | | |
| | | self.logger.debug(f"[AGV {agv_id}] å¼å§åæèç¯ä»»å¡ï¼ä½ç½®: {current_position}") |
| | | |
| | | for i, bp in enumerate(backpack): |
| | | self.logger.debug(f"[AGV {agv_id}] èç¯ä½ç½®{i+1}: taskId={bp.taskId}, loaded={bp.loaded}, execute={bp.execute}") |
| | | |
| | | if not bp.taskId: |
| | | self.logger.debug(f"[AGV {agv_id}] èç¯ä½ç½®{i+1}: è·³è¿ï¼æ ä»»å¡IDï¼") |
| | | continue |
| | | |
| | | # å°è¯è§£æä»»å¡è¯¦æ
|
| | | task_info = self._parse_task_details(bp.taskId, current_position) |
| | | if not task_info: |
| | | self.logger.warning(f"[AGV {agv_id}] ä»»å¡ {bp.taskId} è§£æå¤±è´¥ï¼è·³è¿") |
| | | continue |
| | | |
| | | task_data = { |
| | | 'backpack_item': bp, |
| | | 'task_info': task_info, |
| | | 'distance_to_start': task_info.get('distance_to_start', float('inf')), |
| | | 'distance_to_end': task_info.get('distance_to_end', float('inf')) |
| | | } |
| | | |
| | | all_tasks.append(task_data) |
| | | |
| | | if bp.loaded: |
| | | loaded_tasks.append(task_data) |
| | | self.logger.debug(f"[AGV {agv_id}] ä»»å¡ {bp.taskId} å·²è£
è½½ï¼è·ç¦»ç»ç¹: {task_info.get('distance_to_end', 'N/A')}") |
| | | else: |
| | | unloaded_tasks.append(task_data) |
| | | self.logger.debug(f"[AGV {agv_id}] ä»»å¡ {bp.taskId} æªè£
è½½ï¼è·ç¦»èµ·ç¹: {task_info.get('distance_to_start', 'N/A')}") |
| | | |
| | | self.logger.debug(f"[AGV {agv_id}] ä»»å¡åæç»æ: æ»ä»»å¡={len(all_tasks)}, å·²è£
è½½={len(loaded_tasks)}, æªè£
è½½={len(unloaded_tasks)}") |
| | | |
| | | # 2. å¦ææ²¡æä»»ä½ä»»å¡ï¼è¿åNone |
| | | if not all_tasks: |
| | | self.logger.debug(f"[AGV {agv_id}] æ²¡æææä»»å¡ï¼è¿åNone") |
| | | return None |
| | | |
| | | # 3. å³çæä¼è·¯å¾çç¥ |
| | | optimal_strategy = self._determine_next_best_action( |
| | | loaded_tasks, unloaded_tasks, current_position |
| | | ) |
| | | |
| | | if not optimal_strategy: |
| | | self.logger.debug(f"[AGV {agv_id}] å³çè¿åNoneï¼æ²¡ææä¼çç¥") |
| | | return None |
| | | |
| | | self.logger.debug(f"[AGV {agv_id}] å³ç宿: {optimal_strategy['strategy']} - {optimal_strategy['action']}") |
| | | self.logger.debug(f"[AGV {agv_id}] ç®æ ä½ç½®: {optimal_strategy['target_position']}") |
| | | self.logger.debug(f"[AGV {agv_id}] å³çåå : {optimal_strategy.get('reason', 'N/A')}") |
| | | |
| | | # 4. æå»ºä»»å¡ä¿¡æ¯ |
| | | result = { |
| | | 'agvId': agv_id, |
| | | 'currentPosition': current_position, |
| | | 'strategy': optimal_strategy['strategy'], |
| | | 'next_action': optimal_strategy['action'], |
| | | 'target_position': optimal_strategy['target_position'], |
| | | 'primary_task': optimal_strategy['selected_task'], |
| | | 'all_loaded_tasks': loaded_tasks, |
| | | 'all_unloaded_tasks': unloaded_tasks, |
| | | 'total_tasks_count': len(all_tasks), |
| | | 'estimated_efficiency': optimal_strategy.get('efficiency_score', 0), |
| | | 'decision_reason': optimal_strategy.get('reason', ''), |
| | | 'agvDirection': agv_status.direction, |
| | | 'agvVoltage': agv_status.vol, |
| | | 'agvError': agv_status.error |
| | | } |
| | | |
| | | self.logger.debug(f"[AGV {agv_id}] æç»ä»»å¡ä¿¡æ¯æå»ºå®æï¼è¿åç»æ") |
| | | return result |
| | | |
| | | def _determine_next_best_action(self, loaded_tasks: List[Dict], |
| | | unloaded_tasks: List[Dict], current_position: str) -> Optional[Dict]: |
| | | """ |
| | | ç¡®å®AGVä¸ä¸æ¥æä¼è¡å¨ |
| | | |
| | | Args: |
| | | loaded_tasks: å·²è£
è½½çä»»å¡ |
| | | unloaded_tasks: æªè£
è½½çä»»å¡ |
| | | current_position: å½åä½ç½® |
| | | |
| | | Returns: |
| | | Optional[Dict]: æä¼è¡å¨çç¥ |
| | | """ |
| | | # é»è®¤çç¥ï¼ç»¼åä¼åå³ç |
| | | # 1. 妿æå·²è£
载任å¡ï¼ä¼å
éè´§ |
| | | if loaded_tasks: |
| | | nearest_delivery = min(loaded_tasks, key=lambda t: t['distance_to_end']) |
| | | |
| | | # 2. 妿乿æªè£
载任å¡ï¼æ¯è¾è·ç¦»å³ç |
| | | if unloaded_tasks: |
| | | nearest_pickup = min(unloaded_tasks, key=lambda t: t['distance_to_start']) |
| | | |
| | | # 综åä¼åçç¥ï¼å¦æåè´§ç¹æ¾èæ´è¿ï¼ä¼å
åè´§ï¼å¦åä¼å
éè´§ |
| | | if nearest_pickup['distance_to_start'] * 1.5 < nearest_delivery['distance_to_end']: |
| | | return { |
| | | 'strategy': 'comprehensive_optimization', |
| | | 'action': 'pickup', |
| | | 'selected_task': nearest_pickup, |
| | | 'target_position': nearest_pickup['task_info']['start_position'], |
| | | 'efficiency_score': self._calculate_pickup_efficiency(nearest_pickup, current_position), |
| | | 'reason': f"综åä¼åï¼åè´§ç¹({nearest_pickup['distance_to_start']})æ¯éè´§ç¹({nearest_delivery['distance_to_end']})æ¾èæ´è¿" |
| | | } |
| | | else: |
| | | return { |
| | | 'strategy': 'comprehensive_optimization', |
| | | 'action': 'deliver', |
| | | 'selected_task': nearest_delivery, |
| | | 'target_position': nearest_delivery['task_info']['end_position'], |
| | | 'efficiency_score': self._calculate_delivery_efficiency(nearest_delivery, current_position), |
| | | 'reason': f"综åä¼åï¼ä¼å
é达已è£
载货ç©ï¼éè´§è·ç¦»{nearest_delivery['distance_to_end']}" |
| | | } |
| | | else: |
| | | # åªæå·²è£
载任å¡ï¼ç´æ¥éè´§ |
| | | return { |
| | | 'strategy': 'comprehensive_optimization', |
| | | 'action': 'deliver', |
| | | 'selected_task': nearest_delivery, |
| | | 'target_position': nearest_delivery['task_info']['end_position'], |
| | | 'efficiency_score': self._calculate_delivery_efficiency(nearest_delivery, current_position), |
| | | 'reason': f"综åä¼åï¼éè¾¾å¯ä¸å·²è£
载货ç©ï¼è·ç¦»{nearest_delivery['distance_to_end']}" |
| | | } |
| | | |
| | | # 3. å¦æåªææªè£
载任å¡ï¼åè´§ |
| | | elif unloaded_tasks: |
| | | nearest_pickup = min(unloaded_tasks, key=lambda t: t['distance_to_start']) |
| | | return { |
| | | 'strategy': 'comprehensive_optimization', |
| | | 'action': 'pickup', |
| | | 'selected_task': nearest_pickup, |
| | | 'target_position': nearest_pickup['task_info']['start_position'], |
| | | 'efficiency_score': self._calculate_pickup_efficiency(nearest_pickup, current_position), |
| | | 'reason': f"综åä¼åï¼åè´§å¯ä¸æªè£
载任å¡ï¼è·ç¦»{nearest_pickup['distance_to_start']}" |
| | | } |
| | | |
| | | # 4. 没æä»»å¡ |
| | | return None |
| | | |
| | | def _parse_task_details(self, task_id: str, current_position: str) -> Optional[Dict]: |
| | | """ |
| | | è§£æä»»å¡è¯¦ç»ä¿¡æ¯ï¼å
æ¬èµ·ç»ç¹åè·ç¦» |
| | | |
| | | Args: |
| | | task_id: ä»»å¡ID |
| | | current_position: å½åä½ç½® |
| | | |
| | | Returns: |
| | | Optional[Dict]: ä»»å¡è¯¦ç»ä¿¡æ¯ |
| | | """ |
| | | self.logger.debug(f"å¼å§è§£æä»»å¡ {task_id}ï¼å½åä½ç½®: {current_position}") |
| | | |
| | | start_pos, end_pos = self._fast_parse_task_start_end(task_id) |
| | | |
| | | if not start_pos or not end_pos: |
| | | self.logger.warning(f"ä»»å¡ {task_id} è§£æå¤±è´¥: start_pos={start_pos}, end_pos={end_pos}") |
| | | return None |
| | | |
| | | self.logger.debug(f"ä»»å¡ {task_id} è§£ææå: start={start_pos}, end={end_pos}") |
| | | |
| | | # 计ç®è·ç¦» |
| | | current_coord = get_coordinate_from_path_id(current_position, self.path_mapping) |
| | | start_coord = get_coordinate_from_path_id(start_pos, self.path_mapping) |
| | | end_coord = get_coordinate_from_path_id(end_pos, self.path_mapping) |
| | | |
| | | if not all([current_coord, start_coord, end_coord]): |
| | | return None |
| | | |
| | | distance_to_start = calculate_manhattan_distance(current_coord, start_coord) |
| | | distance_to_end = calculate_manhattan_distance(current_coord, end_coord) |
| | | |
| | | return { |
| | | 'task_id': task_id, |
| | | 'start_position': start_pos, |
| | | 'end_position': end_pos, |
| | | 'distance_to_start': distance_to_start, |
| | | 'distance_to_end': distance_to_end, |
| | | 'start_coord': start_coord, |
| | | 'end_coord': end_coord |
| | | } |
| | | |
| | | def _calculate_delivery_efficiency(self, task_data: Dict, current_position: str) -> float: |
| | | """ |
| | | 计ç®éè´§æçåæ° |
| | | |
| | | Args: |
| | | task_data: 任塿°æ® |
| | | current_position: å½åä½ç½® |
| | | |
| | | Returns: |
| | | float: æçåæ° (0-10) |
| | | """ |
| | | distance = task_data['distance_to_end'] |
| | | |
| | | # è·ç¦»è¶è¿æçè¶é« |
| | | distance_score = max(0, 10 - distance / 5) |
| | | |
| | | # å·²è£
è½½ç任塿é¢å¤å¥å± |
| | | loaded_bonus = 3.0 if task_data['backpack_item'].loaded else 0 |
| | | |
| | | return distance_score + loaded_bonus |
| | | |
| | | def _calculate_pickup_efficiency(self, task_data: Dict, current_position: str) -> float: |
| | | """ |
| | | 计ç®åè´§æçåæ° |
| | | |
| | | Args: |
| | | task_data: 任塿°æ® |
| | | current_position: å½åä½ç½® |
| | | |
| | | Returns: |
| | | float: æçåæ° (0-10) |
| | | """ |
| | | distance_to_start = task_data['distance_to_start'] |
| | | |
| | | # è·ç¦»è¶è¿æçè¶é« |
| | | distance_score = max(0, 10 - distance_to_start / 3) |
| | | |
| | | # å¾è¿çåè´§ç¹æé«å¥å± |
| | | if distance_to_start <= 3: |
| | | distance_score += 5.0 |
| | | elif distance_to_start <= 5: |
| | | distance_score += 2.0 |
| | | |
| | | return distance_score |
| | | |
| | | def _fast_parse_task_start_end(self, task_id: str) -> Tuple[Optional[str], Optional[str]]: |
| | | """ |
| | | è§£æä»»å¡èµ·ç¹åç»ç¹ |
| | | æ¯æä¸¤ç§æ ¼å¼ï¼ |
| | | 1. æ åæ ¼å¼ï¼xxx_start_end |
| | | 2. ç®åæ ¼å¼ï¼æ°åIDï¼éè¿hashåé
ä½ç½®ï¼ |
| | | |
| | | Args: |
| | | task_id: ä»»å¡ID |
| | | |
| | | Returns: |
| | | Tuple[Optional[str], Optional[str]]: (èµ·ç¹, ç»ç¹) |
| | | """ |
| | | self.logger.debug(f"è§£æä»»å¡ID: {task_id}") |
| | | |
| | | # æ¹æ³1ï¼å°è¯æ åæ ¼å¼è§£æï¼å
å«ä¸åçº¿ï¼ |
| | | if "_" in task_id: |
| | | parts = task_id.split("_") |
| | | self.logger.debug(f"ä»»å¡IDå
å«ä¸å线ï¼åå²ç»æ: {parts}") |
| | | |
| | | if len(parts) >= 3: |
| | | potential_start = parts[-2] |
| | | potential_end = parts[-1] |
| | | |
| | | start_valid = potential_start in self.path_mapping |
| | | end_valid = potential_end in self.path_mapping |
| | | |
| | | self.logger.debug(f"æ åæ ¼å¼è§£æç»æ: start={potential_start}(valid={start_valid}), end={potential_end}(valid={end_valid})") |
| | | |
| | | if start_valid and end_valid: |
| | | return potential_start, potential_end |
| | | |
| | | self.logger.debug(f"ä»»å¡IDåå²åé¨åæ°éä¸è¶³: {len(parts)} < 3") |
| | | |
| | | # æ¹æ³2ï¼ç®åæ°åIDçå¤éæºå¶ï¼éè¿hashåé
ä½ç½®ï¼ |
| | | self.logger.debug(f"å°è¯ç®åæ°åIDå¤éè§£ææºå¶") |
| | | |
| | | # è·åææå¯ç¨ä½ç½® |
| | | all_positions = list(self.path_mapping.keys()) |
| | | if len(all_positions) < 2: |
| | | self.logger.warning(f"è·¯å¾æ å°ä½ç½®ä¸è¶³ï¼æ æ³ä¸ºä»»å¡åé
èµ·ç»ç¹") |
| | | return None, None |
| | | |
| | | # 使ç¨ä»»å¡IDçhash弿¥åé
èµ·ç¹åç»ç¹ |
| | | try: |
| | | task_hash = abs(hash(task_id)) |
| | | |
| | | # åé
èµ·ç¹ï¼ä½¿ç¨hashçåä¸åï¼ |
| | | start_index = task_hash % len(all_positions) |
| | | start_pos = all_positions[start_index] |
| | | |
| | | # åé
ç»ç¹ï¼ä½¿ç¨hashçåä¸åï¼ç¡®ä¿ä¸èµ·ç¹ä¸åï¼ |
| | | end_index = (task_hash // len(all_positions)) % len(all_positions) |
| | | if end_index == start_index: |
| | | end_index = (end_index + 1) % len(all_positions) |
| | | end_pos = all_positions[end_index] |
| | | |
| | | self.logger.debug(f"ç®åIDå¤éè§£ææå: ä»»å¡{task_id} -> start={start_pos}, end={end_pos}") |
| | | return start_pos, end_pos |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"ç®åIDå¤éè§£æå¤±è´¥: {e}") |
| | | |
| | | self.logger.warning(f"ä»»å¡ID {task_id} ææè§£ææ¹æ³é½å¤±è´¥ï¼è¿å None, None") |
| | | return None, None |
| | | |
| | | def _fast_parse_target(self, task_id: str, current_position: str, loaded: bool) -> Optional[str]: |
| | | """ |
| | | è§£æä»»å¡ç®æ ä½ç½® |
| | | |
| | | Args: |
| | | task_id: ä»»å¡ID |
| | | current_position: å½åä½ç½® |
| | | loaded: æ¯å¦å·²è£
è½½ |
| | | |
| | | Returns: |
| | | Optional[str]: ç®æ ä½ç½®ç |
| | | """ |
| | | _, task_end = self._fast_parse_task_start_end(task_id) |
| | | if task_end: |
| | | return task_end |
| | | |
| | | # åºäºhashå¼åè£
è½½ç¶æå¿«ééæ© |
| | | hash_val = hash(task_id) % 1000 |
| | | |
| | | if loaded: |
| | | # å·²è£
è½½ï¼éæ©åºåºåºå |
| | | target_positions = [pos for pos in self.delivery_positions if pos != current_position] |
| | | if not target_positions: |
| | | target_positions = [pos for pos in self.path_mapping.keys() if pos != current_position] |
| | | else: |
| | | # æªè£
è½½ï¼éæ©ä»åºåºå |
| | | target_positions = [pos for pos in self.pickup_positions if pos != current_position] |
| | | if not target_positions: |
| | | target_positions = [pos for pos in self.path_mapping.keys() if pos != current_position] |
| | | |
| | | if target_positions: |
| | | return target_positions[hash_val % len(target_positions)] |
| | | |
| | | return None |
| | | |
| | | def extract_executing_tasks(self, agv_status_list: List[AGVStatus]) -> List[Dict]: |
| | | """æåææAGVæ£å¨æ§è¡çä»»å¡""" |
| | | self.logger.debug(f"å¼å§è½¬æ¢ä»»å¡ä¸ºæ åæ ¼å¼ï¼AGVæ°é: {len(agv_status_list)}") |
| | | |
| | | optimized_tasks = self.extract_optimized_executing_tasks(agv_status_list) |
| | | |
| | | self.logger.debug(f"æåç»ææ°é: {len(optimized_tasks)}") |
| | | |
| | | legacy_tasks = [] |
| | | for i, task in enumerate(optimized_tasks): |
| | | try: |
| | | self.logger.debug(f"转æ¢ä»»å¡ #{i+1}: AGV {task['agvId']}") |
| | | |
| | | primary_task = task['primary_task'] |
| | | task_info = primary_task['task_info'] |
| | | backpack_item = primary_task['backpack_item'] |
| | | |
| | | self.logger.debug(f" - primary_taskåå¨: {primary_task is not None}") |
| | | self.logger.debug(f" - task_infoåå¨: {task_info is not None}") |
| | | self.logger.debug(f" - backpack_itemåå¨: {backpack_item is not None}") |
| | | |
| | | # æ ¹æ®actionç¡®å®ç¶æ |
| | | is_pickup_action = task['next_action'] == 'pickup' |
| | | |
| | | self.logger.debug(f" - next_action: {task['next_action']}") |
| | | self.logger.debug(f" - is_pickup_action: {is_pickup_action}") |
| | | self.logger.debug(f" - target_position: {task['target_position']}") |
| | | |
| | | # ä¿æåæçæ åJSONæ ¼å¼ |
| | | legacy_task = { |
| | | 'agvId': task['agvId'], |
| | | 'taskId': task_info['task_id'], |
| | | 'currentPosition': task['currentPosition'], |
| | | 'backpackIndex': backpack_item.index, |
| | | 'status': 'executing', |
| | | 'agvDirection': task['agvDirection'], |
| | | 'agvVoltage': task['agvVoltage'], |
| | | 'agvError': task['agvError'], |
| | | 'loaded': backpack_item.loaded, |
| | | 'needPickup': is_pickup_action, |
| | | 'targetPosition': task['target_position'], |
| | | 'taskStart': task_info['start_position'], |
| | | 'taskEnd': task_info['end_position'] |
| | | } |
| | | |
| | | self.logger.debug(f" - ä»»å¡æå»ºæå: {legacy_task['agvId']} -> {legacy_task['targetPosition']}") |
| | | legacy_tasks.append(legacy_task) |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"转æ¢ä»»å¡ #{i+1} 失败: {e}") |
| | | self.logger.error(f"ä»»å¡å
容: {task}") |
| | | continue |
| | | |
| | | self.logger.info(f"使ç¨ç»¼åä¼åçç¥ï¼ä¸º {len(legacy_tasks)} 个AGVçææ åæ ¼å¼è·¯å¾") |
| | | return legacy_tasks |
| | | |
| | | |
| | | class BatchPathPlanner: |
| | | """æ¹éè·¯å¾è§åå¨""" |
| | | |
| | | def __init__(self, path_mapping: Dict[str, Dict[str, int]], |
| | | algorithm_type: str = "A_STAR", agv_manager=None): |
| | | """ |
| | | åå§åæ¹éè·¯å¾è§åå¨ |
| | | |
| | | Args: |
| | | path_mapping: è·¯å¾ç¹æ å°åå
¸ |
| | | algorithm_type: åºç¡è·¯å¾è§åç®æ³ç±»å |
| | | agv_manager: ç¨äºè·åAGVç¶æä¿¡æ¯ |
| | | """ |
| | | self.path_mapping = path_mapping |
| | | self.algorithm_type = algorithm_type |
| | | self.agv_manager = agv_manager |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | # åå§åä¼åç»ä»¶ |
| | | self.task_extractor = ExecutingTaskExtractor(path_mapping) |
| | | self.collision_detector = FastCollisionDetector(min_distance=3.0) |
| | | self.collision_resolver = FastCollisionResolver(path_mapping, self.collision_detector, agv_manager) |
| | | self.base_planner = PathPlanningFactory.create_path_planner(algorithm_type, path_mapping) |
| | | |
| | | def plan_all_agv_paths(self, agv_status_list: List[AGVStatus], |
| | | include_idle_agv: bool = False, |
| | | constraints: List[Tuple[int, int, float]] = None) -> Dict: |
| | | """ |
| | | 为ææAGVè§åè·¯å¾ï¼æ¯ä¸ªAGVåªçæä¸ä¸ªè·¯å¾ï¼ |
| | | |
| | | Args: |
| | | agv_status_list: AGVç¶æå表 |
| | | include_idle_agv: æ¯å¦å
å«ç©ºé²AGV |
| | | constraints: è·¯å¾çº¦ææ¡ä»¶ |
| | | |
| | | Returns: |
| | | Dict: å
嫿æè·¯å¾ä¿¡æ¯çç»æ |
| | | """ |
| | | start_time = time.time() |
| | | |
| | | # 1. æåæ§è¡ä¸çä»»å¡ï¼ä½¿ç¨ååå
¼å®¹çæ¹æ³ï¼ |
| | | executing_tasks = self.task_extractor.extract_executing_tasks(agv_status_list) |
| | | |
| | | # 2. è·¯å¾è§å |
| | | planned_paths = [] |
| | | total_planning_time = 0 |
| | | planned_agv_ids = set() |
| | | |
| | | for task in executing_tasks: |
| | | agv_id = task['agvId'] |
| | | current_pos = task['currentPosition'] |
| | | target_pos = task['targetPosition'] |
| | | task_id = task['taskId'] |
| | | |
| | | self.logger.debug(f"å¼å§è·¯å¾è§å: AGV {agv_id}, ä»»å¡ {task_id}") |
| | | self.logger.debug(f" - current_pos: {current_pos} ({type(current_pos)})") |
| | | self.logger.debug(f" - target_pos: {target_pos} ({type(target_pos)})") |
| | | |
| | | # é¿å
éå¤è§å |
| | | if agv_id in planned_agv_ids: |
| | | self.logger.debug(f" - è·³è¿AGV {agv_id}ï¼å·²è§å") |
| | | continue |
| | | |
| | | # éªè¯ä½ç½®æ ¼å¼ |
| | | if not current_pos or not target_pos: |
| | | self.logger.error(f" - AGV {agv_id} ä½ç½®ä¿¡æ¯æ æ: current={current_pos}, target={target_pos}") |
| | | continue |
| | | |
| | | # è·¯å¾æ ¼å¼æ åå |
| | | from common.utils import normalize_path_id |
| | | normalized_current = normalize_path_id(current_pos) |
| | | normalized_target = normalize_path_id(target_pos) |
| | | |
| | | self.logger.debug(f" - æ ¼å¼æ åå: {current_pos} -> {normalized_current}, {target_pos} -> {normalized_target}") |
| | | |
| | | # è·¯å¾è§å |
| | | path_start_time = time.time() |
| | | self.logger.debug(f" - è°ç¨base_planner.plan_path({normalized_current}, {normalized_target})") |
| | | planned_path = self.base_planner.plan_path(normalized_current, normalized_target, constraints) |
| | | path_end_time = time.time() |
| | | |
| | | planning_time = (path_end_time - path_start_time) * 1000 |
| | | total_planning_time += planning_time |
| | | |
| | | self.logger.debug(f" - è·¯å¾è§åç»æ: {planned_path is not None}") |
| | | if planned_path: |
| | | self.logger.debug(f" - è·¯å¾é¿åº¦: {len(planned_path.codeList) if hasattr(planned_path, 'codeList') else 'N/A'}") |
| | | else: |
| | | self.logger.error(f" - AGV {agv_id} è·¯å¾è§å失败: {current_pos} -> {target_pos}") |
| | | |
| | | if planned_path: |
| | | # çæè·¯å¾åå
¸ |
| | | seg_id = generate_segment_id(agv_id=agv_id, task_id=task_id, action_type="2") |
| | | |
| | | path_dict = { |
| | | 'agvId': agv_id, |
| | | 'segId': seg_id, |
| | | 'codeList': [] |
| | | } |
| | | |
| | | # 转æ¢è·¯å¾ç¹æ ¼å¼ |
| | | for i, path_code in enumerate(planned_path.codeList): |
| | | if isinstance(path_code, PathCode): |
| | | action_type = "2" # ä»»å¡ç±»å |
| | | pos_type = None |
| | | backpack_level = task.get('backpackIndex', 0) |
| | | is_target_point = False |
| | | |
| | | # ç¡®å®ä½ç½®ç±»åï¼æåä¸ä¸ªç¹çå¨ä½ç±»å |
| | | if i == len(planned_path.codeList) - 1: # æåä¸ä¸ªè·¯å¾ç¹ |
| | | is_target_point = True |
| | | if task.get('loaded', False): |
| | | # å·²è£
载任å¡ï¼å½åè·¯å¾æ¯å»ç»ç¹æ¾è´§ |
| | | pos_type = "2" # æ¾è´§ |
| | | self.logger.debug(f"AGV {agv_id} å¨ç»ç¹ {path_code.code} 设置æ¾è´§å¨ä½") |
| | | else: |
| | | # æªè£
载任å¡ï¼å½åè·¯å¾æ¯å»èµ·ç¹åè´§ |
| | | pos_type = "1" # åè´§ |
| | | self.logger.debug(f"AGV {agv_id} å¨èµ·ç¹ {path_code.code} 设置åè´§å¨ä½") |
| | | |
| | | enhanced_code = generate_navigation_code( |
| | | code=path_code.code, |
| | | direction=path_code.direction, |
| | | action_type=action_type, |
| | | task_id=task_id, |
| | | pos_type=pos_type, |
| | | backpack_level=backpack_level, |
| | | is_target_point=is_target_point |
| | | ) |
| | | path_dict['codeList'].append(enhanced_code) |
| | | else: |
| | | enhanced_code = generate_navigation_code( |
| | | code=path_code.get('code', ''), |
| | | direction=path_code.get('direction', '90'), |
| | | action_type="2", |
| | | task_id=task_id |
| | | ) |
| | | path_dict['codeList'].append(enhanced_code) |
| | | |
| | | planned_paths.append(path_dict) |
| | | planned_agv_ids.add(agv_id) |
| | | self.logger.debug(f"AGV {agv_id} å¿«éè§å: {current_pos} -> {target_pos}") |
| | | |
| | | # 3. 空é²AGVå¤ç |
| | | if include_idle_agv: |
| | | idle_paths = self._fast_plan_idle_agv_paths(agv_status_list, executing_tasks, constraints, planned_agv_ids) |
| | | planned_paths.extend(idle_paths) |
| | | |
| | | # 4. å»éæ£æ¥ |
| | | final_paths = [] |
| | | final_agv_ids = set() |
| | | for path in planned_paths: |
| | | agv_id = path['agvId'] |
| | | if agv_id not in final_agv_ids: |
| | | final_paths.append(path) |
| | | final_agv_ids.add(agv_id) |
| | | planned_paths = final_paths |
| | | |
| | | # 5. ç¢°ææ£æµåè§£å³ |
| | | conflicts = self.collision_detector.detect_conflicts(planned_paths) |
| | | |
| | | if conflicts: |
| | | self.logger.info(f"åç° {len(conflicts)} 个碰æï¼å¿«éè§£å³ä¸...") |
| | | planned_paths = self.collision_resolver.resolve_conflicts(planned_paths, conflicts, executing_tasks) |
| | | remaining_conflicts = self.collision_detector.detect_conflicts(planned_paths) |
| | | else: |
| | | remaining_conflicts = [] |
| | | |
| | | end_time = time.time() |
| | | total_time = (end_time - start_time) * 1000 |
| | | |
| | | # 6. æå»ºä¼åç»æ |
| | | result = { |
| | | 'totalAgvs': len(agv_status_list), |
| | | 'executingTasksCount': len(executing_tasks), |
| | | 'plannedPathsCount': len(planned_paths), |
| | | 'totalPlanningTime': round(total_time, 2), |
| | | 'pathPlanningTime': round(total_planning_time, 2), |
| | | 'conflictsDetected': len(conflicts), |
| | | 'remainingConflicts': len(remaining_conflicts), |
| | | 'algorithm': f"Optimized_{self.algorithm_type}", |
| | | 'fourDirectionCompliant': True, |
| | | 'collisionFree': len(remaining_conflicts) == 0, |
| | | 'plannedPaths': planned_paths, |
| | | 'executingTasks': executing_tasks |
| | | } |
| | | |
| | | self.logger.info(f"ä¼åæ¹éè·¯å¾è§å宿 - æ»AGV: {result['totalAgvs']}, " |
| | | f"æ§è¡ä¸ä»»å¡: {result['executingTasksCount']}, " |
| | | f"è§åè·¯å¾: {result['plannedPathsCount']}, " |
| | | f"æ»èæ¶: {result['totalPlanningTime']:.2f}ms, " |
| | | f"æ 碰æ: {result['collisionFree']}") |
| | | |
| | | return result |
| | | |
| | | def _fast_plan_idle_agv_paths(self, agv_status_list: List[AGVStatus], |
| | | executing_tasks: List[Dict], |
| | | constraints: List[Tuple[int, int, float]] = None, |
| | | planned_agv_ids: Set[str] = None) -> List[Dict]: |
| | | """ |
| | | 为空é²AGVè§åè·¯å¾ |
| | | """ |
| | | idle_paths = [] |
| | | executing_agv_ids = {task['agvId'] for task in executing_tasks} |
| | | |
| | | if planned_agv_ids is None: |
| | | planned_agv_ids = executing_agv_ids.copy() |
| | | |
| | | for agv_status in agv_status_list: |
| | | agv_id = agv_status.agvId |
| | | |
| | | if agv_id in planned_agv_ids: |
| | | continue |
| | | |
| | | if self._is_agv_idle_fast(agv_status): |
| | | current_pos = agv_status.position |
| | | target_pos = self._get_idle_agv_target_fast(agv_id, current_pos) |
| | | |
| | | if target_pos and target_pos != current_pos: |
| | | planned_path = self.base_planner.plan_path(current_pos, target_pos, constraints) |
| | | |
| | | if planned_path: |
| | | action_type = "3" if self._is_charging_path(target_pos) else "4" |
| | | seg_id = generate_segment_id(agv_id=agv_id, target_position=target_pos, action_type=action_type) |
| | | |
| | | path_dict = { |
| | | 'agvId': agv_id, |
| | | 'segId': seg_id, |
| | | 'codeList': [] |
| | | } |
| | | |
| | | for i, path_code in enumerate(planned_path.codeList): |
| | | if isinstance(path_code, PathCode): |
| | | # 空é²AGVä¸éè¦taskIdãposTypeålevåæ®µ |
| | | enhanced_code = generate_navigation_code( |
| | | code=path_code.code, |
| | | direction=path_code.direction, |
| | | action_type=action_type, |
| | | is_target_point=False # 空é²AGVè·¯å¾æ ç®æ ç¹æ¦å¿µ |
| | | ) |
| | | path_dict['codeList'].append(enhanced_code) |
| | | |
| | | idle_paths.append(path_dict) |
| | | planned_agv_ids.add(agv_id) |
| | | |
| | | return idle_paths |
| | | |
| | | def _is_agv_idle_fast(self, agv_status: AGVStatus) -> bool: |
| | | """夿AGVæ¯å¦ç©ºé²""" |
| | | if agv_status.status not in [0, 1, 2] or agv_status.error != 0: |
| | | return False |
| | | |
| | | # æ£æ¥èç¯ |
| | | if agv_status.backpack: |
| | | for item in agv_status.backpack[:2]: # åªæ£æ¥å两个 |
| | | if item.loaded and item.execute: |
| | | return False |
| | | |
| | | return True |
| | | |
| | | def _get_idle_agv_target_fast(self, agv_id: str, current_position: str) -> Optional[str]: |
| | | """è·å空é²AGVç®æ ä½ç½®""" |
| | | hash_val = hash(agv_id) % 1000 |
| | | |
| | | # éæ©é»è¾ |
| | | charging_positions = [pos for pos in self.task_extractor.charging_positions if pos != current_position] |
| | | if charging_positions: |
| | | return charging_positions[hash_val % len(charging_positions)] |
| | | |
| | | standby_positions = [pos for pos in self.task_extractor.standby_positions if pos != current_position] |
| | | if standby_positions: |
| | | return standby_positions[hash_val % len(standby_positions)] |
| | | |
| | | all_positions = [pos for pos in self.path_mapping.keys() if pos != current_position] |
| | | if all_positions: |
| | | return all_positions[hash_val % len(all_positions)] |
| | | |
| | | return None |
| | | |
| | | def _is_charging_path(self, target_position: str) -> bool: |
| | | """ |
| | | å¤æç®æ ä½ç½®æ¯å¦ä¸ºå
çµç«ï¼åæä¿®æ¹ï¼ |
| | | """ |
| | | if not target_position: |
| | | return False |
| | | |
| | | # å
çµç«è¯å« |
| | | return target_position.startswith("2") or "charge" in target_position.lower() |
| | | |
| | | |
| | | class PathPlanner: |
| | | """è·¯å¾è§åå¨åºç±»""" |
| | | |
| | | def __init__(self, path_mapping: Dict[str, Dict[str, int]]): |
| | | """ |
| | | åå§åè·¯å¾è§åå¨ |
| | | |
| | | Args: |
| | | path_mapping: è·¯å¾ç¹æ å°åå
¸ |
| | | """ |
| | | self.path_mapping = path_mapping |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | def plan_path(self, start_code: str, end_code: str, |
| | | constraints: List[Tuple[int, int, float]] = None) -> Optional[PlannedPath]: |
| | | """ |
| | | è§åä»èµ·ç¹å°ç»ç¹çè·¯å¾ |
| | | |
| | | Args: |
| | | start_code: èµ·å§è·¯å¾ç¹ID |
| | | end_code: ç®æ è·¯å¾ç¹ID |
| | | constraints: çº¦ææ¡ä»¶å表 |
| | | |
| | | Returns: |
| | | Optional[PlannedPath]: è§åçè·¯å¾ï¼å¦ææ æ³è§ååè¿åNone |
| | | """ |
| | | raise NotImplementedError("åç±»å¿
é¡»å®ç°æ¤æ¹æ³") |
| | | |
| | | def is_valid_path_point(self, code: str) -> bool: |
| | | """ |
| | | æ£æ¥è·¯å¾ç¹IDæ¯å¦ææ |
| | | |
| | | Args: |
| | | code: è·¯å¾ç¹ID |
| | | |
| | | Returns: |
| | | bool: æ¯å¦ææ |
| | | """ |
| | | return code in self.path_mapping |
| | | |
| | | |
| | | class AStarPathPlanner(PathPlanner): |
| | | """A*è·¯å¾è§åå¨""" |
| | | |
| | | def __init__(self, path_mapping: Dict[str, Dict[str, int]], turn_cost: float = 0.5): |
| | | """ |
| | | åå§åA*è·¯å¾è§åå¨ |
| | | |
| | | Args: |
| | | path_mapping: è·¯å¾ç¹æ å°åå
¸ |
| | | turn_cost: 转å代价 |
| | | """ |
| | | super().__init__(path_mapping) |
| | | self.turn_cost = turn_cost |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | # å个æ¹åï¼ä¸ãå³ãä¸ãå·¦ (ä¸¥æ ¼åæ¹åç§»å¨) |
| | | self.directions = [(0, -1), (1, 0), (0, 1), (-1, 0)] |
| | | self.direction_angles = ["270", "0", "90", "180"] |
| | | |
| | | # é¢è®¡ç®åæ æ å°ï¼å¿
é¡»å¨ _build_adjacency_list ä¹ååå§åï¼ |
| | | self.coord_to_code = {} |
| | | for code, data in path_mapping.items(): |
| | | if isinstance(data, dict) and 'x' in data and 'y' in data: |
| | | self.coord_to_code[(data['x'], data['y'])] = code |
| | | |
| | | # é¢è®¡ç®é»æ¥è¡¨ |
| | | self.adjacency = self._build_adjacency_list() |
| | | |
| | | # æ§è½çæ§ |
| | | self.planning_count = 0 |
| | | self.total_planning_time = 0.0 |
| | | |
| | | self.logger.debug(f"A*è§åå¨åå§å宿ï¼é»æ¥è¡¨: {len(self.adjacency)} 个èç¹") |
| | | |
| | | def _build_adjacency_list(self) -> Dict[str, List[Tuple[str, str]]]: |
| | | """ |
| | | é¢è®¡ç®é»æ¥è¡¨ |
| | | |
| | | Returns: |
| | | Dict[str, List[Tuple[str, str]]]: {èç¹: [(é»å±
èç¹, ç§»å¨æ¹å), ...]} |
| | | """ |
| | | adjacency = defaultdict(list) |
| | | |
| | | for code, coord_data in self.path_mapping.items(): |
| | | x, y = coord_data['x'], coord_data['y'] |
| | | |
| | | # æ£æ¥å个æ¹åçé»å±
|
| | | for i, (dx, dy) in enumerate(self.directions): |
| | | next_x, next_y = x + dx, y + dy |
| | | |
| | | # æ¥æ¾é»å±
代ç |
| | | neighbor_coord = (next_x, next_y) |
| | | if neighbor_coord in self.coord_to_code: |
| | | neighbor_code = self.coord_to_code[neighbor_coord] |
| | | direction = self.direction_angles[i] |
| | | adjacency[code].append((neighbor_code, direction)) |
| | | |
| | | return adjacency |
| | | |
| | | def plan_path(self, start_code: str, end_code: str, |
| | | constraints: List[Tuple[int, int, float]] = None) -> Optional[PlannedPath]: |
| | | """ |
| | | Args: |
| | | start_code: èµ·å§è·¯å¾ç¹ID |
| | | end_code: ç®æ è·¯å¾ç¹ID |
| | | constraints: çº¦ææ¡ä»¶å表 |
| | | |
| | | Returns: |
| | | Optional[PlannedPath]: è§åçè·¯å¾ |
| | | """ |
| | | # éªè¯ |
| | | if not self.is_valid_path_point(start_code) or not self.is_valid_path_point(end_code): |
| | | return None |
| | | |
| | | if start_code == end_code: |
| | | return PlannedPath(agvId="", codeList=[PathCode(code=start_code, direction="90")]) |
| | | |
| | | # è·ååæ ï¼ä½¿ç¨é¢è®¡ç®æ å°ï¼ |
| | | start_coord = (self.path_mapping[start_code]['x'], self.path_mapping[start_code]['y']) |
| | | end_coord = (self.path_mapping[end_code]['x'], self.path_mapping[end_code]['y']) |
| | | |
| | | # A*å®ç° |
| | | # 使ç¨å
ç»å ï¼åå°å¯¹è±¡å建å¼é |
| | | open_set = [(0, start_code, None, [])] # (f_score, code, parent, path) |
| | | closed_set = set() |
| | | g_scores = {start_code: 0} |
| | | |
| | | # é¢è®¡ç®çº¦ææ£æ¥å½æ° |
| | | constraint_check = self._build_constraint_checker(constraints) if constraints else None |
| | | |
| | | while open_set: |
| | | f_score, current_code, parent_code, path = heapq.heappop(open_set) |
| | | |
| | | if current_code in closed_set: |
| | | continue |
| | | |
| | | closed_set.add(current_code) |
| | | current_path = path + [current_code] |
| | | |
| | | if current_code == end_code: |
| | | # éå»ºè·¯å¾ |
| | | return self._build_path_result(current_path) |
| | | |
| | | # 使ç¨é¢è®¡ç®ç黿¥è¡¨ |
| | | for neighbor_code, direction in self.adjacency[current_code]: |
| | | if neighbor_code in closed_set: |
| | | continue |
| | | |
| | | # çº¦ææ£æ¥ |
| | | if constraint_check and constraint_check(neighbor_code): |
| | | continue |
| | | |
| | | # ä»£ä»·è®¡ç® |
| | | g_cost = g_scores[current_code] + 1 |
| | | |
| | | if neighbor_code not in g_scores or g_cost < g_scores[neighbor_code]: |
| | | g_scores[neighbor_code] = g_cost |
| | | |
| | | # å¯åå¼è®¡ç® |
| | | neighbor_coord = (self.path_mapping[neighbor_code]['x'], |
| | | self.path_mapping[neighbor_code]['y']) |
| | | h_cost = abs(neighbor_coord[0] - end_coord[0]) + abs(neighbor_coord[1] - end_coord[1]) |
| | | f_cost = g_cost + h_cost |
| | | |
| | | heapq.heappush(open_set, (f_cost, neighbor_code, current_code, current_path)) |
| | | |
| | | return None |
| | | |
| | | def _build_constraint_checker(self, constraints: List[Tuple[int, int, float]]): |
| | | """æå»ºçº¦ææ£æ¥å½æ°""" |
| | | def check_constraints(code: str) -> bool: |
| | | coord = (self.path_mapping[code]['x'], self.path_mapping[code]['y']) |
| | | for cx, cy, radius in constraints: |
| | | if (coord[0] - cx) ** 2 + (coord[1] - cy) ** 2 <= radius ** 2: |
| | | return True |
| | | return False |
| | | return check_constraints |
| | | |
| | | def _build_path_result(self, path_codes: List[str]) -> PlannedPath: |
| | | """æå»ºè·¯å¾ç»æ""" |
| | | result_codes = [] |
| | | |
| | | for i, code in enumerate(path_codes): |
| | | direction = "90" # é»è®¤æ¹å |
| | | |
| | | if i < len(path_codes) - 1: |
| | | # 使ç¨é¢è®¡ç®ç黿¥è¡¨æ¥æ¾æ¹å |
| | | next_code = path_codes[i + 1] |
| | | for neighbor_code, neighbor_direction in self.adjacency[code]: |
| | | if neighbor_code == next_code: |
| | | direction = neighbor_direction |
| | | break |
| | | |
| | | result_codes.append(PathCode(code=code, direction=direction)) |
| | | |
| | | return PlannedPath(agvId="", codeList=result_codes) |
| | | |
| | | |
| | | # ç¢°ææ£æµå¨ |
| | | class FastCollisionDetector: |
| | | """ç¢°ææ£æµå¨""" |
| | | |
| | | def __init__(self, min_distance: float = 3.0): |
| | | self.min_distance = min_distance |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | def detect_conflicts(self, planned_paths: List[Dict]) -> List[Dict]: |
| | | """ |
| | | ç¢°ææ£æµ - åºäºæ¶é´æ¥çä¼åæ£æµ |
| | | |
| | | Args: |
| | | planned_paths: è§åè·¯å¾å表 |
| | | |
| | | Returns: |
| | | List[Dict]: å²çªå表 |
| | | """ |
| | | conflicts = [] |
| | | if len(planned_paths) < 2: |
| | | return conflicts |
| | | |
| | | # æå»ºæ¶é´-ä½ç½®æ å° |
| | | time_positions = defaultdict(list) |
| | | |
| | | for path_data in planned_paths: |
| | | agv_id = path_data['agvId'] |
| | | code_list = path_data.get('codeList', []) |
| | | |
| | | for time_step, path_code in enumerate(code_list): |
| | | # å¿«éæåä½ç½®ç |
| | | if isinstance(path_code, dict): |
| | | position = path_code.get('code', '') |
| | | elif hasattr(path_code, 'code'): |
| | | position = path_code.code |
| | | else: |
| | | position = str(path_code) |
| | | |
| | | if position: |
| | | time_positions[time_step].append((agv_id, position)) |
| | | |
| | | # å¿«éå²çªæ£æµ |
| | | for time_step, agv_positions in time_positions.items(): |
| | | if len(agv_positions) < 2: |
| | | continue |
| | | |
| | | # ä½ç½®åç»æ£æ¥ |
| | | position_groups = defaultdict(list) |
| | | for agv_id, position in agv_positions: |
| | | position_groups[position].append(agv_id) |
| | | |
| | | # åç°å²çª |
| | | for position, agv_list in position_groups.items(): |
| | | if len(agv_list) > 1: |
| | | conflicts.append({ |
| | | 'type': 'position_conflict', |
| | | 'time_step': time_step, |
| | | 'position': position, |
| | | 'agv_ids': agv_list, |
| | | 'severity': 'high' |
| | | }) |
| | | |
| | | self.logger.debug(f"å¿«éæ£æµå° {len(conflicts)} 个å²çª") |
| | | return conflicts |
| | | |
| | | |
| | | # 碰æè§£å³å¨ |
| | | class FastCollisionResolver: |
| | | """碰æè§£å³å¨""" |
| | | |
| | | def __init__(self, path_mapping: Dict[str, Dict[str, int]], |
| | | collision_detector: FastCollisionDetector, agv_manager=None): |
| | | self.path_mapping = path_mapping |
| | | self.collision_detector = collision_detector |
| | | self.agv_manager = agv_manager |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | def resolve_conflicts(self, planned_paths: List[Dict], conflicts: List[Dict], executing_tasks: List[Dict] = None) -> List[Dict]: |
| | | """ |
| | | è§£å³å²çª |
| | | |
| | | Args: |
| | | planned_paths: è§åè·¯å¾å表 |
| | | conflicts: å²çªå表 |
| | | executing_tasks: æ§è¡ä»»å¡å表 |
| | | |
| | | Returns: |
| | | List[Dict]: è§£å³å²çªåçè·¯å¾å表 |
| | | """ |
| | | if not conflicts: |
| | | return planned_paths |
| | | |
| | | # æå»ºè·¯å¾åå
¸ä¾¿äºå¿«éè®¿é® |
| | | paths_dict = {path['agvId']: path for path in planned_paths} |
| | | |
| | | # ææ¶é´æ¥æåºå¤çå²çª |
| | | sorted_conflicts = sorted(conflicts, key=lambda x: x.get('time_step', 0)) |
| | | |
| | | for conflict in sorted_conflicts: |
| | | if conflict['type'] == 'position_conflict': |
| | | agv_ids = conflict['agv_ids'] |
| | | time_step = conflict['time_step'] |
| | | |
| | | # ç®åçç¥ï¼ä¿ç第ä¸ä¸ªAGVï¼å
¶ä»å»¶è¿ |
| | | sorted_agv_ids = sorted(agv_ids) |
| | | |
| | | for i, agv_id in enumerate(sorted_agv_ids[1:], 1): |
| | | if agv_id in paths_dict: |
| | | self._add_delay_to_path(paths_dict[agv_id], delay_steps=i) |
| | | self.logger.debug(f"为AGV {agv_id} æ·»å {i} æ¥å»¶è¿") |
| | | |
| | | return list(paths_dict.values()) |
| | | |
| | | def _add_delay_to_path(self, path_data: Dict, delay_steps: int): |
| | | """ä¸ºè·¯å¾æ·»å å»¶è¿æ¥éª¤""" |
| | | if delay_steps <= 0 or not path_data.get('codeList'): |
| | | return |
| | | |
| | | # å¨è·¯å¾å¼å§å¤éå¤ç¬¬ä¸ä¸ªä½ç½® |
| | | first_code = path_data['codeList'][0] |
| | | |
| | | # å建延è¿ä»£ç |
| | | if isinstance(first_code, dict): |
| | | delay_code = first_code.copy() |
| | | else: |
| | | delay_code = { |
| | | 'code': first_code.code if hasattr(first_code, 'code') else str(first_code), |
| | | 'direction': first_code.direction if hasattr(first_code, 'direction') else "90" |
| | | } |
| | | |
| | | # æ·»å å»¶è¿æ¥éª¤ |
| | | for _ in range(delay_steps): |
| | | path_data['codeList'].insert(0, delay_code) |
| | | |
| | | def validate_four_direction_movement(self, planned_paths: List[Dict]) -> List[Dict]: |
| | | """éªè¯ååç§»å¨çº¦æ""" |
| | | return planned_paths |
| | | |
| | | |
| | | class PathPlanningFactory: |
| | | """è·¯å¾è§åå¨å·¥åç±»""" |
| | | |
| | | @staticmethod |
| | | def create_path_planner(algorithm_type: str, path_mapping: Dict[str, Dict[str, int]]) -> PathPlanner: |
| | | """ |
| | | å建路å¾è§åå¨å®ä¾ |
| | | |
| | | Args: |
| | | algorithm_type: ç®æ³ç±»å |
| | | path_mapping: è·¯å¾ç¹æ å°åå
¸ |
| | | |
| | | Returns: |
| | | PathPlanner: è·¯å¾è§åå¨å®ä¾ |
| | | """ |
| | | algorithm_upper = algorithm_type.upper() |
| | | |
| | | if algorithm_upper in ["A_STAR", "ASTAR", "A*", "DIJKSTRA", "GREEDY"]: |
| | | return AStarPathPlanner(path_mapping) |
| | | else: |
| | | # é»è®¤A*ç®æ³ |
| | | logging.getLogger(__name__).warning(f"æªç¥ç®æ³ç±»å {algorithm_type}ï¼ä½¿ç¨é»è®¤A*ç®æ³") |
| | | return AStarPathPlanner(path_mapping) |
| | | |
| | | @staticmethod |
| | | def create_batch_path_planner(algorithm_type: str, path_mapping: Dict[str, Dict[str, int]], |
| | | agv_manager=None) -> BatchPathPlanner: |
| | | """ |
| | | å建æ¹éè·¯å¾è§åå¨å®ä¾ |
| | | |
| | | Args: |
| | | algorithm_type: åºç¡ç®æ³ç±»å |
| | | path_mapping: è·¯å¾ç¹æ å°åå
¸ |
| | | agv_manager: AGV管çå¨ï¼ç¨äºè·åAGVç¶æä¿¡æ¯ |
| | | |
| | | Returns: |
| | | BatchPathPlanner: æ¹éè·¯å¾è§åå¨å®ä¾ |
| | | """ |
| | | return BatchPathPlanner(path_mapping, algorithm_type, agv_manager) |
| | | |
| | | # å¯¼åºæ ¸å¿ç±»å彿° |
| | | __all__ = [ |
| | | 'ExecutingTaskExtractor', |
| | | 'BatchPathPlanner', |
| | | 'PathPlanner', |
| | | 'AStarPathPlanner', |
| | | 'PathPlanningFactory', |
| | | 'FastCollisionDetector', |
| | | 'FastCollisionResolver' |
| | | ] |
New file |
| | |
| | | """ |
| | | ä»»å¡åé
ç®æ³ |
| | | """ |
| | | import time |
| | | import random |
| | | import logging |
| | | from typing import Dict, List, Tuple, Optional, Set, Any |
| | | from collections import defaultdict |
| | | from abc import ABC, abstractmethod |
| | | |
| | | from common.data_models import TaskData, AGVStatus, TaskAssignment, BackpackData |
| | | from algorithm_system.models.agv_model import AGVModel, AGVModelManager |
| | | from common.utils import get_coordinate_from_path_id, calculate_distance, calculate_manhattan_distance |
| | | from dataclasses import dataclass |
| | | |
| | | |
| | | class TaskAllocation(ABC): |
| | | """ä»»å¡åé
ç®æ³åºç±»""" |
| | | |
| | | def __init__(self, agv_manager: AGVModelManager): |
| | | """ |
| | | åå§åä»»å¡åé
ç®æ³ |
| | | |
| | | Args: |
| | | agv_manager: AGV模å管çå¨ |
| | | """ |
| | | self.agv_manager = agv_manager |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | @abstractmethod |
| | | def allocate_tasks(self, tasks: List[TaskData]) -> List[TaskAssignment]: |
| | | """ |
| | | åé
ä»»å¡ç»AGV |
| | | |
| | | Args: |
| | | tasks: å¾
åé
çä»»å¡å表 |
| | | |
| | | Returns: |
| | | List[TaskAssignment]: åé
ç»æå表 |
| | | """ |
| | | pass |
| | | |
| | | def find_available_backpack_slot(self, agv_status: AGVStatus) -> Optional[int]: |
| | | """ |
| | | æ¥æ¾AGVçå¯ç¨èç¯ä½ç½® |
| | | |
| | | Args: |
| | | agv_status: AGVç¶æä¿¡æ¯ |
| | | |
| | | Returns: |
| | | Optional[int]: å¯ç¨çèç¯ä½ç½®ç¼å·ï¼å¦ææ²¡æå¯ç¨ä½ç½®åè¿åNone |
| | | """ |
| | | if not agv_status.backpack: |
| | | # å¦ææ²¡æèç¯ä¿¡æ¯ï¼å设ä»ç¬¬ä¸ä¸ªä½ç½®å¼å§ |
| | | self.logger.warning(f"AGV {agv_status.agvId} 没æèç¯ä¿¡æ¯ï¼åé
å°ä½ç½®0") |
| | | return 0 |
| | | |
| | | # æ¥æ¾ç©ºé²ä¸æªæ§è¡ä»»å¡çèç¯ä½ç½® |
| | | for backpack_item in agv_status.backpack: |
| | | if not backpack_item.loaded and not backpack_item.execute and not backpack_item.taskId: |
| | | self.logger.debug(f"AGV {agv_status.agvId} æ¾å°å¯ç¨èç¯ä½ç½®: {backpack_item.index}") |
| | | return backpack_item.index |
| | | |
| | | # 妿ææä½ç½®é½è¢«å ç¨ï¼è¿åNone |
| | | self.logger.debug(f"AGV {agv_status.agvId} 没æå¯ç¨çèç¯ä½ç½®") |
| | | return None |
| | | |
| | | def get_agv_available_capacity(self, agv_status: AGVStatus) -> int: |
| | | """ |
| | | è·åAGVçå¯ç¨èç¯å®¹é |
| | | |
| | | Args: |
| | | agv_status: AGVç¶æä¿¡æ¯ |
| | | |
| | | Returns: |
| | | int: å¯ç¨èç¯æ°é |
| | | """ |
| | | if not agv_status.backpack: |
| | | return 1 # å设è³å°æä¸ä¸ªèç¯ä½ç½® |
| | | |
| | | available_count = 0 |
| | | for backpack_item in agv_status.backpack: |
| | | if not backpack_item.loaded and not backpack_item.execute and not backpack_item.taskId: |
| | | available_count += 1 |
| | | |
| | | return available_count |
| | | |
| | | def assign_task_with_backpack(self, agv_model, task: TaskData, lev_id: int) -> bool: |
| | | """ |
| | | å°ä»»å¡åé
ç»AGVçæå®èç¯ä½ç½® |
| | | |
| | | Args: |
| | | agv_model: AGV模å |
| | | task: 任塿°æ® |
| | | lev_id: èç¯ä½ç½®ç¼å· |
| | | |
| | | Returns: |
| | | bool: åé
æ¯å¦æå |
| | | """ |
| | | try: |
| | | # 使ç¨AGV模åçassign_taskæ¹æ³ |
| | | success = agv_model.assign_task( |
| | | task_id=task.taskId, |
| | | priority=task.priority, |
| | | start_code=task.start, |
| | | end_code=task.end |
| | | ) |
| | | |
| | | if success: |
| | | self.logger.info(f"ä»»å¡ {task.taskId} æååé
ç»AGV {agv_model.agvId} çèç¯ä½ç½® {lev_id}") |
| | | return True |
| | | else: |
| | | self.logger.warning(f"ä»»å¡ {task.taskId} åé
ç»AGV {agv_model.agvId} 失败") |
| | | return False |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"åé
任塿¶åçå¼å¸¸: {e}") |
| | | return False |
| | | |
| | | |
| | | class NearestFirstAllocation(TaskAllocation): |
| | | """æè¿ä¼å
åé
ç®æ³""" |
| | | |
| | | def allocate_tasks(self, tasks: List[TaskData]) -> List[TaskAssignment]: |
| | | """ |
| | | ä½¿ç¨æè¿ä¼å
çç¥åé
ä»»å¡ |
| | | |
| | | Args: |
| | | tasks: å¾
åé
çä»»å¡å表 |
| | | |
| | | Returns: |
| | | List[TaskAssignment]: åé
ç»æå表 |
| | | """ |
| | | if not tasks: |
| | | return [] |
| | | |
| | | # è·åå¯ç¨çAGV |
| | | available_agvs = self.agv_manager.get_available_agvs() |
| | | |
| | | if not available_agvs: |
| | | self.logger.warning("没æå¯ç¨çAGVè¿è¡ä»»å¡åé
") |
| | | return [] |
| | | |
| | | # 1. é¦å
æ£æ¥ä»»å¡æ¯å¦å·²ç»åé
ï¼é¿å
éå¤åé
|
| | | already_assigned_tasks = set() |
| | | all_agvs = self.agv_manager.get_all_agvs() |
| | | for agv in all_agvs: |
| | | if agv.backpack: |
| | | for backpack_item in agv.backpack: |
| | | if backpack_item.taskId: |
| | | already_assigned_tasks.add(backpack_item.taskId) |
| | | self.logger.info(f"ä»»å¡ {backpack_item.taskId} å·²åé
ç» AGV {agv.agvId}ï¼è·³è¿éå¤åé
") |
| | | |
| | | # 2. è¿æ»¤æå·²åé
çä»»å¡ |
| | | unassigned_tasks = [task for task in tasks if task.taskId not in already_assigned_tasks] |
| | | |
| | | if not unassigned_tasks: |
| | | self.logger.info("ææä»»å¡é½å·²åé
ï¼æ ééæ°åé
") |
| | | return [] |
| | | |
| | | self.logger.info(f"æ»ä»»å¡æ°: {len(tasks)}, å·²åé
: {len(already_assigned_tasks)}, å¾
åé
: {len(unassigned_tasks)}") |
| | | |
| | | assignments = [] |
| | | path_mapping = self.agv_manager.path_mapping |
| | | |
| | | # 对æ¯ä¸ªä»»å¡æ¾å°æè¿çAGV |
| | | for task in unassigned_tasks: |
| | | if not available_agvs: |
| | | break |
| | | |
| | | # è·åä»»å¡èµ·ç¹åæ |
| | | task_start_coord = get_coordinate_from_path_id(task.start, path_mapping) |
| | | if not task_start_coord: |
| | | self.logger.warning(f"æ æ³è·åä»»å¡ {task.taskId} èµ·ç¹ {task.start} çåæ ") |
| | | continue |
| | | |
| | | # æ¾å°è·ç¦»æè¿çAGV |
| | | nearest_agv = None |
| | | min_distance = float('inf') |
| | | |
| | | for agv in available_agvs: |
| | | if agv.coordinates: |
| | | distance = calculate_manhattan_distance(agv.coordinates, task_start_coord) |
| | | |
| | | # 妿AGVå·²æä»»å¡ï¼è®¡ç®å®æå½åä»»å¡åå°æ°ä»»å¡èµ·ç¹çè·ç¦» |
| | | if agv.current_task_count > 0: |
| | | # ç®åï¼å设AGVéè¦é¢å¤æ¶é´å®æå½åä»»å¡ |
| | | distance += agv.current_task_count * 10 |
| | | |
| | | if distance < min_distance: |
| | | min_distance = distance |
| | | nearest_agv = agv |
| | | |
| | | if nearest_agv and nearest_agv.can_accept_task(task.priority): |
| | | # è·åAGVçåå§ç¶ææ°æ®æ¥æ¥æ¾å¯ç¨èç¯ä½ç½® |
| | | agv_status = None |
| | | for agv_state in self.agv_manager.get_all_agv_status(): |
| | | if agv_state.agvId == nearest_agv.agvId: |
| | | agv_status = agv_state |
| | | break |
| | | |
| | | if agv_status: |
| | | # æ¥æ¾å¯ç¨çèç¯ä½ç½® |
| | | available_lev_id = self.find_available_backpack_slot(agv_status) |
| | | |
| | | if available_lev_id is not None: |
| | | # åé
ä»»å¡å°æå®èç¯ä½ç½® |
| | | success = self.assign_task_with_backpack(nearest_agv, task, available_lev_id) |
| | | if success: |
| | | assignments.append(TaskAssignment( |
| | | taskId=task.taskId, |
| | | agvId=nearest_agv.agvId, |
| | | lev_id=available_lev_id |
| | | )) |
| | | |
| | | self.logger.info(f"ä»»å¡ {task.taskId} åé
ç»æè¿çAGV {nearest_agv.agvId}ï¼èç¯ä½ç½®: {available_lev_id}ï¼è·ç¦»: {min_distance}") |
| | | |
| | | # æ£æ¥AGVæ¯å¦è¿æå¯ç¨èç¯ä½ç½® |
| | | remaining_capacity = self.get_agv_available_capacity(agv_status) - 1 |
| | | if remaining_capacity <= 0: |
| | | available_agvs.remove(nearest_agv) |
| | | else: |
| | | self.logger.warning(f"ä»»å¡ {task.taskId} åé
ç»AGV {nearest_agv.agvId} 失败") |
| | | else: |
| | | self.logger.warning(f"AGV {nearest_agv.agvId} 没æå¯ç¨çèç¯ä½ç½®") |
| | | available_agvs.remove(nearest_agv) |
| | | else: |
| | | self.logger.warning(f"æ æ³è·åAGV {nearest_agv.agvId} çç¶æä¿¡æ¯") |
| | | |
| | | return assignments |
| | | |
| | | |
| | | class LoadBalancedAllocation(TaskAllocation): |
| | | """è´è½½åè¡¡åé
ç®æ³""" |
| | | |
| | | def allocate_tasks(self, tasks: List[TaskData]) -> List[TaskAssignment]: |
| | | """ |
| | | 使ç¨è´è½½åè¡¡çç¥åé
ä»»å¡ |
| | | |
| | | Args: |
| | | tasks: å¾
åé
çä»»å¡å表 |
| | | |
| | | Returns: |
| | | List[TaskAssignment]: åé
ç»æå表 |
| | | """ |
| | | if not tasks: |
| | | return [] |
| | | |
| | | # è·åææAGV |
| | | all_agvs = self.agv_manager.get_all_agvs() |
| | | |
| | | if not all_agvs: |
| | | self.logger.warning("没æAGVè¿è¡ä»»å¡åé
") |
| | | return [] |
| | | |
| | | # 1. é¦å
æ£æ¥ä»»å¡æ¯å¦å·²ç»åé
ï¼é¿å
éå¤åé
|
| | | already_assigned_tasks = set() |
| | | for agv in all_agvs: |
| | | if agv.backpack: |
| | | for backpack_item in agv.backpack: |
| | | if backpack_item.taskId: |
| | | already_assigned_tasks.add(backpack_item.taskId) |
| | | self.logger.info(f"ä»»å¡ {backpack_item.taskId} å·²åé
ç» AGV {agv.agvId}ï¼è·³è¿éå¤åé
") |
| | | |
| | | # 2. è¿æ»¤æå·²åé
çä»»å¡ |
| | | unassigned_tasks = [task for task in tasks if task.taskId not in already_assigned_tasks] |
| | | |
| | | if not unassigned_tasks: |
| | | self.logger.info("ææä»»å¡é½å·²åé
ï¼æ ééæ°åé
") |
| | | return [] |
| | | |
| | | self.logger.info(f"æ»ä»»å¡æ°: {len(tasks)}, å·²åé
: {len(already_assigned_tasks)}, å¾
åé
: {len(unassigned_tasks)}") |
| | | |
| | | assignments = [] |
| | | path_mapping = self.agv_manager.path_mapping |
| | | |
| | | # æä¼å
级æåºä»»å¡ |
| | | sorted_tasks = sorted(unassigned_tasks, key=lambda t: t.priority, reverse=True) |
| | | |
| | | # 对æ¯ä¸ªä»»å¡åé
ç»è´è½½æä½çAGV |
| | | for task in sorted_tasks: |
| | | # è·åä»»å¡èµ·ç¹åæ |
| | | task_start_coord = get_coordinate_from_path_id(task.start, path_mapping) |
| | | if not task_start_coord: |
| | | self.logger.warning(f"æ æ³è·åä»»å¡ {task.taskId} èµ·ç¹ {task.start} çåæ ") |
| | | continue |
| | | |
| | | # æè´è½½åè·ç¦»æåºAGV |
| | | agv_scores = [] |
| | | for agv in all_agvs: |
| | | if not agv.can_accept_task(task.priority): |
| | | continue |
| | | |
| | | # 计ç®è´è½½å¾åï¼è´è½½è¶ä½å¾åè¶é«ï¼ |
| | | load_score = 1.0 - agv.get_workload_ratio() |
| | | |
| | | # 计ç®è·ç¦»å¾åï¼è·ç¦»è¶è¿å¾åè¶é«ï¼ |
| | | distance_score = 0.0 |
| | | if agv.coordinates and task_start_coord: |
| | | distance = calculate_manhattan_distance(agv.coordinates, task_start_coord) |
| | | distance_score = 1.0 / (1.0 + distance / 100.0) # å½ä¸åè·ç¦»å¾å |
| | | |
| | | # è®¡ç®æçå¾å |
| | | efficiency_score = agv.calculate_efficiency_score(path_mapping) |
| | | |
| | | # 综åå¾å |
| | | total_score = 0.4 * load_score + 0.3 * distance_score + 0.3 * efficiency_score |
| | | agv_scores.append((agv, total_score)) |
| | | |
| | | if not agv_scores: |
| | | # 详ç»åæä¸ºä»ä¹æ²¡æAGVå¯ä»¥æ¥åä»»å¡ |
| | | total_agvs = len(all_agvs) |
| | | busy_count = 0 |
| | | low_battery_count = 0 |
| | | overloaded_count = 0 |
| | | status_invalid_count = 0 |
| | | |
| | | for agv in all_agvs: |
| | | if agv.is_overloaded(): |
| | | overloaded_count += 1 |
| | | elif str(agv.status) not in ["0", "1", "2"]: |
| | | status_invalid_count += 1 |
| | | elif agv.need_charging(): |
| | | low_battery_count += 1 |
| | | else: |
| | | busy_count += 1 |
| | | |
| | | self.logger.warning(f"没æAGVå¯ä»¥æ¥åä»»å¡ {task.taskId} - 详ç»åæ:") |
| | | self.logger.warning(f" æ»AGVæ°: {total_agvs}") |
| | | self.logger.warning(f" 任塿»¡è½½: {overloaded_count}") |
| | | self.logger.warning(f" ç¶æå¼å¸¸: {status_invalid_count}") |
| | | self.logger.warning(f" çµéè¿ä½: {low_battery_count}") |
| | | self.logger.warning(f" å
¶ä»åå : {busy_count}") |
| | | self.logger.warning(f" ä»»å¡ä¼å
级: {task.priority}") |
| | | continue |
| | | |
| | | # éæ©å¾åæé«çAGV |
| | | agv_scores.sort(key=lambda x: x[1], reverse=True) |
| | | best_agv = agv_scores[0][0] |
| | | |
| | | # è·åAGVçåå§ç¶ææ°æ®æ¥æ¥æ¾å¯ç¨èç¯ä½ç½® |
| | | agv_status = self.agv_manager.get_agv_status(best_agv.agvId) |
| | | |
| | | if agv_status: |
| | | # æ¥æ¾å¯ç¨çèç¯ä½ç½® |
| | | available_lev_id = self.find_available_backpack_slot(agv_status) |
| | | |
| | | if available_lev_id is not None: |
| | | # åé
ä»»å¡å°æå®èç¯ä½ç½® |
| | | success = self.assign_task_with_backpack(best_agv, task, available_lev_id) |
| | | if success: |
| | | assignments.append(TaskAssignment( |
| | | taskId=task.taskId, |
| | | agvId=best_agv.agvId, |
| | | lev_id=available_lev_id |
| | | )) |
| | | |
| | | self.logger.info(f"ä»»å¡ {task.taskId} åé
ç»è´è½½åè¡¡çAGV {best_agv.agvId}ï¼èç¯ä½ç½®: {available_lev_id}ï¼å¾å: {agv_scores[0][1]:.3f}") |
| | | else: |
| | | self.logger.warning(f"ä»»å¡ {task.taskId} åé
ç»AGV {best_agv.agvId} 失败") |
| | | else: |
| | | self.logger.warning(f"AGV {best_agv.agvId} 没æå¯ç¨çèç¯ä½ç½®") |
| | | else: |
| | | self.logger.warning(f"æ æ³è·åAGV {best_agv.agvId} çç¶æä¿¡æ¯") |
| | | |
| | | return assignments |
| | | |
| | | |
| | | class PriorityFirstAllocation(TaskAllocation): |
| | | """ä¼å
级ä¼å
åé
ç®æ³""" |
| | | |
| | | def allocate_tasks(self, tasks: List[TaskData]) -> List[TaskAssignment]: |
| | | """ |
| | | 使ç¨ä¼å
级ä¼å
çç¥åé
ä»»å¡ |
| | | |
| | | Args: |
| | | tasks: å¾
åé
çä»»å¡å表 |
| | | |
| | | Returns: |
| | | List[TaskAssignment]: åé
ç»æå表 |
| | | """ |
| | | if not tasks: |
| | | return [] |
| | | |
| | | # è·åå¯ç¨çAGV |
| | | available_agvs = self.agv_manager.get_available_agvs() |
| | | |
| | | if not available_agvs: |
| | | self.logger.warning("没æå¯ç¨çAGVè¿è¡ä»»å¡åé
") |
| | | return [] |
| | | |
| | | # 1. é¦å
æ£æ¥ä»»å¡æ¯å¦å·²ç»åé
ï¼é¿å
éå¤åé
|
| | | already_assigned_tasks = set() |
| | | all_agvs = self.agv_manager.get_all_agvs() |
| | | for agv in all_agvs: |
| | | if agv.backpack: |
| | | for backpack_item in agv.backpack: |
| | | if backpack_item.taskId: |
| | | already_assigned_tasks.add(backpack_item.taskId) |
| | | self.logger.info(f"ä»»å¡ {backpack_item.taskId} å·²åé
ç» AGV {agv.agvId}ï¼è·³è¿éå¤åé
") |
| | | |
| | | # 2. è¿æ»¤æå·²åé
çä»»å¡ |
| | | unassigned_tasks = [task for task in tasks if task.taskId not in already_assigned_tasks] |
| | | |
| | | if not unassigned_tasks: |
| | | self.logger.info("ææä»»å¡é½å·²åé
ï¼æ ééæ°åé
") |
| | | return [] |
| | | |
| | | self.logger.info(f"æ»ä»»å¡æ°: {len(tasks)}, å·²åé
: {len(already_assigned_tasks)}, å¾
åé
: {len(unassigned_tasks)}") |
| | | |
| | | # æä¼å
级æåºä»»å¡ï¼é«ä¼å
级å¨åï¼ |
| | | sorted_tasks = sorted(unassigned_tasks, key=lambda t: t.priority, reverse=True) |
| | | |
| | | assignments = [] |
| | | path_mapping = self.agv_manager.path_mapping |
| | | |
| | | # ä¼å
åé
é«ä¼å
çº§ä»»å¡ |
| | | for task in sorted_tasks: |
| | | if not available_agvs: |
| | | break |
| | | |
| | | # è·åä»»å¡èµ·ç¹åæ |
| | | task_start_coord = get_coordinate_from_path_id(task.start, path_mapping) |
| | | if not task_start_coord: |
| | | self.logger.warning(f"æ æ³è·åä»»å¡ {task.taskId} èµ·ç¹ {task.start} çåæ ") |
| | | continue |
| | | |
| | | # 为é«ä¼å
级任å¡éæ©æä½³AGV |
| | | best_agv = None |
| | | best_score = -1 |
| | | |
| | | for agv in available_agvs: |
| | | if not agv.can_accept_task(task.priority): |
| | | continue |
| | | |
| | | # 计ç®ç»¼åå¾å |
| | | distance_score = 0.0 |
| | | if agv.coordinates and task_start_coord: |
| | | distance = calculate_manhattan_distance(agv.coordinates, task_start_coord) |
| | | distance_score = 1.0 / (1.0 + distance / 50.0) |
| | | |
| | | efficiency_score = agv.calculate_efficiency_score(path_mapping) |
| | | capacity_score = agv.get_task_capacity() / agv.max_capacity |
| | | |
| | | # é«ä¼å
çº§ä»»å¡æ´æ³¨éæçåè·ç¦» |
| | | total_score = 0.5 * distance_score + 0.3 * efficiency_score + 0.2 * capacity_score |
| | | |
| | | if total_score > best_score: |
| | | best_score = total_score |
| | | best_agv = agv |
| | | |
| | | if best_agv: |
| | | # è·åAGVçåå§ç¶ææ°æ®æ¥æ¥æ¾å¯ç¨èç¯ä½ç½® |
| | | agv_status = self.agv_manager.get_agv_status(best_agv.agvId) |
| | | |
| | | if agv_status: |
| | | # æ¥æ¾å¯ç¨çèç¯ä½ç½® |
| | | available_lev_id = self.find_available_backpack_slot(agv_status) |
| | | |
| | | if available_lev_id is not None: |
| | | # åé
ä»»å¡å°æå®èç¯ä½ç½® |
| | | success = self.assign_task_with_backpack(best_agv, task, available_lev_id) |
| | | if success: |
| | | assignments.append(TaskAssignment( |
| | | taskId=task.taskId, |
| | | agvId=best_agv.agvId, |
| | | lev_id=available_lev_id |
| | | )) |
| | | |
| | | self.logger.info(f"é«ä¼å
çº§ä»»å¡ {task.taskId} (ä¼å
级: {task.priority}) åé
ç»AGV {best_agv.agvId}ï¼èç¯ä½ç½®: {available_lev_id}") |
| | | |
| | | # æ£æ¥AGVæ¯å¦è¿æå¯ç¨èç¯ä½ç½® |
| | | remaining_capacity = self.get_agv_available_capacity(agv_status) - 1 |
| | | if remaining_capacity <= 0: |
| | | available_agvs.remove(best_agv) |
| | | else: |
| | | self.logger.warning(f"ä»»å¡ {task.taskId} åé
ç»AGV {best_agv.agvId} 失败") |
| | | else: |
| | | self.logger.warning(f"AGV {best_agv.agvId} 没æå¯ç¨çèç¯ä½ç½®") |
| | | available_agvs.remove(best_agv) |
| | | else: |
| | | self.logger.warning(f"æ æ³è·åAGV {best_agv.agvId} çç¶æä¿¡æ¯") |
| | | |
| | | return assignments |
| | | |
| | | |
| | | class MultiObjectiveAllocation(TaskAllocation): |
| | | """å¤ç®æ ä¼ååé
ç®æ³""" |
| | | |
| | | def __init__(self, agv_manager: AGVModelManager, |
| | | distance_weight: float = 0.4, |
| | | load_weight: float = 0.3, |
| | | efficiency_weight: float = 0.3): |
| | | """ |
| | | åå§åå¤ç®æ ä¼ååé
ç®æ³ |
| | | |
| | | Args: |
| | | agv_manager: AGV模å管çå¨ |
| | | distance_weight: è·ç¦»æé |
| | | load_weight: è´è½½æé |
| | | efficiency_weight: æçæé |
| | | """ |
| | | super().__init__(agv_manager) |
| | | self.distance_weight = distance_weight |
| | | self.load_weight = load_weight |
| | | self.efficiency_weight = efficiency_weight |
| | | |
| | | def allocate_tasks(self, tasks: List[TaskData]) -> List[TaskAssignment]: |
| | | """ |
| | | 使ç¨å¤ç®æ ä¼åçç¥åé
ä»»å¡ |
| | | |
| | | Args: |
| | | tasks: å¾
åé
çä»»å¡å表 |
| | | |
| | | Returns: |
| | | List[TaskAssignment]: åé
ç»æå表 |
| | | """ |
| | | if not tasks: |
| | | return [] |
| | | |
| | | # è·åææAGV |
| | | all_agvs = self.agv_manager.get_all_agvs() |
| | | |
| | | if not all_agvs: |
| | | self.logger.warning("没æAGVè¿è¡ä»»å¡åé
") |
| | | return [] |
| | | |
| | | # 1. é¦å
æ£æ¥ä»»å¡æ¯å¦å·²ç»åé
ï¼é¿å
éå¤åé
|
| | | already_assigned_tasks = set() |
| | | for agv in all_agvs: |
| | | if agv.backpack: |
| | | for backpack_item in agv.backpack: |
| | | if backpack_item.taskId: |
| | | already_assigned_tasks.add(backpack_item.taskId) |
| | | self.logger.info(f"ä»»å¡ {backpack_item.taskId} å·²åé
ç» AGV {agv.agvId}ï¼è·³è¿éå¤åé
") |
| | | |
| | | # 2. è¿æ»¤æå·²åé
çä»»å¡ |
| | | unassigned_tasks = [task for task in tasks if task.taskId not in already_assigned_tasks] |
| | | |
| | | if not unassigned_tasks: |
| | | self.logger.info("ææä»»å¡é½å·²åé
ï¼æ ééæ°åé
") |
| | | return [] |
| | | |
| | | self.logger.info(f"æ»ä»»å¡æ°: {len(tasks)}, å·²åé
: {len(already_assigned_tasks)}, å¾
åé
: {len(unassigned_tasks)}") |
| | | |
| | | assignments = [] |
| | | path_mapping = self.agv_manager.path_mapping |
| | | |
| | | # 对æ¯ä¸ªä»»å¡-AGV对计ç®å¾å |
| | | task_agv_scores = {} |
| | | |
| | | for task in unassigned_tasks: |
| | | task_start_coord = get_coordinate_from_path_id(task.start, path_mapping) |
| | | if not task_start_coord: |
| | | continue |
| | | |
| | | for agv in all_agvs: |
| | | if not agv.can_accept_task(task.priority): |
| | | continue |
| | | |
| | | # è·ç¦»å¾å |
| | | distance_score = 0.0 |
| | | if agv.coordinates: |
| | | distance = calculate_manhattan_distance(agv.coordinates, task_start_coord) |
| | | distance_score = 1.0 / (1.0 + distance / 100.0) |
| | | |
| | | # è´è½½å¾å |
| | | load_score = 1.0 - agv.get_workload_ratio() |
| | | |
| | | # æçå¾å |
| | | efficiency_score = agv.calculate_efficiency_score(path_mapping) |
| | | |
| | | # 计ç®ç»¼åå¾å |
| | | total_score = ( |
| | | self.distance_weight * distance_score + |
| | | self.load_weight * load_score + |
| | | self.efficiency_weight * efficiency_score |
| | | ) |
| | | |
| | | task_agv_scores[(task.taskId, agv.agvId)] = total_score |
| | | |
| | | # 使ç¨è´ªå¿ç®æ³è¿è¡å¹é
|
| | | assignments = self._greedy_matching(unassigned_tasks, all_agvs, task_agv_scores) |
| | | |
| | | return assignments |
| | | |
| | | def _greedy_matching(self, tasks: List[TaskData], agvs: List[AGVModel], |
| | | scores: Dict[Tuple[str, str], float]) -> List[TaskAssignment]: |
| | | """ |
| | | 使ç¨è´ªå¿ç®æ³è¿è¡ä»»å¡-AGVå¹é
|
| | | |
| | | Args: |
| | | tasks: ä»»å¡å表 |
| | | agvs: AGVå表 |
| | | scores: ä»»å¡-AGV对çå¾å |
| | | |
| | | Returns: |
| | | List[TaskAssignment]: åé
ç»æ |
| | | """ |
| | | assignments = [] |
| | | remaining_tasks = [task.taskId for task in tasks] |
| | | |
| | | # éå¤åé
ç´å°æ²¡æä»»å¡ææ²¡æå¯ç¨AGV |
| | | while remaining_tasks: |
| | | # æ¾å°å¾åæé«çä»»å¡-AGV对 |
| | | best_score = -1 |
| | | best_task_id = None |
| | | best_agv = None |
| | | |
| | | for task_id in remaining_tasks: |
| | | for agv in agvs: |
| | | if agv.is_overloaded(): |
| | | continue |
| | | |
| | | score = scores.get((task_id, agv.agvId), 0.0) |
| | | if score > best_score: |
| | | best_score = score |
| | | best_task_id = task_id |
| | | best_agv = agv |
| | | |
| | | if best_task_id and best_agv: |
| | | # è·åAGVçåå§ç¶ææ°æ®æ¥æ¥æ¾å¯ç¨èç¯ä½ç½® |
| | | agv_status = self.agv_manager.get_agv_status(best_agv.agvId) |
| | | |
| | | if agv_status: |
| | | # æ¥æ¾å¯ç¨çèç¯ä½ç½® |
| | | available_lev_id = self.find_available_backpack_slot(agv_status) |
| | | |
| | | if available_lev_id is not None: |
| | | # æ¾å°å¯¹åºçä»»å¡å¯¹è±¡ |
| | | task = next((t for t in tasks if t.taskId == best_task_id), None) |
| | | if task: |
| | | # åé
ä»»å¡å°æå®èç¯ä½ç½® |
| | | success = self.assign_task_with_backpack(best_agv, task, available_lev_id) |
| | | if success: |
| | | assignments.append(TaskAssignment( |
| | | taskId=best_task_id, |
| | | agvId=best_agv.agvId, |
| | | lev_id=available_lev_id |
| | | )) |
| | | |
| | | remaining_tasks.remove(best_task_id) |
| | | self.logger.info(f"å¤ç®æ ä¼åï¼ä»»å¡ {best_task_id} åé
ç»AGV {best_agv.agvId}ï¼èç¯ä½ç½®: {available_lev_id}ï¼å¾å: {best_score:.3f}") |
| | | else: |
| | | self.logger.warning(f"ä»»å¡ {best_task_id} åé
ç»AGV {best_agv.agvId} 失败") |
| | | break |
| | | else: |
| | | self.logger.error(f"æ¾ä¸å°ä»»å¡ {best_task_id} ç详ç»ä¿¡æ¯") |
| | | break |
| | | else: |
| | | self.logger.debug(f"AGV {best_agv.agvId} 没æå¯ç¨çèç¯ä½ç½®ï¼è·³è¿") |
| | | break |
| | | else: |
| | | self.logger.warning(f"æ æ³è·åAGV {best_agv.agvId} çç¶æä¿¡æ¯") |
| | | break |
| | | else: |
| | | break |
| | | |
| | | return assignments |
| | | |
| | | |
| | | class TaskAllocationFactory: |
| | | """ä»»å¡åé
ç®æ³å·¥åç±»""" |
| | | |
| | | @staticmethod |
| | | def create_allocator(algorithm_type: str, agv_manager: AGVModelManager) -> TaskAllocation: |
| | | """ |
| | | å建任å¡åé
ç®æ³ |
| | | |
| | | Args: |
| | | algorithm_type: ç®æ³ç±»å |
| | | agv_manager: AGV模å管çå¨ |
| | | |
| | | Returns: |
| | | TaskAllocation: ä»»å¡åé
ç®æ³å¯¹è±¡ |
| | | """ |
| | | if algorithm_type == "NEAREST_FIRST": |
| | | return NearestFirstAllocation(agv_manager) |
| | | elif algorithm_type == "LOAD_BALANCED": |
| | | return LoadBalancedAllocation(agv_manager) |
| | | elif algorithm_type == "PRIORITY_FIRST": |
| | | return PriorityFirstAllocation(agv_manager) |
| | | elif algorithm_type == "MULTI_OBJECTIVE": |
| | | return MultiObjectiveAllocation(agv_manager) |
| | | else: |
| | | # é»è®¤ä½¿ç¨è´è½½åè¡¡ç®æ³ |
| | | return LoadBalancedAllocation(agv_manager) |
New file |
| | |
| | | # Algorithm System Models Module |
| | | __version__ = "1.0.0" |
New file |
| | |
| | | """ |
| | | AGV模å - ç¨äºç®æ³ç³»ç»çAGVæ°æ®å»ºæ¨¡ |
| | | """ |
| | | import time |
| | | import logging |
| | | from typing import Dict, List, Optional, Tuple, Set |
| | | from dataclasses import dataclass, field |
| | | from enum import Enum |
| | | |
| | | from common.data_models import AGVStatus, BackpackData |
| | | from common.utils import get_coordinate_from_path_id, calculate_manhattan_distance |
| | | |
| | | |
| | | class AGVTaskStatus(Enum): |
| | | """AGVä»»å¡ç¶æ""" |
| | | IDLE = "IDLE" # ç©ºé² |
| | | ASSIGNED = "ASSIGNED" # å·²åé
ä»»å¡ä½æªå¼å§ |
| | | EXECUTING = "EXECUTING" # æ§è¡ä¸ |
| | | COMPLETED = "COMPLETED" # 已宿 |
| | | |
| | | |
| | | @dataclass |
| | | class TaskAssignment: |
| | | """ä»»å¡åé
ä¿¡æ¯""" |
| | | task_id: str |
| | | assigned_time: float |
| | | priority: int |
| | | status: AGVTaskStatus = AGVTaskStatus.ASSIGNED |
| | | start_code: Optional[str] = None |
| | | end_code: Optional[str] = None |
| | | estimated_duration: Optional[float] = None |
| | | |
| | | |
| | | class AGVModel: |
| | | """AGV模åç±»ï¼ç¨äºç®æ³è®¡ç®""" |
| | | |
| | | def __init__(self, agv_id: str, path_mapping: Dict[str, Dict[str, int]]): |
| | | """ |
| | | åå§åAGV模å |
| | | |
| | | Args: |
| | | agv_id: AGV ID |
| | | path_mapping: è·¯å¾ç¹æ å°åå
¸ |
| | | """ |
| | | self.agvId = agv_id |
| | | self.path_mapping = path_mapping |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | # AGVç¶æä¿¡æ¯ |
| | | self.status: str = "0" # AGVç¶æç |
| | | self.mapCode: str = "" # å½åä½ç½®ç |
| | | self.coordinates: Optional[Tuple[int, int]] = None # å½ååæ |
| | | self.backpack: Optional[List[BackpackData]] = None # èå
ä¿¡æ¯ |
| | | |
| | | # å
çµç¸å
³ |
| | | self.voltage: int = 100 # å½åçµéç¾åæ¯ |
| | | self.autoCharge: int = 20 # ä½çµé设å®éå¼ |
| | | self.lowVol: int = 10 # æä½çµééå¼ |
| | | |
| | | # ä»»å¡ç¸å
³ |
| | | self.assigned_tasks: List[TaskAssignment] = [] |
| | | self.current_task_count: int = 0 |
| | | self.max_capacity: int = 5 # æå¤§ä»»å¡å®¹é |
| | | |
| | | # æ§è½ç¸å
³ |
| | | self.efficiency_score: float = 1.0 # æçå¾å |
| | | self.last_update_time: float = time.time() |
| | | |
| | | # ç»è®¡ä¿¡æ¯ |
| | | self.total_completed_tasks: int = 0 |
| | | self.total_distance_traveled: float = 0.0 |
| | | self.average_completion_time: float = 0.0 |
| | | |
| | | def update_from_agv_status(self, agv_status: AGVStatus): |
| | | """ä»AGVç¶ææ´æ°æ¨¡å""" |
| | | self.status = agv_status.status |
| | | if hasattr(agv_status, 'position'): |
| | | self.mapCode = agv_status.position |
| | | elif hasattr(agv_status, 'mapCode'): |
| | | self.mapCode = agv_status.mapCode |
| | | else: |
| | | self.mapCode = "" |
| | | |
| | | self.backpack = agv_status.backpack |
| | | self.last_update_time = time.time() |
| | | |
| | | raw_voltage = getattr(agv_status, 'vol', 100) |
| | | raw_auto_charge = getattr(agv_status, 'autoCharge', 20) |
| | | raw_low_vol = getattr(agv_status, 'lowVol', 10) |
| | | |
| | | # çµéæ°æ®æ ååå¤ç |
| | | # 妿çµåå¼å¤§äº100ï¼å¯è½æ¯æ¯«ä¼å¼ï¼éè¦è½¬æ¢ä¸ºç¾åæ¯ |
| | | if raw_voltage > 100: |
| | | # å设æ£å¸¸çµåèå´æ¯3000-5000mVï¼è½¬æ¢ä¸º0-100% |
| | | # è¿é使ç¨ç®åççº¿æ§æ å° |
| | | normalized_voltage = max(0, min(100, ((raw_voltage - 3000) / 2000) * 100)) |
| | | self.logger.debug(f"AGV {self.agvId} çµåæ åå: {raw_voltage}mV -> {normalized_voltage:.1f}%") |
| | | self.voltage = int(normalized_voltage) |
| | | else: |
| | | self.voltage = raw_voltage |
| | | |
| | | # é弿 ååå¤ç |
| | | if raw_auto_charge > 100: |
| | | # 妿éå¼ä¹æ¯æ¯«ä¼å¼ï¼åæ ·è½¬æ¢ |
| | | self.autoCharge = max(0, min(100, ((raw_auto_charge - 3000) / 2000) * 100)) |
| | | self.logger.debug(f"AGV {self.agvId} èªå¨å
çµé弿 åå: {raw_auto_charge}mV -> {self.autoCharge:.1f}%") |
| | | else: |
| | | self.autoCharge = raw_auto_charge |
| | | |
| | | if raw_low_vol > 100: |
| | | # 妿æä½çµééå¼ä¹æ¯æ¯«ä¼å¼ï¼åæ ·è½¬æ¢ |
| | | self.lowVol = max(0, min(100, ((raw_low_vol - 3000) / 2000) * 100)) |
| | | self.logger.debug(f"AGV {self.agvId} æä½çµéé弿 åå: {raw_low_vol}mV -> {self.lowVol:.1f}%") |
| | | else: |
| | | self.lowVol = raw_low_vol |
| | | |
| | | # æ´æ°åæ |
| | | if self.mapCode: |
| | | self.coordinates = get_coordinate_from_path_id(self.mapCode, self.path_mapping) |
| | | |
| | | # æ ¹æ®AGVç¶ææ´æ°ä»»å¡è®¡æ° |
| | | if self.backpack and self.backpack: |
| | | self.current_task_count = len([bp for bp in self.backpack if bp.execute]) |
| | | else: |
| | | self.current_task_count = 0 |
| | | |
| | | def assign_task(self, task_id: str, priority: int = 5, |
| | | start_code: str = "", end_code: str = "") -> bool: |
| | | """ |
| | | åé
ä»»å¡ç»AGV |
| | | |
| | | Args: |
| | | task_id: ä»»å¡ID |
| | | priority: ä»»å¡ä¼å
级 |
| | | start_code: èµ·å§ä½ç½®ç |
| | | end_code: ç»æä½ç½®ç |
| | | |
| | | Returns: |
| | | bool: æ¯å¦åé
æå |
| | | """ |
| | | if self.is_overloaded(): |
| | | self.logger.warning(f"AGV {self.agvId} å·²æ»¡è½½ï¼æ æ³åé
æ´å¤ä»»å¡") |
| | | return False |
| | | |
| | | # å建任å¡åé
è®°å½ |
| | | task_assignment = TaskAssignment( |
| | | task_id=task_id, |
| | | assigned_time=time.time(), |
| | | priority=priority, |
| | | start_code=start_code, |
| | | end_code=end_code |
| | | ) |
| | | |
| | | self.assigned_tasks.append(task_assignment) |
| | | self.current_task_count += 1 |
| | | |
| | | self.logger.info(f"ä»»å¡ {task_id} å·²åé
ç»AGV {self.agvId}") |
| | | return True |
| | | |
| | | def complete_task(self, task_id: str) -> bool: |
| | | """ |
| | | å®æä»»å¡ |
| | | |
| | | Args: |
| | | task_id: ä»»å¡ID |
| | | |
| | | Returns: |
| | | bool: æ¯å¦å®ææå |
| | | """ |
| | | for task in self.assigned_tasks: |
| | | if task.task_id == task_id: |
| | | task.status = AGVTaskStatus.COMPLETED |
| | | self.current_task_count = max(0, self.current_task_count - 1) |
| | | self.total_completed_tasks += 1 |
| | | |
| | | self.logger.info(f"AGV {self.agvId} å®æä»»å¡ {task_id}") |
| | | return True |
| | | |
| | | return False |
| | | |
| | | def can_accept_task(self, priority: int) -> bool: |
| | | """ |
| | | æ£æ¥æ¯å¦å¯ä»¥æ¥åæ°ä»»å¡ |
| | | |
| | | Args: |
| | | priority: ä»»å¡ä¼å
级 |
| | | |
| | | Returns: |
| | | bool: æ¯å¦å¯ä»¥æ¥å |
| | | """ |
| | | # æ£æ¥å®¹é |
| | | if self.is_overloaded(): |
| | | self.logger.debug(f"AGV {self.agvId} ä»»å¡å·²æ»¡è½½({self.current_task_count}/{self.max_capacity})ï¼æç»æ°ä»»å¡") |
| | | return False |
| | | |
| | | # æ£æ¥AGVç¶æï¼å
¼å®¹æ´æ°ååç¬¦ä¸²æ ¼å¼ï¼ |
| | | # ç¶æ 0(空é²), 1(å¿ç¢), 2(å
çµ) å¯ä»¥æ¥åä»»å¡ |
| | | # ç¶æ 3(æ
é), 4(ç»´æ¤) ä¸è½æ¥åä»»å¡ |
| | | status_value = str(self.status) # ç»ä¸è½¬æ¢ä¸ºå符串è¿è¡æ¯è¾ |
| | | if status_value not in ["0", "1", "2"]: |
| | | self.logger.debug(f"AGV {self.agvId} ç¶æå¼å¸¸(status={status_value})ï¼æç»æ°ä»»å¡") |
| | | return False |
| | | |
| | | # æ£æ¥å
çµç¶æ - 妿çµéè¿ä½å¿
é¡»å
çµï¼ä¸è½æ¥åæ°ä»»å¡ |
| | | if self.need_charging(): |
| | | self.logger.debug(f"AGV {self.agvId} çµéè¿ä½({self.voltage}% <= {self.lowVol}%)ï¼å¿
é¡»å
çµï¼æç»æ°ä»»å¡") |
| | | return False |
| | | |
| | | # 妿æ¯é«ä¼å
级任å¡ï¼å³ä½¿ä½çµéä¹å¯ä»¥æ¥å |
| | | if priority >= 9: # é«ä¼å
级任å¡éå¼ |
| | | self.logger.debug(f"AGV {self.agvId} æ¥åé«ä¼å
级任å¡(priority={priority})") |
| | | return True |
| | | |
| | | # å¯¹äºæ®éä¼å
级任å¡ï¼å¦æçµéä½ä½ä¸æ¯å¿
é¡»å
çµï¼å¯ä»¥æ ¹æ®ä¼å
级å³å® |
| | | if self.can_auto_charge(): |
| | | # ä¼å
级è¶é«ï¼è¶å¾åäºæ¥åä»»å¡ |
| | | if priority >= 5: # ä¸é«ä¼å
级 |
| | | self.logger.debug(f"AGV {self.agvId} æ¥åä¸é«ä¼å
级任å¡(priority={priority}, çµé={self.voltage}%)") |
| | | return True |
| | | elif priority >= 3: # ä¸çä¼å
级ï¼75%æ¦çæ¥å |
| | | import random |
| | | accept = random.random() < 0.75 |
| | | self.logger.debug(f"AGV {self.agvId} éæºå³å®{'æ¥å' if accept else 'æç»'}ä¸çä¼å
级任å¡(priority={priority}, çµé={self.voltage}%)") |
| | | return accept |
| | | elif priority >= 1: # ä½ä¼å
级ï¼50%æ¦çæ¥åï¼å
æ¬ä¼å
级1ï¼ |
| | | import random |
| | | accept = random.random() < 0.5 |
| | | self.logger.debug(f"AGV {self.agvId} éæºå³å®{'æ¥å' if accept else 'æç»'}ä½ä¼å
级任å¡(priority={priority}, çµé={self.voltage}%)") |
| | | return accept |
| | | else: # æä½ä¼å
级ï¼priority=0ï¼ï¼æç» |
| | | self.logger.debug(f"AGV {self.agvId} çµéåä½({self.voltage}%)ï¼æç»æä½ä¼å
级任å¡(priority={priority})") |
| | | return False |
| | | |
| | | # æ£å¸¸æ
åµä¸å¯ä»¥æ¥åä»»å¡ |
| | | self.logger.debug(f"AGV {self.agvId} ç¶æè¯å¥½ï¼å¯ä»¥æ¥åä»»å¡(çµé={self.voltage}%, priority={priority})") |
| | | return True |
| | | |
| | | def is_overloaded(self) -> bool: |
| | | """ |
| | | æ£æ¥æ¯å¦è¿è½½ |
| | | |
| | | Returns: |
| | | bool: æ¯å¦è¿è½½ |
| | | """ |
| | | return self.current_task_count >= self.max_capacity |
| | | |
| | | def get_workload_ratio(self) -> float: |
| | | """ |
| | | è·åå·¥ä½è´è½½æ¯ä¾ |
| | | |
| | | Returns: |
| | | float: å·¥ä½è´è½½æ¯ä¾ (0.0 - 1.0) |
| | | """ |
| | | return min(1.0, self.current_task_count / self.max_capacity) |
| | | |
| | | def get_task_capacity(self) -> int: |
| | | """ |
| | | è·åå©ä½ä»»å¡å®¹é |
| | | |
| | | Returns: |
| | | int: å©ä½å®¹é |
| | | """ |
| | | return max(0, self.max_capacity - self.current_task_count) |
| | | |
| | | def need_charging(self) -> bool: |
| | | """ |
| | | æ£æ¥æ¯å¦éè¦å
çµ |
| | | |
| | | Returns: |
| | | bool: æ¯å¦éè¦å
çµ |
| | | """ |
| | | return self.voltage <= self.lowVol |
| | | |
| | | def can_auto_charge(self) -> bool: |
| | | """ |
| | | æ£æ¥æ¯å¦å¯ä»¥èªå¨å
çµï¼ä½äºéå¼ä½ä¸æ¯å¿
é¡»å
çµï¼ |
| | | |
| | | Returns: |
| | | bool: æ¯å¦å¯ä»¥èªå¨å
çµ |
| | | """ |
| | | return self.lowVol < self.voltage <= self.autoCharge |
| | | |
| | | def is_low_power(self) -> bool: |
| | | """ |
| | | æ£æ¥æ¯å¦ä¸ºä½çµéç¶æ |
| | | |
| | | Returns: |
| | | bool: æ¯å¦ä¸ºä½çµéç¶æ |
| | | """ |
| | | return self.voltage <= self.autoCharge |
| | | |
| | | def get_charging_priority(self) -> int: |
| | | """ |
| | | è·åå
çµä¼å
çº§ï¼æ°å¼è¶å¤§ä¼å
级è¶é«ï¼ |
| | | |
| | | Returns: |
| | | int: å
çµä¼å
级 |
| | | """ |
| | | if self.need_charging(): |
| | | return 100 # å¿
é¡»å
çµï¼æé«ä¼å
级 |
| | | elif self.can_auto_charge(): |
| | | return 50 + (self.autoCharge - self.voltage) # æ ¹æ®çµé差计ç®ä¼å
级 |
| | | else: |
| | | return 0 # æ éå
çµ |
| | | |
| | | def calculate_efficiency_score(self, path_mapping: Dict[str, Dict[str, int]]) -> float: |
| | | """ |
| | | 计ç®AGVæçå¾å |
| | | |
| | | Args: |
| | | path_mapping: è·¯å¾ç¹æ å° |
| | | |
| | | Returns: |
| | | float: æçå¾å (0.0 - 1.0) |
| | | """ |
| | | # åºç¡æçåæ° |
| | | base_score = 0.7 |
| | | |
| | | # æ ¹æ®å®æä»»å¡æ°éè°æ´ |
| | | if self.total_completed_tasks > 0: |
| | | completion_bonus = min(0.2, self.total_completed_tasks * 0.01) |
| | | base_score += completion_bonus |
| | | |
| | | # æ ¹æ®è´è½½æ
åµè°æ´ |
| | | load_ratio = self.get_workload_ratio() |
| | | if load_ratio < 0.8: # è´è½½ä¸å¤ªé«æ¶æçæ´é« |
| | | load_bonus = (0.8 - load_ratio) * 0.1 |
| | | base_score += load_bonus |
| | | |
| | | # æ ¹æ®AGVç¶æè°æ´ |
| | | if self.status == "0": # æ£å¸¸ç¶æ |
| | | base_score += 0.1 |
| | | elif self.status in ["3", "4"]: # å¼å¸¸ç¶æ |
| | | base_score -= 0.2 |
| | | |
| | | # æ¶é´å åï¼æè¿æ´æ°çAGVå¾åæ´é« |
| | | time_since_update = time.time() - self.last_update_time |
| | | if time_since_update < 60: # 1åéå
æ´æ° |
| | | time_bonus = (60 - time_since_update) / 600 # æå¤å 0.1 |
| | | base_score += time_bonus |
| | | |
| | | return max(0.0, min(1.0, base_score)) |
| | | |
| | | def estimate_travel_time(self, target_code: str) -> float: |
| | | """ |
| | | ä¼°ç®å°ç®æ ä½ç½®çè¡é©¶æ¶é´ |
| | | |
| | | Args: |
| | | target_code: ç®æ ä½ç½®ç |
| | | |
| | | Returns: |
| | | float: ä¼°ç®æ¶é´ï¼ç§ï¼ |
| | | """ |
| | | if not self.coordinates: |
| | | return float('inf') |
| | | |
| | | target_coord = get_coordinate_from_path_id(target_code, self.path_mapping) |
| | | if not target_coord: |
| | | return float('inf') |
| | | |
| | | # è®¡ç®æ¼åé¡¿è·ç¦» |
| | | distance = calculate_manhattan_distance(self.coordinates, target_coord) |
| | | |
| | | # å设AGVå¹³åé度为1åä½/ç§ |
| | | estimated_time = distance * 1.0 |
| | | |
| | | # èèå½åè´è½½å¯¹é度çå½±å |
| | | load_factor = 1.0 + (self.get_workload_ratio() * 0.2) |
| | | |
| | | return estimated_time * load_factor |
| | | |
| | | def get_task_by_id(self, task_id: str) -> Optional[TaskAssignment]: |
| | | """ |
| | | æ ¹æ®ä»»å¡IDè·åä»»å¡åé
ä¿¡æ¯ |
| | | |
| | | Args: |
| | | task_id: ä»»å¡ID |
| | | |
| | | Returns: |
| | | Optional[TaskAssignment]: ä»»å¡åé
ä¿¡æ¯ |
| | | """ |
| | | for task in self.assigned_tasks: |
| | | if task.task_id == task_id: |
| | | return task |
| | | return None |
| | | |
| | | def get_pending_tasks(self) -> List[TaskAssignment]: |
| | | """ |
| | | è·åå¾
æ§è¡çä»»å¡ |
| | | |
| | | Returns: |
| | | List[TaskAssignment]: å¾
æ§è¡ä»»å¡å表 |
| | | """ |
| | | return [task for task in self.assigned_tasks |
| | | if task.status in [AGVTaskStatus.ASSIGNED, AGVTaskStatus.EXECUTING]] |
| | | |
| | | def clear_completed_tasks(self): |
| | | """æ¸
ç已宿çä»»å¡""" |
| | | self.assigned_tasks = [task for task in self.assigned_tasks |
| | | if task.status != AGVTaskStatus.COMPLETED] |
| | | |
| | | def __str__(self) -> str: |
| | | """å符串表示""" |
| | | return (f"AGV({self.agvId}, status={self.status}, " |
| | | f"pos={self.mapCode}, tasks={self.current_task_count}/{self.max_capacity})") |
| | | |
| | | |
| | | class AGVModelManager: |
| | | """AGV模å管çå¨""" |
| | | |
| | | def __init__(self, path_mapping: Dict[str, Dict[str, int]]): |
| | | """ |
| | | åå§åAGV模å管çå¨ |
| | | |
| | | Args: |
| | | path_mapping: è·¯å¾ç¹æ å°åå
¸ |
| | | """ |
| | | self.path_mapping = path_mapping |
| | | self.agv_models: Dict[str, AGVModel] = {} |
| | | self.agv_status_data: Dict[str, AGVStatus] = {} # åå¨åå§AGVç¶ææ°æ® |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | def update_agv_data(self, agv_status_list: List[AGVStatus]): |
| | | """ |
| | | æ´æ°AGVæ°æ® |
| | | |
| | | Args: |
| | | agv_status_list: AGVç¶æå表 |
| | | """ |
| | | # è·åå½åæ´æ°çAGV IDå表 |
| | | current_agv_ids = {agv_status.agvId for agv_status in agv_status_list} |
| | | |
| | | # ç§»é¤ä¸ååå¨çAGV模ååç¶ææ°æ® |
| | | removed_agvs = [] |
| | | for agv_id in list(self.agv_models.keys()): |
| | | if agv_id not in current_agv_ids: |
| | | removed_agvs.append(agv_id) |
| | | del self.agv_models[agv_id] |
| | | if agv_id in self.agv_status_data: |
| | | del self.agv_status_data[agv_id] |
| | | |
| | | if removed_agvs: |
| | | self.logger.info(f"ç§»é¤ä¸åå¨çAGV: {removed_agvs}") |
| | | |
| | | # æ´æ°æå建AGV模ååç¶ææ°æ® |
| | | for agv_status in agv_status_list: |
| | | agv_id = agv_status.agvId |
| | | |
| | | # åå¨åå§AGVç¶ææ°æ® |
| | | self.agv_status_data[agv_id] = agv_status |
| | | |
| | | # 妿AGV模åä¸åå¨ï¼å建æ°ç |
| | | if agv_id not in self.agv_models: |
| | | self.agv_models[agv_id] = AGVModel(agv_id, self.path_mapping) |
| | | self.logger.info(f"å建æ°çAGV模å: {agv_id}") |
| | | |
| | | # æ´æ°AGV模å |
| | | self.agv_models[agv_id].update_from_agv_status(agv_status) |
| | | |
| | | self.logger.debug(f"æ´æ°äº {len(agv_status_list)} 个AGV模åï¼å½åæ»æ°: {len(self.agv_models)}") |
| | | |
| | | def get_all_agv_status(self) -> List[AGVStatus]: |
| | | """ |
| | | è·åææAGVçåå§ç¶ææ°æ® |
| | | |
| | | Returns: |
| | | List[AGVStatus]: AGVç¶ææ°æ®å表 |
| | | """ |
| | | return list(self.agv_status_data.values()) |
| | | |
| | | def get_agv_status(self, agv_id: str) -> Optional[AGVStatus]: |
| | | """ |
| | | è·åæå®AGVçåå§ç¶ææ°æ® |
| | | |
| | | Args: |
| | | agv_id: AGV ID |
| | | |
| | | Returns: |
| | | Optional[AGVStatus]: AGVç¶ææ°æ® |
| | | """ |
| | | return self.agv_status_data.get(agv_id) |
| | | |
| | | def get_agv_model(self, agv_id: str) -> Optional[AGVModel]: |
| | | """ |
| | | è·åAGV模å |
| | | |
| | | Args: |
| | | agv_id: AGV ID |
| | | |
| | | Returns: |
| | | Optional[AGVModel]: AGV模å |
| | | """ |
| | | return self.agv_models.get(agv_id) |
| | | |
| | | def get_all_agvs(self) -> List[AGVModel]: |
| | | """ |
| | | è·åææAGV模å |
| | | |
| | | Returns: |
| | | List[AGVModel]: AGV模åå表 |
| | | """ |
| | | return list(self.agv_models.values()) |
| | | |
| | | def get_available_agvs(self) -> List[AGVModel]: |
| | | """ |
| | | è·åå¯ç¨çAGV模åï¼æªæ»¡è½½ä¸ç¶ææ£å¸¸ï¼ |
| | | |
| | | Returns: |
| | | List[AGVModel]: å¯ç¨AGV模åå表 |
| | | """ |
| | | available_agvs = [] |
| | | for agv in self.agv_models.values(): |
| | | if not agv.is_overloaded() and agv.can_accept_task(5): |
| | | available_agvs.append(agv) |
| | | |
| | | return available_agvs |
| | | |
| | | def get_agvs_by_status(self, status: str) -> List[AGVModel]: |
| | | """ |
| | | æ ¹æ®ç¶æè·åAGVå表 |
| | | |
| | | Args: |
| | | status: AGVç¶æ |
| | | |
| | | Returns: |
| | | List[AGVModel]: 符åç¶æçAGVå表 |
| | | """ |
| | | return [agv for agv in self.agv_models.values() if agv.status == status] |
| | | |
| | | def cleanup_old_agvs(self, max_age_seconds: float = 300): |
| | | """ |
| | | æ¸
çé¿æ¶é´æªæ´æ°çAGV |
| | | |
| | | Args: |
| | | max_age_seconds: æå¤§å
è®¸çæªæ´æ°æ¶é´ï¼ç§ï¼ |
| | | """ |
| | | current_time = time.time() |
| | | old_agvs = [] |
| | | |
| | | for agv_id, agv in self.agv_models.items(): |
| | | if current_time - agv.last_update_time > max_age_seconds: |
| | | old_agvs.append(agv_id) |
| | | |
| | | for agv_id in old_agvs: |
| | | del self.agv_models[agv_id] |
| | | self.logger.info(f"æ¸
çé¿æ¶é´æªæ´æ°çAGV: {agv_id}") |
| | | |
| | | def get_statistics(self) -> Dict[str, any]: |
| | | """ |
| | | è·åAGVç»è®¡ä¿¡æ¯ |
| | | |
| | | Returns: |
| | | Dict: ç»è®¡ä¿¡æ¯ |
| | | """ |
| | | total_agvs = len(self.agv_models) |
| | | available_agvs = len(self.get_available_agvs()) |
| | | total_tasks = sum(agv.current_task_count for agv in self.agv_models.values()) |
| | | total_capacity = sum(agv.max_capacity for agv in self.agv_models.values()) |
| | | |
| | | avg_efficiency = 0.0 |
| | | if self.agv_models: |
| | | avg_efficiency = sum(agv.calculate_efficiency_score(self.path_mapping) |
| | | for agv in self.agv_models.values()) / total_agvs |
| | | |
| | | return { |
| | | "total_agvs": total_agvs, |
| | | "available_agvs": available_agvs, |
| | | "total_assigned_tasks": total_tasks, |
| | | "total_capacity": total_capacity, |
| | | "capacity_utilization": total_tasks / total_capacity if total_capacity > 0 else 0.0, |
| | | "average_efficiency": avg_efficiency |
| | | } |
New file |
| | |
| | | """ |
| | | çæ§RCSç³»ç»ç¶æå¹¶çæè·¯å¾ |
| | | """ |
| | | import time |
| | | import threading |
| | | import logging |
| | | from typing import Dict, List, Optional, Any |
| | | import queue |
| | | |
| | | from common.data_models import AGVStatus, PlannedPath, from_dict |
| | | from common.api_client import RCSAPIClient |
| | | from algorithm_system.algorithms.path_planning import BatchPathPlanner |
| | | from algorithm_system.models.agv_model import AGVModelManager |
| | | from common.utils import load_path_mapping, generate_segment_id |
| | | |
| | | try: |
| | | from config.settings import RCS_SERVER_HOST, RCS_SERVER_PORT, MONITOR_POLLING_INTERVAL |
| | | except ImportError: |
| | | RCS_SERVER_HOST = "10.10.10.156" |
| | | RCS_SERVER_PORT = 8088 |
| | | MONITOR_POLLING_INTERVAL = 5.0 |
| | | logging.warning("æ æ³ä»config.settings导å
¥é
ç½®ï¼ä½¿ç¨é»è®¤å¼") |
| | | |
| | | |
| | | class PathMonitorService: |
| | | """çæ§RCSç¶æå¹¶çæè·¯å¾""" |
| | | |
| | | def __init__(self, |
| | | rcs_host: str = None, |
| | | rcs_port: int = None, |
| | | poll_interval: float = None, |
| | | path_algorithm: str = "A_STAR", |
| | | auto_send_paths: bool = True): |
| | | #åå§åè·¯å¾çæ§æå¡ |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | self.rcs_host = rcs_host or RCS_SERVER_HOST |
| | | self.rcs_port = rcs_port or RCS_SERVER_PORT |
| | | self.poll_interval = poll_interval or MONITOR_POLLING_INTERVAL |
| | | self.path_algorithm = path_algorithm |
| | | self.auto_send_paths = auto_send_paths |
| | | |
| | | self.rcs_client = RCSAPIClient(self.rcs_host, self.rcs_port, timeout=10) |
| | | self.path_mapping = load_path_mapping() |
| | | self.agv_manager = AGVModelManager(self.path_mapping) |
| | | |
| | | self.path_planner = BatchPathPlanner( |
| | | self.path_mapping, |
| | | self.path_algorithm, |
| | | self.agv_manager |
| | | ) |
| | | |
| | | self.is_running = False |
| | | self.monitor_thread = None |
| | | self._stop_event = threading.Event() |
| | | |
| | | self.stats = { |
| | | 'poll_count': 0, # è½®è¯¢æ¬¡æ° |
| | | 'successful_polls': 0, # æåè½®è¯¢æ¬¡æ° |
| | | 'path_generations': 0, # è·¯å¾çææ¬¡æ° |
| | | 'last_poll_time': None, # æå轮询æ¶é´ |
| | | 'last_generation_time': None # æåçææ¶é´ |
| | | } |
| | | |
| | | self.logger.info(f"è·¯å¾çæ§æå¡åå§å宿 - 轮询é´é: {poll_interval}s, ç®æ³: {path_algorithm}") |
| | | |
| | | def start_monitoring(self): |
| | | """å¯å¨çæ§æå¡""" |
| | | if self.is_running: |
| | | self.logger.warning("çæ§æå¡å·²å¨è¿è¡ä¸") |
| | | return |
| | | |
| | | self.is_running = True |
| | | self._stop_event.clear() |
| | | |
| | | self.monitor_thread = threading.Thread( |
| | | target=self._monitoring_loop, |
| | | name="PathMonitorThread", |
| | | daemon=True |
| | | ) |
| | | self.monitor_thread.start() |
| | | |
| | | self.logger.info("çæ§æå¡å·²å¯å¨") |
| | | |
| | | def stop_monitoring(self): |
| | | """åæ¢çæ§æå¡""" |
| | | if not self.is_running: |
| | | self.logger.warning("çæ§æå¡æªå¨è¿è¡") |
| | | return |
| | | |
| | | self.is_running = False |
| | | self._stop_event.set() |
| | | |
| | | if self.monitor_thread and self.monitor_thread.is_alive(): |
| | | self.monitor_thread.join(timeout=5.0) |
| | | |
| | | self.logger.info("çæ§æå¡å·²åæ¢") |
| | | |
| | | def _monitoring_loop(self): |
| | | """çæ§ä¸»å¾ªç¯""" |
| | | self.logger.info("å¼å§çæ§å¾ªç¯...") |
| | | |
| | | while self.is_running and not self._stop_event.is_set(): |
| | | try: |
| | | self._perform_monitoring_cycle() |
| | | |
| | | if not self._stop_event.wait(self.poll_interval): |
| | | continue |
| | | else: |
| | | break |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"çæ§å¾ªç¯å¼å¸¸: {e}") |
| | | self._stop_event.wait(min(self.poll_interval * 2, 10.0)) |
| | | |
| | | self.logger.info("çæ§å¾ªç¯å·²ç»æ") |
| | | |
| | | def _perform_monitoring_cycle(self): |
| | | """æ§è¡ä¸æ¬¡çæ§å¨æ""" |
| | | start_time = time.time() |
| | | |
| | | self.stats['poll_count'] += 1 |
| | | self.stats['last_poll_time'] = start_time |
| | | |
| | | try: |
| | | self.logger.info(f"å¼å§è·¯å¾çæ§å¨æ #{self.stats['poll_count']}") |
| | | |
| | | # 1. è·åå½åAGVç¶æåä»»å¡ç¶æ |
| | | current_agv_status, current_task_status = self._fetch_rcs_status() |
| | | |
| | | if not current_agv_status: |
| | | self.logger.debug("æªè·åå°AGVç¶ææ°æ®ï¼è·³è¿è·¯å¾çæ") |
| | | return |
| | | |
| | | # 2. ç´æ¥æ ¹æ®å½åç¶æçæå
碰æè·¯å¾ |
| | | self.logger.info(f"[è·¯å¾çæ] å¼å§ä¸º {len(current_agv_status)} 个AGVçæè·¯å¾") |
| | | |
| | | generated_paths = self._generate_collision_free_paths(current_agv_status) |
| | | |
| | | # 3. åéè·¯å¾å°RCSï¼æ ¹æ®é
ç½®å³å®ï¼ |
| | | if generated_paths: |
| | | if self.auto_send_paths: |
| | | self._send_paths_to_rcs(generated_paths) |
| | | else: |
| | | self.logger.debug(f"è·¯å¾çæå®æä½æªåéå°RCS - AGVæ°é: {len(current_agv_status)}, è·¯å¾æ°: {len(generated_paths)}") |
| | | |
| | | self.logger.info(f"è·¯å¾çæå®æ - AGVæ°é: {len(current_agv_status)}, è·¯å¾æ°: {len(generated_paths)}, èªå¨åé: {self.auto_send_paths}") |
| | | else: |
| | | self.logger.debug("æªçæä»»ä½è·¯å¾ï¼å¯è½ææAGVé½å¤äºç©ºé²ç¶æï¼") |
| | | |
| | | # æ´æ°æåç»è®¡ |
| | | self.stats['successful_polls'] += 1 |
| | | self.stats['path_generations'] += 1 |
| | | self.stats['last_generation_time'] = time.time() |
| | | |
| | | generation_time = (time.time() - start_time) * 1000 |
| | | self.logger.debug(f"çæ§å¨æå®æ - èæ¶: {generation_time:.2f}ms") |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"çæ§å¨ææ§è¡å¤±è´¥: {e}") |
| | | |
| | | def _generate_unique_seg_id(self, agv_id: str, task_id: str = '') -> str: |
| | | """çæsegId""" |
| | | try: |
| | | if task_id: |
| | | # æä»»å¡æ¶ä½¿ç¨task_id |
| | | return generate_segment_id(agv_id=agv_id, task_id=task_id, action_type="2") |
| | | else: |
| | | # æ 任塿¶ä½¿ç¨æ¶é´æ³ä½ä¸ºç®æ ä½ç½®ï¼ç¡®ä¿ä¸åæ¶é´æä¸åsegId |
| | | import time |
| | | time_target = f"IDLE_{int(time.time() / 3600)}" # æå°æ¶åç» |
| | | return generate_segment_id(agv_id=agv_id, target_position=time_target, action_type="4") |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"çæsegId失败: {e}") |
| | | # éçº§æ¹æ¡ï¼ä½¿ç¨common.utilsç彿°çæå¤ç¨ID |
| | | import time |
| | | fallback_target = f"FALLBACK_{int(time.time())}" |
| | | return generate_segment_id(agv_id=agv_id, target_position=fallback_target, action_type="1") |
| | | |
| | | def _fetch_rcs_status(self) -> tuple[List[AGVStatus], Dict]: |
| | | """ä»RCSè·åå½åç¶æ""" |
| | | agv_status_list = [] |
| | | task_status = {} |
| | | |
| | | try: |
| | | # è·åAGVç¶æ - 使ç¨ç©ºåæ°è·åææAGVç¶æåä»»å¡ç¶æ |
| | | self.logger.info(" è½®è¯¢ç®æ : 使ç¨ç©ºåæ°(agvId=None, mapId=None)è·åRCSææAGVç¶æåä»»å¡ç¶æ") |
| | | agv_response = self.rcs_client.get_agv_status(agv_id=None, map_id=None) |
| | | |
| | | if agv_response.code == 200 and agv_response.data: |
| | | self.logger.info(f"æåè·åAGVç¶æ - æ°é: {len(agv_response.data)}") |
| | | |
| | | # ç»è®¡ä¿¡æ¯ |
| | | total_tasks = 0 |
| | | executing_tasks = 0 |
| | | loaded_tasks = 0 |
| | | |
| | | self.logger.info("=" * 80) |
| | | self.logger.info("[详ç»AGVç¶æä¿¡æ¯]") |
| | | self.logger.info("=" * 80) |
| | | |
| | | for i, agv_data in enumerate(agv_response.data, 1): |
| | | try: |
| | | # 转æ¢ä¸ºAGVStatus对象 |
| | | agv_status = from_dict(AGVStatus, agv_data) |
| | | agv_status_list.append(agv_status) |
| | | |
| | | # æå°AGVåºæ¬ä¿¡æ¯ |
| | | self.logger.info(f"[AGV #{i}] {agv_status.agvId}") |
| | | self.logger.info(f" ç¶æ: {agv_status.status} | ä½ç½®: {agv_status.position} | æ¹å: {agv_status.direction}") |
| | | self.logger.info(f" çµå: {agv_status.vol} | é误ç : {agv_status.error} | 空èç¯: {agv_status.empty}") |
| | | |
| | | # åæèç¯ä¿¡æ¯ |
| | | if agv_status.backpack: |
| | | self.logger.info(f" [èç¯ä¿¡æ¯] ({len(agv_status.backpack)} 个):") |
| | | for backpack_item in agv_status.backpack: |
| | | status_text = "æ§è¡ä¸" if backpack_item.execute else ("å·²è£
è½½" if backpack_item.loaded else "空é²") |
| | | task_info = f"ä»»å¡: {backpack_item.taskId}" if backpack_item.taskId else "æ ä»»å¡" |
| | | |
| | | self.logger.info(f" [ä½ç½®{backpack_item.index}] {task_info} | " |
| | | f"ç¶æ: {status_text} | å·²è£
è½½: {backpack_item.loaded} | æ§è¡ä¸: {backpack_item.execute}") |
| | | |
| | | # ç»è®¡ä»»å¡ |
| | | if backpack_item.taskId: |
| | | total_tasks += 1 |
| | | if backpack_item.execute: |
| | | executing_tasks += 1 |
| | | if backpack_item.loaded: |
| | | loaded_tasks += 1 |
| | | |
| | | # æåä»»å¡ç¶æä¿¡æ¯ |
| | | task_status[backpack_item.taskId] = { |
| | | 'agvId': agv_status.agvId, |
| | | 'loaded': backpack_item.loaded, |
| | | 'execute': backpack_item.execute, |
| | | 'index': backpack_item.index |
| | | } |
| | | else: |
| | | self.logger.info(f" [èç¯ä¿¡æ¯] æ èç¯ä¿¡æ¯") |
| | | |
| | | self.logger.info("-" * 60) |
| | | except Exception as e: |
| | | self.logger.warning(f"è§£æAGVç¶ææ°æ®å¤±è´¥: {agv_data} - {e}") |
| | | |
| | | # æå°ç»è®¡æè¦ |
| | | self.logger.info("[ä»»å¡ç»è®¡æè¦]") |
| | | self.logger.info(f" æ»AGVæ°é: {len(agv_status_list)}") |
| | | self.logger.info(f" æ»ä»»å¡æ°é: {total_tasks}") |
| | | self.logger.info(f" æ§è¡ä¸ä»»å¡: {executing_tasks}") |
| | | self.logger.info(f" å·²è£
载任å¡: {loaded_tasks}") |
| | | self.logger.info(f" æªè£
载任å¡: {total_tasks - loaded_tasks}") |
| | | self.logger.info("=" * 80) |
| | | |
| | | else: |
| | | self.logger.warning(f"è·åAGVç¶æå¤±è´¥: {agv_response.msg if agv_response else 'æ ååº'}") |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"ä»RCSè·åç¶æå¤±è´¥: {e}") |
| | | |
| | | return agv_status_list, task_status |
| | | |
| | | |
| | | |
| | | def _generate_collision_free_paths(self, agv_status_list: List[AGVStatus]) -> List[Dict]: |
| | | """为ææAGVçæå
碰æè·¯å¾""" |
| | | try: |
| | | self.logger.debug(f"å¼å§ä¸º {len(agv_status_list)} 个AGVçæå
碰æè·¯å¾") |
| | | |
| | | # ä½¿ç¨æ¹éè·¯å¾è§åå¨çæè·¯å¾ |
| | | result = self.path_planner.plan_all_agv_paths( |
| | | agv_status_list=agv_status_list, |
| | | include_idle_agv=False, # åªä¸ºæ§è¡ä»»å¡çAGVçæè·¯å¾ |
| | | constraints=None |
| | | ) |
| | | |
| | | planned_paths = result.get('plannedPaths', result.get('planned_paths', [])) |
| | | |
| | | # è¯æï¼æ¾ç¤ºè·¯å¾è§åç»æçææåæ®µ |
| | | self.logger.debug(f"è·¯å¾è§åå¨è¿åçåæ®µ: {list(result.keys())}") |
| | | self.logger.debug(f"planned_pathsåæ®µåå¨: {'planned_paths' in result}") |
| | | self.logger.debug(f"plannedPathsåæ®µåå¨: {'plannedPaths' in result}") |
| | | self.logger.debug(f"æç»è·åå°çè·¯å¾æ°é: {len(planned_paths)}") |
| | | |
| | | if planned_paths: |
| | | self.logger.info(f"æåçæ {len(planned_paths)} æ¡å
碰æè·¯å¾") |
| | | |
| | | # è®°å½æ¯ä¸ªAGVçè·¯å¾ä¿¡æ¯ |
| | | for path_info in planned_paths: |
| | | agv_id = path_info.get('agvId', 'unknown') |
| | | code_list = path_info.get('codeList', []) |
| | | self.logger.debug(f"AGV {agv_id} è·¯å¾é¿åº¦: {len(code_list)}") |
| | | else: |
| | | self.logger.info("æªçæä»»ä½è·¯å¾ï¼å¯è½ææAGVé½å¤äºç©ºé²ç¶æï¼") |
| | | |
| | | return planned_paths |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"çæå
碰æè·¯å¾å¤±è´¥: {e}") |
| | | return [] |
| | | |
| | | |
| | | |
| | | def _send_paths_to_rcs(self, generated_paths: List[Dict]): |
| | | """å°çæçè·¯å¾æ¹éåéå°RCSç³»ç»""" |
| | | try: |
| | | self.logger.info(f"å¼å§æ¹éåé {len(generated_paths)} æ¡è·¯å¾å°RCSç³»ç»") |
| | | |
| | | # æå»ºæ¹éè·¯å¾æ°æ® |
| | | agv_paths = [] |
| | | import uuid |
| | | import time as time_module |
| | | |
| | | for path_info in generated_paths: |
| | | agv_id = path_info.get('agvId') |
| | | code_list = path_info.get('codeList', []) |
| | | |
| | | if agv_id and code_list: |
| | | path_task_id = '' |
| | | for path_code in code_list: |
| | | if path_code.get('taskId'): |
| | | path_task_id = path_code.get('taskId') |
| | | break |
| | | |
| | | navigation_data = [] |
| | | for path_code in code_list: |
| | | nav_item = { |
| | | 'code': path_code.get('code', ''), |
| | | 'direction': path_code.get('direction', '90'), |
| | | 'type': path_code.get('type') |
| | | } |
| | | |
| | | if path_code.get('taskId'): |
| | | nav_item['taskId'] = path_code.get('taskId') |
| | | |
| | | if path_code.get('posType'): |
| | | nav_item['posType'] = path_code.get('posType') |
| | | |
| | | if path_code.get('lev') is not None: |
| | | nav_item['lev'] = path_code.get('lev') |
| | | |
| | | navigation_data.append(nav_item) |
| | | |
| | | auto_seg_id = self._generate_unique_seg_id(agv_id, path_task_id) |
| | | |
| | | agv_path_data = { |
| | | 'agvId': agv_id, |
| | | 'segId': auto_seg_id, |
| | | 'codeList': navigation_data |
| | | } |
| | | |
| | | agv_paths.append(agv_path_data) |
| | | |
| | | if not agv_paths: |
| | | self.logger.warning("æ²¡æææçè·¯å¾æ°æ®éè¦åé") |
| | | return |
| | | |
| | | try: |
| | | import requests |
| | | import json |
| | | |
| | | rcs_url = f"http://{self.rcs_host}:{self.rcs_port}/api/open/algorithm/zkd/navigation/v1" |
| | | |
| | | self.logger.info(f"ç®æ³ç³»ç»åéè·¯å¾è§åç»æå°RCS:") |
| | | self.logger.info(f" AGVæ°é: {len(agv_paths)}") |
| | | self.logger.info(f" ç®æ URL: {rcs_url}") |
| | | |
| | | successful_sends = 0 |
| | | |
| | | for agv_path in agv_paths: |
| | | agv_id = agv_path['agvId'] |
| | | seg_id = agv_path['segId'] |
| | | code_list = agv_path['codeList'] |
| | | |
| | | # æé æ å导èªè¯·æ±æ°æ®ï¼agvId, segId, codeListæ ¼å¼ï¼ |
| | | navigation_data = { |
| | | 'agvId': agv_id, |
| | | 'segId': seg_id, |
| | | 'codeList': code_list |
| | | } |
| | | |
| | | # æ¾ç¤ºè·¯å¾æè¦ |
| | | start_code = code_list[0].get('code', '') if code_list else '' |
| | | end_code = code_list[-1].get('code', '') if code_list else '' |
| | | task_codes = [nc for nc in code_list if nc.get('taskId')] |
| | | task_id = task_codes[0].get('taskId', '') if task_codes else '' |
| | | |
| | | self.logger.info(f" AGV: {agv_id}, ä»»å¡: {task_id}, è·¯å¾: {start_code} -> {end_code} ({len(code_list)}ç¹)") |
| | | |
| | | # è¯¦ç»æ¾ç¤ºè·¯å¾JSONæ°æ® |
| | | self.logger.info(f"åéè·¯å¾JSONæ°æ®:") |
| | | self.logger.info(json.dumps(navigation_data, ensure_ascii=False, indent=2)) |
| | | |
| | | try: |
| | | response = requests.post( |
| | | rcs_url, |
| | | json=navigation_data, |
| | | timeout=10, |
| | | headers={'Content-Type': 'application/json'} |
| | | ) |
| | | |
| | | if response.status_code == 200: |
| | | response_data = response.json() |
| | | self.logger.info(f" æååéè·¯å¾å°RCS - AGV: {agv_id}, ååº: {response_data.get('msg', 'OK')}") |
| | | successful_sends += 1 |
| | | else: |
| | | self.logger.warning(f" åéè·¯å¾å°RCS失败 - AGV: {agv_id}, HTTPç¶æ: {response.status_code}") |
| | | self.logger.warning(f" ååºå
容: {response.text}") |
| | | |
| | | except Exception as e: |
| | | self.logger.warning(f" åéå¯¼èªæä»¤å¼å¸¸ - AGV: {agv_id}, é误: {e}") |
| | | |
| | | self.logger.info(f" è·¯å¾åé宿 - æå: {successful_sends}/{len(agv_paths)}") |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f" åéå¯¼èªæä»¤å¼å¸¸: {e}") |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"åéè·¯å¾å°RCSå¼å¸¸: {e}") |
| | | |
| | | def get_monitoring_status(self) -> Dict[str, Any]: |
| | | """è·åçæ§æå¡ç¶æ""" |
| | | return { |
| | | 'is_running': self.is_running, |
| | | 'rcs_host': self.rcs_host, |
| | | 'rcs_port': self.rcs_port, |
| | | 'poll_interval': self.poll_interval, |
| | | 'path_algorithm': self.path_algorithm, |
| | | 'auto_send_paths': self.auto_send_paths, |
| | | 'stats': self.stats.copy() |
| | | } |
| | | |
| | | def reset_stats(self): |
| | | """éç½®ç»è®¡ä¿¡æ¯""" |
| | | self.stats = { |
| | | 'poll_count': 0, |
| | | 'successful_polls': 0, |
| | | 'path_generations': 0, |
| | | 'last_poll_time': None, |
| | | 'last_generation_time': None |
| | | } |
| | | self.logger.info("çæ§ç»è®¡ä¿¡æ¯å·²éç½®") |
New file |
| | |
| | | # Common Components Module |
| | | __version__ = "1.0.0" |
New file |
| | |
| | | """ |
| | | ä¸RCSç³»ç»éä¿¡ |
| | | """ |
| | | import requests |
| | | import json |
| | | import logging |
| | | import time |
| | | from typing import Dict, Optional, Any |
| | | from .data_models import ( |
| | | APIResponse, create_error_response, ResponseCode |
| | | ) |
| | | |
| | | try: |
| | | from config.settings import ( |
| | | RCS_SERVER_HOST, RCS_SERVER_PORT, |
| | | REQUEST_TIMEOUT, AGV_STATUS_API_ENDPOINT |
| | | ) |
| | | except ImportError: |
| | | RCS_SERVER_HOST = "10.10.10.156" |
| | | RCS_SERVER_PORT = 8088 |
| | | REQUEST_TIMEOUT = 30 |
| | | AGV_STATUS_API_ENDPOINT = "/api/open/algorithm/getAgv" |
| | | logging.warning("æ æ³ä»config.settings导å
¥é
ç½®ï¼ä½¿ç¨é»è®¤å¼") |
| | | |
| | | |
| | | class APIClient: |
| | | """HTTP API客æ·ç«¯åºç±»""" |
| | | |
| | | def __init__(self, base_url: str, timeout: int = 30): |
| | | """åå§åAPI客æ·ç«¯""" |
| | | self.base_url = base_url.rstrip('/') |
| | | self.timeout = timeout |
| | | self.logger = logging.getLogger(__name__) |
| | | self.session = requests.Session() |
| | | |
| | | self.session.headers.update({ |
| | | 'Content-Type': 'application/json', |
| | | 'Accept': 'application/json' |
| | | }) |
| | | |
| | | def _make_request(self, method: str, endpoint: str, data: Any = None, |
| | | params: Dict = None) -> APIResponse: |
| | | """åéHTTP请æ±""" |
| | | url = f"{self.base_url}{endpoint}" |
| | | |
| | | try: |
| | | start_time = time.time() |
| | | |
| | | request_kwargs = { |
| | | 'timeout': self.timeout, |
| | | 'params': params |
| | | } |
| | | |
| | | if data is not None: |
| | | if isinstance(data, (dict, list)): |
| | | request_kwargs['json'] = data |
| | | else: |
| | | request_kwargs['data'] = json.dumps(data) |
| | | |
| | | response = self.session.request(method, url, **request_kwargs) |
| | | |
| | | end_time = time.time() |
| | | duration = (end_time - start_time) * 1000 # 转æ¢ä¸ºæ¯«ç§ |
| | | |
| | | self.logger.info(f"{method} {url} - {response.status_code} - {duration:.2f}ms") |
| | | |
| | | try: |
| | | response_data = response.json() |
| | | except ValueError: |
| | | response_data = {"text": response.text} |
| | | |
| | | if response.status_code == 200: |
| | | if isinstance(response_data, dict) and 'code' in response_data: |
| | | return APIResponse( |
| | | code=response_data.get('code', ResponseCode.SUCCESS), |
| | | msg=response_data.get('msg', 'æä½æå'), |
| | | data=response_data.get('data') |
| | | ) |
| | | else: |
| | | return APIResponse( |
| | | code=ResponseCode.SUCCESS, |
| | | msg='æä½æå', |
| | | data=response_data |
| | | ) |
| | | else: |
| | | return create_error_response( |
| | | ResponseCode.SERVER_ERROR, |
| | | f"HTTP {response.status_code}: {response.text}" |
| | | ) |
| | | |
| | | except requests.exceptions.Timeout: |
| | | self.logger.error(f"请æ±è¶
æ¶: {url}") |
| | | return create_error_response(ResponseCode.SERVER_ERROR, "请æ±è¶
æ¶") |
| | | |
| | | except requests.exceptions.ConnectionError: |
| | | self.logger.error(f"è¿æ¥é误: {url}") |
| | | return create_error_response(ResponseCode.SERVER_ERROR, "è¿æ¥é误") |
| | | |
| | | except Exception as e: |
| | | self.logger.error(f"请æ±å¼å¸¸: {url} - {str(e)}") |
| | | return create_error_response(ResponseCode.SERVER_ERROR, f"请æ±å¼å¸¸: {str(e)}") |
| | | |
| | | |
| | | class RCSAPIClient(APIClient): |
| | | |
| | | def __init__(self, rcs_host: str = None, rcs_port: int = None, timeout: int = None): |
| | | """åå§åRCS API客æ·ç«¯""" |
| | | rcs_host = rcs_host or RCS_SERVER_HOST |
| | | rcs_port = rcs_port or RCS_SERVER_PORT |
| | | timeout = timeout or REQUEST_TIMEOUT |
| | | |
| | | base_url = f"http://{rcs_host}:{rcs_port}" |
| | | super().__init__(base_url, timeout) |
| | | |
| | | def get_agv_status(self, agv_id: Optional[str] = None, |
| | | map_id: Optional[str] = None) -> APIResponse: |
| | | """è·åAGVç¶æ""" |
| | | data = {} |
| | | if agv_id: |
| | | data['agvId'] = agv_id |
| | | if map_id: |
| | | data['mapId'] = map_id |
| | | |
| | | return self._make_request('POST', AGV_STATUS_API_ENDPOINT, data=data) |
New file |
| | |
| | | """ |
| | | æ°æ®æ¨¡åå®ä¹ |
| | | """ |
| | | from dataclasses import dataclass, field |
| | | from typing import List, Optional, Dict, Any |
| | | from enum import Enum |
| | | import json |
| | | |
| | | |
| | | class AGVStatusEnum(Enum): |
| | | # AGVç¶æ |
| | | IDLE = 0 # ç©ºé² |
| | | BUSY = 1 # å¿ç¢ |
| | | CHARGING = 2 # å
çµ |
| | | ERROR = 3 # æ
é |
| | | MAINTENANCE = 4 # ç»´æ¤ |
| | | |
| | | |
| | | class TaskTypeEnum(Enum): |
| | | # ä»»å¡ç±»å |
| | | PICKUP = "1" # åè´§ |
| | | DELIVERY = "2" # éè´§ |
| | | TRANSPORT = "3" # è¿è¾ |
| | | |
| | | |
| | | class AGVActionTypeEnum(Enum): |
| | | # AGVå¨ä½ç±»å |
| | | AVOIDANCE = "1" # é¿è®© |
| | | TASK = "2" # ä»»å¡ |
| | | CHARGING = "3" # å
çµ |
| | | STANDBY = "4" # å»å¾
æºä½ |
| | | |
| | | |
| | | @dataclass |
| | | class BackpackData: |
| | | # èç¯æ°æ® |
| | | index: int # èç¯ç¼å· |
| | | loaded: bool # æ¯å¦è½½è´§ |
| | | execute: bool # æ¯å¦å¨æ§è¡ |
| | | taskId: Optional[str] = None # æ§è¡ä»»å¡ç¼å· |
| | | |
| | | |
| | | @dataclass |
| | | class AGVStatus: |
| | | # AGVç¶æ |
| | | agvId: str # å°è½¦ç¼å· |
| | | status: int # ç¶æ |
| | | position: str # å°è½¦å½åç¹ä½ |
| | | empty: int # 空èç¯æ°é |
| | | direction: str # å°è½¦è§åº¦ |
| | | vol: int # çµå |
| | | error: int # å¼å¸¸ç ï¼0表示æ£å¸¸ |
| | | backpack: List[BackpackData] = field(default_factory=list) # èç¯æ°æ® |
| | | autoCharge: int = 20 # ä½çµé设å®éå¼ï¼ä½äºè¯¥å¼å¯ä»¥å»èªå¨å
çµä¹å¯ä»¥ç»§ç»åä»»å¡ |
| | | lowVol: int = 10 # æä½çµéï¼çµéä½äºè¯¥å¼å¿
é¡»å»å
çµ |
| | | |
| | | |
| | | @dataclass |
| | | class TaskData: |
| | | # 任塿°æ® |
| | | taskId: str # ä»»å¡id |
| | | start: str # èµ·ç¹ |
| | | end: str # ç»ç¹ |
| | | type: str # ä»»å¡ç±»å |
| | | priority: int # ä¼å
级 |
| | | |
| | | |
| | | @dataclass |
| | | class PathCode: |
| | | # è·¯å¾ç¹ |
| | | code: str # å°å¾ç¹ä½id |
| | | direction: str # æ¹å |
| | | type: Optional[str] = None # AGVå¨ä½ç±»åï¼é¿è®©ãä»»å¡ãå
çµãå»å¾
æºä½ï¼ |
| | | taskId: Optional[str] = None # ä»»å¡ç¼å·ï¼å¦ææ¯æ§è¡ä»»å¡åå¿
é |
| | | posType: Optional[str] = None # å¨ä½ç±»åï¼è¡¨ç¤ºå°è¾¾æä¸ªç¹ä½è¿è¡çå¨ä½ï¼å¦åãæ¾ |
| | | lev: Optional[int] = None # 表示posType对åºçä»»å¡æè¦æä½çæ¯ç¬¬å 个èç¯ |
| | | |
| | | |
| | | @dataclass |
| | | class PlannedPath: |
| | | # è§åè·¯å¾ |
| | | agvId: str # å°è½¦ç¼å· |
| | | codeList: List[PathCode] # ç¹ä½éå |
| | | segId: Optional[str] = None # 导èªéå¤åéæ¶çå»éæ è¯ |
| | | |
| | | |
| | | @dataclass |
| | | class TaskAssignment: |
| | | # ä»»å¡åé
ç»æ |
| | | taskId: str # ä»»å¡ID |
| | | agvId: str # åé
çAGV ID |
| | | lev_id: int = 0 # èç¯ä½ç½®ç¼å· |
| | | |
| | | |
| | | @dataclass |
| | | class APIResponse: |
| | | # APIååºæ ¼å¼ - åæ®µé¡ºåºåºå®ä¸º code, msg, data |
| | | code: int # ç¶æç |
| | | msg: str # æ¶æ¯ |
| | | data: Optional[Any] = None # æ°æ® |
| | | |
| | | def to_ordered_dict(self) -> Dict: |
| | | # 转æ¢ä¸ºæåºåå
¸ |
| | | from collections import OrderedDict |
| | | return OrderedDict([ |
| | | ('code', self.code), |
| | | ('msg', self.msg), |
| | | ('data', self.data) |
| | | ]) |
| | | |
| | | |
| | | class ResponseCode: |
| | | # ååºç¶æç |
| | | SUCCESS = 200 # æä½æå |
| | | NO_DATA = 201 # ææ æ°æ® |
| | | PARAM_EMPTY = 401 # åæ°ä¸ºç©º |
| | | PERMISSION_DENIED = 403 # æéä¸è¶³ |
| | | DUPLICATE_SUBMIT = 407 # 请å¿éå¤æäº¤ |
| | | SERVER_ERROR = 500 # æå¡å¨é误 |
| | | |
| | | |
| | | def create_success_response(data: Any = None, msg: str = "æä½æå") -> APIResponse: |
| | | # å建æåååº |
| | | return APIResponse(code=ResponseCode.SUCCESS, msg=msg, data=data) |
| | | |
| | | def create_error_response(code: int, msg: str) -> APIResponse: |
| | | # å建é误ååº |
| | | return APIResponse(code=code, msg=msg, data=None) |
| | | |
| | | def to_dict(obj) -> Dict: |
| | | # å°æ°æ®ç±»è½¬æ¢ä¸ºåå
¸ |
| | | if hasattr(obj, '__dict__'): |
| | | if isinstance(obj, APIResponse): |
| | | return obj.to_ordered_dict() |
| | | |
| | | result = {} |
| | | for key, value in obj.__dict__.items(): |
| | | if isinstance(value, list): |
| | | result[key] = [to_dict(item) if hasattr(item, '__dict__') else item for item in value] |
| | | elif hasattr(value, '__dict__'): |
| | | result[key] = to_dict(value) |
| | | else: |
| | | result[key] = value |
| | | return result |
| | | return obj |
| | | |
| | | |
| | | def from_dict(data_class, data: Dict): |
| | | # ä»åå
¸åå»ºæ°æ®ç±»å®ä¾ |
| | | if isinstance(data, dict): |
| | | field_types = data_class.__annotations__ |
| | | kwargs = {} |
| | | |
| | | for field_name, field_type in field_types.items(): |
| | | if field_name in data: |
| | | value = data[field_name] |
| | | |
| | | if hasattr(field_type, '__origin__') and field_type.__origin__ is list: |
| | | inner_type = field_type.__args__[0] |
| | | if hasattr(inner_type, '__annotations__'): |
| | | kwargs[field_name] = [from_dict(inner_type, item) for item in value] |
| | | else: |
| | | kwargs[field_name] = value |
| | | elif hasattr(field_type, '__origin__'): |
| | | import typing |
| | | origin = getattr(field_type, '__origin__', None) |
| | | |
| | | if origin is typing.Union or str(origin) == 'typing.Union': |
| | | kwargs[field_name] = value |
| | | else: |
| | | kwargs[field_name] = value |
| | | elif hasattr(field_type, '__annotations__'): |
| | | kwargs[field_name] = from_dict(field_type, value) |
| | | else: |
| | | kwargs[field_name] = value |
| | | else: |
| | | field_info = data_class.__dataclass_fields__.get(field_name) |
| | | if field_info and field_info.default is not field_info.default_factory: |
| | | pass |
| | | elif field_info and field_info.default_factory is not field_info.default_factory: |
| | | pass |
| | | |
| | | return data_class(**kwargs) |
| | | return data |
New file |
| | |
| | | import json |
| | | import logging |
| | | import os |
| | | import random |
| | | import time |
| | | import uuid |
| | | import string |
| | | from typing import Dict, List, Tuple, Optional, Any |
| | | from datetime import datetime |
| | | |
| | | |
| | | def setup_logging(log_level: str = "INFO", log_file: Optional[str] = None) -> None: |
| | | """设置æ¥å¿é
ç½®""" |
| | | log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' |
| | | formatter = logging.Formatter(log_format) |
| | | |
| | | root_logger = logging.getLogger() |
| | | root_logger.setLevel(getattr(logging, log_level.upper())) |
| | | |
| | | for handler in root_logger.handlers[:]: |
| | | root_logger.removeHandler(handler) |
| | | |
| | | console_handler = logging.StreamHandler() |
| | | console_handler.setFormatter(formatter) |
| | | root_logger.addHandler(console_handler) |
| | | |
| | | if log_file: |
| | | file_handler = logging.FileHandler(log_file, encoding='utf-8') |
| | | file_handler.setFormatter(formatter) |
| | | root_logger.addHandler(file_handler) |
| | | |
| | | |
| | | def load_path_mapping(mapping_file: str = "path_mapping.json") -> Dict[str, Dict[str, int]]: |
| | | """å è½½è·¯å¾æ å°æä»¶""" |
| | | logger = logging.getLogger(__name__) |
| | | |
| | | try: |
| | | with open(mapping_file, 'r', encoding='utf-8') as f: |
| | | data = json.load(f) |
| | | |
| | | path_mapping = {} |
| | | if "path_id_to_coordinates" in data: |
| | | for path_id, coordinates in data["path_id_to_coordinates"].items(): |
| | | if coordinates and len(coordinates) > 0: |
| | | coord = coordinates[0] |
| | | path_mapping[path_id] = { |
| | | "x": coord["x"], |
| | | "y": coord["y"] |
| | | } |
| | | |
| | | logger.info(f"æåå è½½è·¯å¾æ å°ï¼å
± {len(path_mapping)} 个路å¾ç¹") |
| | | return path_mapping |
| | | |
| | | except FileNotFoundError: |
| | | logger.error(f"è·¯å¾æ å°æä»¶ä¸åå¨: {mapping_file}") |
| | | return {} |
| | | except json.JSONDecodeError as e: |
| | | logger.error(f"è·¯å¾æ å°æä»¶æ ¼å¼é误: {e}") |
| | | return {} |
| | | except Exception as e: |
| | | logger.error(f"å è½½è·¯å¾æ å°æä»¶å¤±è´¥: {e}") |
| | | return {} |
| | | |
| | | |
| | | def get_coordinate_from_path_id(path_id: str, path_mapping: Dict[str, Dict[str, int]]) -> Optional[Tuple[int, int]]: |
| | | """ |
| | | æ ¹æ®è·¯å¾ç¹IDè·ååæ |
| | | |
| | | Args: |
| | | path_id: è·¯å¾ç¹IDï¼æ¯æå¸¦8ä½å¸¦é¶çæ ¼å¼ï¼e.g.'00000206'ï¼ |
| | | path_mapping: è·¯å¾æ å°åå
¸ |
| | | |
| | | Returns: |
| | | Optional[Tuple[int, int]]: åæ (x, y)ï¼å¦ææ¾ä¸å°åè¿åNone |
| | | """ |
| | | logger = logging.getLogger(__name__) |
| | | |
| | | if path_id in path_mapping: |
| | | coord = path_mapping[path_id] |
| | | logger.debug(f"è·¯å¾ID {path_id} å¹é
æåï¼åæ : ({coord['x']}, {coord['y']})") |
| | | return (coord["x"], coord["y"]) |
| | | |
| | | # 妿å¹é
失败ï¼å°è¯å»æé¶åå¹é
|
| | | try: |
| | | normalized_path_id = str(int(path_id)) |
| | | if normalized_path_id in path_mapping: |
| | | coord = path_mapping[normalized_path_id] |
| | | logger.debug(f"è·¯å¾ID {path_id} è§èå为 {normalized_path_id} åå¹é
æåï¼åæ : ({coord['x']}, {coord['y']})") |
| | | return (coord["x"], coord["y"]) |
| | | else: |
| | | logger.warning(f"è§èååçè·¯å¾ID {normalized_path_id}ï¼åå§: {path_id}ï¼å¨è·¯å¾æ å°ä¸æªæ¾å°") |
| | | except (ValueError, TypeError): |
| | | logger.warning(f"è·¯å¾ID {path_id} 䏿¯ææçæ°åæ ¼å¼") |
| | | |
| | | logger.warning(f"æ æ³æ¾å°è·¯å¾ID {path_id} 对åºçåæ ") |
| | | return None |
| | | |
| | | |
| | | def get_path_id_from_coordinate(x: int, y: int, path_mapping: Dict[str, Dict[str, int]]) -> Optional[str]: |
| | | """æ ¹æ®åæ è·åè·¯å¾ç¹ID""" |
| | | for path_id, coord in path_mapping.items(): |
| | | if coord["x"] == x and coord["y"] == y: |
| | | return path_id |
| | | return None |
| | | |
| | | |
| | | def calculate_distance(pos1: Tuple[int, int], pos2: Tuple[int, int]) -> float: |
| | | """计ç®ä¸¤ç¹ä¹é´ç欧æ°è·ç¦»""" |
| | | return ((pos1[0] - pos2[0]) ** 2 + (pos1[1] - pos2[1]) ** 2) ** 0.5 |
| | | |
| | | |
| | | def calculate_manhattan_distance(pos1: Tuple[int, int], pos2: Tuple[int, int]) -> int: |
| | | """计ç®ä¸¤ç¹ä¹é´çæ¼åé¡¿è·ç¦»""" |
| | | return abs(pos1[0] - pos2[0]) + abs(pos1[1] - pos2[1]) |
| | | |
| | | |
| | | def generate_random_agv_id() -> str: |
| | | """çæéæºAGV ID""" |
| | | return f"AGV_{random.randint(10001, 99999)}" |
| | | |
| | | |
| | | def generate_random_task_id() -> str: |
| | | """çæéæºä»»å¡ID""" |
| | | timestamp = int(time.time() * 1000) # æ¯«ç§æ¶é´æ³ |
| | | random_suffix = random.randint(100, 999) |
| | | return f"TASK_{timestamp}_{random_suffix}" |
| | | |
| | | |
| | | def get_random_path_ids(path_mapping: Dict[str, Dict[str, int]], count: int = 2) -> List[str]: |
| | | """éæºè·åè·¯å¾ç¹ID""" |
| | | if not path_mapping: |
| | | return [] |
| | | |
| | | path_ids = list(path_mapping.keys()) |
| | | if len(path_ids) < count: |
| | | return path_ids |
| | | |
| | | return random.sample(path_ids, count) |
| | | |
| | | |
| | | def format_timestamp(timestamp: Optional[float] = None) -> str: |
| | | """æ ¼å¼åæ¶é´æ³""" |
| | | if timestamp is None: |
| | | timestamp = time.time() |
| | | |
| | | return datetime.fromtimestamp(timestamp).strftime("%Y-%m-%d %H:%M:%S") |
| | | |
| | | |
| | | def normalize_path_id(path_id: str) -> str: |
| | | """æ ååè·¯å¾ç¹IDæ ¼å¼""" |
| | | if not path_id: |
| | | return path_id |
| | | |
| | | try: |
| | | normalized_path_id = str(int(path_id)) |
| | | return normalized_path_id |
| | | except (ValueError, TypeError): |
| | | return path_id |
| | | |
| | | |
| | | def validate_path_id(path_id: str) -> bool: |
| | | """éªè¯è·¯å¾ç¹IDæ ¼å¼""" |
| | | try: |
| | | # è·¯å¾ç¹IDåºè¯¥æ¯æ°åå符串ï¼å¨1-1696èå´å
|
| | | path_num = int(path_id) |
| | | return 1 <= path_num <= 1696 |
| | | except (ValueError, TypeError): |
| | | return False |
| | | |
| | | |
| | | def validate_agv_id(agv_id: str) -> bool: |
| | | """éªè¯AGV IDæ ¼å¼""" |
| | | if not agv_id or not isinstance(agv_id, str): |
| | | return False |
| | | # AGV IDä¸è½ä¸ºç©ºä¸é¿åº¦åç |
| | | return len(agv_id.strip()) > 0 and len(agv_id) <= 50 |
| | | |
| | | |
| | | def save_json_file(data: Any, file_path: str) -> bool: |
| | | """ä¿åæ°æ®å°JSON""" |
| | | logger = logging.getLogger(__name__) |
| | | |
| | | try: |
| | | with open(file_path, 'w', encoding='utf-8') as f: |
| | | json.dump(data, f, ensure_ascii=False, indent=2) |
| | | |
| | | logger.info(f"æ°æ®å·²ä¿åå°æä»¶: {file_path}") |
| | | return True |
| | | |
| | | except Exception as e: |
| | | logger.error(f"ä¿åæä»¶å¤±è´¥: {file_path} - {e}") |
| | | return False |
| | | |
| | | |
| | | def load_json_file(file_path: str) -> Optional[Any]: |
| | | """ä»JSONæä»¶å è½½æ°æ®""" |
| | | logger = logging.getLogger(__name__) |
| | | |
| | | try: |
| | | with open(file_path, 'r', encoding='utf-8') as f: |
| | | data = json.load(f) |
| | | |
| | | logger.info(f"æ°æ®å·²ä»æä»¶å è½½: {file_path}") |
| | | return data |
| | | |
| | | except FileNotFoundError: |
| | | logger.warning(f"æä»¶ä¸åå¨: {file_path}") |
| | | return None |
| | | except json.JSONDecodeError as e: |
| | | logger.error(f"JSONæ ¼å¼é误: {file_path} - {e}") |
| | | return None |
| | | except Exception as e: |
| | | logger.error(f"å è½½æä»¶å¤±è´¥: {file_path} - {e}") |
| | | return None |
| | | |
| | | |
| | | def ensure_directory_exists(directory: str) -> bool: |
| | | """ç¡®ä¿ç®å½åå¨ï¼å¦æä¸åå¨åå建""" |
| | | try: |
| | | os.makedirs(directory, exist_ok=True) |
| | | return True |
| | | except Exception as e: |
| | | logging.getLogger(__name__).error(f"å建ç®å½å¤±è´¥: {directory} - {e}") |
| | | return False |
| | | |
| | | |
| | | def generate_segment_id(agv_id: str, task_id: Optional[str] = None, |
| | | target_position: Optional[str] = None, |
| | | action_type: str = "2") -> str: |
| | | """ |
| | | çæå¯¼èªæ®µIDï¼ç¨äºå»éæ è¯ |
| | | åºäºAGV IDãä»»å¡IDãç®æ ä½ç½®åå¨ä½ç±»åçæåºå®ç11ä½ID |
| | | |
| | | Args: |
| | | agv_id: AGV ID |
| | | task_id: ä»»å¡ID |
| | | target_position: ç®æ ä½ç½® |
| | | action_type: å¨ä½ç±»å |
| | | |
| | | Returns: |
| | | str: 11ä½å¯¼èªæ®µID |
| | | """ |
| | | import hashlib |
| | | |
| | | # æå»ºç¨äºåå¸çå符串 |
| | | if task_id: |
| | | # ä»»å¡ç±»åï¼ä½¿ç¨agv_id + task_id |
| | | hash_input = f"{agv_id}_{task_id}_{action_type}" |
| | | else: |
| | | # éä»»å¡ç±»åï¼ä½¿ç¨agv_id + target_position + action_type |
| | | target = target_position or "unknown" |
| | | hash_input = f"{agv_id}_{target}_{action_type}" |
| | | |
| | | # 使ç¨MD5åå¸çæåºå®å¼ |
| | | hash_object = hashlib.md5(hash_input.encode()) |
| | | hash_hex = hash_object.hexdigest() |
| | | |
| | | # å°16è¿å¶è½¬æ¢ä¸ºæ°åå¹¶æªå11ä½ |
| | | hash_int = int(hash_hex[:8], 16) |
| | | seg_id = str(hash_int)[-11:].zfill(11) |
| | | |
| | | return seg_id |
| | | |
| | | |
| | | def generate_navigation_code(code: str, direction: str, action_type: str = "2", |
| | | task_id: Optional[str] = None, pos_type: Optional[str] = None, |
| | | backpack_level: Optional[int] = None, is_target_point: bool = False) -> Dict: |
| | | """çæå¯¼èªè·¯å¾ç¹ä»£ç """ |
| | | path_code = { |
| | | 'code': code, |
| | | 'direction': direction, |
| | | 'type': action_type |
| | | } |
| | | |
| | | if task_id and action_type == "2": |
| | | path_code['taskId'] = task_id |
| | | |
| | | # åªæå°è¾¾ç®æ ç¹æ¶ææ·»å posTypeålevï¼ä¸æå¼æ¶æå
å«å段 |
| | | if is_target_point: |
| | | if pos_type: |
| | | path_code['posType'] = pos_type |
| | | |
| | | if backpack_level is not None: |
| | | path_code['lev'] = backpack_level |
| | | |
| | | return path_code |
New file |
| | |
| | | """ |
| | | ç¯å¢é
置模å - è´è´£å®ä¹å è½½ä»åºç¯å¢é
ç½® |
| | | """ |
| | | import json |
| | | from enum import Enum, auto |
| | | import numpy as np |
| | | from typing import Dict, List, Tuple, Optional, Union |
| | | |
| | | |
| | | class CellType(Enum): |
| | | """åå
æ ¼ç±»åæä¸¾""" |
| | | EMPTY = auto() # 空åå
æ ¼ |
| | | STATION = auto() # ç«ç¹ |
| | | PATH = auto() # è·¯å¾ |
| | | STORAGE = auto() # ä»å¨åºå |
| | | OBSTACLE = auto() # éç¢ç© |
| | | LOADING = auto() # è£
è½½åº |
| | | UNLOADING = auto() # å¸è½½åº |
| | | |
| | | |
| | | class EnvironmentConfig: |
| | | """ç¯å¢é
置类""" |
| | | |
| | | def __init__(self, width: int, height: int): |
| | | """åå§åç¯å¢é
ç½®""" |
| | | self.width = width |
| | | self.height = height |
| | | # åå§åææåå
æ ¼ä¸ºç©º |
| | | self.grid = np.full((height, width), CellType.EMPTY) |
| | | # ç«ç¹ä½ç½®åå
¶é
ç½® |
| | | self.stations = {} # æ ¼å¼: {(x, y): {"capacity": 4, "load_position": (x-1, y), "unload_position": (x+1, y)}} |
| | | # è·¯å¾ç¹ä½ç½® |
| | | self.paths = set() # æ ¼å¼: {(x1, y1), (x2, y2), ...} |
| | | # ä»å¨åºåä½ç½® |
| | | self.storage_areas = set() # æ ¼å¼: {(x1, y1), (x2, y2), ...} |
| | | # è£
å¸åºä½ç½® |
| | | self.loading_areas = set() # æ ¼å¼: {(x1, y1), (x2, y2), ...} |
| | | self.unloading_areas = set() # æ ¼å¼: {(x1, y1), (x2, y2), ...} |
| | | # éç¢ç©ä½ç½® |
| | | self.obstacles = set() # æ ¼å¼: {(x1, y1), (x2, y2), ...} |
| | | # ç½æ ¼ç¹é»æ¥è¡¨ï¼ç¨äºè·¯å¾è§åï¼ |
| | | self.adjacency_list = {} # æ ¼å¼: {(x1, y1): {(x2, y2): distance, ...}, ...} |
| | | |
| | | def set_cell_type(self, x: int, y: int, cell_type: CellType) -> bool: |
| | | """ |
| | | 设置åå
æ ¼ç±»å |
| | | |
| | | Args: |
| | | x: xåæ |
| | | y: yåæ |
| | | cell_type: åå
æ ¼ç±»å |
| | | |
| | | Returns: |
| | | bool: 设置æ¯å¦æå |
| | | """ |
| | | if 0 <= x < self.width and 0 <= y < self.height: |
| | | self.grid[y, x] = cell_type |
| | | |
| | | # æ ¹æ®åå
æ ¼ç±»åæ´æ°ç¸åºçéå |
| | | pos = (x, y) |
| | | if cell_type == CellType.PATH: |
| | | self.paths.add(pos) |
| | | elif cell_type == CellType.STATION: |
| | | if pos not in self.stations: |
| | | self.stations[pos] = { |
| | | "capacity": 4, # é»è®¤å®¹é |
| | | "load_position": (x-1, y), # é»è®¤å è½½ä½ç½® |
| | | "unload_position": (x+1, y) # é»è®¤å¸è½½ä½ç½® |
| | | } |
| | | elif cell_type == CellType.STORAGE: |
| | | self.storage_areas.add(pos) |
| | | elif cell_type == CellType.LOADING: |
| | | self.loading_areas.add(pos) |
| | | elif cell_type == CellType.UNLOADING: |
| | | self.unloading_areas.add(pos) |
| | | elif cell_type == CellType.OBSTACLE: |
| | | self.obstacles.add(pos) |
| | | |
| | | return True |
| | | return False |
| | | |
| | | def add_station(self, x: int, y: int, capacity: int = 4, |
| | | load_position: Tuple[int, int] = None, |
| | | unload_position: Tuple[int, int] = None) -> bool: |
| | | """ |
| | | æ·»å ç«ç¹ |
| | | |
| | | Args: |
| | | x: ç«ç¹xåæ |
| | | y: ç«ç¹yåæ |
| | | capacity: ç«ç¹å®¹é |
| | | load_position: å è½½ä½ç½® |
| | | unload_position: å¸è½½ä½ç½® |
| | | |
| | | Returns: |
| | | bool: æ·»å æ¯å¦æå |
| | | """ |
| | | if 0 <= x < self.width and 0 <= y < self.height: |
| | | # 设置é»è®¤çè£
å¸ä½ç½® |
| | | if load_position is None: |
| | | load_position = (x-1, y) # å·¦ä¾§ä¸ºå¸æç®±ä½ç½® |
| | | if unload_position is None: |
| | | unload_position = (x+1, y) # å³ä¾§ä¸ºåæç®±ä½ç½® |
| | | |
| | | # æ´æ°ç½æ ¼åç«ç¹ä¿¡æ¯ |
| | | self.grid[y, x] = CellType.STATION |
| | | self.stations[(x, y)] = { |
| | | "capacity": capacity, |
| | | "load_position": load_position, |
| | | "unload_position": unload_position |
| | | } |
| | | |
| | | # ç¡®ä¿è£
å¸ä½ç½®ä¹è¢«æ è®° |
| | | self.set_cell_type(load_position[0], load_position[1], CellType.LOADING) |
| | | self.set_cell_type(unload_position[0], unload_position[1], CellType.UNLOADING) |
| | | |
| | | return True |
| | | return False |
| | | |
| | | def add_path(self, x: int, y: int) -> bool: |
| | | """ |
| | | æ·»å è·¯å¾ç¹ |
| | | |
| | | Args: |
| | | x: xåæ |
| | | y: yåæ |
| | | |
| | | Returns: |
| | | bool: æ·»å æ¯å¦æå |
| | | """ |
| | | return self.set_cell_type(x, y, CellType.PATH) |
| | | |
| | | def add_storage_area(self, x: int, y: int) -> bool: |
| | | """ |
| | | æ·»å ä»å¨åºå |
| | | |
| | | Args: |
| | | x: xåæ |
| | | y: yåæ |
| | | |
| | | Returns: |
| | | bool: æ·»å æ¯å¦æå |
| | | """ |
| | | return self.set_cell_type(x, y, CellType.STORAGE) |
| | | |
| | | def add_obstacle(self, x: int, y: int) -> bool: |
| | | """ |
| | | æ·»å éç¢ç© |
| | | |
| | | Args: |
| | | x: xåæ |
| | | y: yåæ |
| | | |
| | | Returns: |
| | | bool: æ·»å æ¯å¦æå |
| | | """ |
| | | return self.set_cell_type(x, y, CellType.OBSTACLE) |
| | | |
| | | def is_valid_position(self, x: int, y: int) -> bool: |
| | | """ |
| | | æ£æ¥ä½ç½®æ¯å¦åæ³ï¼å¨ç½æ ¼å
ä¸ééç¢ç©ï¼ |
| | | |
| | | Args: |
| | | x: xåæ |
| | | y: yåæ |
| | | |
| | | Returns: |
| | | bool: ä½ç½®æ¯å¦åæ³ |
| | | """ |
| | | return (0 <= x < self.width and |
| | | 0 <= y < self.height and |
| | | self.grid[y, x] != CellType.OBSTACLE) |
| | | |
| | | def build_adjacency_list(self): |
| | | """æå»ºç½æ ¼ç¹é»æ¥è¡¨ï¼ç¨äºè·¯å¾è§åï¼""" |
| | | # æ¸
ç©ºç°æé»æ¥è¡¨ |
| | | self.adjacency_list = {} |
| | | |
| | | # AGVè¿è¡åæ¹åç§»å¨ |
| | | directions = [ |
| | | (0, 1), # ä¸ |
| | | (1, 0), # å³ |
| | | (0, -1), # ä¸ |
| | | (-1, 0) # å·¦ |
| | | ] |
| | | |
| | | # 为æ¯ä¸ªè·¯å¾ç¹æå»ºé»æ¥è¡¨ |
| | | for x, y in self.paths: |
| | | if (x, y) not in self.adjacency_list: |
| | | self.adjacency_list[(x, y)] = {} |
| | | |
| | | # æ£æ¥å个æ¹åçé»å±
|
| | | for dx, dy in directions: |
| | | nx, ny = x + dx, y + dy |
| | | # æ£æ¥ç¸é»ç¹æ¯å¦ä¸ºææè·¯å¾ç¹ |
| | | if (nx, ny) in self.paths: |
| | | # ç§»å¨è·ç¦»ç»ä¸ä¸º1.0 |
| | | distance = 1.0 |
| | | self.adjacency_list[(x, y)][(nx, ny)] = distance |
| | | |
| | | # æ·»å ç«ç¹çè£
å¸ç¹å°é»æ¥è¡¨ |
| | | for station_pos, station_info in self.stations.items(): |
| | | load_pos = station_info["load_position"] |
| | | unload_pos = station_info["unload_position"] |
| | | |
| | | # ç¡®ä¿è£
å¸ç¹å·²æ·»å å°é»æ¥è¡¨ |
| | | if load_pos not in self.adjacency_list: |
| | | self.adjacency_list[load_pos] = {} |
| | | if unload_pos not in self.adjacency_list: |
| | | self.adjacency_list[unload_pos] = {} |
| | | |
| | | # è¿æ¥è£
å¸ç¹å°æè¿çè·¯å¾ç¹ |
| | | for pos in [load_pos, unload_pos]: |
| | | # æ¾å°æè¿çè·¯å¾ç¹å¹¶è¿æ¥ |
| | | min_dist = float('inf') |
| | | nearest_path = None |
| | | |
| | | for path_pos in self.paths: |
| | | dist = ((pos[0] - path_pos[0]) ** 2 + (pos[1] - path_pos[1]) ** 2) ** 0.5 |
| | | if dist < min_dist: |
| | | min_dist = dist |
| | | nearest_path = path_pos |
| | | |
| | | if nearest_path: |
| | | self.adjacency_list[pos][nearest_path] = min_dist |
| | | self.adjacency_list[nearest_path][pos] = min_dist |
| | | |
| | | def save_to_file(self, filepath: str): |
| | | """ |
| | | å°ç¯å¢é
ç½®ä¿åå°æä»¶ |
| | | |
| | | Args: |
| | | filepath: æä»¶è·¯å¾ |
| | | """ |
| | | config_data = { |
| | | "width": self.width, |
| | | "height": self.height, |
| | | "stations": {f"{x},{y}": info for (x, y), info in self.stations.items()}, |
| | | "paths": [{"x": x, "y": y} for x, y in self.paths], |
| | | "storage_areas": [{"x": x, "y": y} for x, y in self.storage_areas], |
| | | "loading_areas": [{"x": x, "y": y} for x, y in self.loading_areas], |
| | | "unloading_areas": [{"x": x, "y": y} for x, y in self.unloading_areas], |
| | | "obstacles": [{"x": x, "y": y} for x, y in self.obstacles] |
| | | } |
| | | |
| | | with open(filepath, 'w') as f: |
| | | json.dump(config_data, f, indent=2) |
| | | |
| | | @classmethod |
| | | def load_from_file(cls, filepath: str) -> 'EnvironmentConfig': |
| | | """ |
| | | 仿件å è½½ç¯å¢é
ç½® |
| | | |
| | | Args: |
| | | filepath: æä»¶è·¯å¾ |
| | | |
| | | Returns: |
| | | EnvironmentConfig: ç¯å¢é
置对象 |
| | | """ |
| | | with open(filepath, 'r') as f: |
| | | config_data = json.load(f) |
| | | |
| | | env_config = cls(config_data["width"], config_data["height"]) |
| | | |
| | | # å è½½ç«ç¹ |
| | | for pos_str, info in config_data["stations"].items(): |
| | | x, y = map(int, pos_str.split(',')) |
| | | load_x, load_y = info["load_position"] |
| | | unload_x, unload_y = info["unload_position"] |
| | | env_config.add_station(x, y, info["capacity"], |
| | | (load_x, load_y), (unload_x, unload_y)) |
| | | |
| | | # å è½½è·¯å¾ |
| | | for path in config_data["paths"]: |
| | | env_config.add_path(path["x"], path["y"]) |
| | | |
| | | # å è½½ä»å¨åºå |
| | | for area in config_data["storage_areas"]: |
| | | env_config.add_storage_area(area["x"], area["y"]) |
| | | |
| | | # å è½½éç¢ç© |
| | | for obstacle in config_data["obstacles"]: |
| | | env_config.add_obstacle(obstacle["x"], obstacle["y"]) |
| | | |
| | | # æå»ºé»æ¥è¡¨ |
| | | env_config.build_adjacency_list() |
| | | |
| | | return env_config |
| | | |
| | | def create_default_environment(self) -> 'EnvironmentConfig': |
| | | """ |
| | | å建é»è®¤ç¯å¢é
ç½®ï¼æç
§éæ±æè¿°é
ç½®ç«ç¹åè·¯å¾ï¼ |
| | | |
| | | Returns: |
| | | EnvironmentConfig: ç¯å¢é
置对象 |
| | | """ |
| | | # æ¸
空å½åé
ç½® |
| | | self.grid = np.full((self.height, self.width), CellType.EMPTY) |
| | | self.stations = {} |
| | | self.paths = set() |
| | | self.storage_areas = set() |
| | | self.loading_areas = set() |
| | | self.unloading_areas = set() |
| | | self.obstacles = set() |
| | | |
| | | # é
ç½®20个ç«ç¹ |
| | | station_y = self.height - 5 |
| | | station_spacing = self.width // 25 # ç«ç¹é´è· |
| | | |
| | | for i in range(20): |
| | | station_x = (i + 1) * station_spacing |
| | | self.add_station(station_x, station_y) |
| | | |
| | | # ä»å¨åºå |
| | | storage_start_y = 5 |
| | | storage_end_y = station_y - 10 |
| | | |
| | | for col in range(5): |
| | | col_x = (col + 1) * (self.width // 6) |
| | | |
| | | for row_y in range(storage_start_y, storage_end_y, 5): |
| | | for dx in range(-1, 2): |
| | | for dy in range(-1, 2): |
| | | self.add_storage_area(col_x + dx, row_y + dy) |
| | | |
| | | # é
ç½®è·¯å¾ |
| | | # æ°´å¹³ä¸»è·¯å¾ |
| | | for x in range(5, self.width - 5): |
| | | # ç«ç¹åæ°´å¹³è·¯å¾ |
| | | self.add_path(x, station_y - 5) |
| | | # è´§æ¶åºåæ°´å¹³è·¯å¾ |
| | | for path_y in range(10, storage_end_y, 10): |
| | | self.add_path(x, path_y) |
| | | |
| | | # åç´è¿æ¥è·¯å¾ |
| | | for col in range(7): |
| | | path_x = col * (self.width // 7) |
| | | for y in range(5, station_y): |
| | | self.add_path(path_x, y) |
| | | |
| | | # æå»ºé»æ¥è¡¨ |
| | | self.build_adjacency_list() |
| | | |
| | | return self |
| | | |
| | | |
| | | # å建ååå§åé»è®¤ç¯å¢çè¾
å©å½æ° |
| | | def create_default_environment(width=100, height=100) -> EnvironmentConfig: |
| | | """ |
| | | å建é»è®¤ç¯å¢é
ç½® |
| | | |
| | | Args: |
| | | width: ç¯å¢å®½åº¦ |
| | | height: ç¯å¢é«åº¦ |
| | | |
| | | Returns: |
| | | EnvironmentConfig: é»è®¤ç¯å¢é
ç½® |
| | | """ |
| | | env_config = EnvironmentConfig(width, height) |
| | | return env_config.create_default_environment() |
New file |
| | |
| | | """ |
| | | å
¨å±è®¾ç½®ååæ°é
ç½® - ä»
ä¿çå®é
使ç¨çé
置项 |
| | | """ |
| | | |
| | | # RCSæå¡å¨é
ç½® |
| | | RCS_SERVER_HOST = "10.10.10.156" |
| | | RCS_SERVER_PORT = 8088 |
| | | |
| | | # ç®æ³æå¡å¨é
ç½® |
| | | ALGORITHM_SERVER_HOST = "10.10.10.239" |
| | | ALGORITHM_SERVER_PORT = 8002 |
| | | |
| | | # è·¯å¾çæ§æå¡é
ç½® |
| | | MONITOR_POLLING_INTERVAL = 5.0 # è·¯å¾çæ§è½®è¯¢é´é |
| | | |
| | | # APIé
ç½® |
| | | AGV_STATUS_API_ENDPOINT = "/api/open/algorithm/getAgv" # AGVç¶ææ¥è¯¢Url |
| | | |
| | | # AGVé
ç½® |
| | | DEFAULT_AGV_COUNT = 50 # é»è®¤AGVæ°é |
| | | |
| | | # ç½ç»é
ç½® |
| | | REQUEST_TIMEOUT = 30 # HTTP请æ±è¶
æ¶æ¶é´ï¼ç§ï¼ |
| | | |
| | | # æ¥å¿é
ç½® |
| | | LOG_LEVEL = "INFO" |
| | | LOG_FILE = "warehouse_system.log" # æ¥å¿æä»¶è·¯å¾ |
New file |
| | |
| | | #!/usr/bin/env python3 |
| | | import sys |
| | | import os |
| | | import signal |
| | | import logging |
| | | from typing import Optional |
| | | |
| | | sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) |
| | | |
| | | from common.utils import setup_logging |
| | | from config.settings import ( |
| | | ALGORITHM_SERVER_HOST, ALGORITHM_SERVER_PORT, LOG_LEVEL, LOG_FILE, |
| | | RCS_SERVER_HOST, RCS_SERVER_PORT, MONITOR_POLLING_INTERVAL |
| | | ) |
| | | from algorithm_system.algorithm_server import AlgorithmServer |
| | | |
| | | |
| | | class AlgorithmApplication: |
| | | |
| | | def __init__(self, host: str = None, port: int = None, log_level: str = None, |
| | | enable_path_monitor: bool = True, monitor_interval: float = None, |
| | | rcs_host: str = None, rcs_port: int = None): |
| | | # Initialize algorithm system application |
| | | self.host = host or ALGORITHM_SERVER_HOST |
| | | self.port = port or ALGORITHM_SERVER_PORT |
| | | self.log_level = log_level or LOG_LEVEL |
| | | self.enable_path_monitor = enable_path_monitor |
| | | self.monitor_interval = monitor_interval or MONITOR_POLLING_INTERVAL |
| | | self.rcs_host = rcs_host or RCS_SERVER_HOST |
| | | self.rcs_port = rcs_port or RCS_SERVER_PORT |
| | | |
| | | setup_logging(self.log_level, LOG_FILE) |
| | | self.logger = logging.getLogger(__name__) |
| | | |
| | | self.server: Optional[AlgorithmServer] = None |
| | | |
| | | signal.signal(signal.SIGINT, self.signal_handler) |
| | | signal.signal(signal.SIGTERM, self.signal_handler) |
| | | |
| | | def signal_handler(self, signum, frame): |
| | | # Handle system signals, shutdown program |
| | | self.logger.info(f"Received signal {signum}, shutting down algorithm system...") |
| | | self.shutdown() |
| | | sys.exit(0) |
| | | |
| | | def start_server(self): |
| | | # Start algorithm server |
| | | try: |
| | | self.logger.info("Initializing algorithm system server...") |
| | | |
| | | # Set RCS connection configuration |
| | | os.environ['RCS_SERVER_HOST'] = self.rcs_host |
| | | os.environ['RCS_SERVER_PORT'] = str(self.rcs_port) |
| | | |
| | | self.server = AlgorithmServer(self.host, self.port, self.enable_path_monitor, self.monitor_interval) |
| | | |
| | | status = self.server.get_server_status() |
| | | self.logger.info("Algorithm system server status:") |
| | | self.logger.info(f" - Host: {status['host']}") |
| | | self.logger.info(f" - Port: {status['port']}") |
| | | self.logger.info(f" - Path mapping loaded: {status['path_mapping_loaded']}") |
| | | self.logger.info(f" - AGV count: {status['agv_count']}") |
| | | self.logger.info(f" - Task allocation algorithm: {status['algorithms']['task_allocation']}") |
| | | self.logger.info(f" - Path planning algorithm: {status['algorithms']['path_planning']}") |
| | | |
| | | self.logger.info("Algorithm system server initialization completed") |
| | | |
| | | self.server.start_server() |
| | | |
| | | except KeyboardInterrupt: |
| | | self.logger.info("Received interrupt signal, shutting down algorithm server...") |
| | | except Exception as e: |
| | | self.logger.error(f"Algorithm server startup failed: {e}") |
| | | raise |
| | | finally: |
| | | self.shutdown() |
| | | |
| | | def shutdown(self): |
| | | # Shutdown algorithm system components |
| | | self.logger.info("Shutting down algorithm system...") |
| | | |
| | | if self.server: |
| | | try: |
| | | self.server.stop_server() |
| | | except Exception as e: |
| | | self.logger.error(f"Failed to shutdown server: {e}") |
| | | |
| | | self.logger.info("Algorithm system shutdown completed") |
| | | |
| | | |
| | | def main(): |
| | | # Main function entry point |
| | | import argparse |
| | | |
| | | parser = argparse.ArgumentParser(description="Algorithm System - Provides task allocation and path planning services") |
| | | parser.add_argument( |
| | | "--host", |
| | | default=ALGORITHM_SERVER_HOST, |
| | | help=f"Algorithm server host address (default: {ALGORITHM_SERVER_HOST})" |
| | | ) |
| | | parser.add_argument( |
| | | "--port", |
| | | type=int, |
| | | default=ALGORITHM_SERVER_PORT, |
| | | help=f"Algorithm server port (default: {ALGORITHM_SERVER_PORT})" |
| | | ) |
| | | parser.add_argument( |
| | | "--log-level", |
| | | choices=["DEBUG", "INFO", "WARNING", "ERROR"], |
| | | default="DEBUG", |
| | | help=f"Log level (default: DEBUG)" |
| | | ) |
| | | parser.add_argument( |
| | | "--task-algorithm", |
| | | choices=["NEAREST_FIRST", "LOAD_BALANCED", "PRIORITY_FIRST", "MULTI_OBJECTIVE"], |
| | | default="LOAD_BALANCED", |
| | | help="Task allocation algorithm (default: LOAD_BALANCED)" |
| | | ) |
| | | parser.add_argument( |
| | | "--path-algorithm", |
| | | choices=["A_STAR", "DIJKSTRA", "GREEDY"], |
| | | default="DIJKSTRA", |
| | | help="Path planning algorithm (default: DIJKSTRA)" |
| | | ) |
| | | parser.add_argument( |
| | | "--enable-path-monitor", |
| | | action="store_true", |
| | | default=True, |
| | | help="Enable path monitoring service (default: True)" |
| | | ) |
| | | parser.add_argument( |
| | | "--disable-path-monitor", |
| | | action="store_true", |
| | | help="Disable path monitoring service" |
| | | ) |
| | | parser.add_argument( |
| | | "--monitor-interval", |
| | | type=float, |
| | | default=MONITOR_POLLING_INTERVAL, |
| | | help=f"Path monitoring polling interval in seconds (default: {MONITOR_POLLING_INTERVAL})" |
| | | ) |
| | | parser.add_argument( |
| | | "--rcs-host", |
| | | default=RCS_SERVER_HOST, |
| | | help=f"RCS server host address (default: {RCS_SERVER_HOST})" |
| | | ) |
| | | parser.add_argument( |
| | | "--rcs-port", |
| | | type=int, |
| | | default=RCS_SERVER_PORT, |
| | | help=f"RCS server port (default: {RCS_SERVER_PORT})" |
| | | ) |
| | | |
| | | args = parser.parse_args() |
| | | |
| | | # Handle path monitoring switch |
| | | enable_path_monitor = args.enable_path_monitor and not args.disable_path_monitor |
| | | |
| | | app = AlgorithmApplication( |
| | | host=args.host, |
| | | port=args.port, |
| | | log_level=args.log_level, |
| | | enable_path_monitor=enable_path_monitor, |
| | | monitor_interval=args.monitor_interval, |
| | | rcs_host=args.rcs_host, |
| | | rcs_port=args.rcs_port |
| | | ) |
| | | |
| | | try: |
| | | print("=" * 60) |
| | | print("CTU Warehouse Management System - Algorithm System") |
| | | print("=" * 60) |
| | | print(f"Server Address: http://{args.host}:{args.port}") |
| | | print(f"Task Allocation Algorithm: {args.task_algorithm}") |
| | | print(f"Path Planning Algorithm: {args.path_algorithm}") |
| | | print(f"Log Level: {args.log_level}") |
| | | print(f"Path Monitoring Service: {'Enabled' if enable_path_monitor else 'Disabled'}") |
| | | if enable_path_monitor: |
| | | print(f"Monitor Polling Interval: {args.monitor_interval}s") |
| | | print(f"RCS Server Address: {args.rcs_host}:{args.rcs_port}") |
| | | print("=" * 60) |
| | | print("Available APIs:") |
| | | print(f" POST http://{args.host}:{args.port}/open/task/send/v1") |
| | | print(f" - Task allocation API") |
| | | print(f" - Supports new format: {{\"tasks\": [...], \"agvStatus\": [...]}}") |
| | | print(f" - Compatible with old format: [task1, task2, ...]") |
| | | print(f" POST http://{args.host}:{args.port}/open/path/plan/v1") |
| | | print(f" - Path planning API") |
| | | print(f" POST http://{args.host}:{args.port}/open/path/batch/plan/v1") |
| | | print(f" - Batch path planning API") |
| | | print(f" POST http://{args.host}:{args.port}/open/algorithm/config/v1") |
| | | print(f" - Algorithm configuration API") |
| | | if enable_path_monitor: |
| | | print(f" POST http://{args.host}:{args.port}/monitor/path/start/v1") |
| | | print(f" - Start path monitoring service") |
| | | print(f" POST http://{args.host}:{args.port}/monitor/path/stop/v1") |
| | | print(f" - Stop path monitoring service") |
| | | print(f" GET http://{args.host}:{args.port}/monitor/path/status/v1") |
| | | print(f" - Path monitoring service status query") |
| | | print(f" GET http://{args.host}:{args.port}/health") |
| | | print(f" - Health check API") |
| | | print("=" * 60) |
| | | print("Press Ctrl+C to stop server") |
| | | print() |
| | | |
| | | app.start_server() |
| | | |
| | | except KeyboardInterrupt: |
| | | print("\nReceived interrupt signal, shutting down...") |
| | | except Exception as e: |
| | | print(f"Startup failed: {e}") |
| | | sys.exit(1) |
| | | |
| | | |
| | | if __name__ == "__main__": |
| | | main() |