diff --git a/.gitignore b/.gitignore index 68bc17f..10189be 100644 --- a/.gitignore +++ b/.gitignore @@ -158,3 +158,10 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ + +# User config +vATISLoadUserConfig.json + +# LLM stuff +.claude/ +CLAUDE.md diff --git a/vATISLoad.ipynb b/vATISLoad.ipynb index 24aa2b6..4fc00a6 100644 --- a/vATISLoad.ipynb +++ b/vATISLoad.ipynb @@ -47,554 +47,7 @@ "id": "cb02d716-5883-4228-a6fe-5dae01b904aa", "metadata": {}, "outputs": [], - "source": [ - "def update_vATISLoad():\n", - " online_file = ''\n", - " url = 'https://raw.githubusercontent.com/glott/vATISLoad/refs/heads/main/vATISLoad.pyw'\n", - " try:\n", - " online_file = requests.get(url).text.split('\\n')\n", - " except Exception as ignored:\n", - " return\n", - "\n", - " up_to_date = True\n", - " with open(sys.argv[0], 'r') as FileObj:\n", - " i = 0\n", - " for line in FileObj:\n", - " if ('DISABLE_AUTOUPDATES =' in line or 'RUN_UPDATE =' in line \n", - " or 'SHUTDOWN_LIMIT =' in line or 'AUTO_SELECT_FACILITY' in line) and i < 10:\n", - " pass\n", - " elif i > len(online_file) or len(line.strip()) != len(online_file[i].strip()):\n", - " up_to_date = False\n", - " break\n", - " i += 1\n", - "\n", - " if up_to_date:\n", - " return\n", - "\n", - " try:\n", - " os.rename(sys.argv[0], sys.argv[0] + '.bak')\n", - " with requests.get(url, stream=True) as r:\n", - " r.raise_for_status()\n", - " with open(sys.argv[0], 'wb') as f:\n", - " for chunk in r.iter_content(chunk_size=8192): \n", - " f.write(chunk)\n", - "\n", - " os.remove(sys.argv[0] + '.bak')\n", - " \n", - " except Exception as ignored:\n", - " if not os.path.isfile(sys.argv[0]) and os.path.isfile(sys.argv[0] + '.bak'):\n", - " os.rename(sys.argv[0] + '.bak', sys.argv[0])\n", - "\n", - " os.execv(sys.executable, ['python'] + sys.argv)\n", - "\n", - "def determine_active_callsign(return_artcc_only=False):\n", - " crc_path = ''\n", - " try:\n", - " key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, 'SOFTWARE\\\\CRC')\n", - " crc_path, value_type = winreg.QueryValueEx(key, 'Install_Dir')\n", - " except FileNotFoundError as ignored:\n", - " crc_path = os.path.join(os.getenv('LOCALAPPDATA'), 'CRC')\n", - " \n", - " crc_profiles = os.path.join(crc_path, 'Profiles')\n", - " crc_name = ''\n", - " crc_data = {}\n", - " crc_lastused_time = '2020-01-01T08:00:00'\n", - " try:\n", - " for filename in os.listdir(crc_profiles):\n", - " if filename.endswith('.json'): \n", - " file_path = os.path.join(crc_profiles, filename)\n", - " with open(file_path, 'r') as f:\n", - " data = json.load(f)\n", - " dt1 = datetime.strptime(crc_lastused_time, '%Y-%m-%dT%H:%M:%S')\n", - " if 'LastUsedAt' not in data or data['LastUsedAt'] == None:\n", - " continue\n", - " dt2 = datetime.strptime(data['LastUsedAt'].split('.')[0].replace('Z',''), '%Y-%m-%dT%H:%M:%S')\n", - " if dt2 > dt1:\n", - " crc_lastused_time = data['LastUsedAt'].split('.')[0].replace('Z','')\n", - " crc_name = data['Name']\n", - " crc_data = data\n", - " except Exception as ignored:\n", - " return None\n", - "\n", - " if return_artcc_only:\n", - " return crc_data['ArtccId']\n", - "\n", - " try:\n", - " lastPos = crc_data['LastUsedPositionId']\n", - " crc_ARTCC = os.path.join(crc_path, 'ARTCCs') + os.sep + crc_data['ArtccId'] + '.json'\n", - " with open(crc_ARTCC, 'r') as f:\n", - " data = json.load(f)\n", - "\n", - " pos = determine_position_from_id(data['facility']['positions'], lastPos)\n", - " if pos is not None:\n", - " return pos\n", - "\n", - " for child1 in data['facility']['childFacilities']:\n", - " pos = determine_position_from_id(child1['positions'], lastPos)\n", - " if pos is not None:\n", - " return pos\n", - " \n", - " for child2 in child1['childFacilities']:\n", - " pos = determine_position_from_id(child2['positions'], lastPos)\n", - " if pos is not None:\n", - " return pos\n", - " \n", - " except Exception as ignored:\n", - " pass\n", - "\n", - " return None\n", - "\n", - "async def auto_select_facility():\n", - " artcc = determine_active_callsign(return_artcc_only=True)\n", - " if artcc is None:\n", - " return\n", - " \n", - " if not AUTO_SELECT_FACILITY and not artcc in ['ZOA', 'ZMA', 'ZDC']:\n", - " return\n", - "\n", - " # Determine if CRC is open and a profile is loaded\n", - " crc_found = False\n", - " for win in [w.title for w in pygetwindow.getAllWindows()]:\n", - " if 'CRC : 1' in win:\n", - " crc_found = True\n", - "\n", - " if not crc_found:\n", - " return\n", - " \n", - " try:\n", - " async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n", - " # Determine if any vATIS profile matches ARTCC\n", - " await websocket.send(json.dumps({'type': 'getProfiles'}))\n", - " m = json.loads(await asyncio.wait_for(websocket.recv(), timeout=0.25))['profiles']\n", - " \n", - " match_id = ''\n", - " for p in m:\n", - " if artcc in p['name']:\n", - " match_id = p['id']\n", - " \n", - " if len(match_id) < 0:\n", - " return\n", - " \n", - " # Determine if current profile is already the desired profile\n", - " await websocket.send(json.dumps({'type': 'getActiveProfile'}))\n", - " m = json.loads(await asyncio.wait_for(websocket.recv(), timeout=0.25))\n", - " \n", - " if 'id' in m:\n", - " # Do not select a profile if current profile is already selected\n", - " if m['id'] == match_id:\n", - " return\n", - " \n", - " # Load new profile\n", - " await websocket.send(json.dumps({'type': 'loadProfile', 'value': {'id': match_id}}))\n", - " await asyncio.sleep(1)\n", - " \n", - " except Exception as ignored:\n", - " pass\n", - "\n", - "async def try_websocket(shutdown=RUN_UPDATE, limit=SHUTDOWN_LIMIT, initial=False):\n", - " t0 = time.time()\n", - " for i in range(0, 250):\n", - " if initial and i < 30:\n", - " await auto_select_facility()\n", - " \n", - " t1 = time.time()\n", - " if t1 - t0 > limit:\n", - " if shutdown:\n", - " sys.exit()\n", - " return\n", - " try:\n", - " async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n", - " await websocket.send(json.dumps({'type': 'getStations'}))\n", - " try:\n", - " m = json.loads(await asyncio.wait_for(websocket.recv(), timeout=1))\n", - " if time.time() - t0 > 5:\n", - " await asyncio.sleep(1)\n", - " if m['type'] != 'stations':\n", - " await asyncio.sleep(0.5)\n", - " continue\n", - " return\n", - " except Exception as ignored:\n", - " pass\n", - " except Exception as ignored:\n", - " dt = time.time() - t1\n", - " if dt < 1:\n", - " await asyncio.sleep(1 - dt)\n", - " pass\n", - "\n", - "async def get_datis_stations(initial=False):\n", - " await try_websocket(initial=initial)\n", - " \n", - " data = {}\n", - " async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n", - " await websocket.send(json.dumps({'type': 'getStations'}))\n", - " m = json.loads(await websocket.recv())\n", - " \n", - " not_stations = False\n", - " while m['type'] != 'stations':\n", - " not_stations = True\n", - " await asyncio.sleep(0.1)\n", - " await websocket.send(json.dumps({'type': 'getStations'}))\n", - " m = json.loads(await websocket.recv())\n", - " \n", - " if not_stations:\n", - " await asyncio.sleep(0.5)\n", - " await websocket.send(json.dumps({'type': 'getStations'}))\n", - " m = json.loads(await websocket.recv())\n", - "\n", - " for s in m['stations']:\n", - " name = s['name']\n", - "\n", - " if s['atisType'] == 'Arrival':\n", - " name += '_A'\n", - " elif s['atisType'] == 'Departure':\n", - " name += '_D'\n", - " \n", - " if 'D-ATIS' in s['presets']:\n", - " data[name] = s['id']\n", - " \n", - " return data\n", - "\n", - "def get_atis_replacements(stations):\n", - " stations = list(set(value.replace('_A', '').replace('_D', '') for value in stations))\n", - "\n", - " config = {}\n", - " try:\n", - " url = 'https://raw.githubusercontent.com/glott/vATISLoad/refs/heads/main/vATISLoadConfig.json'\n", - " config = json.loads(requests.get(url).text)\n", - " except Exception as ignored:\n", - " pass\n", - "\n", - " if 'replacements' not in config:\n", - " return {}\n", - "\n", - " replacements = {}\n", - " for a in config['replacements']:\n", - " if a in stations:\n", - " replacements[a] = config['replacements'][a]\n", - "\n", - " return replacements\n", - " \n", - "async def get_contractions(station):\n", - " try:\n", - " async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n", - " if '_D' in station:\n", - " payload = {'station': station[0:4], 'atisType': 'Departure'}\n", - " elif '_A' in station:\n", - " payload = {'station': station[0:4], 'atisType': 'Arrival'}\n", - " else:\n", - " payload = {'station': station[0:4]}\n", - " await websocket.send(json.dumps({'type': 'getContractions', 'value': payload}))\n", - " m = json.loads(await asyncio.wait_for(websocket.recv(), timeout=0.25))\n", - " \n", - "\n", - " c = {}\n", - " contractions = m['stations'][0]['contractions']\n", - " for cont in contractions:\n", - " c[contractions[cont]['text']] = '@' + cont\n", - " \n", - " c = dict(sorted(c.items(), key=lambda item: len(item[0])))\n", - " c = {key: c[key] for key in reversed(c)}\n", - "\n", - " return c\n", - " except asyncio.TimeoutError:\n", - " pass\n", - "\n", - " return {}\n", - "\n", - "def get_datis_data():\n", - " data = {}\n", - " try:\n", - " url = 'https://atis.info/api/all'\n", - " data = json.loads(requests.get(url, timeout=2.5).text)\n", - " except Exception as ignored:\n", - " os.system('cmd /K \\\"cls & echo Unable to fetch D-ATIS data. & timeout 5 & exit\\\"')\n", - " \n", - " return data\n", - "\n", - "async def get_datis(station, atis_data, replacements):\n", - " atis_type = 'combined'\n", - " if '_A' in station:\n", - " atis_type = 'arr'\n", - " elif '_D' in station:\n", - " atis_type = 'dep'\n", - "\n", - " atis_info = ['D-ATIS NOT AVBL.', '']\n", - " if 'error' in atis_data:\n", - " return atis_info\n", - "\n", - " datis = ''\n", - " for a in atis_data:\n", - " if a['airport'] != station[0:4] or a['type'] != atis_type:\n", - " continue\n", - " datis = a['datis']\n", - "\n", - " # Ignore D-ATIS more than 1.75 hours old\n", - " try: \n", - " t_updated = datetime.strptime(a['updatedAt'][:26], \"%Y-%m-%dT%H:%M:%S.%f\")\n", - " t_updated = t_updated.replace(tzinfo=timezone.utc)\n", - " t_now = datetime.now(timezone.utc)\n", - "\n", - " if (t_now - t_updated).total_seconds() / 3600 > 1.75:\n", - " return atis_info\n", - " except Exception as ignored:\n", - " pass\n", - "\n", - " if len(datis) == 0:\n", - " return atis_info\n", - "\n", - " # Strip beginning and ending D-ATIS text\n", - " datis = '. '.join(datis.split('. ')[2:])\n", - " datis = re.sub(' ...ADVS YOU HAVE.*', '', datis)\n", - " datis = datis.replace('NOTICE TO AIR MISSIONS, NOTAMS. ', 'NOTAMS... ') \\\n", - " .replace('NOTICE TO AIR MISSIONS. ', 'NOTAMS... ') \\\n", - " .replace('NOTICE TO AIR MEN. ', 'NOTAMS... ') \\\n", - " .replace('NOTICE TO AIRMEN. ', 'NOTAMS... ') \\\n", - " .replace('NOTAMS. ', 'NOTAMS... ') \\\n", - " .replace('NOTAM. ', 'NOTAMS... ')\n", - "\n", - " # Replace defined replacements\n", - " for r in replacements:\n", - " if '%r' in replacements[r]:\n", - " datis = re.sub(r + '[,.;]{0,2}', replacements[r].replace('%r', ''), datis)\n", - " else:\n", - " datis = re.sub(r + '[,.;]{0,2}', replacements[r], datis)\n", - " datis = re.sub(r'\\s+', ' ', datis).strip()\n", - "\n", - " # Clean up D-ATIS\n", - " datis = datis.replace('...', '/./').replace('..', '.') \\\n", - " .replace('/./', '...').replace(' ', ' ').replace(' . ', '. ') \\\n", - " .replace(', ,', ',').replace(' ; ', '; ').replace(' .,', ' ,') \\\n", - " .replace(' , ', ', ').replace('., ', ', ').replace('&', '&') \\\n", - " .replace(' ;.', '.').replace(' ;,', ',')\n", - "\n", - " # Replace contractions\n", - " contractions = await get_contractions(station)\n", - " for c, v in contractions.items():\n", - " if not c.isdigit():\n", - " datis = re.sub(r'(?= 4:\n", - " break\n", - " \n", - " if s not in disconnected_atises:\n", - " continue\n", - " \n", - " payload = {'type': 'connectAtis', 'value': {'id': i}}\n", - " async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n", - " await websocket.send(json.dumps(payload))\n", - "\n", - " try:\n", - " m = await asyncio.wait_for(websocket.recv(), timeout=0.1)\n", - " n += 1\n", - " except Exception as ignored:\n", - " pass\n", - "\n", - "def kill_open_instances():\n", - " prev_instances = {}\n", - "\n", - " for q in psutil.process_iter():\n", - " if 'python' in q.name():\n", - " for parameter in q.cmdline():\n", - " if 'vATISLoad' in parameter and parameter.endswith('.pyw'):\n", - " q_create_time = q.create_time()\n", - " q_create_datetime = datetime.fromtimestamp(q_create_time)\n", - " prev_instances[q.pid] = {'process': q, 'start': q_create_datetime}\n", - " \n", - " prev_instances = dict(sorted(prev_instances.items(), key=lambda item: item[1]['start']))\n", - " \n", - " for i in range(0, len(prev_instances) - 1):\n", - " k = list(prev_instances.keys())[i]\n", - " prev_instances[k]['process'].terminate()\n", - "\n", - "def open_vATIS():\n", - " # Set 'autoFetchAtisLetter' to True\n", - " config_path = os.getenv('LOCALAPPDATA') + '\\\\org.vatsim.vatis\\\\AppConfig.json'\n", - " try:\n", - " with open(config_path, 'r') as f:\n", - " data = json.load(f)\n", - " if 'autoFetchAtisLetter' in data:\n", - " data['autoFetchAtisLetter'] = True\n", - " with open(config_path, 'w') as f:\n", - " json.dump(data, f, indent=2)\n", - " except Exception as ignored:\n", - " pass\n", - "\n", - " # Check if vATIS is open\n", - " for process in psutil.process_iter(['name']):\n", - " if process.info['name'] == 'vATIS.exe':\n", - " return\n", - "\n", - " exe = os.getenv('LOCALAPPDATA') + '\\\\org.vatsim.vatis\\\\current\\\\vATIS.exe'\n", - " subprocess.Popen(exe);\n", - "\n", - "async def get_connected_atis_data():\n", - " stations = await get_datis_stations()\n", - " atis_statuses = await get_atis_statuses()\n", - "\n", - " connected_atis_data = {}\n", - " \n", - " for station in [k for k, v in atis_statuses.items() if v == 'Connected']:\n", - " payload = {'type': 'getAtis', 'value': {'id': stations[station]}}\n", - " async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n", - " await websocket.send(json.dumps(payload))\n", - "\n", - " m = json.loads(await websocket.recv())['value']\n", - " connected_atis_data[station] = [m['airportConditions'], m['notams']]\n", - "\n", - " return connected_atis_data\n", - "\n", - "async def disconnect_over_connection_limit(delay=True):\n", - " if True:\n", - " time.sleep(5)\n", - " \n", - " stations = await get_datis_stations()\n", - " atis_statuses = await get_atis_statuses()\n", - " connected_atises = [k for k, v in atis_statuses.items() if v == 'Connected']\n", - "\n", - " if len(connected_atises) <= 4 or SHUTDOWN_LIMIT == 346:\n", - " return\n", - "\n", - " for i in range(4, len(connected_atises)):\n", - " s, i = connected_atises[i], stations[connected_atises[i]]\n", - " payload = {'type': 'disconnectAtis', 'value': {'id': i}}\n", - " async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n", - " await websocket.send(json.dumps(payload))\n", - "\n", - "def find_deleted_portions(original, modified):\n", - " sequence_matcher = difflib.SequenceMatcher(None, original, modified)\n", - " \n", - " deleted_portions = []\n", - " for tag, i1, i2, j1, j2 in sequence_matcher.get_opcodes():\n", - " if tag == 'delete': \n", - " deleted_portions.append(original[i1:i2])\n", - " \n", - " return deleted_portions\n", - "\n", - "def compare_atis_data(prev_data, new_data):\n", - " compared_output = {}\n", - "\n", - " for station in prev_data:\n", - " if station not in new_data:\n", - " continue\n", - " \n", - " conditionDiff = find_deleted_portions(prev_data[station][0], new_data[station][0])\n", - " notamDiff = find_deleted_portions(prev_data[station][1], new_data[station][1])\n", - "\n", - " if len(conditionDiff) > 0 or len(notamDiff) > 0:\n", - " compared_output[station] = conditionDiff + notamDiff\n", - "\n", - " return compared_output" - ] + "source": "def update_vATISLoad():\n online_file = ''\n url = 'https://raw.githubusercontent.com/glott/vATISLoad/refs/heads/main/vATISLoad.pyw'\n try:\n online_file = requests.get(url).text.split('\\n')\n except Exception as ignored:\n return\n\n up_to_date = True\n with open(sys.argv[0], 'r') as FileObj:\n i = 0\n for line in FileObj:\n if ('DISABLE_AUTOUPDATES =' in line or 'RUN_UPDATE =' in line \n or 'SHUTDOWN_LIMIT =' in line or 'AUTO_SELECT_FACILITY' in line) and i < 10:\n pass\n elif i > len(online_file) or len(line.strip()) != len(online_file[i].strip()):\n up_to_date = False\n break\n i += 1\n\n if up_to_date:\n return\n\n try:\n os.rename(sys.argv[0], sys.argv[0] + '.bak')\n with requests.get(url, stream=True) as r:\n r.raise_for_status()\n with open(sys.argv[0], 'wb') as f:\n for chunk in r.iter_content(chunk_size=8192): \n f.write(chunk)\n\n os.remove(sys.argv[0] + '.bak')\n \n except Exception as ignored:\n if not os.path.isfile(sys.argv[0]) and os.path.isfile(sys.argv[0] + '.bak'):\n os.rename(sys.argv[0] + '.bak', sys.argv[0])\n\n os.execv(sys.executable, ['python'] + sys.argv)\n\ndef determine_active_callsign(return_artcc_only=False):\n crc_path = ''\n try:\n key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, 'SOFTWARE\\\\CRC')\n crc_path, value_type = winreg.QueryValueEx(key, 'Install_Dir')\n except FileNotFoundError as ignored:\n crc_path = os.path.join(os.getenv('LOCALAPPDATA'), 'CRC')\n \n crc_profiles = os.path.join(crc_path, 'Profiles')\n crc_name = ''\n crc_data = {}\n crc_lastused_time = '2020-01-01T08:00:00'\n try:\n for filename in os.listdir(crc_profiles):\n if filename.endswith('.json'): \n file_path = os.path.join(crc_profiles, filename)\n with open(file_path, 'r') as f:\n data = json.load(f)\n dt1 = datetime.strptime(crc_lastused_time, '%Y-%m-%dT%H:%M:%S')\n if 'LastUsedAt' not in data or data['LastUsedAt'] == None:\n continue\n dt2 = datetime.strptime(data['LastUsedAt'].split('.')[0].replace('Z',''), '%Y-%m-%dT%H:%M:%S')\n if dt2 > dt1:\n crc_lastused_time = data['LastUsedAt'].split('.')[0].replace('Z','')\n crc_name = data['Name']\n crc_data = data\n except Exception as ignored:\n return None\n\n if return_artcc_only:\n return crc_data['ArtccId']\n\n try:\n lastPos = crc_data['LastUsedPositionId']\n crc_ARTCC = os.path.join(crc_path, 'ARTCCs') + os.sep + crc_data['ArtccId'] + '.json'\n with open(crc_ARTCC, 'r') as f:\n data = json.load(f)\n\n pos = determine_position_from_id(data['facility']['positions'], lastPos)\n if pos is not None:\n return pos\n\n for child1 in data['facility']['childFacilities']:\n pos = determine_position_from_id(child1['positions'], lastPos)\n if pos is not None:\n return pos\n \n for child2 in child1['childFacilities']:\n pos = determine_position_from_id(child2['positions'], lastPos)\n if pos is not None:\n return pos\n \n except Exception as ignored:\n pass\n\n return None\n\nasync def auto_select_facility():\n artcc = determine_active_callsign(return_artcc_only=True)\n if artcc is None:\n return\n \n if not AUTO_SELECT_FACILITY and not artcc in ['ZOA', 'ZMA', 'ZDC']:\n return\n\n # Determine if CRC is open and a profile is loaded\n crc_found = False\n for win in [w.title for w in pygetwindow.getAllWindows()]:\n if 'CRC : 1' in win:\n crc_found = True\n\n if not crc_found:\n return\n \n try:\n async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n # Determine if any vATIS profile matches ARTCC\n await websocket.send(json.dumps({'type': 'getProfiles'}))\n m = json.loads(await asyncio.wait_for(websocket.recv(), timeout=0.25))['profiles']\n \n match_id = ''\n for p in m:\n if artcc in p['name']:\n match_id = p['id']\n \n if len(match_id) < 0:\n return\n \n # Determine if current profile is already the desired profile\n await websocket.send(json.dumps({'type': 'getActiveProfile'}))\n m = json.loads(await asyncio.wait_for(websocket.recv(), timeout=0.25))\n \n if 'id' in m:\n # Do not select a profile if current profile is already selected\n if m['id'] == match_id:\n return\n \n # Load new profile\n await websocket.send(json.dumps({'type': 'loadProfile', 'value': {'id': match_id}}))\n await asyncio.sleep(1)\n \n except Exception as ignored:\n pass\n\nasync def try_websocket(shutdown=RUN_UPDATE, limit=SHUTDOWN_LIMIT, initial=False):\n t0 = time.time()\n for i in range(0, 250):\n if initial and i < 30:\n await auto_select_facility()\n \n t1 = time.time()\n if t1 - t0 > limit:\n if shutdown:\n sys.exit()\n return\n try:\n async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n await websocket.send(json.dumps({'type': 'getStations'}))\n try:\n m = json.loads(await asyncio.wait_for(websocket.recv(), timeout=1))\n if time.time() - t0 > 5:\n await asyncio.sleep(1)\n if m['type'] != 'stations':\n await asyncio.sleep(0.5)\n continue\n return\n except Exception as ignored:\n pass\n except Exception as ignored:\n dt = time.time() - t1\n if dt < 1:\n await asyncio.sleep(1 - dt)\n pass\n\nasync def get_datis_stations(initial=False):\n await try_websocket(initial=initial)\n \n data = {}\n async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n await websocket.send(json.dumps({'type': 'getStations'}))\n m = json.loads(await websocket.recv())\n \n not_stations = False\n while m['type'] != 'stations':\n not_stations = True\n await asyncio.sleep(0.1)\n await websocket.send(json.dumps({'type': 'getStations'}))\n m = json.loads(await websocket.recv())\n \n if not_stations:\n await asyncio.sleep(0.5)\n await websocket.send(json.dumps({'type': 'getStations'}))\n m = json.loads(await websocket.recv())\n\n for s in m['stations']:\n name = s['name']\n\n if s['atisType'] == 'Arrival':\n name += '_A'\n elif s['atisType'] == 'Departure':\n name += '_D'\n \n if 'D-ATIS' in s['presets']:\n data[name] = s['id']\n \n return data\n\ndef get_atis_replacements(stations):\n stations = list(set(value.replace('_A', '').replace('_D', '') for value in stations))\n\n config = {}\n try:\n url = 'https://raw.githubusercontent.com/glott/vATISLoad/refs/heads/main/vATISLoadConfig.json'\n config = json.loads(requests.get(url).text)\n except Exception as ignored:\n pass\n\n if 'replacements' not in config:\n return {}\n\n replacements = {}\n for a in config['replacements']:\n if a in stations:\n replacements[a] = config['replacements'][a]\n\n return replacements\n\ndef get_user_config():\n config = {}\n config_path = os.path.join(os.path.dirname(sys.argv[0]), 'vATISLoadUserConfig.json')\n try:\n with open(config_path, 'r') as f:\n config = json.load(f)\n except Exception as ignored:\n pass\n return config\n\ndef apply_user_modifications(airport, conditions, notams, user_config):\n if airport not in user_config:\n return conditions, notams\n\n cfg = user_config[airport]\n\n # Apply conditions modifications\n if 'conditions' in cfg:\n for text in cfg['conditions'].get('remove', []):\n conditions = conditions.replace(text, '')\n append_text = cfg['conditions'].get('append', '')\n if append_text:\n if conditions and not conditions.endswith(' '):\n conditions += ' '\n conditions += append_text\n\n # Apply notams modifications\n if 'notams' in cfg:\n for text in cfg['notams'].get('remove', []):\n notams = notams.replace(text, '')\n append_text = cfg['notams'].get('append', '')\n if append_text:\n if notams and not notams.endswith(' '):\n notams += ' '\n notams += append_text\n\n # Clean up extra spaces\n conditions = re.sub(r'\\s+', ' ', conditions).strip()\n notams = re.sub(r'\\s+', ' ', notams).strip()\n\n return conditions, notams\n\nasync def get_contractions(station):\n try:\n async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n if '_D' in station:\n payload = {'station': station[0:4], 'atisType': 'Departure'}\n elif '_A' in station:\n payload = {'station': station[0:4], 'atisType': 'Arrival'}\n else:\n payload = {'station': station[0:4]}\n await websocket.send(json.dumps({'type': 'getContractions', 'value': payload}))\n m = json.loads(await asyncio.wait_for(websocket.recv(), timeout=0.25))\n \n\n c = {}\n contractions = m['stations'][0]['contractions']\n for cont in contractions:\n c[contractions[cont]['text']] = '@' + cont\n \n c = dict(sorted(c.items(), key=lambda item: len(item[0])))\n c = {key: c[key] for key in reversed(c)}\n\n return c\n except asyncio.TimeoutError:\n pass\n\n return {}\n\ndef get_datis_data():\n data = {}\n try:\n url = 'https://atis.info/api/all'\n data = json.loads(requests.get(url, timeout=2.5).text)\n except Exception as ignored:\n os.system('cmd /K \\\"cls & echo Unable to fetch D-ATIS data. & timeout 5 & exit\\\"')\n \n return data\n\nasync def get_datis(station, atis_data, replacements):\n atis_type = 'combined'\n if '_A' in station:\n atis_type = 'arr'\n elif '_D' in station:\n atis_type = 'dep'\n\n atis_info = ['D-ATIS NOT AVBL.', '']\n if 'error' in atis_data:\n return atis_info\n\n datis = ''\n for a in atis_data:\n if a['airport'] != station[0:4] or a['type'] != atis_type:\n continue\n datis = a['datis']\n\n # Ignore D-ATIS more than 1.75 hours old\n try: \n t_updated = datetime.strptime(a['updatedAt'][:26], \"%Y-%m-%dT%H:%M:%S.%f\")\n t_updated = t_updated.replace(tzinfo=timezone.utc)\n t_now = datetime.now(timezone.utc)\n\n if (t_now - t_updated).total_seconds() / 3600 > 1.75:\n return atis_info\n except Exception as ignored:\n pass\n\n if len(datis) == 0:\n return atis_info\n\n # Strip beginning and ending D-ATIS text\n datis = '. '.join(datis.split('. ')[2:])\n datis = re.sub(' ...ADVS YOU HAVE.*', '', datis)\n datis = datis.replace('NOTICE TO AIR MISSIONS, NOTAMS. ', 'NOTAMS... ') \\\n .replace('NOTICE TO AIR MISSIONS. ', 'NOTAMS... ') \\\n .replace('NOTICE TO AIR MEN. ', 'NOTAMS... ') \\\n .replace('NOTICE TO AIRMEN. ', 'NOTAMS... ') \\\n .replace('NOTAMS. ', 'NOTAMS... ') \\\n .replace('NOTAM. ', 'NOTAMS... ')\n\n # Replace defined replacements\n for r in replacements:\n if '%r' in replacements[r]:\n datis = re.sub(r + '[,.;]{0,2}', replacements[r].replace('%r', ''), datis)\n else:\n datis = re.sub(r + '[,.;]{0,2}', replacements[r], datis)\n datis = re.sub(r'\\s+', ' ', datis).strip()\n\n # Clean up D-ATIS\n datis = datis.replace('...', '/./').replace('..', '.') \\\n .replace('/./', '...').replace(' ', ' ').replace(' . ', '. ') \\\n .replace(', ,', ',').replace(' ; ', '; ').replace(' .,', ' ,') \\\n .replace(' , ', ', ').replace('., ', ', ').replace('&', '&') \\\n .replace(' ;.', '.').replace(' ;,', ',')\n\n # Replace contractions\n contractions = await get_contractions(station)\n for c, v in contractions.items():\n if not c.isdigit():\n datis = re.sub(r'(?= 4:\n break\n \n if s not in disconnected_atises:\n continue\n \n payload = {'type': 'connectAtis', 'value': {'id': i}}\n async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n await websocket.send(json.dumps(payload))\n\n try:\n m = await asyncio.wait_for(websocket.recv(), timeout=0.1)\n n += 1\n except Exception as ignored:\n pass\n\ndef kill_open_instances():\n prev_instances = {}\n\n for q in psutil.process_iter():\n if 'python' in q.name():\n for parameter in q.cmdline():\n if 'vATISLoad' in parameter and parameter.endswith('.pyw'):\n q_create_time = q.create_time()\n q_create_datetime = datetime.fromtimestamp(q_create_time)\n prev_instances[q.pid] = {'process': q, 'start': q_create_datetime}\n \n prev_instances = dict(sorted(prev_instances.items(), key=lambda item: item[1]['start']))\n \n for i in range(0, len(prev_instances) - 1):\n k = list(prev_instances.keys())[i]\n prev_instances[k]['process'].terminate()\n\ndef open_vATIS():\n # Set 'autoFetchAtisLetter' to True\n config_path = os.getenv('LOCALAPPDATA') + '\\\\org.vatsim.vatis\\\\AppConfig.json'\n try:\n with open(config_path, 'r') as f:\n data = json.load(f)\n if 'autoFetchAtisLetter' in data:\n data['autoFetchAtisLetter'] = True\n with open(config_path, 'w') as f:\n json.dump(data, f, indent=2)\n except Exception as ignored:\n pass\n\n # Check if vATIS is open\n for process in psutil.process_iter(['name']):\n if process.info['name'] == 'vATIS.exe':\n return\n\n exe = os.getenv('LOCALAPPDATA') + '\\\\org.vatsim.vatis\\\\current\\\\vATIS.exe'\n subprocess.Popen(exe);\n\nasync def get_connected_atis_data():\n stations = await get_datis_stations()\n atis_statuses = await get_atis_statuses()\n\n connected_atis_data = {}\n \n for station in [k for k, v in atis_statuses.items() if v == 'Connected']:\n payload = {'type': 'getAtis', 'value': {'id': stations[station]}}\n async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n await websocket.send(json.dumps(payload))\n\n m = json.loads(await websocket.recv())['value']\n connected_atis_data[station] = [m['airportConditions'], m['notams']]\n\n return connected_atis_data\n\nasync def disconnect_over_connection_limit(delay=True):\n if True:\n time.sleep(5)\n \n stations = await get_datis_stations()\n atis_statuses = await get_atis_statuses()\n connected_atises = [k for k, v in atis_statuses.items() if v == 'Connected']\n\n if len(connected_atises) <= 4 or SHUTDOWN_LIMIT == 346:\n return\n\n for i in range(4, len(connected_atises)):\n s, i = connected_atises[i], stations[connected_atises[i]]\n payload = {'type': 'disconnectAtis', 'value': {'id': i}}\n async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket:\n await websocket.send(json.dumps(payload))\n\ndef find_deleted_portions(original, modified):\n sequence_matcher = difflib.SequenceMatcher(None, original, modified)\n \n deleted_portions = []\n for tag, i1, i2, j1, j2 in sequence_matcher.get_opcodes():\n if tag == 'delete': \n deleted_portions.append(original[i1:i2])\n \n return deleted_portions\n\ndef compare_atis_data(prev_data, new_data):\n compared_output = {}\n\n for station in prev_data:\n if station not in new_data:\n continue\n \n conditionDiff = find_deleted_portions(prev_data[station][0], new_data[station][0])\n notamDiff = find_deleted_portions(prev_data[station][1], new_data[station][1])\n\n if len(conditionDiff) > 0 or len(notamDiff) > 0:\n compared_output[station] = conditionDiff + notamDiff\n\n return compared_output" }, { "cell_type": "code", @@ -664,4 +117,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} +} \ No newline at end of file diff --git a/vATISLoad.pyw b/vATISLoad.pyw index cec4558..7b87774 100644 --- a/vATISLoad.pyw +++ b/vATISLoad.pyw @@ -5,38 +5,60 @@ DISABLE_AUTOUPDATES = False # Set to True to disable auto-updates SHUTDOWN_LIMIT = 60 * 5 # Time delay to exit script AUTO_SELECT_FACILITY = False # Enable/disable auto-select facility -RUN_UPDATE = True # Set to False for testing +RUN_UPDATE = False # Set to False for testing ##################################################################### -import subprocess, sys, os, time, json, re, uuid, ctypes, asyncio, difflib, winreg, argparse +import subprocess, sys, os, time, json, re, uuid, ctypes, asyncio, difflib, winreg, argparse, logging from datetime import datetime, timezone -import importlib.util as il -if None in [il.find_spec('requests'), il.find_spec('websockets'), il.find_spec('psutil'), - il.find_spec('pygetwindow')]: +# Parse --debug flag early for logging setup +_parser = argparse.ArgumentParser(add_help=False) +_parser.add_argument('--debug', action='store_true') +_early_args, _ = _parser.parse_known_args() + +# Configure logging based on --debug flag +if _early_args.debug: + logging.basicConfig( + level=logging.INFO, + format='%(asctime)s [%(levelname)s] %(message)s', + datefmt='%H:%M:%S' + ) +else: + logging.basicConfig(level=logging.CRITICAL + 1) # Effectively disable logging +log = logging.getLogger('vATISLoad') + +log.info("vATISLoad starting...") - os.system('cmd /K \"cls & echo Updating required libraries for vATISLoad.' + +import importlib.util as il +missing_libs = [] +for lib in ['requests', 'websockets', 'psutil', 'pygetwindow']: + if il.find_spec(lib) is None: + missing_libs.append(lib) + +if len(missing_libs) > 0: + log.info(f"Installing missing libraries: {missing_libs}") + os.system('cmd /K "cls & echo Updating required libraries for vATISLoad.' + ' & echo Please wait a few minutes for libraries to install. & echo.' + ' & echo If this does not work, try using vATISLoad_library_installer.py (see the vATISLoad README).' + - ' & timeout 15 & exit\"') - + ' & timeout 15 & exit"') + subprocess.check_call([sys.executable, '-m', 'pip', 'install', '--upgrade', 'pip']) subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'requests']); subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'websockets']); subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'psutil']); subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'pygetwindow']); -os.system('cls') - import requests, websockets, psutil, pygetwindow def update_vATISLoad(): + log.info("Checking for updates...") online_file = '' url = 'https://raw.githubusercontent.com/glott/vATISLoad/refs/heads/main/vATISLoad.pyw' try: online_file = requests.get(url).text.split('\n') - except Exception as ignored: + except Exception as e: + log.error(f"Failed to fetch update: {e}") return up_to_date = True @@ -54,6 +76,7 @@ def update_vATISLoad(): if up_to_date: return + log.info("Update available, downloading...") try: os.rename(sys.argv[0], sys.argv[0] + '.bak') with requests.get(url, stream=True) as r: @@ -63,11 +86,13 @@ def update_vATISLoad(): f.write(chunk) os.remove(sys.argv[0] + '.bak') - - except Exception as ignored: + + except Exception as e: + log.error(f"Update failed: {e}") if not os.path.isfile(sys.argv[0]) and os.path.isfile(sys.argv[0] + '.bak'): os.rename(sys.argv[0] + '.bak', sys.argv[0]) + log.info("Restarting with new version...") os.execv(sys.executable, ['python'] + sys.argv) def determine_active_callsign(return_artcc_only=False): @@ -96,11 +121,12 @@ def determine_active_callsign(return_artcc_only=False): crc_lastused_time = data['LastUsedAt'].split('.')[0].replace('Z','') crc_name = data['Name'] crc_data = data - except Exception as ignored: + except Exception as e: + log.error(f"Error reading CRC profiles: {e}") return None if return_artcc_only: - return crc_data['ArtccId'] + return crc_data.get('ArtccId', None) try: lastPos = crc_data['LastUsedPositionId'] @@ -110,20 +136,23 @@ def determine_active_callsign(return_artcc_only=False): pos = determine_position_from_id(data['facility']['positions'], lastPos) if pos is not None: + log.info(f"Active callsign: {pos[0]}_{pos[1]}") return pos for child1 in data['facility']['childFacilities']: pos = determine_position_from_id(child1['positions'], lastPos) if pos is not None: + log.info(f"Active callsign: {pos[0]}_{pos[1]}") return pos - + for child2 in child1['childFacilities']: pos = determine_position_from_id(child2['positions'], lastPos) if pos is not None: + log.info(f"Active callsign: {pos[0]}_{pos[1]}") return pos - - except Exception as ignored: - pass + + except Exception as e: + log.error(f"Error determining position: {e}") return None @@ -168,10 +197,11 @@ async def auto_select_facility(): return # Load new profile + log.info(f"Auto-selecting profile for {artcc}") await websocket.send(json.dumps({'type': 'loadProfile', 'value': {'id': match_id}})) await asyncio.sleep(1) - - except Exception as ignored: + + except Exception: pass async def try_websocket(shutdown=RUN_UPDATE, limit=SHUTDOWN_LIMIT, initial=False): @@ -181,7 +211,9 @@ async def try_websocket(shutdown=RUN_UPDATE, limit=SHUTDOWN_LIMIT, initial=False await auto_select_facility() t1 = time.time() - if t1 - t0 > limit: + elapsed = t1 - t0 + if elapsed > limit: + log.warning(f"Websocket timeout after {elapsed:.0f}s") if shutdown: sys.exit() return @@ -234,7 +266,8 @@ async def get_datis_stations(initial=False): if 'D-ATIS' in s['presets']: data[name] = s['id'] - + + log.info(f"Found {len(data)} D-ATIS stations: {list(data.keys())}") return data def get_atis_replacements(stations): @@ -244,8 +277,8 @@ def get_atis_replacements(stations): try: url = 'https://raw.githubusercontent.com/glott/vATISLoad/refs/heads/main/vATISLoadConfig.json' config = json.loads(requests.get(url).text) - except Exception as ignored: - pass + except Exception as e: + log.error(f"Failed to fetch config: {e}") if 'replacements' not in config: return {} @@ -255,8 +288,55 @@ def get_atis_replacements(stations): if a in stations: replacements[a] = config['replacements'][a] + if len(replacements) > 0: + log.info(f"Loaded replacements for {len(replacements)} airports") return replacements - + +def get_user_config(): + config = {} + config_path = os.path.join(os.path.dirname(sys.argv[0]), 'vATISLoadUserConfig.json') + try: + with open(config_path, 'r') as f: + config = json.load(f) + log.info(f"Loaded user config for {len(config)} airports") + except FileNotFoundError: + pass + except Exception as e: + log.error(f"Error loading user config: {e}") + return config + +def apply_user_modifications(airport, conditions, notams, user_config): + if airport not in user_config: + return conditions, notams + + cfg = user_config[airport] + + # Apply conditions modifications + if 'conditions' in cfg: + for text in cfg['conditions'].get('remove', []): + conditions = conditions.replace(text, '') + append_text = cfg['conditions'].get('append', '') + if append_text: + if conditions and not conditions.endswith(' '): + conditions += ' ' + conditions += append_text + + # Apply notams modifications + if 'notams' in cfg: + for text in cfg['notams'].get('remove', []): + notams = notams.replace(text, '') + append_text = cfg['notams'].get('append', '') + if append_text: + if notams and not notams.endswith(' '): + notams += ' ' + notams += append_text + + # Clean up extra spaces + conditions = re.sub(r'\s+', ' ', conditions).strip() + notams = re.sub(r'\s+', ' ', notams).strip() + + return conditions, notams + async def get_contractions(station): try: async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket: @@ -278,20 +358,25 @@ async def get_contractions(station): c = dict(sorted(c.items(), key=lambda item: len(item[0]))) c = {key: c[key] for key in reversed(c)} - return c + return c except asyncio.TimeoutError: - pass + log.warning(f"Timeout getting contractions for {station}") + except Exception as e: + log.error(f"Error getting contractions for {station}: {e}") return {} def get_datis_data(): + log.info("Fetching D-ATIS data...") data = {} try: url = 'https://atis.info/api/all' - data = json.loads(requests.get(url, timeout=2.5).text) - except Exception as ignored: - os.system('cmd /K \"cls & echo Unable to fetch D-ATIS data. & timeout 5 & exit\"') - + data = json.loads(requests.get(url, timeout=2.5).text) + log.info(f"Fetched D-ATIS for {len(data)} airports") + except Exception as e: + log.error(f"Failed to fetch D-ATIS data: {e}") + os.system('cmd /K "cls & echo Unable to fetch D-ATIS data. & timeout 5 & exit"') + return data async def get_datis(station, atis_data, replacements): @@ -303,6 +388,7 @@ async def get_datis(station, atis_data, replacements): atis_info = ['D-ATIS NOT AVBL.', ''] if 'error' in atis_data: + log.warning(f"{station}: D-ATIS data has error") return atis_info datis = '' @@ -317,9 +403,11 @@ async def get_datis(station, atis_data, replacements): t_updated = t_updated.replace(tzinfo=timezone.utc) t_now = datetime.now(timezone.utc) - if (t_now - t_updated).total_seconds() / 3600 > 1.75: + age_hours = (t_now - t_updated).total_seconds() / 3600 + if age_hours > 1.75: + log.warning(f"{station}: D-ATIS too old ({age_hours:.1f}h)") return atis_info - except Exception as ignored: + except Exception: pass if len(datis) == 0: @@ -378,7 +466,13 @@ async def get_atis_statuses(): m = json.loads(await websocket.recv())['value'] data[s] = m['networkConnectionStatus'] - + + connected = [k for k, v in data.items() if v == 'Connected'] + disconnected = [k for k, v in data.items() if v == 'Disconnected'] + other = {k: v for k, v in data.items() if v not in ['Connected', 'Disconnected']} + log.info(f"Connected: {connected}, Disconnected: {disconnected}") + if other: + log.info(f"Other statuses: {other}") return data async def get_num_connections(): @@ -389,16 +483,19 @@ async def get_num_connections(): return n async def configure_atises(connected_only=False, initial=False, temp_rep={}): + log.info(f"Configuring ATISes (connected_only={connected_only})") stations = await get_datis_stations(initial=initial) replacements = get_atis_replacements(stations) atis_data = get_datis_data() + user_config = get_user_config() atis_statuses = await get_atis_statuses() for k, v in temp_rep.items(): for cont, cont_rep in (await get_contractions(k)).items(): temp_rep[k] = [elem.replace(cont_rep, cont) for elem in temp_rep[k]] - + + configured = [] for s, i in stations.items(): if connected_only and atis_statuses[s] != 'Connected': continue @@ -412,14 +509,19 @@ async def configure_atises(connected_only=False, initial=False, temp_rep={}): v = {'id': i, 'preset': 'D-ATIS', 'syncAtisLetter': True} v['airportConditionsFreeText'], v['notamsFreeText'] = await get_datis(s, atis_data, rep) + v['airportConditionsFreeText'], v['notamsFreeText'] = apply_user_modifications( + s[0:4], v['airportConditionsFreeText'], v['notamsFreeText'], user_config) if connected_only and v['airportConditionsFreeText'] == 'D-ATIS NOT AVBL.': continue - + + configured.append(s) payload = {'type': 'configureAtis', 'value': v} async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket: await websocket.send(json.dumps(payload)) + log.info(f"Configured: {configured}") + def determine_position_from_id(positions, position_id): for p in positions: if p['id'] == position_id: @@ -437,6 +539,7 @@ def determine_position_from_id(positions, position_id): return None async def connect_atises(airport_override=None): + log.info(f"Connecting ATISes (override={airport_override})") stations = await get_datis_stations() atis_statuses = await get_atis_statuses() disconnected_atises = [k for k, v in atis_statuses.items() if v == 'Disconnected'] @@ -444,12 +547,26 @@ async def connect_atises(airport_override=None): # If airport override is provided, filter to only those airports if airport_override is not None: + # Normalize overrides to match ICAO codes (handle K prefix for US airports) + normalized_overrides = [] + for a in airport_override: + a_upper = a.upper() + normalized_overrides.append(a_upper) + # Add K-prefixed version if not already prefixed + if not a_upper.startswith('K') and len(a_upper) == 3: + normalized_overrides.append('K' + a_upper) + # Add non-K version if K-prefixed + if a_upper.startswith('K') and len(a_upper) == 4: + normalized_overrides.append(a_upper[1:]) + + # Filter disconnected stations to match override stations_temp = {} for da in disconnected_atises: airport_code = da[:4] # Extract airport code (e.g., 'KSFO' from 'KSFO_A') - if airport_code.upper() in [a.upper() for a in airport_override] and da in stations: + if airport_code.upper() in normalized_overrides and da in stations: stations_temp[da] = stations[da] stations = stations_temp + log.info(f"Filtered to airports: {list(stations.keys())}") else: # Original logic: auto-select based on active callsign active_callsign = determine_active_callsign() @@ -464,13 +581,15 @@ async def connect_atises(airport_override=None): stations = stations_temp n = 0 + connected = [] for s, i in stations.items(): if n + n_connected >= 4: + log.warning("Connection limit (4) reached") break - + if s not in disconnected_atises: continue - + payload = {'type': 'connectAtis', 'value': {'id': i}} async with websockets.connect('ws://127.0.0.1:49082/', close_timeout=0.01) as websocket: await websocket.send(json.dumps(payload)) @@ -478,27 +597,63 @@ async def connect_atises(airport_override=None): try: m = await asyncio.wait_for(websocket.recv(), timeout=0.1) n += 1 - except Exception as ignored: + connected.append(s) + except Exception: pass + if connected: + log.info(f"Connected: {connected}") + def kill_open_instances(): prev_instances = {} + current_pid = os.getpid() + current_process = psutil.Process(current_pid) + current_start = current_process.create_time() + + # Get parent PID to avoid killing it + try: + parent_pid = current_process.ppid() + except (psutil.NoSuchProcess, psutil.AccessDenied): + parent_pid = None + + log.info(f"Current process PID: {current_pid}, Parent PID: {parent_pid}") for q in psutil.process_iter(): - if 'python' in q.name(): - for parameter in q.cmdline(): - if 'vATISLoad' in parameter and parameter.endswith('.pyw'): - q_create_time = q.create_time() - q_create_datetime = datetime.fromtimestamp(q_create_time) - prev_instances[q.pid] = {'process': q, 'start': q_create_datetime} - + try: + if 'python' in q.name(): + for parameter in q.cmdline(): + if 'vATISLoad' in parameter and parameter.endswith('.pyw'): + q_create_time = q.create_time() + q_create_datetime = datetime.fromtimestamp(q_create_time) + prev_instances[q.pid] = {'process': q, 'start': q_create_datetime, 'start_ts': q_create_time} + log.info(f"Found vATISLoad instance: PID {q.pid}, started {q_create_datetime}") + except (psutil.NoSuchProcess, psutil.AccessDenied): + continue + prev_instances = dict(sorted(prev_instances.items(), key=lambda item: item[1]['start'])) - + log.info(f"Sorted PIDs (oldest first): {list(prev_instances.keys())}") + for i in range(0, len(prev_instances) - 1): k = list(prev_instances.keys())[i] + # Skip current process + if k == current_pid: + log.warning(f"Skipping termination of current process PID {k}") + continue + # Skip parent process + if k == parent_pid: + log.warning(f"Skipping termination of parent process PID {k}") + continue + # Skip processes started within 1 second of current (likely related to same invocation) + if abs(prev_instances[k]['start_ts'] - current_start) < 1.0: + log.warning(f"Skipping PID {k} - started too close to current process") + continue + log.info(f"Terminating previous instance: PID {k}") prev_instances[k]['process'].terminate() + time.sleep(0.5) # Give terminated processes time to exit + def open_vATIS(): + log.info("open_vATIS() called") # Set 'autoFetchAtisLetter' to True config_path = os.getenv('LOCALAPPDATA') + '\\org.vatsim.vatis\\AppConfig.json' try: @@ -508,16 +663,29 @@ def open_vATIS(): data['autoFetchAtisLetter'] = True with open(config_path, 'w') as f: json.dump(data, f, indent=2) - except Exception as ignored: - pass + log.info("vATIS config updated") + except Exception as e: + log.warning(f"Could not update vATIS config: {e}") # Check if vATIS is open - for process in psutil.process_iter(['name']): - if process.info['name'] == 'vATIS.exe': - return + log.info("Checking if vATIS is running...") + try: + for process in psutil.process_iter(['name']): + try: + if process.info['name'] == 'vATIS.exe': + log.info("vATIS already running") + return + except (psutil.NoSuchProcess, psutil.AccessDenied): + continue + except Exception as e: + log.error(f"Error checking processes: {e}") exe = os.getenv('LOCALAPPDATA') + '\\org.vatsim.vatis\\current\\vATIS.exe' - subprocess.Popen(exe); + log.info(f"Starting vATIS from: {exe}") + try: + subprocess.Popen(exe) + except Exception as e: + log.error(f"Failed to start vATIS: {e}") async def get_connected_atis_data(): stations = await get_datis_stations() @@ -546,6 +714,7 @@ async def disconnect_over_connection_limit(delay=True): if len(connected_atises) <= 4 or SHUTDOWN_LIMIT == 346: return + log.warning(f"Over connection limit ({len(connected_atises)} > 4), disconnecting excess") for i in range(4, len(connected_atises)): s, i = connected_atises[i], stations[connected_atises[i]] payload = {'type': 'disconnectAtis', 'value': {'id': i}} @@ -574,6 +743,7 @@ def compare_atis_data(prev_data, new_data): if len(conditionDiff) > 0 or len(notamDiff) > 0: compared_output[station] = conditionDiff + notamDiff + log.info(f"Changes detected for {station}") return compared_output @@ -582,12 +752,18 @@ async def main(): parser = argparse.ArgumentParser(description='vATIS Auto-Loader') parser.add_argument('--airports', nargs='+', metavar='ICAO', help='Optional list of airport ICAO codes to activate (e.g., KSFO KOAK)') + parser.add_argument('--debug', action='store_true', + help='Enable debug logging output') args = parser.parse_args() + if args.airports: + log.info(f"Airport override: {args.airports}") + if RUN_UPDATE: update_vATISLoad() kill_open_instances() + log.info("Previous instances handled, proceeding...") open_vATIS() await configure_atises(initial=True) await connect_atises(airport_override=args.airports) @@ -596,17 +772,19 @@ async def main(): await disconnect_over_connection_limit() k, temp_rep = 0, [] + log.info("Entering update loop (5 min intervals)...") while not DISABLE_AUTOUPDATES: # Sleep for 5 minutes - for i in range(0, 5): + for i in range(0, 5): await try_websocket() time.sleep(60) - + # Capture temporary replacements after first 5 minutes if k == 0: new_data = await get_connected_atis_data() temp_rep = compare_atis_data(prev_data, new_data) - + + log.info(f"Update cycle {k}") await configure_atises(connected_only=True, temp_rep=temp_rep) k += 1