Initial commit
This commit is contained in:
205
network_utils.py
Executable file
205
network_utils.py
Executable file
@@ -0,0 +1,205 @@
|
||||
import subprocess
|
||||
import asyncio
|
||||
import aiohttp
|
||||
from aiohttp import BasicAuth
|
||||
import aiofiles
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
def log_error(message, log_file=None):
|
||||
"""Schreibt Fehler in Konsole und optional in Log-Datei"""
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
log_message = f"[{timestamp}] {message}"
|
||||
print(log_message)
|
||||
if log_file:
|
||||
try:
|
||||
with open(log_file, 'a', encoding='utf-8') as f:
|
||||
f.write(log_message + '\n')
|
||||
except Exception:
|
||||
pass # Ignoriere Log-Fehler
|
||||
|
||||
async def upload_single_file_with_retry(session, path, url, auth, max_retries=3, retry_delay=2):
|
||||
"""Lädt eine einzelne Datei hoch mit Retry-Mechanismus"""
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
if not os.path.exists(path):
|
||||
return False, f"Datei nicht gefunden: {path}"
|
||||
|
||||
file_size = os.path.getsize(path)
|
||||
if file_size == 0:
|
||||
return False, f"Datei ist leer: {path}"
|
||||
|
||||
# Warnung bei sehr großen Dateien (>100MB)
|
||||
if file_size > 100 * 1024 * 1024:
|
||||
log_error(f"Warnung: Sehr große Datei ({file_size / 1024 / 1024:.1f}MB): {os.path.basename(path)}")
|
||||
|
||||
async with aiofiles.open(path, 'rb') as f:
|
||||
content = await f.read()
|
||||
|
||||
# Timeout pro Datei: 2 Minuten
|
||||
timeout = aiohttp.ClientTimeout(total=120)
|
||||
async with session.post(url, data=content, auth=auth, timeout=timeout) as response:
|
||||
if response.status == 200:
|
||||
return True, f"Erfolgreich hochgeladen: {os.path.basename(path)}"
|
||||
else:
|
||||
error_text = await response.text()
|
||||
error_msg = f"Statuscode {response.status} für {os.path.basename(path)}"
|
||||
if error_text:
|
||||
error_msg += f" - Server: {error_text[:200]}"
|
||||
|
||||
# Bei 4xx Fehlern (Client-Fehler) nicht wiederholen
|
||||
if 400 <= response.status < 500:
|
||||
return False, error_msg
|
||||
|
||||
# Bei anderen Fehlern: Retry
|
||||
if attempt < max_retries - 1:
|
||||
log_error(f"Versuch {attempt + 1}/{max_retries} fehlgeschlagen: {error_msg}. Wiederhole in {retry_delay}s...")
|
||||
await asyncio.sleep(retry_delay)
|
||||
continue
|
||||
else:
|
||||
return False, error_msg
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
error_msg = f"Timeout beim Upload von {os.path.basename(path)}"
|
||||
if attempt < max_retries - 1:
|
||||
log_error(f"Versuch {attempt + 1}/{max_retries} fehlgeschlagen: {error_msg}. Wiederhole in {retry_delay}s...")
|
||||
await asyncio.sleep(retry_delay)
|
||||
continue
|
||||
else:
|
||||
return False, error_msg
|
||||
|
||||
except aiohttp.ClientError as e:
|
||||
error_msg = f"Netzwerkfehler: {os.path.basename(path)} - {str(e)}"
|
||||
if attempt < max_retries - 1:
|
||||
log_error(f"Versuch {attempt + 1}/{max_retries} fehlgeschlagen: {error_msg}. Wiederhole in {retry_delay}s...")
|
||||
await asyncio.sleep(retry_delay)
|
||||
continue
|
||||
else:
|
||||
return False, error_msg
|
||||
|
||||
except OSError as e:
|
||||
return False, f"Dateifehler: {os.path.basename(path)} - {str(e)}"
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Unerwarteter Fehler: {os.path.basename(path)} - {str(e)}"
|
||||
if attempt < max_retries - 1:
|
||||
log_error(f"Versuch {attempt + 1}/{max_retries} fehlgeschlagen: {error_msg}. Wiederhole in {retry_delay}s...")
|
||||
await asyncio.sleep(retry_delay)
|
||||
continue
|
||||
else:
|
||||
return False, error_msg
|
||||
|
||||
return False, f"Maximale Anzahl Wiederholungen erreicht für {os.path.basename(path)}"
|
||||
|
||||
async def upload_multiple_files(paths, url, username, password, success_callback, progress_callback):
|
||||
print("Starte Uploads...")
|
||||
auth = BasicAuth(login=username, password=password) if username and password else None
|
||||
total_files = len(paths)
|
||||
print(f"Gesamtanzahl Dateien: {total_files}")
|
||||
|
||||
# Log-Datei erstellen
|
||||
log_file = os.path.join(os.path.expanduser('~'), 'dicom2pacs_upload.log')
|
||||
log_error(f"=== Upload gestartet: {total_files} Dateien ===", log_file)
|
||||
|
||||
if progress_callback:
|
||||
progress_callback(0, total_files)
|
||||
|
||||
# Timeout für die gesamte Session: 30 Minuten
|
||||
timeout = aiohttp.ClientTimeout(total=1800)
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
uploaded_count = 0
|
||||
failed_files = []
|
||||
|
||||
for idx, path in enumerate(paths, 1):
|
||||
log_error(f"Upload {idx}/{total_files}: {os.path.basename(path)}", log_file)
|
||||
success, message = await upload_single_file_with_retry(session, path, url, auth)
|
||||
|
||||
if success:
|
||||
uploaded_count += 1
|
||||
log_error(message, log_file)
|
||||
if success_callback is not None:
|
||||
await success_callback(message)
|
||||
if progress_callback:
|
||||
progress_callback(1, total_files)
|
||||
else:
|
||||
failed_files.append((os.path.basename(path), message))
|
||||
log_error(f"FEHLER: {message}", log_file)
|
||||
# Fortschritt trotzdem aktualisieren, damit UI nicht hängt
|
||||
if progress_callback:
|
||||
progress_callback(1, total_files)
|
||||
|
||||
# Zusammenfassung
|
||||
summary = f"Upload abgeschlossen: {uploaded_count}/{total_files} Dateien erfolgreich hochgeladen"
|
||||
log_error(summary, log_file)
|
||||
|
||||
if failed_files:
|
||||
log_error(f"Fehlgeschlagene Dateien ({len(failed_files)}):", log_file)
|
||||
for filename, error in failed_files:
|
||||
log_error(f" - {filename}: {error}", log_file)
|
||||
|
||||
log_error("=== Upload beendet ===", log_file)
|
||||
print(summary)
|
||||
|
||||
if failed_files:
|
||||
print(f"\n⚠️ {len(failed_files)} Dateien konnten nicht hochgeladen werden.")
|
||||
print(f"Details finden Sie in der Log-Datei: {log_file}")
|
||||
|
||||
return uploaded_count, len(failed_files)
|
||||
|
||||
|
||||
async def check_server_availability(url):
|
||||
"""
|
||||
Überprüft die Verfügbarkeit des Servers durch Pingen der IP-Adresse.
|
||||
|
||||
:param url: URL des Servers, dessen Verfügbarkeit überprüft werden soll
|
||||
:return: True, wenn der Server erreichbar ist, sonst False
|
||||
"""
|
||||
if not url:
|
||||
print("Keine URL angegeben")
|
||||
return False
|
||||
|
||||
try:
|
||||
# Validierung der URL
|
||||
if "//" not in url:
|
||||
print(f"Ungültige URL: {url}")
|
||||
return False
|
||||
|
||||
# Extrahieren des Host-Teils (IP-Adresse) aus der URL
|
||||
base_url = url.split("//")[1].split("/")[0]
|
||||
|
||||
# Extrahieren der IP-Adresse aus dem Host-Teil
|
||||
ip_address = base_url.split(":")[0]
|
||||
|
||||
if not ip_address:
|
||||
print("Konnte IP-Adresse nicht aus URL extrahieren")
|
||||
return False
|
||||
|
||||
print(f"Trying to ping {ip_address}...") # Debug-Print
|
||||
|
||||
# Verwende asyncio.create_subprocess_exec, um den ping-Befehl asynchron auszuführen
|
||||
# Timeout von 5 Sekunden für ping
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
"ping", "-c", "1", "-W", "5000", ip_address,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
)
|
||||
try:
|
||||
_, _ = await asyncio.wait_for(process.communicate(), timeout=10)
|
||||
except asyncio.TimeoutError:
|
||||
process.kill()
|
||||
await process.wait()
|
||||
print(f"Ping timeout für {ip_address}")
|
||||
return False
|
||||
|
||||
if process.returncode == 0:
|
||||
print(f"Ping successful to {ip_address}") # Debug-Print
|
||||
return True
|
||||
else:
|
||||
print(f"Ping failed to {ip_address}") # Debug-Print
|
||||
return False
|
||||
except ValueError as e:
|
||||
print(f"Ungültige URL-Format: {e}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"Exception occurred: {str(e)}") # Debug-Print
|
||||
return False
|
||||
Reference in New Issue
Block a user