|
|
|
@@ -5,59 +5,48 @@ import logging
|
|
|
|
|
import numpy as np
|
|
|
|
|
from datetime import datetime, timedelta
|
|
|
|
|
from dotenv import load_dotenv
|
|
|
|
|
import requests
|
|
|
|
|
from collections import defaultdict
|
|
|
|
|
import ipaddress
|
|
|
|
|
import csv
|
|
|
|
|
import sqlite3
|
|
|
|
|
from functools import wraps
|
|
|
|
|
from contextlib import contextmanager
|
|
|
|
|
import time
|
|
|
|
|
import threading
|
|
|
|
|
|
|
|
|
|
app = Flask(__name__, static_folder='static')
|
|
|
|
|
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', 'dev')
|
|
|
|
|
app.config['ALLOWED_IP_RANGES'] = os.getenv('ALLOWED_IP_RANGES', '192.168.0.0/16,10.0.0.0/8').split(',')
|
|
|
|
|
app.config['VERSION'] = '1.2.16'
|
|
|
|
|
app.config['VERSION'] = '1.2.17'
|
|
|
|
|
app.config['DATABASE'] = 'data/customers.db'
|
|
|
|
|
app.config['DATABASE_TIMEOUT'] = 20
|
|
|
|
|
app.config['DATABASE_POOL_SIZE'] = 5
|
|
|
|
|
|
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
# Pfad zur Datenbank
|
|
|
|
|
DB_FILE = 'data/customers.db'
|
|
|
|
|
|
|
|
|
|
# Lade Umgebungsvariablen
|
|
|
|
|
load_dotenv()
|
|
|
|
|
|
|
|
|
|
# Statisches Passwort aus der .env Datei
|
|
|
|
|
STATIC_PASSWORD = os.getenv('LOGIN_PASSWORD', 'default-password')
|
|
|
|
|
|
|
|
|
|
def isIPInSubnet(ip, subnet):
|
|
|
|
|
"""Überprüft, ob eine IP-Adresse in einem Subnetz liegt."""
|
|
|
|
|
try:
|
|
|
|
|
# Teile die IP und das Subnetz in ihre Komponenten
|
|
|
|
|
subnet_ip, bits = subnet.split('/')
|
|
|
|
|
ip_parts = [int(x) for x in ip.split('.')]
|
|
|
|
|
subnet_parts = [int(x) for x in subnet_ip.split('.')]
|
|
|
|
|
|
|
|
|
|
# Konvertiere IPs in 32-bit Zahlen
|
|
|
|
|
ip_num = (ip_parts[0] << 24) | (ip_parts[1] << 16) | (ip_parts[2] << 8) | ip_parts[3]
|
|
|
|
|
subnet_num = (subnet_parts[0] << 24) | (subnet_parts[1] << 16) | (subnet_parts[2] << 8) | subnet_parts[3]
|
|
|
|
|
|
|
|
|
|
# Erstelle die Subnetzmaske
|
|
|
|
|
mask = ~((1 << (32 - int(bits))) - 1)
|
|
|
|
|
|
|
|
|
|
# Prüfe, ob die IP im Subnetz liegt
|
|
|
|
|
return (ip_num & mask) == (subnet_num & mask)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Fehler bei der IP-Überprüfung: {str(e)}")
|
|
|
|
|
return False
|
|
|
|
|
# Thread-lokaler Speicher für Datenbankverbindungen
|
|
|
|
|
thread_local = threading.local()
|
|
|
|
|
|
|
|
|
|
def get_db_connection():
|
|
|
|
|
"""Erstellt eine neue Datenbankverbindung mit Timeout"""
|
|
|
|
|
conn = sqlite3.connect(DB_FILE, timeout=20)
|
|
|
|
|
conn.row_factory = sqlite3.Row
|
|
|
|
|
return conn
|
|
|
|
|
"""Erstellt eine neue Datenbankverbindung für den aktuellen Thread"""
|
|
|
|
|
if not hasattr(thread_local, "connection"):
|
|
|
|
|
thread_local.connection = sqlite3.connect(app.config['DATABASE'], timeout=app.config['DATABASE_TIMEOUT'])
|
|
|
|
|
thread_local.connection.row_factory = sqlite3.Row
|
|
|
|
|
return thread_local.connection
|
|
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
|
def get_db():
|
|
|
|
|
"""Context Manager für Datenbankverbindungen"""
|
|
|
|
|
conn = get_db_connection()
|
|
|
|
|
try:
|
|
|
|
|
yield conn
|
|
|
|
|
except Exception:
|
|
|
|
|
conn.rollback()
|
|
|
|
|
raise
|
|
|
|
|
finally:
|
|
|
|
|
conn.commit()
|
|
|
|
|
|
|
|
|
|
def init_db():
|
|
|
|
|
"""Initialisiert die SQLite-Datenbank mit der notwendigen Tabelle."""
|
|
|
|
|
conn = get_db_connection()
|
|
|
|
|
with get_db() as conn:
|
|
|
|
|
c = conn.cursor()
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
@@ -84,36 +73,45 @@ def init_db():
|
|
|
|
|
)
|
|
|
|
|
''')
|
|
|
|
|
|
|
|
|
|
# Erstelle Indizes für alle Suchfelder
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_nummer ON customers(nummer)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_name ON customers(name)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_strasse ON customers(strasse)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_plz ON customers(plz)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_ort ON customers(ort)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_telefon ON customers(telefon)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_mobil ON customers(mobil)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_email ON customers(email)')
|
|
|
|
|
# Optimierte Indizes für die häufigsten Suchanfragen
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_name_ort ON customers(name, ort)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_fachrichtung ON customers(fachrichtung)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_tag ON customers(tag)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_handy ON customers(handy)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_tele_firma ON customers(tele_firma)')
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_plz ON customers(plz)')
|
|
|
|
|
|
|
|
|
|
# Erstelle einen zusammengesetzten Index für die häufigste Suchkombination
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_name_ort ON customers(name, ort)')
|
|
|
|
|
# Zusammengesetzter Index für die häufigste Suchkombination
|
|
|
|
|
c.execute('CREATE INDEX IF NOT EXISTS idx_customers_search ON customers(name, ort, fachrichtung, tag)')
|
|
|
|
|
|
|
|
|
|
conn.commit()
|
|
|
|
|
logger.info('Datenbank initialisiert')
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f'Fehler bei der Datenbankinitialisierung: {str(e)}')
|
|
|
|
|
raise
|
|
|
|
|
finally:
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
|
def isIPInSubnet(ip, subnet):
|
|
|
|
|
"""Überprüft, ob eine IP-Adresse in einem Subnetz liegt."""
|
|
|
|
|
try:
|
|
|
|
|
# Teile die IP und das Subnetz in ihre Komponenten
|
|
|
|
|
subnet_ip, bits = subnet.split('/')
|
|
|
|
|
ip_parts = [int(x) for x in ip.split('.')]
|
|
|
|
|
subnet_parts = [int(x) for x in subnet_ip.split('.')]
|
|
|
|
|
|
|
|
|
|
# Konvertiere IPs in 32-bit Zahlen
|
|
|
|
|
ip_num = (ip_parts[0] << 24) | (ip_parts[1] << 16) | (ip_parts[2] << 8) | ip_parts[3]
|
|
|
|
|
subnet_num = (subnet_parts[0] << 24) | (subnet_parts[1] << 16) | (subnet_parts[2] << 8) | subnet_parts[3]
|
|
|
|
|
|
|
|
|
|
# Erstelle die Subnetzmaske
|
|
|
|
|
mask = ~((1 << (32 - int(bits))) - 1)
|
|
|
|
|
|
|
|
|
|
# Prüfe, ob die IP im Subnetz liegt
|
|
|
|
|
return (ip_num & mask) == (subnet_num & mask)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Fehler bei der IP-Überprüfung: {str(e)}")
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
def import_csv():
|
|
|
|
|
"""Importiert die CSV-Datei in die Datenbank"""
|
|
|
|
|
conn = None
|
|
|
|
|
try:
|
|
|
|
|
conn = get_db_connection()
|
|
|
|
|
with get_db() as conn:
|
|
|
|
|
c = conn.cursor()
|
|
|
|
|
|
|
|
|
|
# Lösche bestehende Daten
|
|
|
|
@@ -126,18 +124,24 @@ def import_csv():
|
|
|
|
|
df.columns = df.columns.str.strip().str.replace('"', '')
|
|
|
|
|
df = df.apply(lambda x: x.str.strip().str.replace('"', '') if x.dtype == "object" else x)
|
|
|
|
|
|
|
|
|
|
for _, row in df.iterrows():
|
|
|
|
|
c.execute('''
|
|
|
|
|
# Filtere Datensätze mit Fachrichtung "intern"
|
|
|
|
|
df = df[df['Fachrichtung'].str.lower() != 'intern']
|
|
|
|
|
|
|
|
|
|
# Bereite die Daten für den Batch-Insert vor
|
|
|
|
|
data = [(
|
|
|
|
|
row['VorNachname'], row['Nummer'], row['Strasse'], row['PLZ'], row['Ort'],
|
|
|
|
|
row['Tel'], row['Tel'], row['mail'], row['Fachrichtung'], 'medisoft',
|
|
|
|
|
row['Handy'], row['Tele Firma'], row['Kontakt1'], row['Kontakt2'], row['Kontakt3']
|
|
|
|
|
) for _, row in df.iterrows()]
|
|
|
|
|
|
|
|
|
|
# Führe Batch-Insert durch
|
|
|
|
|
c.executemany('''
|
|
|
|
|
INSERT INTO customers (
|
|
|
|
|
name, nummer, strasse, plz, ort, telefon, mobil, email,
|
|
|
|
|
fachrichtung, tag, handy, tele_firma, kontakt1, kontakt2, kontakt3
|
|
|
|
|
)
|
|
|
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
|
|
|
''', (
|
|
|
|
|
row['VorNachname'], row['Nummer'], row['Strasse'], row['PLZ'], row['Ort'],
|
|
|
|
|
row['Tel'], row['Tel'], row['mail'], row['Fachrichtung'], 'medisoft',
|
|
|
|
|
row['Handy'], row['Tele Firma'], row['Kontakt1'], row['Kontakt2'], row['Kontakt3']
|
|
|
|
|
))
|
|
|
|
|
''', data)
|
|
|
|
|
else:
|
|
|
|
|
logger.warning("MEDISOFT CSV-Datei nicht gefunden")
|
|
|
|
|
|
|
|
|
@@ -148,33 +152,31 @@ def import_csv():
|
|
|
|
|
df_snk.columns = df_snk.columns.str.strip().str.replace('"', '')
|
|
|
|
|
df_snk = df_snk.apply(lambda x: x.str.strip().str.replace('"', '') if x.dtype == "object" else x)
|
|
|
|
|
|
|
|
|
|
for _, row in df_snk.iterrows():
|
|
|
|
|
c.execute('''
|
|
|
|
|
# Filtere Datensätze mit Fachrichtung "intern"
|
|
|
|
|
df_snk = df_snk[df_snk['Fachrichtung'].str.lower() != 'intern']
|
|
|
|
|
|
|
|
|
|
# Bereite die Daten für den Batch-Insert vor
|
|
|
|
|
data = [(
|
|
|
|
|
row['VorNachname'], row['Nummer'], row['Strasse'], row['PLZ'], row['Ort'],
|
|
|
|
|
row['Tel'], row['Tel'], row['mail'], row['Fachrichtung'], 'mediconsult',
|
|
|
|
|
row['Handy'], row['Tele Firma'], row['Kontakt1'], row['Kontakt2'], row['Kontakt3']
|
|
|
|
|
) for _, row in df_snk.iterrows()]
|
|
|
|
|
|
|
|
|
|
# Führe Batch-Insert durch
|
|
|
|
|
c.executemany('''
|
|
|
|
|
INSERT INTO customers (
|
|
|
|
|
name, nummer, strasse, plz, ort, telefon, mobil, email,
|
|
|
|
|
fachrichtung, tag, handy, tele_firma, kontakt1, kontakt2, kontakt3
|
|
|
|
|
)
|
|
|
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
|
|
|
''', (
|
|
|
|
|
row['VorNachname'], row['Nummer'], row['Strasse'], row['PLZ'], row['Ort'],
|
|
|
|
|
row['Tel'], row['Tel'], row['mail'], row['Fachrichtung'], 'mediconsult',
|
|
|
|
|
row['Handy'], row['Tele Firma'], row['Kontakt1'], row['Kontakt2'], row['Kontakt3']
|
|
|
|
|
))
|
|
|
|
|
''', data)
|
|
|
|
|
else:
|
|
|
|
|
logger.warning("MEDICONSULT CSV-Datei nicht gefunden")
|
|
|
|
|
|
|
|
|
|
conn.commit()
|
|
|
|
|
logger.info("CSV-Daten erfolgreich in die Datenbank importiert")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Fehler beim Importieren der CSV-Datei: {str(e)}")
|
|
|
|
|
raise
|
|
|
|
|
finally:
|
|
|
|
|
if conn:
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
|
def clean_dataframe(df):
|
|
|
|
|
"""Konvertiert NaN-Werte in None für JSON-Kompatibilität"""
|
|
|
|
|
return df.replace({np.nan: None})
|
|
|
|
|
|
|
|
|
|
@app.route('/login', methods=['GET', 'POST'])
|
|
|
|
|
def login():
|
|
|
|
@@ -233,7 +235,7 @@ def search():
|
|
|
|
|
fachrichtung = request.args.get('fachrichtung', '')
|
|
|
|
|
tag = request.args.get('tag', 'medisoft')
|
|
|
|
|
|
|
|
|
|
conn = get_db_connection()
|
|
|
|
|
with get_db() as conn:
|
|
|
|
|
c = conn.cursor()
|
|
|
|
|
|
|
|
|
|
# Baue die SQL-Abfrage
|
|
|
|
@@ -314,9 +316,8 @@ def search():
|
|
|
|
|
sql_query += " ORDER BY name LIMIT 100"
|
|
|
|
|
|
|
|
|
|
# Führe die Abfrage aus
|
|
|
|
|
cursor = conn.cursor()
|
|
|
|
|
cursor.execute(sql_query, params)
|
|
|
|
|
results = cursor.fetchall()
|
|
|
|
|
c.execute(sql_query, params)
|
|
|
|
|
results = c.fetchall()
|
|
|
|
|
|
|
|
|
|
formatted_results = []
|
|
|
|
|
for row in results:
|
|
|
|
@@ -339,7 +340,6 @@ def search():
|
|
|
|
|
}
|
|
|
|
|
formatted_results.append(customer)
|
|
|
|
|
|
|
|
|
|
conn.close()
|
|
|
|
|
return jsonify(formatted_results)
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
@@ -350,7 +350,7 @@ def search():
|
|
|
|
|
def get_fachrichtungen():
|
|
|
|
|
try:
|
|
|
|
|
search_term = request.args.get('q', '').lower()
|
|
|
|
|
conn = get_db_connection()
|
|
|
|
|
with get_db() as conn:
|
|
|
|
|
c = conn.cursor()
|
|
|
|
|
|
|
|
|
|
# Hole alle eindeutigen Fachrichtungen, die mit dem Suchbegriff übereinstimmen
|
|
|
|
@@ -365,8 +365,6 @@ def get_fachrichtungen():
|
|
|
|
|
''', (f'%{search_term}%',))
|
|
|
|
|
|
|
|
|
|
fachrichtungen = [row[0] for row in c.fetchall()]
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
|
return jsonify(fachrichtungen)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Fehler beim Abrufen der Fachrichtungen: {str(e)}")
|
|
|
|
@@ -376,7 +374,7 @@ def get_fachrichtungen():
|
|
|
|
|
def get_orte():
|
|
|
|
|
try:
|
|
|
|
|
search_term = request.args.get('q', '').lower()
|
|
|
|
|
conn = get_db_connection()
|
|
|
|
|
with get_db() as conn:
|
|
|
|
|
c = conn.cursor()
|
|
|
|
|
|
|
|
|
|
# Hole alle eindeutigen Orte, die mit dem Suchbegriff übereinstimmen
|
|
|
|
@@ -391,8 +389,6 @@ def get_orte():
|
|
|
|
|
''', (f'%{search_term}%',))
|
|
|
|
|
|
|
|
|
|
orte = [row[0] for row in c.fetchall()]
|
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
|
return jsonify(orte)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Fehler beim Abrufen der Orte: {str(e)}")
|
|
|
|
@@ -406,10 +402,10 @@ def init_app(app):
|
|
|
|
|
os.makedirs('data', exist_ok=True)
|
|
|
|
|
|
|
|
|
|
# Lösche die alte Datenbank, falls sie existiert
|
|
|
|
|
if os.path.exists(DB_FILE):
|
|
|
|
|
if os.path.exists(app.config['DATABASE']):
|
|
|
|
|
try:
|
|
|
|
|
os.remove(DB_FILE)
|
|
|
|
|
logger.info(f"Alte Datenbank {DB_FILE} wurde gelöscht")
|
|
|
|
|
os.remove(app.config['DATABASE'])
|
|
|
|
|
logger.info(f"Alte Datenbank {app.config['DATABASE']} wurde gelöscht")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Fehler beim Löschen der alten Datenbank: {str(e)}")
|
|
|
|
|
|
|
|
|
|