Remove adodbapi from exporter

master
Sebastian Serfling 2025-10-29 10:19:08 +01:00
parent 5086fd930a
commit ab0d40de67
1 changed files with 34 additions and 33 deletions

View File

@ -1,5 +1,4 @@
import os import os
import adodbapi
import pyodbc import pyodbc
import traceback import traceback
from dotenv import load_dotenv from dotenv import load_dotenv
@ -11,6 +10,7 @@ load_dotenv()
# Logdatei vorbereiten # Logdatei vorbereiten
MAIN_DIR = os.getcwd() MAIN_DIR = os.getcwd()
LOG_DIR = os.path.join(MAIN_DIR, "Logs") LOG_DIR = os.path.join(MAIN_DIR, "Logs")
os.makedirs(LOG_DIR, exist_ok=True)
logfile_name = f"MSSQL_exporter_log_{datetime.datetime.now().strftime('%Y-%m-%d')}.txt" logfile_name = f"MSSQL_exporter_log_{datetime.datetime.now().strftime('%Y-%m-%d')}.txt"
logfile_path = os.path.join(LOG_DIR, logfile_name) logfile_path = os.path.join(LOG_DIR, logfile_name)
@ -20,6 +20,7 @@ process = "SDF_to_MSSQL_Export"
# Globaler MSSQL-Cursor für Logging # Globaler MSSQL-Cursor für Logging
mssql_cursor_global = None mssql_cursor_global = None
def write_log(line): def write_log(line):
timestamp = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') timestamp = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
log_entry = f"{line}\n------------------------------{timestamp}------------------------------" log_entry = f"{line}\n------------------------------{timestamp}------------------------------"
@ -27,6 +28,7 @@ def write_log(line):
with open(logfile_path, "a", encoding="utf-8") as f: with open(logfile_path, "a", encoding="utf-8") as f:
f.write(log_entry + "\n") f.write(log_entry + "\n")
def write_log_summary(table, inserted, skipped, errors, inserted_rows): def write_log_summary(table, inserted, skipped, errors, inserted_rows):
"""Schreibt eine zusammenfassende Logzeile pro Tabelle in die logs-Tabelle.""" """Schreibt eine zusammenfassende Logzeile pro Tabelle in die logs-Tabelle."""
if not mssql_cursor_global: if not mssql_cursor_global:
@ -53,10 +55,10 @@ SDF_LOCAL_PFAD = os.getenv("SDF_LOCAL_PFAD")
SDF_NAME = os.getenv("SDF_NAME") SDF_NAME = os.getenv("SDF_NAME")
sdf_file = os.path.join(SDF_LOCAL_PFAD, SDF_NAME) sdf_file = os.path.join(SDF_LOCAL_PFAD, SDF_NAME)
# Neuer pyodbc-Treiber für SQL Server Compact Edition 4.0
sdf_connection_str = ( sdf_connection_str = (
"Provider=Microsoft.SQLSERVER.CE.OLEDB.3.5;" r"Driver={SQL Server Compact Edition 4.0};"
f"Data Source={sdf_file};" f"Data Source={sdf_file};"
"Persist Security Info=False;"
) )
mssql_connection_str = os.getenv("MSSQL_CONNECTION_STR") mssql_connection_str = os.getenv("MSSQL_CONNECTION_STR")
@ -66,19 +68,12 @@ if not mssql_connection_str:
mail.send_error_email(error_msg, process) mail.send_error_email(error_msg, process)
exit(1) exit(1)
## Neue Varinate über .env # Tabellenliste aus .env oder Fallback
tables_env = os.getenv("TABLES", "") tables_env = os.getenv("TABLES", "")
tables = [t.strip() for t in tables_env.split(",") if t.strip()] tables = [t.strip() for t in tables_env.split(",") if t.strip()]
if not tables: if not tables:
write_log("Hinweis: Keine Tabellen in der .env gefunden (Variable TABLES fehlt oder ist leer).") write_log("Hinweis: Keine Tabellen in der .env gefunden (Variable TABLES fehlt oder ist leer).")
## Alte Variante ohne .env
# tables = [
# "Addressee", "ADR", "AxlesArchive", "CardEncoding", "Carrier", "Coeff", "Conveyer",
# "CustomerLDB", "Fields", "GeneralData", "PDR", "Plate", "Product", "RDR",
# "RDR_LDB_Weighing", "Reason", "Supplier", "Tare", "TxWeighing", "Weighing_LDB"
# ]
def get_pk_columns(mssql_cursor, table_name): def get_pk_columns(mssql_cursor, table_name):
pk_query = """ pk_query = """
@ -93,6 +88,7 @@ def get_pk_columns(mssql_cursor, table_name):
mssql_cursor.execute(pk_query, (table_name,)) mssql_cursor.execute(pk_query, (table_name,))
return [row[0] for row in mssql_cursor.fetchall()] return [row[0] for row in mssql_cursor.fetchall()]
def row_summary(columns, row): def row_summary(columns, row):
summary_parts = [] summary_parts = []
for i, col in enumerate(columns): for i, col in enumerate(columns):
@ -101,13 +97,15 @@ def row_summary(columns, row):
summary_parts.append(f"{col}={repr(val)}") summary_parts.append(f"{col}={repr(val)}")
return ", ".join(summary_parts) return ", ".join(summary_parts)
def main(): def main():
global mssql_cursor_global global mssql_cursor_global
try: try:
# Verbindungen herstellen # --- Verbindungen herstellen ---
sdf_conn = adodbapi.connect(sdf_connection_str) sdf_conn = pyodbc.connect(sdf_connection_str)
sdf_cursor = sdf_conn.cursor() sdf_cursor = sdf_conn.cursor()
mssql_conn = pyodbc.connect(mssql_connection_str) mssql_conn = pyodbc.connect(mssql_connection_str)
mssql_cursor = mssql_conn.cursor() mssql_cursor = mssql_conn.cursor()
mssql_cursor_global = mssql_cursor mssql_cursor_global = mssql_cursor
@ -116,11 +114,19 @@ def main():
for table_name in tables: for table_name in tables:
write_log(f"\nVerarbeite Tabelle: {table_name}") write_log(f"\nVerarbeite Tabelle: {table_name}")
try:
sdf_cursor.execute(f"SELECT * FROM [{table_name}]") sdf_cursor.execute(f"SELECT * FROM [{table_name}]")
columns = [col[0] for col in sdf_cursor.description] columns = [col[0] for col in sdf_cursor.description]
write_log(f"Spalten: {columns}") rows = sdf_cursor.fetchall()
except Exception as e:
write_log(f"Fehler beim Lesen aus SDF-Tabelle {table_name}: {e}")
mail.send_error_email(f"Fehler beim Lesen aus {table_name}: {e}", process)
continue
# PK ermitteln write_log(f"{len(rows)} Datensätze in SDF gefunden. Spalten: {columns}")
# Primary Keys ermitteln
pk_columns = get_pk_columns(mssql_cursor, table_name) pk_columns = get_pk_columns(mssql_cursor, table_name)
if pk_columns: if pk_columns:
write_log(f"Primary Key(s): {pk_columns}") write_log(f"Primary Key(s): {pk_columns}")
@ -133,14 +139,8 @@ def main():
write_log("Kein Primary Key definiert.") write_log("Kein Primary Key definiert.")
pk_indices = [] pk_indices = []
# Daten auslesen
sdf_cursor.execute(f"SELECT * FROM [{table_name}]")
rows = sdf_cursor.fetchall()
write_log(f"{len(rows)} Datensätze in SDF gefunden.")
# Insert vorbereiten
placeholders = ", ".join("?" for _ in columns) placeholders = ", ".join("?" for _ in columns)
insert_sql = f"INSERT INTO {table_name} ({', '.join('[' + col + ']' for col in columns)}) VALUES ({placeholders})" insert_sql = f"INSERT INTO [{table_name}] ({', '.join('[' + col + ']' for col in columns)}) VALUES ({placeholders})"
inserted = 0 inserted = 0
skipped = 0 skipped = 0
@ -152,43 +152,43 @@ def main():
if pk_indices: if pk_indices:
pk_values = tuple(row[i] for i in pk_indices) pk_values = tuple(row[i] for i in pk_indices)
pk_clause = " AND ".join(f"[{col}] = ?" for col in pk_columns) pk_clause = " AND ".join(f"[{col}] = ?" for col in pk_columns)
select_sql = f"SELECT COUNT(*) FROM {table_name} WHERE {pk_clause}" select_sql = f"SELECT COUNT(*) FROM [{table_name}] WHERE {pk_clause}"
mssql_cursor.execute(select_sql, pk_values) mssql_cursor.execute(select_sql, pk_values)
count = mssql_cursor.fetchone()[0] count = mssql_cursor.fetchone()[0]
if count > 0: if count > 0:
skipped += 1 skipped += 1
continue continue
mssql_cursor.execute(insert_sql, *row) mssql_cursor.execute(insert_sql, row)
inserted += 1 inserted += 1
inserted_rows.append(row_summary(columns, row)) inserted_rows.append(row_summary(columns, row))
except Exception as ie: except Exception as ie:
errors += 1 errors += 1
error_details = f"Fehler beim Einfügen in Tabelle {table_name}: {ie}\nRow: {row}" err = f"Fehler beim Einfügen in Tabelle {table_name}: {ie}"
write_log(error_details) write_log(err)
mail.send_error_email(error_details, process) mail.send_error_email(err, process)
mssql_conn.commit() mssql_conn.commit()
write_log(f"{inserted} eingefügt, {skipped} übersprungen, {errors} Fehler.") write_log(f"{inserted} eingefügt, {skipped} übersprungen, {errors} Fehler.")
write_log_summary(table_name, inserted, skipped, errors, inserted_rows) write_log_summary(table_name, inserted, skipped, errors, inserted_rows)
report_lines.append(f"Tabelle {table_name}: {inserted} eingefügt, {skipped} übersprungen, {errors} Fehler.") report_lines.append(f"Tabelle {table_name}: {inserted} eingefügt, {skipped} übersprungen, {errors} Fehler.")
# Abschlussbericht # --- Abschlussbericht ---
report_text = "\n".join(report_lines) report_text = "\n".join(report_lines)
write_log("Export-Zusammenfassung:\n" + report_text) write_log("Export-Zusammenfassung:\n" + report_text)
mail.send_report_email(report_text, process) mail.send_report_email(report_text, process)
# Leere .export-Datei erstellen # --- Leere .export-Datei erstellen ---
export_marker_path = os.path.join(MAIN_DIR, ".export") export_marker_path = os.path.join(MAIN_DIR, ".export")
try: try:
with open(export_marker_path, "w") as f: with open(export_marker_path, "w"):
pass pass
write_log(f"Leere .export-Datei erstellt: {export_marker_path}") write_log(f"Leere .export-Datei erstellt: {export_marker_path}")
except Exception as marker_err: except Exception as marker_err:
write_log(f"Fehler beim Erstellen der .export-Datei: {marker_err}") write_log(f"Fehler beim Erstellen der .export-Datei: {marker_err}")
# Abschlusslog in DB # --- Abschlusslog in DB ---
try: try:
if mssql_cursor_global: if mssql_cursor_global:
mssql_cursor_global.execute( mssql_cursor_global.execute(
@ -210,5 +210,6 @@ def main():
write_log(error_details) write_log(error_details)
mail.send_error_email(error_details, process) mail.send_error_email(error_details, process)
if __name__ == "__main__": if __name__ == "__main__":
main() main()