Source code for LOGS_solutions.CreateExportEntities.CreateExportInventories.InventoryManager

#!/usr/bin/env python3

import csv
import json
import os
from typing import Optional, Set, List
import logging

import pandas as pd
import openpyxl

from LOGS.Auxiliary.Exceptions import LOGSException
from LOGS.Entities import (
    ProjectRequestParameter,
    CustomTypeRequestParameter,
    InventoryItemRequestParameter,
)
from LOGS.LOGS import LOGS
from ...Utils.Exceptions import CsvReadError, ExcelReadError


logging.basicConfig(level=logging.INFO)


[docs] class InventoryManager: """This class enables the creation of inventories in a LOGS instance using a CSV file, or the export of inventories from a LOGS instance into a CSV file.""" def __init__( self, logs: LOGS, source_path: Optional[str] = None, target_path: Optional[str] = None, export_format: Optional[str] = ".csv", ) -> None: """Initialization. :param logs: LOGS object to access the LOGS web API :param source_path: Source path for exporting inventories in logs instance, defaults to None :param target_path: target path for extracting inventories of a logs instance in csv file, defaults to None """ self.__logs = logs self.__source_path = source_path self.__target_path = target_path if self.__target_path is not None: if self.__target_path.suffix == "": self.__target_path = os.path.join( self.__target_path, f"inventories_export{export_format}" ) self.__export_format = export_format self.__source_format = self.__source_path.suffix if self.__source_path else None def _ensure_header(self, df: pd.DataFrame) -> None: """Ensure that the DataFrame has the correct header. :param df: DataFrame to check. """ required_columns = [ "Custom Type ID", "Name", "Projects", ] cols = [str(c).strip() for c in df.columns] if cols != required_columns: raise ValueError( f"Header does not match.\nExpected: {required_columns}\nFound: {cols}" ) def _split_int_list_cell(self, v: object) -> Optional[List[int]]: """Parse '2, 1' -> [2,1]. Empty/None/NA -> None. :param v: Cell value to parse. :return: List of integers or None. """ if v is None or pd.isna(v): return None s = str(v).strip() if s == "" or s.lower() in {"none", "nan", "<na>"}: return None parts = [p.strip() for p in s.split(",")] out: List[int] = [] for p in parts: if not p: continue try: out.append(int(p)) except ValueError: continue return out if out else None
[docs] def check_customtypes(self, customtype_set: Set) -> None: """Checks for each custom type in the custom type set whether they exist in the LOGS instance. :param customtype_set: Set with all custom types named in the csv file. :return: List of CustomType objects """ all_customtypes = [] logs_customtype_id = [] for customtype in self.__logs.customTypes(CustomTypeRequestParameter()): all_customtypes.append(customtype) logs_customtype_id.append(customtype.id) for customtype in customtype_set: if customtype in logs_customtype_id: continue elif customtype is not None and customtype != "": message = f"The custom type {customtype} does not exist in this LOGS instance. The Script is terminated." logging.error(message) raise ValueError(message)
[docs] def check_projects(self, project_set: Set) -> None: """Checks for each project in the project set whether they exist in the LOGS instance. :param person_set: Set with all projects named in the csv file. :return: List of Project objects """ all_projects = [] logs_project_id = [] for project in self.__logs.projects(ProjectRequestParameter()): all_projects.append(project) logs_project_id.append(project.id) for project in project_set: if project in logs_project_id: continue elif project is not None and project != "": message = f"The project {project} does not exist in this LOGS instance. The Script is terminated." logging.error(message) raise ValueError(message)
[docs] def post_process_data(self, inventories_data: pd.DataFrame) -> pd.DataFrame: """Normalize empties, parse Projects list, parse Custom Type; drop bad rows. :param inventories_data: DataFrame read from source file. :return: Post-processed DataFrame. """ logging.debug("Inventories_Data before post processing:\n%s", inventories_data) self._ensure_header(inventories_data) df = inventories_data.copy() df = df.replace(r"^\s*$", pd.NA, regex=True) df["Custom Type ID"] = pd.to_numeric( df["Custom Type ID"], errors="coerce" ).astype("Int64") df["Projects"] = df["Projects"].map(self._split_int_list_cell) df = df.dropna(subset=["Custom Type ID", "Name"], how="any") df = df.dropna(how="all") logging.debug("Inventories_Data after post processing:\n%s", df) return df
[docs] def read_file(self) -> pd.DataFrame: """Reads the inventories from the given source file. :return: DataFrame with inventories data. """ logging.info("Reading inventory data from file: %s", self.__source_path) if self.__source_format == ".csv": try: inventories_data = pd.read_csv( self.__source_path, delimiter=";", dtype=str, keep_default_na=False, quotechar='"', skip_blank_lines=True, ) except Exception as e: message = f"Error reading CSV file with the inventories: {e}" logging.exception(message) raise CsvReadError(message) from e elif self.__source_format in [".xlsx"]: try: inventories_data = pd.read_excel( self.__source_path, dtype=str, keep_default_na=False, engine="openpyxl", ) except Exception as e: message = f"Error reading Excel file with the inventories: {e}" logging.exception(message) raise ExcelReadError(message) from e else: raise ValueError( f"Unsupported source format: {self.__source_format}. Supported formats are: .csv, .xlsx" ) return self.post_process_data(inventories_data)
[docs] def create_inventories(self) -> None: """Creates inventories in the LOGS instance based on the data from the source CSV or Excel file. """ inventories_data = self.read_file() logging.info("Starting inventory creation process.") customtype_set = set( inventories_data["Custom Type ID"].dropna().unique().tolist() ) project_set = set( inventories_data["Projects"].dropna().explode().unique().tolist() ) self.check_projects(project_set) self.check_customtypes(customtype_set) inventory_count = 0 for _, inventory_item in inventories_data.iterrows(): inventory_count += 1 projects = inventory_item["Projects"] inventory_customtype = ( self.__logs.customTypes( CustomTypeRequestParameter( ids=[int(inventory_item["Custom Type ID"])] ) ).first() if not pd.isna(inventory_item["Custom Type ID"]) else None ) if inventory_customtype.isHierarchyRoot: continue logging.info( "Custom Type Name: %s", inventory_customtype.name if inventory_customtype else "None", ) log_inventory = self.__logs.newInventoryItem( customTypeOrId=inventory_customtype ) log_inventory.name = inventory_item["Name"] log_inventory.projects = projects try: self.__logs.create(log_inventory) logging.info( "The inventory item in line %s has been created.", inventory_count ) except LOGSException: logging.exception( "The inventory item in line %s could not be created.", inventory_count, )
[docs] def export_inventories_json(self) -> None: """Exports inventories from the LOGS instance to JSON files.""" target_dir = os.path.dirname(self.__target_path) for inventory_item in self.__logs.inventoryItems( InventoryItemRequestParameter() ): inventory_json = inventory_item.toJson() json_filename = f"inventory_{inventory_item.id}.json" json_path = os.path.join(target_dir, json_filename) with open(json_path, "w", encoding="utf-8") as json_file: json.dump(inventory_json, json_file, ensure_ascii=False, indent=2)
[docs] def export_inventories_csv(self) -> None: """Exports inventories from the LOGS instance to a CSV file.""" heading = [ "Custom Type ID", "Name", "Projects", ] with open(self.__target_path, "w", newline="", encoding="utf-8") as file: writer = csv.writer( file, delimiter=";", quotechar='"', quoting=csv.QUOTE_ALL ) writer.writerow(heading) for inventory_item in self.__logs.inventoryItems( InventoryItemRequestParameter() ): if ( inventory_item.customType and inventory_item.customType.isHierarchyRoot ): continue projects_str = "" if inventory_item.projects is not None: projects_str = ",".join( str(project.id) for project in inventory_item.projects ) inventory_data = [ inventory_item.customType.id if inventory_item.customType else "", inventory_item.name, projects_str, ] writer.writerow(inventory_data)
[docs] def export_inventories_excel(self) -> None: """Exports inventories from the LOGS instance to an Excel file.""" heading = [ "Custom Type ID", "Name", "Projects", ] wb = openpyxl.Workbook() ws = wb.active ws.append(heading) for inventory_item in self.__logs.inventoryItems( InventoryItemRequestParameter() ): if inventory_item.customType and inventory_item.customType.isHierarchyRoot: continue projects_str = "" if inventory_item.projects is not None: projects_str = ",".join( str(project.id) for project in inventory_item.projects ) inventory_data = [ inventory_item.customType.id if inventory_item.customType else "", inventory_item.name, projects_str, ] ws.append(inventory_data) wb.save(self.__target_path)
[docs] def export_inventories(self) -> None: """Exports inventories from the LOGS instance to a CSV or Excel file at the target path. """ logging.info("Starting inventory export process.") if self.__export_format == ".csv": self.export_inventories_csv() elif self.__export_format == ".xlsx": self.export_inventories_excel() else: raise ValueError( f"Invalid export format: {self.__export_format}. Supported formats are: .csv, .xlsx" ) self.export_inventories_json()