Skip to content

Commit

Permalink
smaller fixes
Browse files Browse the repository at this point in the history
Issue #153
  • Loading branch information
rsoika committed Jan 26, 2024
1 parent 47145a8 commit 036b1d9
Show file tree
Hide file tree
Showing 5 changed files with 428 additions and 69 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,22 +5,21 @@
import java.util.List;
import java.util.logging.Logger;

import jakarta.inject.Inject;

import org.apache.commons.net.ftp.FTP;
import org.imixs.workflow.FileData;
import org.imixs.workflow.ItemCollection;
import org.imixs.workflow.ItemCollectionComparator;
import org.imixs.workflow.SignalAdapter;
import org.imixs.workflow.engine.DocumentService;
import org.imixs.workflow.engine.WorkflowService;
import org.imixs.workflow.engine.scheduler.SchedulerException;
import org.imixs.workflow.exceptions.AccessDeniedException;
import org.imixs.workflow.exceptions.AdapterException;
import org.imixs.workflow.exceptions.ModelException;
import org.imixs.workflow.exceptions.PluginException;
import org.imixs.workflow.exceptions.ProcessingErrorException;

import jakarta.inject.Inject;

/**
* Der DATEVExportAdapter erstellt einen DATEV Export zu allen im Datev-Export
* worktiem referenzierten Rechnungen. Ein Export besteht aus einer CSV Datei
Expand Down Expand Up @@ -153,7 +152,7 @@ public class DatevExportAdapter implements SignalAdapter {

@Inject
DatevExportService datevExportService;

/**
* This method finds or create the Zahlungsavis and adds a reference
* ($workitemref) to the current invoice.
Expand All @@ -167,7 +166,7 @@ public ItemCollection execute(ItemCollection datevExport, ItemCollection event)

try {
ItemCollection configuration = datevExportService.loadConfiguration(DATEV_CONFIGURATION);
// get the data source based on the report definition....
// get the data source based on the $workitemref....
List<ItemCollection> masterDataSet = buildMasterDataSet(datevExport);

// first we need to extend the Export Workitem
Expand All @@ -178,13 +177,11 @@ public ItemCollection execute(ItemCollection datevExport, ItemCollection event)
// =====================================
// 2nd export buchungsstapel via CSV
// =====================================

datevExportService.buildCSVFile(datevExport, masterDataSet, datevClientID, configuration);

// =====================================
// 3nd create export workitem with attached zip file....
// =====================================

datevExportService.buildDocumentsZipFile(datevExport, masterDataSet, datevClientID, configuration);

// finally copy attachments via FTP...
Expand All @@ -207,7 +204,7 @@ public ItemCollection execute(ItemCollection datevExport, ItemCollection event)
// update and process invoices in new transaction to avoid partial updates...
processDatevExportEntities(datevExport, masterDataSet, event, configuration);

} catch (SchedulerException | AccessDeniedException | ProcessingErrorException | ModelException e) {
} catch (AccessDeniedException | ProcessingErrorException | ModelException e) {
throw new PluginException(DatevExportAdapter.class.getName(), DATEV_EXPORT_ERROR, e.getMessage(), e);
}

Expand Down Expand Up @@ -252,10 +249,10 @@ private void processDatevExportEntities(ItemCollection datevExport, List<ItemCol

// process all invoices...
for (ItemCollection invoice : datevExportEntities) {
if (invoice.getTaskID()!=5900) {
invoice.event(EVENT_INVOICE_COMPLETED);
workflowService.processWorkItem(invoice);
}
if (invoice.getTaskID() != 5900) {
invoice.event(EVENT_INVOICE_COMPLETED);
workflowService.processWorkItem(invoice);
}
}
// write log
logger.info("..." + datevExportEntities.size() + " invoices exported. ");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,10 @@
import org.imixs.workflow.ItemCollection;
import org.imixs.workflow.WorkflowKernel;
import org.imixs.workflow.datev.DatevHelper;
import org.imixs.workflow.datev.imports.DatevService;
import org.imixs.workflow.engine.DocumentService;
import org.imixs.workflow.engine.ReportService;
import org.imixs.workflow.engine.WorkflowService;
import org.imixs.workflow.engine.scheduler.SchedulerException;
import org.imixs.workflow.engine.scheduler.SchedulerService;
import org.imixs.workflow.exceptions.AccessDeniedException;
import org.imixs.workflow.exceptions.ModelException;
import org.imixs.workflow.exceptions.PluginException;
Expand Down Expand Up @@ -188,11 +187,10 @@ public ItemCollection updateExportWorkitem(ItemCollection datevExport,
*
* @param data
* @return
* @throws SchedulerException
* @throws PluginException
*/
public void buildDocumentsZipFile(ItemCollection datevExport, List<ItemCollection> data, String key,
ItemCollection configuration) throws SchedulerException, PluginException {
ItemCollection configuration) throws PluginException {
int documentCount = 0;
ZipOutputStream datevZip = null;
ByteArrayOutputStream zipOutputStream = null;
Expand All @@ -203,10 +201,10 @@ public void buildDocumentsZipFile(ItemCollection datevExport, List<ItemCollectio
configuration, datevExport);

// load report configuration....
String reportNameDocuments = configuration.getItemValueString("_report_documents");
String reportNameDocuments = configuration.getItemValueString("report.documents");
ItemCollection documentsReport = reportService.findReport(reportNameDocuments);
if (documentsReport == null) {
throw new SchedulerException(REPORT_ERROR,
throw new PluginException(this.getClass().getClass().getName(), REPORT_ERROR,
"unable to load documents report definition '" + reportNameDocuments
+ "'. Please check the configuration");
}
Expand All @@ -218,8 +216,9 @@ public void buildDocumentsZipFile(ItemCollection datevExport, List<ItemCollectio

String xslDocuments = documentsReport.getItemValueString("XSL").trim();
if (xslDocuments.isEmpty()) {
throw new SchedulerException(REPORT_ERROR, "Failed to build DATEV zip archive '"
+ documentsReport.getItemValueString("txtname") + " XSL content is missing.");
throw new PluginException(this.getClass().getClass().getName(), REPORT_ERROR,
"Failed to build DATEV zip archive '"
+ documentsReport.getItemValueString("txtname") + " XSL content is missing.");
}

String encoding = documentsReport.getItemValueString("encoding");
Expand Down Expand Up @@ -302,8 +301,10 @@ public void buildDocumentsZipFile(ItemCollection datevExport, List<ItemCollectio
datevZip.write(byteData);
datevZip.closeEntry();
} catch (IOException | TransformerException | JAXBException e) {
throw new SchedulerException(REPORT_ERROR, "Failed to build DATEV zip archive '"
+ documentsReport.getItemValueString("txtname") + "' : " + e.getMessage(), e);
throw new PluginException(DatevExportService.class.getName(), REPORT_ERROR,
"Failed to build DATEV zip archive '"
+ documentsReport.getItemValueString("txtname") + "' : " + e.getMessage(),
e);
}

DateFormat df = new SimpleDateFormat("yyyy-MM-dd_HHmm");
Expand All @@ -316,7 +317,7 @@ public void buildDocumentsZipFile(ItemCollection datevExport, List<ItemCollectio
datevExport.addFileData(zipFileData);

} catch (IOException e) {
throw new SchedulerException(REPORT_ERROR,
throw new PluginException(DatevExportService.class.getName(), REPORT_ERROR,
"Failed to create Documents archive '" + documentsReport.getItemValueString("txtname") + "' : "
+ e.getClass().getName() + " -> " + e.getMessage(),
e);
Expand All @@ -330,8 +331,10 @@ public void buildDocumentsZipFile(ItemCollection datevExport, List<ItemCollectio
zipOutputStream.close();
}
} catch (IOException e) {
throw new SchedulerException(REPORT_ERROR, "Failed to close DATEV archive '"
+ documentsReport.getItemValueString("txtname") + "' : " + e.getMessage(), e);
throw new PluginException(DatevExportService.class.getName(), REPORT_ERROR,
"Failed to close DATEV archive '"
+ documentsReport.getItemValueString("txtname") + "' : " + e.getMessage(),
e);
}

}
Expand All @@ -347,41 +350,36 @@ public void buildDocumentsZipFile(ItemCollection datevExport, List<ItemCollectio
*
* @param data
* @return
* @throws SchedulerException
* @throws PluginException
*/
public void buildCSVFile(ItemCollection datevExport, List<ItemCollection> data, String key,
ItemCollection configuration) throws SchedulerException {
ItemCollection configuration) throws PluginException {
String clientID = datevExport.getItemValueString(ITEM_DATEV_CLIENT_ID);

// load the report for CSV export
String reportNameInvoices = configuration.getItemValueString("report.invoices");

DatevHelper.logMessage(
"... CSV export started (ClientID="
+ clientID + ") ...",
+ clientID + ") Report=" + reportNameInvoices + "...",
configuration, datevExport);

// load the report for CSV export
String reportNameInvoices = configuration.getItemValueString("_report_invoices");

// It is possible, that we have an optional report definition for this client ID
// let's test this
ItemCollection invoiceReport = null;
invoiceReport = reportService.findReport(reportNameInvoices + "_" + clientID);
if (invoiceReport == null) {
// load default
invoiceReport = reportService.findReport(reportNameInvoices);
}
invoiceReport = reportService.findReport(reportNameInvoices);
if (invoiceReport == null) {
throw new SchedulerException(REPORT_ERROR, "unable to load invoice report definition '"
+ reportNameInvoices + "'. Please check the configuration");
throw new PluginException(DatevExportService.class.getName(), REPORT_ERROR,
"unable to load invoice report definition '"
+ reportNameInvoices + "'. Please check the configuration");
}

// link invoices with export workitem and find the earliest rechnugnsdatum
// (_invoicedate)....
// link invoices with export workitem and find the earliest invoice.date ...
LocalDateTime stapelZeitraumStart = null;
LocalDateTime stapelZeitraumEnde = null;
for (ItemCollection invoice : data) {
Date baseDate = invoice.getItemValueDate("_accountingdate");
Date baseDate = invoice.getItemValueDate("accounting.date");
if (baseDate == null) {
// fallback auf invoice date
baseDate = invoice.getItemValueDate("_invoicedate");
baseDate = invoice.getItemValueDate("invoice.date");
}
LocalDateTime invoiceDate = new Date(baseDate.getTime()).toInstant().atZone(ZoneId.systemDefault())
.toLocalDateTime();
Expand All @@ -391,9 +389,6 @@ public void buildCSVFile(ItemCollection datevExport, List<ItemCollection> data,
if (stapelZeitraumEnde == null || invoiceDate.isAfter(stapelZeitraumEnde)) {
stapelZeitraumEnde = invoiceDate;
}

// here we link the origin invoice and not the data export Entity.

}
// update Stapelzeitruam begin und Ende
if (stapelZeitraumStart != null) {
Expand Down Expand Up @@ -424,7 +419,7 @@ public void buildCSVFile(ItemCollection datevExport, List<ItemCollection> data,
// xsl transformation based on our tmp_data collection....
filedata = reportService.transformDataSource(invoiceReport, tmp_data, datevFileName);
} catch (JAXBException | TransformerException | IOException e) {
throw new SchedulerException(REPORT_ERROR, "Failed to execute CSV report '"
throw new PluginException(DatevExportService.class.getName(), REPORT_ERROR, "Failed to execute CSV report '"
+ invoiceReport.getItemValueString("txtname") + "' : " + e.getMessage(), e);
}

Expand All @@ -450,7 +445,7 @@ public void buildCSVFile(ItemCollection datevExport, List<ItemCollection> data,
* @throws QueryException
*/
public boolean putFileData(FileData fileData, ItemCollection configuration, String subDirectory,
ItemCollection datevExport, int ftpType) throws SchedulerException {
ItemCollection datevExport, int ftpType) throws PluginException {

boolean result = false;
String ftpServer = configuration.getItemValueString(ITEM_FTP_HOST);
Expand Down Expand Up @@ -500,7 +495,8 @@ public boolean putFileData(FileData fileData, ItemCollection configuration, Stri
ftpClient.setControlEncoding("UTF-8");
ftpClient.connect(ftpServer, Integer.parseInt(ftpPort));
if (ftpClient.login(ftpUser, ftpPassword) == false) {
throw new SchedulerException(ITEM_FTP_ERROR, "FTP file transfer failed: login failed!");
throw new PluginException(DatevExportService.class.getName(), ITEM_FTP_ERROR,
"FTP file transfer failed: login failed!");
}

ftpClient.enterLocalPassiveMode();
Expand Down Expand Up @@ -538,7 +534,7 @@ public boolean putFileData(FileData fileData, ItemCollection configuration, Stri
"...FTP file transfer '" + ftpPathReports + fileData.getName() + "' successfull", configuration,
datevExport);
} else {
throw new SchedulerException(ITEM_FTP_ERROR,
throw new PluginException(DatevExportService.class.getName(), ITEM_FTP_ERROR,
"FTP file transfer failed: unable to write '" + ftpPathReports + fileData.getName() + "'");
}

Expand All @@ -547,7 +543,7 @@ public boolean putFileData(FileData fileData, ItemCollection configuration, Stri
int r = ftpClient.getReplyCode();
logger.severe("FTP ReplyCode=" + r);

throw new SchedulerException(ITEM_FTP_ERROR,
throw new PluginException(DatevExportService.class.getName(), ITEM_FTP_ERROR,
"FTP file transfer failed (replyCode=" + r + ") : " + e.getMessage(), e);
} finally {
// do logout....
Expand All @@ -558,7 +554,8 @@ public boolean putFileData(FileData fileData, ItemCollection configuration, Stri
ftpClient.logout();
ftpClient.disconnect();
} catch (IOException e) {
throw new SchedulerException(ITEM_FTP_ERROR, "FTP file transfer failed: " + e.getMessage(), e);
throw new PluginException(DatevExportService.class.getName(), ITEM_FTP_ERROR,
"FTP file transfer failed: " + e.getMessage(), e);
}
}

Expand Down Expand Up @@ -633,7 +630,7 @@ public ItemCollection buildExportWorkitem(ItemCollection configuration, String m
public ItemCollection loadConfiguration(String name) {
try {
// support deprecated txtname attribure
String sQuery = "(type:\"" + SchedulerService.DOCUMENT_TYPE + "\" AND (name:\"" + name + "\" OR txtname:\""
String sQuery = "(type:\"" + DatevService.DOCUMENT_TYPE + "\" AND (name:\"" + name + "\" OR txtname:\""
+ name + "\" ) )";
Collection<ItemCollection> col = documentService.find(sQuery, 1, 0);
// check if we found a configuration
Expand Down

0 comments on commit 036b1d9

Please sign in to comment.