2019-06-14 06:19:20 +00:00
|
|
|
# coding: utf-8
|
|
|
|
"""
|
|
|
|
Provide a report and downloadable CSV according to the German DATEV format.
|
|
|
|
|
|
|
|
- Query report showing only the columns that contain data, formatted nicely for
|
|
|
|
dispay to the user.
|
|
|
|
- CSV download functionality `download_datev_csv` that provides a CSV file with
|
|
|
|
all required columns. Used to import the data into the DATEV Software.
|
|
|
|
"""
|
|
|
|
from __future__ import unicode_literals
|
2019-07-08 12:48:54 +00:00
|
|
|
import datetime
|
2019-06-14 06:19:20 +00:00
|
|
|
import json
|
2019-11-29 12:02:17 +00:00
|
|
|
import zlib
|
|
|
|
import zipfile
|
|
|
|
import six
|
|
|
|
from six import BytesIO
|
2019-06-14 06:19:20 +00:00
|
|
|
from six import string_types
|
|
|
|
import frappe
|
|
|
|
from frappe import _
|
|
|
|
import pandas as pd
|
2019-11-29 12:02:17 +00:00
|
|
|
from .datev_constants import DataCategory
|
|
|
|
from .datev_constants import Transactions
|
|
|
|
from .datev_constants import DebtorsCreditors
|
|
|
|
from .datev_constants import AccountNames
|
|
|
|
from .datev_constants import QUERY_REPORT_COLUMNS
|
2019-06-14 06:19:20 +00:00
|
|
|
|
|
|
|
|
|
|
|
def execute(filters=None):
|
|
|
|
"""Entry point for frappe."""
|
2019-08-13 22:39:59 +00:00
|
|
|
validate(filters)
|
2019-11-29 12:02:17 +00:00
|
|
|
result = get_transactions(filters, as_dict=0)
|
|
|
|
columns = QUERY_REPORT_COLUMNS
|
2019-06-14 06:19:20 +00:00
|
|
|
|
|
|
|
return columns, result
|
|
|
|
|
|
|
|
|
2019-08-13 22:39:59 +00:00
|
|
|
def validate(filters):
|
|
|
|
"""Make sure all mandatory filters and settings are present."""
|
2019-06-14 06:19:20 +00:00
|
|
|
if not filters.get('company'):
|
2019-08-13 23:15:23 +00:00
|
|
|
frappe.throw(_('<b>Company</b> is a mandatory filter.'))
|
2019-06-14 06:19:20 +00:00
|
|
|
|
|
|
|
if not filters.get('from_date'):
|
2019-08-13 23:15:23 +00:00
|
|
|
frappe.throw(_('<b>From Date</b> is a mandatory filter.'))
|
2019-06-14 06:19:20 +00:00
|
|
|
|
|
|
|
if not filters.get('to_date'):
|
2019-08-13 23:15:23 +00:00
|
|
|
frappe.throw(_('<b>To Date</b> is a mandatory filter.'))
|
2019-06-14 06:19:20 +00:00
|
|
|
|
2019-08-13 22:39:59 +00:00
|
|
|
try:
|
2019-08-13 23:22:29 +00:00
|
|
|
frappe.get_doc('DATEV Settings', filters.get('company'))
|
2019-08-13 22:39:59 +00:00
|
|
|
except frappe.DoesNotExistError:
|
2019-08-13 23:15:23 +00:00
|
|
|
frappe.throw(_('Please create <b>DATEV Settings</b> for Company <b>{}</b>.').format(filters.get('company')))
|
2019-06-14 06:19:20 +00:00
|
|
|
|
|
|
|
|
2019-11-29 12:02:17 +00:00
|
|
|
def get_transactions(filters, as_dict=1):
|
2019-06-14 06:19:20 +00:00
|
|
|
"""
|
|
|
|
Get a list of accounting entries.
|
|
|
|
|
|
|
|
Select GL Entries joined with Account and Party Account in order to get the
|
|
|
|
account numbers. Returns a list of accounting entries.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
filters -- dict of filters to be passed to the sql query
|
|
|
|
as_dict -- return as list of dicts [0,1]
|
|
|
|
"""
|
|
|
|
gl_entries = frappe.db.sql("""
|
2019-11-29 12:02:17 +00:00
|
|
|
SELECT
|
2019-06-14 06:19:20 +00:00
|
|
|
|
|
|
|
/* either debit or credit amount; always positive */
|
|
|
|
case gl.debit when 0 then gl.credit else gl.debit end as 'Umsatz (ohne Soll/Haben-Kz)',
|
|
|
|
|
|
|
|
/* 'H' when credit, 'S' when debit */
|
|
|
|
case gl.debit when 0 then 'H' else 'S' end as 'Soll/Haben-Kennzeichen',
|
|
|
|
|
|
|
|
/* account number or, if empty, party account number */
|
|
|
|
coalesce(acc.account_number, acc_pa.account_number) as 'Kontonummer',
|
|
|
|
|
|
|
|
/* against number or, if empty, party against number */
|
|
|
|
coalesce(acc_against.account_number, acc_against_pa.account_number) as 'Gegenkonto (ohne BU-Schlüssel)',
|
|
|
|
|
|
|
|
gl.posting_date as 'Belegdatum',
|
|
|
|
gl.remarks as 'Buchungstext',
|
|
|
|
gl.voucher_type as 'Beleginfo - Art 1',
|
|
|
|
gl.voucher_no as 'Beleginfo - Inhalt 1',
|
|
|
|
gl.against_voucher_type as 'Beleginfo - Art 2',
|
|
|
|
gl.against_voucher as 'Beleginfo - Inhalt 2'
|
|
|
|
|
2019-11-29 12:02:17 +00:00
|
|
|
FROM `tabGL Entry` gl
|
2019-06-14 06:19:20 +00:00
|
|
|
|
|
|
|
/* Statistisches Konto (Debitoren/Kreditoren) */
|
|
|
|
left join `tabParty Account` pa
|
|
|
|
on gl.against = pa.parent
|
|
|
|
and gl.company = pa.company
|
|
|
|
|
|
|
|
/* Kontonummer */
|
|
|
|
left join `tabAccount` acc
|
|
|
|
on gl.account = acc.name
|
|
|
|
|
|
|
|
/* Gegenkonto-Nummer */
|
|
|
|
left join `tabAccount` acc_against
|
|
|
|
on gl.against = acc_against.name
|
|
|
|
|
|
|
|
/* Statistische Kontonummer */
|
|
|
|
left join `tabAccount` acc_pa
|
|
|
|
on pa.account = acc_pa.name
|
|
|
|
|
|
|
|
/* Statistische Gegenkonto-Nummer */
|
|
|
|
left join `tabAccount` acc_against_pa
|
|
|
|
on pa.account = acc_against_pa.name
|
|
|
|
|
2019-11-29 12:02:17 +00:00
|
|
|
WHERE gl.company = %(company)s
|
|
|
|
AND DATE(gl.posting_date) >= %(from_date)s
|
|
|
|
AND DATE(gl.posting_date) <= %(to_date)s
|
|
|
|
ORDER BY 'Belegdatum', gl.voucher_no""", filters, as_dict=as_dict, as_utf8=1)
|
2019-06-14 06:19:20 +00:00
|
|
|
|
|
|
|
return gl_entries
|
|
|
|
|
|
|
|
|
2019-11-29 12:02:17 +00:00
|
|
|
def get_customers(filters):
|
|
|
|
"""
|
|
|
|
Get a list of Customers.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
filters -- dict of filters to be passed to the sql query
|
|
|
|
"""
|
|
|
|
return frappe.db.sql("""
|
|
|
|
SELECT
|
|
|
|
|
|
|
|
acc.account_number as 'Konto',
|
|
|
|
cus.customer_name as 'Name (Adressatentyp Unternehmen)',
|
|
|
|
case cus.customer_type when 'Individual' then 1 when 'Company' then 2 else 0 end as 'Adressatentyp',
|
|
|
|
adr.address_line1 as 'Straße',
|
|
|
|
adr.pincode as 'Postleitzahl',
|
|
|
|
adr.city as 'Ort',
|
|
|
|
UPPER(country.code) as 'Land',
|
|
|
|
adr.address_line2 as 'Adresszusatz',
|
|
|
|
con.email_id as 'E-Mail',
|
|
|
|
coalesce(con.mobile_no, con.phone) as 'Telefon',
|
|
|
|
cus.website as 'Internet',
|
|
|
|
cus.tax_id as 'Steuernummer',
|
|
|
|
ccl.credit_limit as 'Kreditlimit (Debitor)'
|
|
|
|
|
|
|
|
FROM `tabParty Account` par
|
|
|
|
|
|
|
|
left join `tabAccount` acc
|
|
|
|
on acc.name = par.account
|
|
|
|
|
|
|
|
left join `tabCustomer` cus
|
|
|
|
on cus.name = par.parent
|
|
|
|
|
|
|
|
left join `tabAddress` adr
|
|
|
|
on adr.name = cus.customer_primary_address
|
|
|
|
|
|
|
|
left join `tabCountry` country
|
|
|
|
on country.name = adr.country
|
|
|
|
|
|
|
|
left join `tabContact` con
|
|
|
|
on con.name = cus.customer_primary_contact
|
|
|
|
|
|
|
|
left join `tabCustomer Credit Limit` ccl
|
|
|
|
on ccl.parent = cus.name
|
|
|
|
and ccl.company = par.company
|
|
|
|
|
|
|
|
WHERE par.company = %(company)s
|
|
|
|
AND par.parenttype = 'Customer'""", filters, as_dict=1, as_utf8=1)
|
|
|
|
|
|
|
|
|
|
|
|
def get_suppliers(filters):
|
|
|
|
"""
|
|
|
|
Get a list of Suppliers.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
filters -- dict of filters to be passed to the sql query
|
|
|
|
"""
|
|
|
|
return frappe.db.sql("""
|
|
|
|
SELECT
|
|
|
|
|
|
|
|
acc.account_number as 'Konto',
|
|
|
|
sup.supplier_name as 'Name (Adressatentyp Unternehmen)',
|
|
|
|
case sup.supplier_type when 'Individual' then '1' when 'Company' then '2' else '0' end as 'Adressatentyp',
|
|
|
|
adr.address_line1 as 'Straße',
|
|
|
|
adr.pincode as 'Postleitzahl',
|
|
|
|
adr.city as 'Ort',
|
|
|
|
UPPER(country.code) as 'Land',
|
|
|
|
adr.address_line2 as 'Adresszusatz',
|
|
|
|
con.email_id as 'E-Mail',
|
|
|
|
coalesce(con.mobile_no, con.phone) as 'Telefon',
|
|
|
|
sup.website as 'Internet',
|
|
|
|
sup.tax_id as 'Steuernummer',
|
|
|
|
case sup.on_hold when 1 then sup.release_date else null end as 'Zahlungssperre bis'
|
|
|
|
|
|
|
|
FROM `tabParty Account` par
|
|
|
|
|
|
|
|
left join `tabAccount` acc
|
|
|
|
on acc.name = par.account
|
|
|
|
|
|
|
|
left join `tabSupplier` sup
|
|
|
|
on sup.name = par.parent
|
|
|
|
|
|
|
|
left join `tabDynamic Link` dyn_adr
|
|
|
|
on dyn_adr.link_name = sup.name
|
|
|
|
and dyn_adr.link_doctype = 'Supplier'
|
|
|
|
and dyn_adr.parenttype = 'Address'
|
|
|
|
|
|
|
|
left join `tabAddress` adr
|
|
|
|
on adr.name = dyn_adr.parent
|
|
|
|
and adr.is_primary_address = '1'
|
|
|
|
|
|
|
|
left join `tabCountry` country
|
|
|
|
on country.name = adr.country
|
|
|
|
|
|
|
|
left join `tabDynamic Link` dyn_con
|
|
|
|
on dyn_con.link_name = sup.name
|
|
|
|
and dyn_con.link_doctype = 'Supplier'
|
|
|
|
and dyn_con.parenttype = 'Contact'
|
|
|
|
|
|
|
|
left join `tabContact` con
|
|
|
|
on con.name = dyn_con.parent
|
|
|
|
and con.is_primary_contact = '1'
|
|
|
|
|
|
|
|
WHERE par.company = %(company)s
|
|
|
|
AND par.parenttype = 'Supplier'""", filters, as_dict=1, as_utf8=1)
|
|
|
|
|
|
|
|
|
|
|
|
def get_account_names(filters):
|
|
|
|
return frappe.get_list("Account",
|
|
|
|
fields=["account_number as Konto", "name as Kontenbeschriftung"],
|
|
|
|
filters={"company": filters.get("company"), "is_group": "0"})
|
|
|
|
|
|
|
|
|
|
|
|
def get_datev_csv(data, filters, csv_class):
|
2019-06-14 06:19:20 +00:00
|
|
|
"""
|
|
|
|
Fill in missing columns and return a CSV in DATEV Format.
|
|
|
|
|
2019-07-08 12:48:54 +00:00
|
|
|
For automatic processing, DATEV requires the first line of the CSV file to
|
|
|
|
hold meta data such as the length of account numbers oder the category of
|
|
|
|
the data.
|
|
|
|
|
2019-06-14 06:19:20 +00:00
|
|
|
Arguments:
|
|
|
|
data -- array of dictionaries
|
2019-07-08 12:48:54 +00:00
|
|
|
filters -- dict
|
2019-11-29 12:02:17 +00:00
|
|
|
csv_class -- defines DATA_CATEGORY, FORMAT_NAME and COLUMNS
|
2019-06-14 06:19:20 +00:00
|
|
|
"""
|
2019-11-29 12:02:17 +00:00
|
|
|
header = get_header(filters, csv_class)
|
|
|
|
|
|
|
|
empty_df = pd.DataFrame(columns=csv_class.COLUMNS)
|
|
|
|
data_df = pd.DataFrame.from_records(data)
|
|
|
|
|
|
|
|
result = empty_df.append(data_df, sort=True)
|
|
|
|
|
|
|
|
if csv_class.DATA_CATEGORY == DataCategory.TRANSACTIONS:
|
|
|
|
result['Belegdatum'] = pd.to_datetime(result['Belegdatum'])
|
|
|
|
|
|
|
|
if csv_class.DATA_CATEGORY == DataCategory.ACCOUNT_NAMES:
|
|
|
|
result['Sprach-ID'] = 'de-DE'
|
|
|
|
|
|
|
|
header = ';'.join(header).encode('latin_1')
|
|
|
|
data = result.to_csv(
|
|
|
|
# Reason for str(';'): https://github.com/pandas-dev/pandas/issues/6035
|
|
|
|
sep=str(';'),
|
|
|
|
# European decimal seperator
|
|
|
|
decimal=',',
|
|
|
|
# Windows "ANSI" encoding
|
|
|
|
encoding='latin_1',
|
|
|
|
# format date as DDMM
|
|
|
|
date_format='%d%m',
|
|
|
|
# Windows line terminator
|
|
|
|
line_terminator='\r\n',
|
|
|
|
# Do not number rows
|
|
|
|
index=False,
|
|
|
|
# Use all columns defined above
|
|
|
|
columns=csv_class.COLUMNS
|
|
|
|
)
|
|
|
|
|
|
|
|
if not six.PY2:
|
|
|
|
data = data.encode('latin_1')
|
|
|
|
|
|
|
|
return header + b'\r\n' + data
|
|
|
|
|
|
|
|
|
|
|
|
def get_header(filters, csv_class):
|
2019-07-08 12:48:54 +00:00
|
|
|
header = [
|
|
|
|
# A = DATEV format
|
|
|
|
# DTVF = created by DATEV software,
|
|
|
|
# EXTF = created by other software
|
2020-02-13 16:02:01 +00:00
|
|
|
'"EXTF"',
|
2019-07-08 12:48:54 +00:00
|
|
|
# B = version of the DATEV format
|
|
|
|
# 141 = 1.41,
|
|
|
|
# 510 = 5.10,
|
|
|
|
# 720 = 7.20
|
2020-02-13 16:05:37 +00:00
|
|
|
"700",
|
2019-11-29 12:02:17 +00:00
|
|
|
csv_class.DATA_CATEGORY,
|
|
|
|
csv_class.FORMAT_NAME,
|
2019-07-08 12:48:54 +00:00
|
|
|
# E = Format version (regarding format name)
|
2020-02-13 16:06:06 +00:00
|
|
|
csv_class.FORMAT_VERSION,
|
2019-07-08 12:48:54 +00:00
|
|
|
# F = Generated on
|
2020-02-13 16:09:22 +00:00
|
|
|
datetime.datetime.now().strftime("%Y%m%d%H%M%S"),
|
2019-07-08 12:48:54 +00:00
|
|
|
# G = Imported on -- stays empty
|
|
|
|
"",
|
|
|
|
# H = Origin (SV = other (?), RE = KARE)
|
|
|
|
"SV",
|
|
|
|
# I = Exported by
|
|
|
|
frappe.session.user,
|
|
|
|
# J = Imported by -- stays empty
|
|
|
|
"",
|
|
|
|
# K = Tax consultant number (Beraternummer)
|
2019-08-13 22:13:31 +00:00
|
|
|
frappe.get_value("DATEV Settings", filters.get("company"), "consultant_number") or "",
|
2019-07-08 12:48:54 +00:00
|
|
|
"",
|
|
|
|
# L = Tax client number (Mandantennummer)
|
2019-08-13 22:13:31 +00:00
|
|
|
frappe.get_value("DATEV Settings", filters.get("company"), "client_number") or "",
|
2019-07-08 12:48:54 +00:00
|
|
|
"",
|
|
|
|
# M = Start of the fiscal year (Wirtschaftsjahresbeginn)
|
2019-08-13 22:31:00 +00:00
|
|
|
frappe.utils.formatdate(frappe.defaults.get_user_default("year_start_date"), "yyyyMMdd"),
|
2019-07-08 12:48:54 +00:00
|
|
|
# N = Length of account numbers (Sachkontenlänge)
|
|
|
|
"4",
|
|
|
|
# O = Transaction batch start date (YYYYMMDD)
|
|
|
|
frappe.utils.formatdate(filters.get('from_date'), "yyyyMMdd"),
|
|
|
|
# P = Transaction batch end date (YYYYMMDD)
|
|
|
|
frappe.utils.formatdate(filters.get('to_date'), "yyyyMMdd"),
|
|
|
|
# Q = Description (for example, "January - February 2019 Transactions")
|
2019-11-29 12:02:17 +00:00
|
|
|
"{} - {} {}".format(
|
|
|
|
frappe.utils.formatdate(filters.get('from_date'), "MMMM yyyy"),
|
|
|
|
frappe.utils.formatdate(filters.get('to_date'), "MMMM yyyy"),
|
|
|
|
csv_class.FORMAT_NAME
|
2019-07-08 12:48:54 +00:00
|
|
|
),
|
|
|
|
# R = Diktatkürzel
|
|
|
|
"",
|
|
|
|
# S = Buchungstyp
|
|
|
|
# 1 = Transaction batch (Buchungsstapel),
|
|
|
|
# 2 = Annual financial statement (Jahresabschluss)
|
2019-11-29 12:02:17 +00:00
|
|
|
"1" if csv_class.DATA_CATEGORY == DataCategory.TRANSACTIONS else "",
|
2019-07-08 12:48:54 +00:00
|
|
|
# T = Rechnungslegungszweck
|
|
|
|
"",
|
|
|
|
# U = Festschreibung
|
|
|
|
"",
|
|
|
|
# V = Kontoführungs-Währungskennzeichen des Geldkontos
|
|
|
|
frappe.get_value("Company", filters.get("company"), "default_currency")
|
|
|
|
]
|
2019-11-29 12:02:17 +00:00
|
|
|
return header
|
2019-06-14 06:19:20 +00:00
|
|
|
|
|
|
|
|
|
|
|
@frappe.whitelist()
|
|
|
|
def download_datev_csv(filters=None):
|
|
|
|
"""
|
|
|
|
Provide accounting entries for download in DATEV format.
|
|
|
|
|
|
|
|
Validate the filters, get the data, produce the CSV file and provide it for
|
|
|
|
download. Can be called like this:
|
|
|
|
|
|
|
|
GET /api/method/erpnext.regional.report.datev.datev.download_datev_csv
|
|
|
|
|
|
|
|
Arguments / Params:
|
|
|
|
filters -- dict of filters to be passed to the sql query
|
|
|
|
"""
|
|
|
|
if isinstance(filters, string_types):
|
|
|
|
filters = json.loads(filters)
|
|
|
|
|
2019-08-13 22:39:59 +00:00
|
|
|
validate(filters)
|
2019-06-14 06:19:20 +00:00
|
|
|
|
2019-11-29 12:02:17 +00:00
|
|
|
# This is where my zip will be written
|
|
|
|
zip_buffer = BytesIO()
|
|
|
|
# This is my zip file
|
|
|
|
datev_zip = zipfile.ZipFile(zip_buffer, mode='w', compression=zipfile.ZIP_DEFLATED)
|
|
|
|
|
|
|
|
transactions = get_transactions(filters)
|
|
|
|
transactions_csv = get_datev_csv(transactions, filters, csv_class=Transactions)
|
|
|
|
datev_zip.writestr('EXTF_Buchungsstapel.csv', transactions_csv)
|
|
|
|
|
|
|
|
account_names = get_account_names(filters)
|
|
|
|
account_names_csv = get_datev_csv(account_names, filters, csv_class=AccountNames)
|
|
|
|
datev_zip.writestr('EXTF_Kontenbeschriftungen.csv', account_names_csv)
|
|
|
|
|
|
|
|
customers = get_customers(filters)
|
|
|
|
customers_csv = get_datev_csv(customers, filters, csv_class=DebtorsCreditors)
|
|
|
|
datev_zip.writestr('EXTF_Kunden.csv', customers_csv)
|
|
|
|
|
|
|
|
suppliers = get_suppliers(filters)
|
|
|
|
suppliers_csv = get_datev_csv(suppliers, filters, csv_class=DebtorsCreditors)
|
|
|
|
datev_zip.writestr('EXTF_Lieferanten.csv', suppliers_csv)
|
|
|
|
|
|
|
|
# You must call close() before exiting your program or essential records will not be written.
|
|
|
|
datev_zip.close()
|
|
|
|
|
|
|
|
frappe.response['filecontent'] = zip_buffer.getvalue()
|
|
|
|
frappe.response['filename'] = 'DATEV.zip'
|
|
|
|
frappe.response['type'] = 'binary'
|