Files
HSBCConverter/converter.py

120 lines
3.8 KiB
Python

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from csv import DictWriter
from glob import glob
from ofxparse import OfxParser
from pathlib import Path
import os
import time
import watchdog.events
import watchdog.observers
DATE_FORMAT = "%d/%m/%Y"
WATCH_DIR = '/mnt/data/'
PATTERN = '*.qfx'
BACKUP_DIR = 'Imported'
CONVERTED_DIR = 'Converted'
class Handler(watchdog.events.PatternMatchingEventHandler):
def __init__(self):
# Set the patterns for PatternMatchingEventHandler
watchdog.events.PatternMatchingEventHandler.__init__(self, patterns=['*.qfx'],
ignore_directories=True, case_sensitive=False)
@staticmethod
def write_csv(statement, out_file):
print("Writing: " + out_file)
fields = ['date', 'memo', 'category', 'amount', 'name']
with open(out_file, 'w') as f:
f.write("Date,Original Description,Category,Amount,Account Name")
f.write("\r\n")
writer = DictWriter(f, fieldnames=fields)
for line in statement:
writer.writerow(line)
@staticmethod
def get_statement_from_qfx(qfx):
balance = qfx.account.statement.balance
statement = []
credit_transactions = ['credit', 'dep', 'int']
debit_transactions = ['debit', 'atm', 'pos', 'xfer', 'check']
for transaction in qfx.account.statement.transactions:
amount = ""
balance = balance + transaction.amount
if transaction.payee.startswith("PENDING:"):
continue
line = {
'date': transaction.date.strftime(DATE_FORMAT),
'memo' : transaction.memo,
'category': 'Uncategorised',
'amount': transaction.amount,
'name': 'HSBC Everyday Global'
}
statement.append(line)
return statement
@staticmethod
def unique_path(directory, name_pattern):
counter = 0
while True:
counter += 1
path = directory / name_pattern.format(counter)
if not path.exists():
return path
def on_created(self, event):
print('File found: {}'.format(event.src_path))
historicalSize = -1
while (historicalSize != os.path.getsize(event.src_path)):
historicalSize = os.path.getsize(event.src_path)
print('waiting....')
time.sleep(1)
print("file copy has now finished")
with open(event.src_path, 'r') as file:
qfx = OfxParser.parse(file, fail_fast=False)
statement = Handler.get_statement_from_qfx(qfx)
path = Path(event.src_path)
path.resolve()
converted_dir = path.parent / CONVERTED_DIR
if not converted_dir.exists():
converted_dir.mkdir()
out_file = str(path.parent / CONVERTED_DIR / ('converted' + path.stem + '.csv'))
Handler.write_csv(statement, out_file)
#Now move the input file to backup
archive_file_dir = path.parent / BACKUP_DIR
archive_file = (path.stem + '{:04d}' + path.suffix)
destination = Handler.unique_path(archive_file_dir, archive_file)
if not archive_file_dir.exists():
archive_file_dir.mkdir()
if not destination.exists():
path.replace(destination)
if __name__ == "__main__":
event_handler = Handler()
observer = watchdog.observers.Observer()
observer.schedule(event_handler, path=WATCH_DIR, recursive=False)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()