1 |
1 |
# passerelle - uniform access to multiple data sources and services
|
2 |
|
# Copyright (C) 2019 Entr'ouvert
|
|
2 |
# Copyright (C) 2020 Entr'ouvert
|
3 |
3 |
#
|
4 |
4 |
# This program is free software: you can redistribute it and/or modify it
|
5 |
5 |
# under the terms of the GNU Affero General Public License as published
|
... | ... | |
36 |
36 |
from passerelle.utils.api import endpoint
|
37 |
37 |
from passerelle.utils.jsonresponse import APIError
|
38 |
38 |
from passerelle.utils.xml import JSONSchemaFromXMLSchema
|
|
39 |
from . import utils
|
39 |
40 |
|
40 |
41 |
logger = logging.getLogger('passerelle.contrib.toulouse_axel')
|
41 |
42 |
|
42 |
43 |
BASE_XSD_PATH = os.path.join(os.path.dirname(__file__), 'xsd')
|
43 |
44 |
|
44 |
45 |
|
45 |
|
boolean_type = {
|
46 |
|
'oneOf': [
|
47 |
|
{'type': 'boolean'},
|
48 |
|
{
|
49 |
|
'type': 'string',
|
50 |
|
'pattern': '[Oo][Uu][Ii]|[Nn][Oo][Nn]|[Tt][Rr][Uu][Ee]|[Ff][Aa][Ll][Ss][Ee]|1|0',
|
51 |
|
}
|
52 |
|
]
|
53 |
|
}
|
54 |
|
datetime_type = {
|
55 |
|
'type': 'string',
|
56 |
|
'pattern': '[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}',
|
57 |
|
}
|
58 |
|
json_date_format = '%Y-%m-%d'
|
59 |
|
json_datetime_format = '%Y-%m-%dT%H:%M:%S'
|
60 |
|
xml_date_format = '%d/%m/%Y'
|
61 |
|
xml_datetime_format = '%d/%m/%Y %H:%M:%S'
|
62 |
|
|
63 |
|
|
64 |
46 |
PAYMENT_SCHEMA = {
|
65 |
47 |
'type': 'object',
|
66 |
48 |
'properties': {
|
67 |
|
'transaction_date': copy.deepcopy(datetime_type),
|
|
49 |
'transaction_date': copy.deepcopy(utils.datetime_type),
|
68 |
50 |
'transaction_id': {
|
69 |
51 |
'type': 'string',
|
70 |
52 |
}
|
... | ... | |
73 |
55 |
}
|
74 |
56 |
|
75 |
57 |
|
76 |
|
def indent(tree, space=" ", level=0):
|
77 |
|
# backport from Lib/xml/etree/ElementTree.py python 3.9
|
78 |
|
if isinstance(tree, ET.ElementTree):
|
79 |
|
tree = tree.getroot()
|
80 |
|
if level < 0:
|
81 |
|
raise ValueError("Initial indentation level must be >= 0, got {level}".format(level))
|
82 |
|
if not len(tree):
|
83 |
|
return
|
84 |
|
|
85 |
|
# Reduce the memory consumption by reusing indentation strings.
|
86 |
|
indentations = ["\n" + level * space]
|
87 |
|
|
88 |
|
def _indent_children(elem, level):
|
89 |
|
# Start a new indentation level for the first child.
|
90 |
|
child_level = level + 1
|
91 |
|
try:
|
92 |
|
child_indentation = indentations[child_level]
|
93 |
|
except IndexError:
|
94 |
|
child_indentation = indentations[level] + space
|
95 |
|
indentations.append(child_indentation)
|
96 |
|
|
97 |
|
if not elem.text or not elem.text.strip():
|
98 |
|
elem.text = child_indentation
|
99 |
|
|
100 |
|
for child in elem:
|
101 |
|
if len(child):
|
102 |
|
_indent_children(child, child_level)
|
103 |
|
if not child.tail or not child.tail.strip():
|
104 |
|
child.tail = child_indentation
|
105 |
|
|
106 |
|
# Dedent after the last child by overwriting the previous indentation.
|
107 |
|
if not child.tail.strip():
|
108 |
|
child.tail = indentations[level]
|
109 |
|
|
110 |
|
_indent_children(tree, 0)
|
111 |
|
|
112 |
|
|
113 |
|
def encode_bool(obj):
|
114 |
|
if obj is True or str(obj).lower() in ['true', 'oui', '1']:
|
115 |
|
return 'OUI'
|
116 |
|
if obj is False or str(obj).lower() in ['false', 'non', '0']:
|
117 |
|
return 'NON'
|
118 |
|
return obj
|
119 |
|
|
120 |
|
|
121 |
|
def encode_datetime(obj):
|
122 |
|
try:
|
123 |
|
return datetime.datetime.strptime(obj, json_datetime_format).strftime(xml_datetime_format)
|
124 |
|
except ValueError:
|
125 |
|
return obj
|
126 |
|
|
127 |
|
|
128 |
58 |
class AxelSchema(JSONSchemaFromXMLSchema):
|
129 |
59 |
type_map = {
|
130 |
60 |
'{urn:AllAxelTypes}DATEREQUIREDType': 'date',
|
... | ... | |
142 |
72 |
|
143 |
73 |
def encode_date(self, obj):
|
144 |
74 |
try:
|
145 |
|
return datetime.datetime.strptime(obj, json_date_format).strftime(xml_date_format)
|
|
75 |
return datetime.datetime.strptime(obj, utils.json_date_format).strftime(utils.xml_date_format)
|
146 |
76 |
except ValueError:
|
147 |
77 |
return obj
|
148 |
78 |
|
... | ... | |
152 |
82 |
return self.encode_date(obj)
|
153 |
83 |
|
154 |
84 |
def decode_date(self, data):
|
155 |
|
value = datetime.datetime.strptime(data.text, xml_date_format).strftime(json_date_format)
|
|
85 |
value = datetime.datetime.strptime(data.text, utils.xml_date_format).strftime(utils.json_date_format)
|
156 |
86 |
return xmlschema.ElementData(tag=data.tag, text=value, content=data.content, attributes=data.attributes)
|
157 |
87 |
|
158 |
88 |
def decode_date_optional(self, data):
|
... | ... | |
162 |
92 |
|
163 |
93 |
@classmethod
|
164 |
94 |
def schema_bool(cls):
|
165 |
|
return copy.deepcopy(boolean_type)
|
|
95 |
return copy.deepcopy(utils.boolean_type)
|
166 |
96 |
|
167 |
97 |
def encode_bool(self, obj):
|
168 |
|
return encode_bool(obj)
|
|
98 |
return utils.encode_bool(obj)
|
169 |
99 |
|
170 |
100 |
def decode_bool(self, data):
|
171 |
101 |
value = False
|
... | ... | |
236 |
166 |
serialized_request = self.request_converter.encode(request_data)
|
237 |
167 |
except xmlschema.XMLSchemaValidationError as e:
|
238 |
168 |
raise AxelError('invalid request %s' % str(e))
|
239 |
|
indent(serialized_request)
|
|
169 |
utils.indent(serialized_request)
|
240 |
170 |
serialized_request = ET.tostring(serialized_request)
|
241 |
171 |
try:
|
242 |
172 |
self.request_converter.xml_schema.validate(serialized_request)
|
... | ... | |
251 |
181 |
'') # FIXME: What is the user parameter for ?
|
252 |
182 |
|
253 |
183 |
xml_result = ET.fromstring(result.encode('utf-8'))
|
254 |
|
indent(xml_result)
|
|
184 |
utils.indent(xml_result)
|
255 |
185 |
pretty_result = ET.tostring(xml_result)
|
256 |
186 |
if xml_result.find('RESULTAT/STATUS').text != 'OK':
|
257 |
187 |
msg = xml_result.find('RESULTAT/COMMENTAIRES').text
|
... | ... | |
335 |
265 |
form_maj_famille_dui.request_schema['properties']['PORTAIL']['properties']['DUI'])
|
336 |
266 |
|
337 |
267 |
for flag in sorted(UPDATE_FAMILY_FLAGS.keys()):
|
338 |
|
flag_type = copy.deepcopy(boolean_type)
|
|
268 |
flag_type = copy.deepcopy(utils.boolean_type)
|
339 |
269 |
if flag not in UPDATE_FAMILY_REQUIRED_FLAGS:
|
340 |
270 |
flag_type['oneOf'].append({'type': 'null'})
|
341 |
271 |
flag_type['oneOf'].append({'type': 'string', 'enum': ['']})
|
... | ... | |
384 |
314 |
'required': ['ASTHME', 'MEDICAMENTEUSES', 'ALIMENTAIRES', 'AUTRES'],
|
385 |
315 |
}
|
386 |
316 |
for key in ['ASTHME', 'MEDICAMENTEUSES', 'ALIMENTAIRES']:
|
387 |
|
sanitaire_properties['ALLERGIE']['properties'][key] = copy.deepcopy(boolean_type)
|
|
317 |
sanitaire_properties['ALLERGIE']['properties'][key] = copy.deepcopy(utils.boolean_type)
|
388 |
318 |
sanitaire_properties['ALLERGIE']['properties']['AUTRES'] = {
|
389 |
319 |
'oneOf': [
|
390 |
320 |
{'type': 'null'},
|
... | ... | |
576 |
506 |
flags = sorted(self.UPDATE_FAMILY_FLAGS.keys())
|
577 |
507 |
for flag in flags:
|
578 |
508 |
flag_value = post_data.get(flag)
|
579 |
|
flag_value = encode_bool(flag_value)
|
|
509 |
flag_value = utils.encode_bool(flag_value)
|
580 |
510 |
|
581 |
511 |
# no update for the related block
|
582 |
512 |
if flag_value == 'OUI':
|
... | ... | |
754 |
684 |
}
|
755 |
685 |
}
|
756 |
686 |
|
757 |
|
def normalize_invoice(self, invoice, dui, historical=False, vendor_base=None):
|
758 |
|
vendor = vendor_base or {}
|
759 |
|
vendor.update(invoice)
|
760 |
|
invoice_id = '%s-%s' % (dui, invoice['IDFACTURE'])
|
761 |
|
if historical:
|
762 |
|
invoice_id = 'historical-%s' % invoice_id
|
763 |
|
data = {
|
764 |
|
'id': invoice_id,
|
765 |
|
'display_id': str(invoice['IDFACTURE']),
|
766 |
|
'label': invoice['LIBELLE'],
|
767 |
|
'paid': False,
|
768 |
|
'vendor': {'toulouse-axel': vendor},
|
769 |
|
}
|
770 |
|
if historical:
|
771 |
|
data.update({
|
772 |
|
'amount': 0,
|
773 |
|
'total_amount': invoice['MONTANT'],
|
774 |
|
'created': invoice['EMISSION'],
|
775 |
|
'pay_limit_date': '',
|
776 |
|
'online_payment': False,
|
777 |
|
'has_pdf': invoice['IPDF'] == '1',
|
778 |
|
})
|
779 |
|
else:
|
780 |
|
data.update({
|
781 |
|
'amount': invoice['RESTEAPAYER'],
|
782 |
|
'total_amount': invoice['MONTANTTOTAL'],
|
783 |
|
'created': invoice['DATEEMISSION'],
|
784 |
|
'pay_limit_date': invoice['DATEECHEANCE'],
|
785 |
|
'has_pdf': invoice['EXISTEPDF'] == '1',
|
786 |
|
})
|
787 |
|
pay_limit_date = datetime.datetime.strptime(invoice['DATEECHEANCE'], '%Y-%m-%d').date()
|
788 |
|
data['online_payment'] = data['amount'] > 0 and pay_limit_date >= datetime.date.today()
|
789 |
|
return data
|
790 |
|
|
791 |
687 |
def get_invoices(self, regie_id, dui=None, name_id=None):
|
792 |
688 |
assert name_id or dui
|
793 |
689 |
if name_id:
|
... | ... | |
807 |
703 |
for facture in data.get('FACTURES', []):
|
808 |
704 |
if facture['IDREGIE'] != regie_id:
|
809 |
705 |
continue
|
810 |
|
result.append(self.normalize_invoice(facture, dui))
|
|
706 |
result.append(utils.normalize_invoice(facture, dui))
|
811 |
707 |
return result
|
812 |
708 |
|
813 |
709 |
def get_historical_invoices(self, name_id):
|
... | ... | |
828 |
724 |
for direction in data.get('DIRECTION', []):
|
829 |
725 |
for facture in direction.get('FACTURE', []):
|
830 |
726 |
result.append(
|
831 |
|
self.normalize_invoice(
|
|
727 |
utils.normalize_invoice(
|
832 |
728 |
facture,
|
833 |
729 |
link.dui,
|
834 |
730 |
historical=True,
|
... | ... | |
968 |
864 |
|
969 |
865 |
transaction_amount = invoice['amount']
|
970 |
866 |
transaction_id = data['transaction_id']
|
971 |
|
transaction_date = encode_datetime(data['transaction_date'])
|
|
867 |
transaction_date = utils.encode_datetime(data['transaction_date'])
|
972 |
868 |
post_data = {
|
973 |
869 |
'IDFACTURE': int(invoice_id),
|
974 |
870 |
'IDREGIEENCAISSEMENT': '',
|