Projet

Général

Profil

0001-wip.patch

Lauréline Guérin, 14 octobre 2022 09:51

Télécharger (7,18 ko)

Voir les différences:

Subject: [PATCH] wip

 tests/test_datasource.py |   9 ++-
 wcs/data_sources.py      | 121 ++++++++++++++++++++++++++++-----------
 2 files changed, 97 insertions(+), 33 deletions(-)
tests/test_datasource.py
1376 1376
            rsps.get(
1377 1377
                'https://example.invalid/json',
1378 1378
                json={
1379
                    "data": [{"id": 0, "text": "zéro"}, {"id": 1, "text": "uné"}, {"id": 2, "text": "deux"}]
1379
                    "data": [{"id": 0, "text": "zéro"}, {"id": 1, "text": "uné"}, {"id": 2, "text": "deux"}],
1380
                    "meta": {
1381
                        "foo": "bar",
1382
                        "blah": {"a": "b", "c": "d"},
1383
                    },
1380 1384
                },
1381 1385
            )
1382 1386

  
......
1385 1389
                == 'zéro'
1386 1390
            )
1387 1391
            assert rsps.calls[-1].request.url == 'https://example.invalid/json?t=hello'
1392
            assert WorkflowStatusItem.compute('{{ data_source.foobar.meta.foo }}') == 'bar'
1393
            assert WorkflowStatusItem.compute('{{ data_source.foobar.meta.blah }}') == "{'a': 'b', 'c': 'd'}"
1394
            assert WorkflowStatusItem.compute('{{ data_source.foobar.meta.blah.c }}') == 'd'
1388 1395

  
1389 1396

  
1390 1397
def export_to_indented_xml(data_source, include_id=False):
wcs/data_sources.py
441 441
        # a 'data' key holding the list of items, each of them being a dict
442 442
        # with at least both an "id" and a "text" key.
443 443
        geojson = data_source.get('type') == 'geojson'
444
        url = data_source.get('value')
444
        url = get_json_url(data_source)
445 445
        if not url:
446 446
            return []
447
        url = url.strip()
448
        if Template.is_template_string(url):
449
            vars = get_publisher().substitutions.get_context_variables(mode='lazy')
450
            url = get_variadic_url(url, vars)
451
        if data_source.get('qs_data'):  # merge qs_data into url
452
            from wcs.workflows import WorkflowStatusItem
453

  
454
            parsed = urllib.parse.urlparse(url)
455
            qs = list(urllib.parse.parse_qsl(parsed.query))
456
            for key, value in data_source['qs_data'].items():
457
                try:
458
                    value = WorkflowStatusItem.compute(value, raises=True, record_errors=False)
459
                    value = str(value) if value is not None else ''
460
                except Exception as e:
461
                    get_publisher().record_error(
462
                        _(
463
                            'Failed to compute value "%(value)s" for "%(query)s" query parameter'
464
                            % {'value': value, 'query': key}
465
                        ),
466
                        context='[DATASOURCE]',
467
                        exception=e,
468
                        notify=data_source.get('notify_on_errors'),
469
                        record=data_source.get('record_on_errors'),
470
                    )
471
                else:
472
                    key = force_str(key)
473
                    value = force_str(value)
474
                    qs.append((key, value))
475
            qs = urllib.parse.urlencode(qs)
476
            url = urllib.parse.urlunparse(parsed[:4] + (qs,) + parsed[5:6])
477 447

  
478 448
        request = get_request()
479 449
        if hasattr(request, 'datasources_cache') and url in request.datasources_cache:
......
503 473
    return []
504 474

  
505 475

  
476
def get_json_url(data_source):
477
    url = data_source.get('value')
478
    if not url:
479
        return None
480
    url = url.strip()
481
    if Template.is_template_string(url):
482
        vars = get_publisher().substitutions.get_context_variables(mode='lazy')
483
        url = get_variadic_url(url, vars)
484
    if data_source.get('qs_data'):  # merge qs_data into url
485
        from wcs.workflows import WorkflowStatusItem
486

  
487
        parsed = urllib.parse.urlparse(url)
488
        qs = list(urllib.parse.parse_qsl(parsed.query))
489
        for key, value in data_source['qs_data'].items():
490
            try:
491
                value = WorkflowStatusItem.compute(value, raises=True, record_errors=False)
492
                value = str(value) if value is not None else ''
493
            except Exception as e:
494
                get_publisher().record_error(
495
                    _(
496
                        'Failed to compute value "%(value)s" for "%(query)s" query parameter'
497
                        % {'value': value, 'query': key}
498
                    ),
499
                    context='[DATASOURCE]',
500
                    exception=e,
501
                    notify=data_source.get('notify_on_errors'),
502
                    record=data_source.get('record_on_errors'),
503
                )
504
            else:
505
                key = force_str(key)
506
                value = force_str(value)
507
                qs.append((key, value))
508
        qs = urllib.parse.urlencode(qs)
509
        url = urllib.parse.urlunparse(parsed[:4] + (qs,) + parsed[5:6])
510
    return url
511

  
512

  
506 513
def get_real(data_source):
507 514
    if not data_source:
508 515
        return None
......
1031 1038

  
1032 1039
class DataSourcesSubstitutionProxy:
1033 1040
    def __getattr__(self, attr):
1034
        return get_structured_items(NamedDataSource.get_by_slug(attr).extended_data_source)
1041
        return DataSourceProxy(attr)
1042

  
1043
    def inspect_keys(self):
1044
        return []
1045

  
1046

  
1047
class DataSourceProxy:
1048
    def __init__(self, name):
1049
        self.name = name
1050
        self.data_source = NamedDataSource.get_by_slug(self.name)
1051
        self._list = get_structured_items(self.data_source.extended_data_source)
1052
        self._data = Ellipsis
1035 1053

  
1036 1054
    def inspect_keys(self):
1055
        # XXX ?
1037 1056
        return []
1038 1057

  
1058
    def get_value(self):
1059
        return self._list
1060

  
1061
    def __len__(self):
1062
        return len(self._list)
1063

  
1064
    def __str__(self):
1065
        return str(self._list)
1066

  
1067
    def __repr__(self):
1068
        return '<DataSourceProxy, %s>' % self.name
1069

  
1070
    def __iter__(self):
1071
        yield from self._list
1072

  
1073
    def __nonzero__(self):
1074
        return any(self)
1075

  
1076
    def __contains__(self, value):
1077
        return value in list(self)
1078

  
1079
    def __eq__(self, other):
1080
        return list(self) == list(other)
1081

  
1082
    def __getitem__(self, key):
1083
        return list(self)[key]
1084

  
1085
    def __getattr__(self, attr):
1086
        data_source = self.data_source.extended_data_source
1087
        if data_source.get('type') not in ['json', 'geojson']:
1088
            raise AttributeError
1089
        if self._data is Ellipsis:
1090
            url = get_json_url(data_source)
1091
            self._data = get_json_from_url(url, data_source)
1092
        if self._data is None:
1093
            raise AttributeError
1094
        return self._data[attr]
1095

  
1039 1096

  
1040 1097
def has_chrono(publisher):
1041 1098
    return publisher.get_site_option('chrono_url') is not None
1042
-