2475 |
2475 |
)
|
2476 |
2476 |
return sql_dict
|
2477 |
2477 |
|
|
2478 |
@classmethod
|
|
2479 |
def _col2obdata(cls, row, i, field):
|
|
2480 |
obdata = {}
|
|
2481 |
sql_type = SQL_TYPE_MAPPING.get(field.key, 'varchar')
|
|
2482 |
if sql_type is None:
|
|
2483 |
return ({}, i)
|
|
2484 |
value = row[i]
|
|
2485 |
if value is not None:
|
|
2486 |
value = str_encode(value)
|
|
2487 |
if field.key == 'ranked-items':
|
|
2488 |
d = {}
|
|
2489 |
for data, rank in value:
|
|
2490 |
try:
|
|
2491 |
d[data] = int(rank)
|
|
2492 |
except ValueError:
|
|
2493 |
d[data] = rank
|
|
2494 |
value = d
|
|
2495 |
elif field.key == 'password':
|
|
2496 |
d = {}
|
|
2497 |
for fmt, val in value:
|
|
2498 |
d[fmt] = force_str(val)
|
|
2499 |
value = d
|
|
2500 |
elif field.key == 'computed':
|
|
2501 |
if isinstance(value, dict) and value.get('@type') == 'computed-data':
|
|
2502 |
value = value.get('data')
|
|
2503 |
if sql_type == 'date':
|
|
2504 |
value = value.timetuple()
|
|
2505 |
elif sql_type == 'bytea':
|
|
2506 |
value = pickle_loads(value)
|
|
2507 |
elif sql_type == 'jsonb' and isinstance(value, dict) and value.get('schema'):
|
|
2508 |
# block field, adapt date/field values
|
|
2509 |
for field_id, field_type in value.get('schema').items():
|
|
2510 |
if field_type not in ('date', 'file'):
|
|
2511 |
continue
|
|
2512 |
for entry in value.get('data') or []:
|
|
2513 |
subvalue = entry.get(field_id)
|
|
2514 |
if subvalue and field_type == 'date':
|
|
2515 |
entry[field_id] = time.strptime(subvalue, '%Y-%m-%d')
|
|
2516 |
elif subvalue and field_type == 'file':
|
|
2517 |
entry[field_id] = PicklableUpload.__new__(PicklableUpload)
|
|
2518 |
entry[field_id].__setstate__(subvalue)
|
|
2519 |
|
|
2520 |
obdata[field.id] = value
|
|
2521 |
i += 1
|
|
2522 |
if field.store_display_value:
|
|
2523 |
value = str_encode(row[i])
|
|
2524 |
obdata['%s_display' % field.id] = value
|
|
2525 |
i += 1
|
|
2526 |
if field.store_structured_value:
|
|
2527 |
value = row[i]
|
|
2528 |
if value is not None:
|
|
2529 |
obdata['%s_structured' % field.id] = pickle_loads(value)
|
|
2530 |
if obdata['%s_structured' % field.id] is None:
|
|
2531 |
del obdata['%s_structured' % field.id]
|
|
2532 |
i += 1
|
|
2533 |
return (obdata, i)
|
|
2534 |
|
2478 |
2535 |
@classmethod
|
2479 |
2536 |
def _row2obdata(cls, row, formdef):
|
2480 |
2537 |
obdata = {}
|
... | ... | |
2482 |
2539 |
if formdef.geolocations:
|
2483 |
2540 |
i += len(formdef.geolocations.keys())
|
2484 |
2541 |
for field in formdef.get_all_fields():
|
2485 |
|
sql_type = SQL_TYPE_MAPPING.get(field.key, 'varchar')
|
2486 |
|
if sql_type is None:
|
2487 |
|
continue
|
2488 |
|
value = row[i]
|
2489 |
|
if value is not None:
|
2490 |
|
value = str_encode(value)
|
2491 |
|
if field.key == 'ranked-items':
|
2492 |
|
d = {}
|
2493 |
|
for data, rank in value:
|
2494 |
|
try:
|
2495 |
|
d[data] = int(rank)
|
2496 |
|
except ValueError:
|
2497 |
|
d[data] = rank
|
2498 |
|
value = d
|
2499 |
|
elif field.key == 'password':
|
2500 |
|
d = {}
|
2501 |
|
for fmt, val in value:
|
2502 |
|
d[fmt] = force_str(val)
|
2503 |
|
value = d
|
2504 |
|
elif field.key == 'computed':
|
2505 |
|
if isinstance(value, dict) and value.get('@type') == 'computed-data':
|
2506 |
|
value = value.get('data')
|
2507 |
|
if sql_type == 'date':
|
2508 |
|
value = value.timetuple()
|
2509 |
|
elif sql_type == 'bytea':
|
2510 |
|
value = pickle_loads(value)
|
2511 |
|
elif sql_type == 'jsonb' and isinstance(value, dict) and value.get('schema'):
|
2512 |
|
# block field, adapt date/field values
|
2513 |
|
for field_id, field_type in value.get('schema').items():
|
2514 |
|
if field_type not in ('date', 'file'):
|
2515 |
|
continue
|
2516 |
|
for entry in value.get('data') or []:
|
2517 |
|
subvalue = entry.get(field_id)
|
2518 |
|
if subvalue and field_type == 'date':
|
2519 |
|
entry[field_id] = time.strptime(subvalue, '%Y-%m-%d')
|
2520 |
|
elif subvalue and field_type == 'file':
|
2521 |
|
entry[field_id] = PicklableUpload.__new__(PicklableUpload)
|
2522 |
|
entry[field_id].__setstate__(subvalue)
|
2523 |
|
|
2524 |
|
obdata[field.id] = value
|
2525 |
|
i += 1
|
2526 |
|
if field.store_display_value:
|
2527 |
|
value = str_encode(row[i])
|
2528 |
|
obdata['%s_display' % field.id] = value
|
2529 |
|
i += 1
|
2530 |
|
if field.store_structured_value:
|
2531 |
|
value = row[i]
|
2532 |
|
if value is not None:
|
2533 |
|
obdata['%s_structured' % field.id] = pickle_loads(value)
|
2534 |
|
if obdata['%s_structured' % field.id] is None:
|
2535 |
|
del obdata['%s_structured' % field.id]
|
2536 |
|
i += 1
|
2537 |
|
|
|
2542 |
coldata, i = cls._col2obdata(row, i, field)
|
|
2543 |
obdata.update(coldata)
|
2538 |
2544 |
return obdata
|
2539 |
2545 |
|
2540 |
2546 |
@classmethod
|
2541 |
|
-
|