0003-update-and-cache-metadata-from-URL-and-path-10196.patch
README | ||
---|---|---|
76 | 76 |
MELLON_IDENTITY_PROVIDERS |
77 | 77 |
------------------------- |
78 | 78 | |
79 |
A list of dictionaries, only one key is mandatory in those |
|
80 |
dictionaries `METADATA` it should contain the UTF-8 content of the |
|
81 |
metadata file of the identity provider or if it starts with a slash |
|
82 |
the absolute path toward a metadata file. All other keys are override |
|
83 |
of generic settings. |
|
79 |
A list of dictionaries, they must contain at least one of the keys `METADATA` |
|
80 |
(inline copy of the identity provider metadata), `METADATA_URL` URL of the IdP |
|
81 |
metadata file, or `METADATA_PATH` an absolute path to the IdP metadata file.. |
|
82 |
All other keys are override of generic settings. |
|
83 | ||
84 |
When using an URL, the URL is automatically cached in the `MEDIA_ROOT` |
|
85 |
directory of your application in the directory named `mellon_metadata_cache`. |
|
86 |
If you restart the application and the URL is unavailable, the file cache will |
|
87 |
be used. The cache will be refreshed every `MELLON_METADATA_CACHE_TIME` seconds. |
|
88 |
If the HTTP retrieval of the metadata URL takes longer thant |
|
89 |
`METTON_METADATA_HTTP_TIMEOUT` seconds, retrieval will be skipped. |
|
90 | ||
91 |
When the cache is already loaded, retrievals are done in the background by a |
|
92 |
thread. |
|
93 | ||
94 |
When using a local absolute path, the metadata is reloaded each time the |
|
95 |
modification time of the file is superior to the last time it was loaded. |
|
84 | 96 | |
85 | 97 |
MELLON_PUBLIC_KEYS |
86 | 98 |
------------------ |
... | ... | |
287 | 299 |
individually, if not django-mellon will refuse to link multiple users matching |
288 | 300 |
the rules. |
289 | 301 | |
302 |
MELLON_METADATA_CACHE_TIME |
|
303 |
-------------------------- |
|
304 | ||
305 |
When using METADATA_URL to reference a metadata file, it's the duration in |
|
306 |
secondes between refresh of the metadata file. Default is 3600 seconds, 1 hour. |
|
307 | ||
308 |
METTON_METADATA_HTTP_TIMEOUT |
|
309 |
--------------------------- |
|
310 | ||
311 |
Timeout in seconds for HTTP call made to retrieve metadata files. Default is 10 |
|
312 |
seconds. |
|
313 | ||
290 | 314 |
Tests |
291 | 315 |
===== |
292 | 316 |
debian/control | ||
---|---|---|
15 | 15 |
python (>= 2.7), |
16 | 16 |
python-django (>= 1.5), |
17 | 17 |
python-isodate, |
18 |
python-lasso |
|
18 |
python-lasso, |
|
19 |
python-atomicwrites |
|
19 | 20 |
Breaks: python-hobo (<< 0.34.5) |
20 | 21 |
Description: SAML authentication for Django |
21 | 22 | |
... | ... | |
24 | 25 |
Depends: ${misc:Depends}, ${python:Depends}, |
25 | 26 |
python3-django (>= 1.5), |
26 | 27 |
python3-isodate, |
27 |
python3-lasso |
|
28 |
python3-lasso, |
|
29 |
python3-atomicwrites |
|
28 | 30 |
Description: SAML authentication for Django |
mellon/adapters.py | ||
---|---|---|
13 | 13 |
# You should have received a copy of the GNU Affero General Public License |
14 | 14 |
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
15 | 15 | |
16 |
from xml.etree import ElementTree as ET |
|
17 |
import hashlib |
|
16 | 18 |
import logging |
19 |
import os |
|
20 |
import threading |
|
21 |
import time |
|
17 | 22 |
import uuid |
18 |
from xml.etree import ElementTree as ET |
|
19 | 23 | |
20 | 24 |
import lasso |
21 | 25 |
import requests |
22 | 26 |
import requests.exceptions |
27 |
from atomicwrites import atomic_write |
|
23 | 28 | |
24 | 29 |
from django.core.exceptions import PermissionDenied, FieldDoesNotExist |
30 |
from django.core.files.storage import default_storage |
|
25 | 31 |
from django.contrib import auth |
26 | 32 |
from django.contrib.auth.models import Group |
27 | 33 |
from django.utils import six |
28 | 34 |
from django.utils.encoding import force_text |
35 |
from django.utils.six.moves.urllib.parse import urlparse |
|
29 | 36 | |
30 | 37 |
from . import utils, app_settings, models |
31 | 38 | |
32 | 39 |
User = auth.get_user_model() |
33 | 40 | |
41 |
logger = logging.getLogger(__name__) |
|
42 | ||
34 | 43 | |
35 | 44 |
class UserCreationError(Exception): |
36 | 45 |
pass |
... | ... | |
46 | 55 | |
47 | 56 | |
48 | 57 |
class DefaultAdapter(object): |
49 |
def __init__(self, *args, **kwargs): |
|
50 |
self.logger = logging.getLogger(__name__) |
|
51 | ||
52 | 58 |
def get_idp(self, entity_id): |
53 | 59 |
'''Find the first IdP definition matching entity_id''' |
54 | 60 |
for idp in self.get_idps(): |
... | ... | |
63 | 69 | |
64 | 70 |
def get_idps(self): |
65 | 71 |
for i, idp in enumerate(self.get_identity_providers_setting()): |
66 |
if 'METADATA_URL' in idp and 'METADATA' not in idp: |
|
72 |
if self.load_idp(idp, i): |
|
73 |
yield idp |
|
74 | ||
75 |
def load_metadata_path(self, idp, i): |
|
76 |
path = idp['METADATA_PATH'] |
|
77 |
if not os.path.exists(path): |
|
78 |
logger.warning('metadata path %s does not exist', path) |
|
79 |
return |
|
80 |
last_update = idp.get('METADATA_PATH_LAST_UPDATE', 0) |
|
81 |
try: |
|
82 |
mtime = os.stat(path).st_mtime |
|
83 |
except OSError as e: |
|
84 |
logger.warning('metadata path %s : stat() call failed, %s', path, e) |
|
85 |
return |
|
86 |
if last_update == 0 or mtime >= last_update: |
|
87 |
idp['METADATA_PATH_LAST_UPDATE'] = time.time() |
|
88 |
try: |
|
89 |
with open(path) as fd: |
|
90 |
metadata = fd.read() |
|
91 |
except OSError as e: |
|
92 |
logger.warning('metadata path %s : open()/read() call failed, %s', path, e) |
|
93 |
return |
|
94 |
entity_id = self.load_entity_id(metadata, i) |
|
95 |
if not entity_id: |
|
96 |
logger.error('invalid metadata file retrieved from %s', path) |
|
97 |
return |
|
98 |
if 'ENTITY_ID' in idp and idp['ENTITY_ID'] != entity_id: |
|
99 |
logger.error('metadata path %s : entityID changed %r != %r', path, entity_id, idp['ENTITY_ID']) |
|
100 |
del idp['ENTITY_ID'] |
|
101 |
idp['METADATA'] = metadata |
|
102 | ||
103 |
def load_metadata_url(self, idp, i): |
|
104 |
url = idp['METADATA_URL'] |
|
105 |
metadata_cache_time = utils.get_setting(idp, 'METADATA_CACHE_TIME') |
|
106 |
timeout = utils.get_setting(idp, 'METADATA_HTTP_TIMEOUT') |
|
107 | ||
108 |
warning = logger.warning |
|
109 |
if 'METADATA' not in idp: |
|
110 |
# if we have no metadata in cache, we must emit errors |
|
111 |
warning = logger.error |
|
112 | ||
113 |
try: |
|
114 |
hostname = urlparse(url).hostname |
|
115 |
except (ValueError, TypeError) as e: |
|
116 |
warning('invalid METADATA_URL %r: %s', url, e) |
|
117 |
return |
|
118 |
if not hostname: |
|
119 |
warning('no hostname in METADATA_URL %r: %s', url) |
|
120 |
return |
|
121 | ||
122 |
last_update = idp.get('METADATA_URL_LAST_UPDATE', 0) |
|
123 |
now = time.time() |
|
124 | ||
125 |
try: |
|
126 |
url_fingerprint = hashlib.md5(url.encode('ascii')).hexdigest() |
|
127 |
file_cache_key = '%s_%s.xml' % (hostname, url_fingerprint) |
|
128 |
except (UnicodeError, TypeError, ValueError): |
|
129 |
warning('unable to compute file_cache_key') |
|
130 |
return |
|
131 | ||
132 |
cache_directory = default_storage.path('mellon_metadata_cache') |
|
133 |
file_cache_path = os.path.join(cache_directory, file_cache_key) |
|
134 | ||
135 |
if metadata_cache_time: |
|
136 |
# METADATA_CACHE_TIME == 0 disable the file cache |
|
137 |
if not os.path.exists(cache_directory): |
|
138 |
os.makedirs(cache_directory) |
|
139 | ||
140 |
if os.path.exists(file_cache_path) and 'METADATA' not in idp: |
|
141 |
try: |
|
142 |
with open(file_cache_path) as fd: |
|
143 |
idp['METADATA'] = fd.read() |
|
144 |
# use file cache mtime as last_update time, prevent too many loading from different workers |
|
145 |
last_update = max(last_update, os.stat(file_cache_path).st_mtime) |
|
146 |
except OSError: |
|
147 |
warning('metadata url %s : error when loading the file cache %s', url, file_cache_path) |
|
148 | ||
149 |
# fresh cache, skip loading |
|
150 |
if last_update and 'METADATA' in idp and (now - last_update) < metadata_cache_time: |
|
151 |
return |
|
152 | ||
153 |
def __http_get(): |
|
154 |
try: |
|
67 | 155 |
verify_ssl_certificate = utils.get_setting( |
68 | 156 |
idp, 'VERIFY_SSL_CERTIFICATE') |
69 | 157 |
try: |
70 |
response = requests.get(idp['METADATA_URL'], verify=verify_ssl_certificate)
|
|
158 |
response = requests.get(url, verify=verify_ssl_certificate, timeout=timeout)
|
|
71 | 159 |
response.raise_for_status() |
72 | 160 |
except requests.exceptions.RequestException as e: |
73 |
self.logger.error( |
|
74 |
u'retrieval of metadata URL %r failed with error %s for %d-th idp', |
|
75 |
idp['METADATA_URL'], e, i) |
|
76 |
continue |
|
161 |
warning('metadata url %s : HTTP request failed %s', url, e) |
|
162 |
return |
|
163 | ||
164 |
entity_id = self.load_entity_id(response.text, i) |
|
165 |
if not entity_id: |
|
166 |
warning('invalid metadata file retrieved from %s', url) |
|
167 |
return |
|
168 | ||
169 |
if 'ENTITY_ID' in idp and idp['ENTITY_ID'] != entity_id: |
|
170 |
# entityID change is always en error |
|
171 |
logger.error('metadata url %s : entityID changed %r != %r', url, entity_id, idp['ENTITY_ID']) |
|
172 |
del idp['ENTITY_ID'] |
|
173 | ||
77 | 174 |
idp['METADATA'] = response.text |
78 |
elif 'METADATA' in idp: |
|
79 |
if idp['METADATA'].startswith('/'): |
|
80 |
idp['METADATA'] = open(idp['METADATA']).read() |
|
81 |
else: |
|
82 |
self.logger.error(u'missing METADATA or METADATA_URL in %d-th idp', i) |
|
83 |
continue |
|
175 |
idp['METADATA_URL_LAST_UPDATE'] = now |
|
176 |
if metadata_cache_time: |
|
177 |
try: |
|
178 |
with atomic_write(file_cache_path, mode='wb', overwrite=True) as fd: |
|
179 |
fd.write(response.text.encode('utf-8')) |
|
180 |
except OSError as e: |
|
181 |
logger.error('metadata url %s : could not write file cache %s, %s', url, file_cache_path, e) |
|
182 |
idp['METADATA_PATH'] = file_cache_path |
|
183 |
# prevent reloading of the file cache immediately |
|
184 |
idp['METADATA_PATH_LAST_UPDATE'] = time.time() + 1 |
|
185 |
logger.debug('metadata url %s : update throught HTTP', url) |
|
186 |
finally: |
|
187 |
# release thread object |
|
188 |
idp.pop('METADATA_URL_UPDATE_THREAD', None) |
|
189 |
# emit an error if cache is too old |
|
190 |
stale_timeout = 24 * metadata_cache_time |
|
191 |
if last_update and (now - idp['METADATA_URL_LAST_UPDATE']) > stale_timeout: |
|
192 |
logger.error('metadata url %s : not updated since %.1f hours', |
|
193 |
stale_timeout / 3600.0) |
|
194 | ||
195 |
# we have cache, update in background |
|
196 |
if last_update and 'METADATA' in idp: |
|
197 |
t = threading.Thread(target=__http_get) |
|
198 |
t.start() |
|
199 |
# store thread in idp for tests |
|
200 |
idp['METADATA_URL_UPDATE_THREAD'] = t |
|
201 |
# suspend updates for HTTP timeout + 5 seconds |
|
202 |
idp['METADATA_URL_LAST_UPDATE'] = last_update + timeout + 5 |
|
203 |
else: |
|
204 |
# synchronous update |
|
205 |
__http_get() |
|
206 | ||
207 |
def load_metadata(self, idp, i): |
|
208 |
# legacy support |
|
209 |
if 'METADATA' in idp and idp['METADATA'].startswith('/'): |
|
210 |
idp['METADATA_PATH'] = idp['METADATA'] |
|
211 |
del idp['METADATA'] |
|
212 | ||
213 |
if 'METADATA_PATH' in idp: |
|
214 |
self.load_metadata_path(idp, i) |
|
215 | ||
216 |
if 'METADATA_URL' in idp: |
|
217 |
self.load_metadata_url(idp, i) |
|
218 | ||
219 |
if 'METADATA' in idp: |
|
84 | 220 |
if 'ENTITY_ID' not in idp: |
85 |
try: |
|
86 |
doc = ET.fromstring(idp['METADATA']) |
|
87 |
except (TypeError, ET.ParseError): |
|
88 |
self.logger.error(u'METADATA of %d-th idp is invalid', i) |
|
89 |
continue |
|
90 |
if doc.tag != '{%s}EntityDescriptor' % lasso.SAML2_METADATA_HREF: |
|
91 |
self.logger.error(u'METADATA of %d-th idp has no EntityDescriptor root tag', i) |
|
92 |
continue |
|
221 |
entity_id = self.load_entity_id(idp['METADATA'], i) |
|
222 |
if entity_id: |
|
223 |
idp['ENTITY_ID'] = entity_id |
|
93 | 224 | |
94 |
if 'entityID' not in doc.attrib: |
|
95 |
self.logger.error( |
|
96 |
u'METADATA of %d-th idp has no entityID attribute on its root tag', i) |
|
97 |
continue |
|
98 |
idp['ENTITY_ID'] = doc.attrib['entityID'] |
|
99 |
yield idp |
|
225 |
if 'ENTITY_ID' in idp: |
|
226 |
return idp['METADATA'] |
|
227 | ||
228 |
def load_entity_id(self, metadata, i): |
|
229 |
try: |
|
230 |
doc = ET.fromstring(metadata) |
|
231 |
except (TypeError, ET.ParseError): |
|
232 |
logger.error(u'METADATA of %d-th idp is invalid', i) |
|
233 |
return None |
|
234 |
if doc.tag != '{%s}EntityDescriptor' % lasso.SAML2_METADATA_HREF: |
|
235 |
logger.error(u'METADATA of %d-th idp has no EntityDescriptor root tag', i) |
|
236 |
return None |
|
237 | ||
238 |
if 'entityID' not in doc.attrib: |
|
239 |
logger.error( |
|
240 |
u'METADATA of %d-th idp has no entityID attribute on its root tag', i) |
|
241 |
return None |
|
242 |
return doc.attrib['entityID'] |
|
243 | ||
244 |
def load_idp(self, idp, i): |
|
245 |
self.load_metadata(idp, i) |
|
246 |
return 'ENTITY_ID' in idp |
|
100 | 247 | |
101 | 248 |
def authorize(self, idp, saml_attributes): |
102 | 249 |
if not idp: |
... | ... | |
116 | 263 |
username = force_text(username_template).format( |
117 | 264 |
realm=realm, attributes=saml_attributes, idp=idp)[:30] |
118 | 265 |
except ValueError: |
119 |
self.logger.error(u'invalid username template %r', username_template)
|
|
266 |
logger.error(u'invalid username template %r', username_template) |
|
120 | 267 |
except (AttributeError, KeyError, IndexError) as e: |
121 |
self.logger.error(
|
|
268 |
logger.error( |
|
122 | 269 |
u'invalid reference in username template %r: %s', username_template, e) |
123 | 270 |
except Exception: |
124 |
self.logger.exception(u'unknown error when formatting username')
|
|
271 |
logger.exception(u'unknown error when formatting username') |
|
125 | 272 |
else: |
126 | 273 |
return username |
127 | 274 | |
... | ... | |
131 | 278 |
def finish_create_user(self, idp, saml_attributes, user): |
132 | 279 |
username = self.format_username(idp, saml_attributes) |
133 | 280 |
if not username: |
134 |
self.logger.warning('could not build a username, login refused')
|
|
281 |
logger.warning('could not build a username, login refused') |
|
135 | 282 |
raise UserCreationError |
136 | 283 |
user.username = username |
137 | 284 |
user.save() |
... | ... | |
146 | 293 |
if len(name_id) == 1: |
147 | 294 |
name_id = name_id[0] |
148 | 295 |
else: |
149 |
self.logger.warning('more than one value for attribute %r, cannot federate',
|
|
150 |
transient_federation_attribute)
|
|
296 |
logger.warning('more than one value for attribute %r, cannot federate', |
|
297 |
transient_federation_attribute) |
|
151 | 298 |
return None |
152 | 299 |
else: |
153 | 300 |
return None |
... | ... | |
158 | 305 |
user = self.get_users_queryset(idp, saml_attributes).get( |
159 | 306 |
saml_identifiers__name_id=name_id, |
160 | 307 |
saml_identifiers__issuer=issuer) |
161 |
self.logger.info('looked up user %s with name_id %s from issuer %s', |
|
162 |
user, name_id, issuer) |
|
308 |
logger.info('looked up user %s with name_id %s from issuer %s', user, name_id, issuer) |
|
163 | 309 |
return user |
164 | 310 |
except User.DoesNotExist: |
165 | 311 |
pass |
... | ... | |
172 | 318 |
created = False |
173 | 319 |
if not user: |
174 | 320 |
if not utils.get_setting(idp, 'PROVISION'): |
175 |
self.logger.debug('provisionning disabled, login refused')
|
|
321 |
logger.debug('provisionning disabled, login refused') |
|
176 | 322 |
return None |
177 | 323 |
created = True |
178 | 324 |
user = self.create_user(User) |
179 | 325 | |
180 | 326 |
nameid_user = self._link_user(idp, saml_attributes, issuer, name_id, user) |
181 | 327 |
if user != nameid_user: |
182 |
self.logger.info('looked up user %s with name_id %s from issuer %s', |
|
183 |
nameid_user, name_id, issuer) |
|
328 |
logger.info('looked up user %s with name_id %s from issuer %s', nameid_user, name_id, issuer) |
|
184 | 329 |
if created: |
185 | 330 |
user.delete() |
186 | 331 |
return nameid_user |
... | ... | |
191 | 336 |
except UserCreationError: |
192 | 337 |
user.delete() |
193 | 338 |
return None |
194 |
self.logger.info('created new user %s with name_id %s from issuer %s', |
|
195 |
nameid_user, name_id, issuer) |
|
339 |
logger.info('created new user %s with name_id %s from issuer %s', nameid_user, name_id, issuer) |
|
196 | 340 |
return nameid_user |
197 | 341 | |
198 | 342 |
def _lookup_by_attributes(self, idp, saml_attributes, lookup_by_attributes): |
199 | 343 |
if not isinstance(lookup_by_attributes, list): |
200 |
self.logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: it must be a list', lookup_by_attributes)
|
|
344 |
logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: it must be a list', lookup_by_attributes) |
|
201 | 345 |
return None |
202 | 346 | |
203 | 347 |
users = set() |
204 | 348 |
for line in lookup_by_attributes: |
205 | 349 |
if not isinstance(line, dict): |
206 |
self.logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: it must be a list of dicts', line)
|
|
350 |
logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: it must be a list of dicts', line) |
|
207 | 351 |
continue |
208 | 352 |
user_field = line.get('user_field') |
209 | 353 |
if not hasattr(user_field, 'isalpha'): |
210 |
self.logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: user_field is missing', line)
|
|
354 |
logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: user_field is missing', line) |
|
211 | 355 |
continue |
212 | 356 |
try: |
213 | 357 |
User._meta.get_field(user_field) |
214 | 358 |
except FieldDoesNotExist: |
215 |
self.logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r, user field %s does not exist',
|
|
216 |
line, user_field)
|
|
359 |
logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r, user field %s does not exist', |
|
360 |
line, user_field) |
|
217 | 361 |
continue |
218 | 362 |
saml_attribute = line.get('saml_attribute') |
219 | 363 |
if not hasattr(saml_attribute, 'isalpha'): |
220 |
self.logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: saml_attribute is missing', line)
|
|
364 |
logger.error('invalid LOOKUP_BY_ATTRIBUTES configuration %r: saml_attribute is missing', line) |
|
221 | 365 |
continue |
222 | 366 |
values = saml_attributes.get(saml_attribute) |
223 | 367 |
if not values: |
224 |
self.logger.error('looking for user by saml attribute %r and user field %r, skipping because empty',
|
|
225 |
saml_attribute, user_field)
|
|
368 |
logger.error('looking for user by saml attribute %r and user field %r, skipping because empty', |
|
369 |
saml_attribute, user_field) |
|
226 | 370 |
continue |
227 | 371 |
ignore_case = line.get('ignore-case', False) |
228 | 372 |
for value in values: |
... | ... | |
232 | 376 |
users_found = self.get_users_queryset(idp, saml_attributes).filter( |
233 | 377 |
saml_identifiers__isnull=True, **{key: value}) |
234 | 378 |
if not users_found: |
235 |
self.logger.debug('looking for users by attribute %r and user field %r with value %r: not found',
|
|
236 |
saml_attribute, user_field, value)
|
|
379 |
logger.debug('looking for users by attribute %r and user field %r with value %r: not found', |
|
380 |
saml_attribute, user_field, value) |
|
237 | 381 |
continue |
238 |
self.logger.info(u'looking for user by attribute %r and user field %r with value %r: found %s',
|
|
239 |
saml_attribute, user_field, value, display_truncated_list(users_found))
|
|
382 |
logger.info(u'looking for user by attribute %r and user field %r with value %r: found %s', |
|
383 |
saml_attribute, user_field, value, display_truncated_list(users_found)) |
|
240 | 384 |
users.update(users_found) |
241 | 385 |
if len(users) == 1: |
242 | 386 |
user = list(users)[0] |
243 |
self.logger.info(u'looking for user by attributes %r: found user %s', |
|
244 |
lookup_by_attributes, user) |
|
387 |
logger.info(u'looking for user by attributes %r: found user %s', lookup_by_attributes, user) |
|
245 | 388 |
return user |
246 | 389 |
elif len(users) > 1: |
247 |
self.logger.warning(u'looking for user by attributes %r: too many users found(%d), failing',
|
|
248 |
lookup_by_attributes, len(users))
|
|
390 |
logger.warning(u'looking for user by attributes %r: too many users found(%d), failing', |
|
391 |
lookup_by_attributes, len(users)) |
|
249 | 392 |
return None |
250 | 393 | |
251 | 394 |
def _link_user(self, idp, saml_attributes, issuer, name_id, user): |
... | ... | |
269 | 412 |
try: |
270 | 413 |
value = force_text(tpl).format(realm=realm, attributes=saml_attributes, idp=idp) |
271 | 414 |
except ValueError: |
272 |
self.logger.warning(u'invalid attribute mapping template %r', tpl)
|
|
415 |
logger.warning(u'invalid attribute mapping template %r', tpl) |
|
273 | 416 |
except (AttributeError, KeyError, IndexError, ValueError) as e: |
274 |
self.logger.warning(
|
|
417 |
logger.warning( |
|
275 | 418 |
u'invalid reference in attribute mapping template %r: %s', tpl, e) |
276 | 419 |
else: |
277 | 420 |
model_field = user._meta.get_field(field) |
... | ... | |
281 | 424 |
old_value = getattr(user, field) |
282 | 425 |
setattr(user, field, value) |
283 | 426 |
attribute_set = True |
284 |
self.logger.info(u'set field %s of user %s to value %r (old value %r)', field, |
|
285 |
user, value, old_value) |
|
427 |
logger.info(u'set field %s of user %s to value %r (old value %r)', field, user, value, old_value) |
|
286 | 428 |
if attribute_set: |
287 | 429 |
user.save() |
288 | 430 | |
... | ... | |
305 | 447 |
user.is_staff = True |
306 | 448 |
user.is_superuser = True |
307 | 449 |
attribute_set = True |
308 |
self.logger.info('flag is_staff and is_superuser added to user %s', user)
|
|
450 |
logger.info('flag is_staff and is_superuser added to user %s', user) |
|
309 | 451 |
break |
310 | 452 |
else: |
311 | 453 |
if user.is_superuser or user.is_staff: |
312 | 454 |
user.is_staff = False |
313 | 455 |
user.is_superuser = False |
314 |
self.logger.info('flag is_staff and is_superuser removed from user %s', user)
|
|
456 |
logger.info('flag is_staff and is_superuser removed from user %s', user) |
|
315 | 457 |
attribute_set = True |
316 | 458 |
if attribute_set: |
317 | 459 |
user.save() |
... | ... | |
334 | 476 |
continue |
335 | 477 |
groups.append(group) |
336 | 478 |
for group in Group.objects.filter(pk__in=[g.pk for g in groups]).exclude(user=user): |
337 |
self.logger.info(
|
|
479 |
logger.info( |
|
338 | 480 |
u'adding group %s (%s) to user %s (%s)', group, group.pk, user, user.pk) |
339 | 481 |
User.groups.through.objects.get_or_create(group=group, user=user) |
340 | 482 |
qs = User.groups.through.objects.exclude( |
341 | 483 |
group__pk__in=[g.pk for g in groups]).filter(user=user) |
342 | 484 |
for rel in qs: |
343 |
self.logger.info(u'removing group %s (%s) from user %s (%s)', rel.group, |
|
344 |
rel.group.pk, rel.user, rel.user.pk) |
|
485 |
logger.info(u'removing group %s (%s) from user %s (%s)', rel.group, rel.group.pk, rel.user, rel.user.pk) |
|
345 | 486 |
qs.delete() |
mellon/app_settings.py | ||
---|---|---|
41 | 41 |
'LOGIN_HINTS': [], |
42 | 42 |
'SIGNATURE_METHOD': 'RSA-SHA256', |
43 | 43 |
'LOOKUP_BY_ATTRIBUTES': [], |
44 |
'METADATA_CACHE_TIME': 3600, |
|
45 |
'METADATA_HTTP_TIMEOUT': 10, |
|
44 | 46 |
} |
45 | 47 | |
46 | 48 |
@property |
mellon/utils.py | ||
---|---|---|
95 | 95 |
key = key[0] |
96 | 96 |
server.setEncryptionPrivateKeyWithPassword(key, password) |
97 | 97 |
for idp in get_idps(): |
98 |
try: |
|
99 |
server.addProviderFromBuffer(lasso.PROVIDER_ROLE_IDP, idp['METADATA']) |
|
100 |
except lasso.Error as e: |
|
101 |
logger.error(u'bad metadata in idp %r', idp['ENTITY_ID']) |
|
102 |
logger.debug(u'lasso error: %s', e) |
|
103 |
continue |
|
98 |
if idp and idp.get('METADATA'): |
|
99 |
try: |
|
100 |
server.addProviderFromBuffer(lasso.PROVIDER_ROLE_IDP, idp['METADATA']) |
|
101 |
except lasso.Error as e: |
|
102 |
logger.error(u'bad metadata in idp %s, %s', idp['ENTITY_ID'], e) |
|
104 | 103 |
cache[root] = server |
105 | 104 |
settings._MELLON_SERVER_CACHE = cache |
106 | 105 |
return settings._MELLON_SERVER_CACHE.get(root) |
mellon/views.py | ||
---|---|---|
169 | 169 |
'''show error message to user after a login failure''' |
170 | 170 |
login = self.profile |
171 | 171 |
idp = utils.get_idp(login.remoteProviderId) |
172 |
if not idp: |
|
173 |
self.log.warning('entity id %r is unknown', login.remoteProviderId) |
|
174 |
return HttpResponseBadRequest( |
|
175 |
'entity id %r is unknown' % login.remoteProviderId) |
|
172 | 176 |
error_url = utils.get_setting(idp, 'ERROR_URL') |
173 | 177 |
error_redirect_after_timeout = utils.get_setting(idp, 'ERROR_REDIRECT_AFTER_TIMEOUT') |
174 | 178 |
if error_url: |
... | ... | |
391 | 395 | |
392 | 396 |
next_url = check_next_url(self.request, request.GET.get(REDIRECT_FIELD_NAME)) |
393 | 397 |
idp = self.get_idp(request) |
394 |
if idp is None:
|
|
398 |
if not idp:
|
|
395 | 399 |
return HttpResponseBadRequest('no idp found') |
396 | 400 |
self.profile = login = utils.create_login(request) |
397 | 401 |
self.log.debug('authenticating to %r', idp['ENTITY_ID']) |
setup.py | ||
---|---|---|
94 | 94 |
'django>=1.5,<2.0', |
95 | 95 |
'requests', |
96 | 96 |
'isodate', |
97 |
'atomicwrites', |
|
97 | 98 |
], |
98 | 99 |
setup_requires=[ |
99 | 100 |
'django>=1.5,<2.0', |
tests/conftest.py | ||
---|---|---|
13 | 13 |
# You should have received a copy of the GNU Affero General Public License |
14 | 14 |
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
15 | 15 | |
16 |
import os |
|
16 | 17 |
import logging |
18 | ||
17 | 19 |
import pytest |
18 | 20 |
import django_webtest |
19 | 21 | |
20 | 22 | |
23 |
@pytest.fixture(autouse=True) |
|
24 |
def settings(settings, tmpdir): |
|
25 |
settings.MEDIA_ROOT = str(tmpdir.mkdir('media')) |
|
26 |
return settings |
|
27 | ||
28 | ||
21 | 29 |
@pytest.fixture |
22 |
def app(request): |
|
30 |
def app(request, settings):
|
|
23 | 31 |
wtm = django_webtest.WebTestMixin() |
24 | 32 |
wtm._patch_settings() |
25 | 33 |
request.addfinalizer(wtm._unpatch_settings) |
... | ... | |
38 | 46 | |
39 | 47 | |
40 | 48 |
@pytest.fixture |
41 |
def private_settings(request): |
|
49 |
def private_settings(request, tmpdir):
|
|
42 | 50 |
import django.conf |
43 | 51 |
from django.conf import UserSettingsHolder |
44 | 52 |
old = django.conf.settings._wrapped |
... | ... | |
57 | 65 |
caplog.handler.stream = py.io.TextIO() |
58 | 66 |
caplog.handler.records = [] |
59 | 67 |
return caplog |
68 | ||
69 | ||
70 |
@pytest.fixture(scope='session') |
|
71 |
def metadata(): |
|
72 |
with open(os.path.join(os.path.dirname(__file__), 'metadata.xml')) as fd: |
|
73 |
yield fd.read() |
|
74 | ||
75 | ||
76 |
@pytest.fixture |
|
77 |
def metadata_path(tmpdir, metadata): |
|
78 |
metadata_path = tmpdir / 'metadata.xml' |
|
79 |
with metadata_path.open('w') as fd: |
|
80 |
fd.write(metadata) |
|
81 |
yield str(metadata_path) |
tests/test_default_adapter.py | ||
---|---|---|
13 | 13 |
# You should have received a copy of the GNU Affero General Public License |
14 | 14 |
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
15 | 15 | |
16 |
import pytest
|
|
16 |
import datetime
|
|
17 | 17 |
import re |
18 | 18 |
import lasso |
19 |
import time |
|
19 | 20 |
from multiprocessing.pool import ThreadPool |
20 | 21 | |
22 |
import pytest |
|
23 | ||
21 | 24 |
from django.contrib import auth |
22 | 25 |
from django.db import connection |
23 | 26 | |
... | ... | |
167 | 170 |
user = SAMLBackend().authenticate(saml_attributes=saml_attributes) |
168 | 171 |
assert user.is_superuser is True |
169 | 172 |
assert user.is_staff is True |
170 |
assert not 'flag is_staff and is_superuser removed' in caplog.text
|
|
173 |
assert 'flag is_staff and is_superuser removed' not in caplog.text
|
|
171 | 174 | |
172 | 175 | |
173 | 176 |
def test_provision_absent_attribute(settings, django_user_model, idp, saml_attributes, caplog): |
... | ... | |
326 | 329 |
{'user_field': 'username', 'saml_attribute': 'saml_at1', 'ignore-case': True}, |
327 | 330 |
] |
328 | 331 |
assert adapter.lookup_user(idp, saml_attributes) == jane |
332 | ||
333 | ||
334 |
@pytest.fixture |
|
335 |
def adapter(): |
|
336 |
return DefaultAdapter() |
|
337 | ||
338 | ||
339 |
def test_load_metadata_simple(adapter, metadata): |
|
340 |
idp = {'METADATA': metadata} |
|
341 |
assert adapter.load_metadata(idp, 0) == metadata |
|
342 | ||
343 | ||
344 |
def test_load_metadata_legacy(adapter, metadata_path, metadata): |
|
345 |
idp = {'METADATA': metadata_path} |
|
346 |
assert adapter.load_metadata(idp, 0) == metadata |
|
347 |
assert idp['METADATA'] == metadata |
|
348 | ||
349 | ||
350 |
def test_load_metadata_path(adapter, metadata_path, metadata, freezer): |
|
351 |
now = time.time() |
|
352 |
idp = {'METADATA_PATH': str(metadata_path)} |
|
353 |
assert adapter.load_metadata(idp, 0) == metadata |
|
354 |
assert idp['METADATA'] == metadata |
|
355 |
assert idp['METADATA_PATH_LAST_UPDATE'] == now |
|
356 | ||
357 | ||
358 |
def test_load_metadata_url(settings, adapter, metadata, httpserver, freezer, caplog): |
|
359 |
now = time.time() |
|
360 |
httpserver.serve_content(content=metadata, headers={'Content-Type': 'application/xml'}) |
|
361 |
idp = {'METADATA_URL': httpserver.url} |
|
362 |
assert adapter.load_metadata(idp, 0) == metadata |
|
363 |
assert idp['METADATA'] == metadata |
|
364 |
assert idp['METADATA_URL_LAST_UPDATE'] == now |
|
365 |
assert 'METADATA_PATH' in idp |
|
366 |
assert idp['METADATA_PATH'].startswith(settings.MEDIA_ROOT) |
|
367 |
with open(idp['METADATA_PATH']) as fd: |
|
368 |
assert fd.read() == metadata |
|
369 |
assert idp['METADATA_PATH_LAST_UPDATE'] == now + 1 |
|
370 |
httpserver.serve_content(content=metadata.replace('idp5', 'idp6'), |
|
371 |
headers={'Content-Type': 'application/xml'}) |
|
372 |
assert adapter.load_metadata(idp, 0) == metadata |
|
373 | ||
374 |
freezer.move_to(datetime.timedelta(seconds=3601)) |
|
375 |
caplog.clear() |
|
376 |
assert adapter.load_metadata(idp, 0) == metadata |
|
377 |
# wait for update thread to finish |
|
378 |
try: |
|
379 |
idp['METADATA_URL_UPDATE_THREAD'].join() |
|
380 |
except KeyError: |
|
381 |
pass |
|
382 |
new_meta = adapter.load_metadata(idp, 0) |
|
383 |
assert new_meta != metadata |
|
384 |
assert new_meta == metadata.replace('idp5', 'idp6') |
|
385 |
assert 'entityID changed' in caplog.records[-1].message |
|
386 |
assert caplog.records[-1].levelname == 'ERROR' |
|
387 |
# test load from file cache |
|
388 |
del idp['METADATA'] |
|
389 |
del idp['METADATA_PATH'] |
|
390 |
del idp['METADATA_PATH_LAST_UPDATE'] |
|
391 |
httpserver.serve_content(content='', headers={'Content-Type': 'application/xml'}) |
|
392 |
assert adapter.load_metadata(idp, 0) == metadata.replace('idp5', 'idp6') |
|
393 | ||
394 | ||
395 |
def test_load_metadata_url_stale_timeout(settings, adapter, metadata, httpserver, freezer, caplog): |
|
396 |
httpserver.serve_content(content=metadata, headers={'Content-Type': 'application/xml'}) |
|
397 |
idp = {'METADATA_URL': httpserver.url} |
|
398 |
assert adapter.load_metadata(idp, 0) == metadata |
|
399 |
httpserver.serve_content(content='', headers={'Content-Type': 'application/xml'}) |
|
400 |
assert adapter.load_metadata(idp, 0) == metadata |
|
401 | ||
402 |
freezer.move_to(datetime.timedelta(seconds=24 * 3600 - 1)) |
|
403 |
assert adapter.load_metadata(idp, 0) == metadata |
|
404 | ||
405 |
# wait for update thread to finish |
|
406 |
try: |
|
407 |
idp['METADATA_URL_UPDATE_THREAD'].join() |
|
408 |
except KeyError: |
|
409 |
pass |
|
410 |
assert caplog.records[-1].levelname == 'WARNING' |
|
411 | ||
412 |
freezer.move_to(datetime.timedelta(seconds=3601)) |
|
413 |
assert adapter.load_metadata(idp, 0) == metadata |
|
414 | ||
415 |
# wait for update thread to finish |
|
416 |
try: |
|
417 |
idp['METADATA_URL_UPDATE_THREAD'].join() |
|
418 |
except KeyError: |
|
419 |
pass |
|
420 |
assert caplog.records[-1].levelname == 'ERROR' |
tests/test_utils.py | ||
---|---|---|
13 | 13 |
# You should have received a copy of the GNU Affero General Public License |
14 | 14 |
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
15 | 15 | |
16 |
import re |
|
17 | 16 |
import datetime |
18 | 17 | |
19 | 18 |
import mock |
20 | 19 |
import lasso |
21 |
import requests.exceptions |
|
22 |
from httmock import HTTMock |
|
23 | 20 | |
24 |
from mellon.utils import create_server, create_metadata, iso8601_to_datetime, flatten_datetime |
|
25 |
import mellon.utils |
|
21 |
from mellon.utils import create_metadata, iso8601_to_datetime, flatten_datetime |
|
26 | 22 |
from xml_utils import assert_xml_constraints |
27 | 23 | |
28 |
from utils import error_500, metadata_response |
|
29 | ||
30 | ||
31 |
def test_create_server_connection_error(mocker, rf, private_settings, caplog): |
|
32 |
mocker.patch('requests.get', |
|
33 |
side_effect=requests.exceptions.ConnectionError('connection error')) |
|
34 |
private_settings.MELLON_IDENTITY_PROVIDERS = [ |
|
35 |
{ |
|
36 |
'METADATA_URL': 'http://example.com/metadata', |
|
37 |
} |
|
38 |
] |
|
39 |
request = rf.get('/') |
|
40 |
create_server(request) |
|
41 |
assert 'connection error' in caplog.text |
|
42 | ||
43 | ||
44 |
def test_create_server_internal_server_error(mocker, rf, private_settings, caplog): |
|
45 |
private_settings.MELLON_IDENTITY_PROVIDERS = [ |
|
46 |
{ |
|
47 |
'METADATA_URL': 'http://example.com/metadata', |
|
48 |
} |
|
49 |
] |
|
50 |
request = rf.get('/') |
|
51 |
assert not 'failed with error' in caplog.text |
|
52 |
with HTTMock(error_500): |
|
53 |
create_server(request) |
|
54 |
assert 'failed with error' in caplog.text |
|
55 | ||
56 | ||
57 |
def test_create_server_invalid_metadata(mocker, rf, private_settings, caplog): |
|
58 |
private_settings.MELLON_IDENTITY_PROVIDERS = [ |
|
59 |
{ |
|
60 |
'METADATA': 'xxx', |
|
61 |
} |
|
62 |
] |
|
63 |
request = rf.get('/') |
|
64 |
assert not 'failed with error' in caplog.text |
|
65 |
with HTTMock(error_500): |
|
66 |
create_server(request) |
|
67 |
assert len(caplog.records) == 1 |
|
68 |
assert re.search('METADATA.*is invalid', caplog.text) |
|
69 | ||
70 | ||
71 |
def test_create_server_invalid_metadata_file(mocker, rf, private_settings, caplog): |
|
72 |
private_settings.MELLON_IDENTITY_PROVIDERS = [ |
|
73 |
{ |
|
74 |
'METADATA': '/xxx', |
|
75 |
} |
|
76 |
] |
|
77 |
request = rf.get('/') |
|
78 |
assert not 'failed with error' in caplog.text |
|
79 |
with mock.patch('mellon.adapters.open', mock.mock_open(read_data='yyy'), create=True): |
|
80 |
with HTTMock(error_500): |
|
81 |
server = create_server(request) |
|
82 |
assert len(server.providers) == 0 |
|
83 | ||
84 | ||
85 |
def test_create_server_good_metadata_file(mocker, rf, private_settings, caplog): |
|
86 |
private_settings.MELLON_IDENTITY_PROVIDERS = [ |
|
87 |
{ |
|
88 |
'METADATA': '/xxx', |
|
89 |
} |
|
90 |
] |
|
91 |
request = rf.get('/') |
|
92 |
with mock.patch( |
|
93 |
'mellon.adapters.open', mock.mock_open(read_data=open('tests/metadata.xml').read()), |
|
94 |
create=True): |
|
95 |
server = create_server(request) |
|
96 |
assert 'ERROR' not in caplog.text |
|
97 |
assert len(server.providers) == 1 |
|
98 | ||
99 | ||
100 |
def test_create_server_good_metadata(mocker, rf, private_settings, caplog): |
|
101 |
private_settings.MELLON_IDENTITY_PROVIDERS = [ |
|
102 |
{ |
|
103 |
'METADATA': open('tests/metadata.xml').read(), |
|
104 |
} |
|
105 |
] |
|
106 |
request = rf.get('/') |
|
107 |
assert not 'failed with error' in caplog.text |
|
108 |
server = create_server(request) |
|
109 |
assert 'ERROR' not in caplog.text |
|
110 |
assert len(server.providers) == 1 |
|
111 | ||
112 | ||
113 |
def test_create_server_invalid_idp_dict(mocker, rf, private_settings, caplog): |
|
114 |
private_settings.MELLON_IDENTITY_PROVIDERS = [ |
|
115 |
{ |
|
116 |
} |
|
117 |
] |
|
118 |
request = rf.get('/') |
|
119 |
assert not 'failed with error' in caplog.text |
|
120 |
create_server(request) |
|
121 |
assert 'missing METADATA' in caplog.text |
|
122 | ||
123 | ||
124 |
def test_create_server_good_metadata_url(mocker, rf, private_settings, caplog): |
|
125 |
private_settings.MELLON_IDENTITY_PROVIDERS = [ |
|
126 |
{ |
|
127 |
'METADATA_URL': 'http://example.com/metadata', |
|
128 |
} |
|
129 |
] |
|
130 | ||
131 |
request = rf.get('/') |
|
132 |
assert not 'failed with error' in caplog.text |
|
133 |
with HTTMock(metadata_response): |
|
134 |
server = create_server(request) |
|
135 |
assert 'ERROR' not in caplog.text |
|
136 |
assert len(server.providers) == 1 |
|
137 | ||
138 | 24 | |
139 | 25 |
def test_create_metadata(rf, private_settings, caplog): |
140 | 26 |
ns = { |
tox.ini | ||
---|---|---|
1 | 1 |
[tox] |
2 |
envlist = {coverage-,}py2-{dj18,dj111}-{pg,sqlite},py3-dj111-{pg,sqlite}
|
|
2 |
envlist = coverage-py2-{dj18,dj111}-{pg,sqlite},coverage-py3-dj111-{pg,sqlite}
|
|
3 | 3 |
toxworkdir = {env:TMPDIR:/tmp}/tox-{env:USER}/django-mellon/ |
4 | 4 | |
5 | 5 |
[testenv] |
... | ... | |
24 | 24 |
pytest-random |
25 | 25 |
pytest-mock |
26 | 26 |
pytest-django |
27 |
pytest-freezegun |
|
28 |
pytest-localserver |
|
27 | 29 |
pytz |
28 | 30 |
lxml |
29 | 31 |
cssselect |
30 |
- |