1
|
|
2
|
|
3
|
|
4
|
|
5
|
|
6
|
|
7
|
|
8
|
|
9
|
|
10
|
|
11
|
|
12
|
|
13
|
|
14
|
|
15
|
|
16
|
import os, shutil, time, traceback, urllib2, copy
|
17
|
import xmlrpclib
|
18
|
from glob import glob
|
19
|
|
20
|
from twisted.internet import reactor, defer
|
21
|
|
22
|
import config
|
23
|
|
24
|
|
25
|
import redis
|
26
|
import json
|
27
|
import base64
|
28
|
|
29
|
current_milli_time = lambda: int(round(time.time() * 1000))
|
30
|
|
31
|
|
32
|
|
33
|
UNITTESTS_MODE = False
|
34
|
if UNITTESTS_MODE:
|
35
|
_ = str
|
36
|
def trace(func):
|
37
|
return func
|
38
|
from twisted.python import log
|
39
|
default_idp_options = {}
|
40
|
available_contexts = {}
|
41
|
available_contexts['URN_PROTECTED_PASSWORD'] = 'urn_psswd'
|
42
|
available_contexts['URN_TIME_SYNC_TOKEN'] = "urn_time_sync"
|
43
|
|
44
|
else:
|
45
|
from page import trace, log
|
46
|
|
47
|
import page
|
48
|
from saml_utils import get_metadata, date_from_string
|
49
|
from saml_crypto import recreate_cert
|
50
|
from saml_utils import available_contexts, default_idp_options
|
51
|
|
52
|
from eolesso.dataproxy import LDAPProxy
|
53
|
from eolesso.ticket import AppTicket
|
54
|
from eolesso.util import *
|
55
|
from eolesso.ticketcache import InvalidSession, TicketCache, SamlMsgCache
|
56
|
from userstore import UserStore
|
57
|
|
58
|
import user_infos
|
59
|
|
60
|
import external_attrs
|
61
|
|
62
|
from oidc_utils import init_oidclient
|
63
|
|
64
|
|
65
|
class SSOSessionManager(object):
|
66
|
|
67
|
def __init__(self):
|
68
|
self.address = config.AUTH_SERVER_ADDR
|
69
|
self.init_caches()
|
70
|
self.init_db()
|
71
|
self._parent = None
|
72
|
self.parent_url = None
|
73
|
self._init_conf()
|
74
|
|
75
|
def reload_conf(self):
|
76
|
reload(user_infos)
|
77
|
reload(external_attrs)
|
78
|
reload(config)
|
79
|
reload(page)
|
80
|
self._init_conf()
|
81
|
self.load_conf(reloading=True)
|
82
|
return True
|
83
|
|
84
|
|
85
|
def init_db(self):
|
86
|
|
87
|
self.db_sessions = None
|
88
|
self.redis = False
|
89
|
|
90
|
|
91
|
if config.REDIS_HOST != None:
|
92
|
self.redis = True
|
93
|
self.db_sessions = redis.Redis(host=config.REDIS_HOST, port=config.REDIS_PORT)
|
94
|
|
95
|
|
96
|
if self.db_sessions == None:
|
97
|
return
|
98
|
|
99
|
|
100
|
|
101
|
def _DBAddAppTicket(self,ticket):
|
102
|
|
103
|
if self.db_sessions == None:
|
104
|
return
|
105
|
|
106
|
data={}
|
107
|
data["ticket"] = ticket.ticket
|
108
|
data["session_id"] = ticket.session_id
|
109
|
data["service_url"] = ticket.service_url
|
110
|
data["cas_url"] = ticket.cas_url
|
111
|
data["from_credentials"]=ticket.from_credentials
|
112
|
|
113
|
|
114
|
self.db_sessions.set("ticket_%s" % ticket.ticket,json.dumps(data))
|
115
|
|
116
|
|
117
|
def _DBInitUserSession(self,sessionID, user_data, username, auth_instant,search_attrs,auth_class,idp_ident):
|
118
|
|
119
|
if self.db_sessions == None:
|
120
|
return
|
121
|
|
122
|
data={}
|
123
|
data["user_data"] = user_data
|
124
|
data["username"] = username
|
125
|
data["auth_instant"]= auth_instant
|
126
|
data["search_attrs"]= search_attrs
|
127
|
data["auth_class"] = auth_class
|
128
|
data["idp_ident"] = idp_ident
|
129
|
|
130
|
self.db_sessions.set("session_%s" % sessionID,json.dumps(data))
|
131
|
|
132
|
|
133
|
def _DBCheckSession(self,sessionID):
|
134
|
|
135
|
if self.db_sessions == None:
|
136
|
return True
|
137
|
|
138
|
if not self.db_sessions.exists("session_%s" % sessionID):
|
139
|
log.msg("[DBSESSION] Session [session_%s] NOT FOUND " % sessionID)
|
140
|
return False
|
141
|
|
142
|
session = self.db_sessions.get("session_%s" % sessionID)
|
143
|
session = json.loads(session)
|
144
|
|
145
|
self.init_user_session(session["user_data"],
|
146
|
session["username"],
|
147
|
session["auth_instant"],
|
148
|
session["search_attrs"],
|
149
|
session["auth_class"],
|
150
|
session["idp_ident"],sessionID)
|
151
|
|
152
|
return True
|
153
|
|
154
|
def _DBDeleteSession(self,sessionID):
|
155
|
|
156
|
if self.db_sessions == None:
|
157
|
return
|
158
|
|
159
|
self.db_sessions.expire("session_%s" % sessionID, 5)
|
160
|
|
161
|
|
162
|
def _DBAppSessionFromTicket(self,app_ticket):
|
163
|
|
164
|
|
165
|
if app_ticket in self.app_sessions:
|
166
|
return True
|
167
|
|
168
|
|
169
|
if self.db_sessions == None:
|
170
|
return False
|
171
|
|
172
|
|
173
|
if not self.db_sessions.exists("ticket_%s" % app_ticket):
|
174
|
log.msg("[DBSESSION] Ticket [ticket_%s] NOt FOUND" % app_ticket)
|
175
|
return False
|
176
|
|
177
|
|
178
|
data = self.db_sessions.get("ticket_%s" % app_ticket)
|
179
|
data = json.loads(data)
|
180
|
|
181
|
|
182
|
|
183
|
if not self._DBCheckSession(data["session_id"]):
|
184
|
return False
|
185
|
|
186
|
|
187
|
self.init_app_session(data["ticket"], data["session_id"], data["service_url"], data["from_credentials"]);
|
188
|
|
189
|
log.msg("[DBSESSION] Ticket %s with session %s récupéré depuis la DB pour %s" % (data["ticket"], data["session_id"],session["username"]))
|
190
|
|
191
|
return True
|
192
|
|
193
|
|
194
|
|
195
|
def init_caches(self):
|
196
|
|
197
|
self.CACHESESSIONS={}
|
198
|
|
199
|
self.login_sessions = TicketCache(180, self.address, 'LT')
|
200
|
|
201
|
self.user_sessions = {}
|
202
|
|
203
|
self.data_cache = {}
|
204
|
|
205
|
self.calc_cache = {}
|
206
|
|
207
|
self.app_sessions = {}
|
208
|
|
209
|
self.user_app_tickets = {}
|
210
|
|
211
|
self.proxy_granting_sessions = {}
|
212
|
|
213
|
self.saml_sessions = {}
|
214
|
|
215
|
self.saml_sent_msg = SamlMsgCache(500)
|
216
|
self.saml_rcved_msg = SamlMsgCache(500)
|
217
|
|
218
|
self.relay_state_cache = SamlMsgCache(1000)
|
219
|
|
220
|
self.checkers = {}
|
221
|
|
222
|
|
223
|
|
224
|
|
225
|
|
226
|
|
227
|
|
228
|
|
229
|
self.pending_logout = {}
|
230
|
|
231
|
self.allowed_contexts = available_contexts
|
232
|
self.init_securid()
|
233
|
self.init_openid()
|
234
|
|
235
|
def init_securid(self):
|
236
|
if config.USE_SECURID or config.SECURID_PROVIDER:
|
237
|
self.securid_user_store = UserStore(config.SECURID_USER_DIR, 'securid_users.ini')
|
238
|
else:
|
239
|
self.securid_user_store = None
|
240
|
del(self.allowed_contexts['URN_TIME_SYNC_TOKEN'])
|
241
|
if config.SECURID_PROVIDER:
|
242
|
|
243
|
|
244
|
|
245
|
self.securid_sessions = TicketCache(300, "", "_")
|
246
|
|
247
|
def init_openid(self):
|
248
|
|
249
|
self.external_providers = dict()
|
250
|
for ref, prov_infosdic in config.OPENID_PROVIDERS.items():
|
251
|
about_info = None
|
252
|
if prov_infosdic['about_url'] and prov_infosdic['about_label']:
|
253
|
about_info = (prov_infosdic['about_url'], prov_infosdic['about_label'])
|
254
|
self.external_providers[ref] = (init_oidclient(ref), UserStore(config.OPENID_USER_DIR, '%s_users.ini' % ref), prov_infosdic['label'], about_info)
|
255
|
|
256
|
def _init_conf(self):
|
257
|
self.filters = {}
|
258
|
self.apps = {'http':{'all':{}},
|
259
|
'https':{'all':{}},
|
260
|
'all':{'all':{}},
|
261
|
'sp_ident':{}}
|
262
|
self.sp_meta = {}
|
263
|
self.req_attributes = {}
|
264
|
self.opt_attributes = {}
|
265
|
self.attribute_sets = {}
|
266
|
self.attribute_sets_ids = {}
|
267
|
self.associations = {}
|
268
|
static_data = {'rne': [config.RNE], 'nom_etab': [config.ETABLISSEMENT]}
|
269
|
proxy = LDAPProxy(config.LDAP_SERVER, config.LDAP_PORT,
|
270
|
config.LDAP_BASE, config.LDAP_LABEL, config.LDAP_INFOS,
|
271
|
config.LDAP_READER, config.LDAP_READER_PASSFILE,
|
272
|
config.LDAP_LOGIN_OTP, static_data,
|
273
|
config.LDAP_MATCH_ATTRIBUTE)
|
274
|
|
275
|
if config.PARENT_URL not in ('', None):
|
276
|
self.parent_url = config.PARENT_URL
|
277
|
self._parent = xmlrpclib.ServerProxy('%s/xmlrpc' % self.parent_url)
|
278
|
log.msg("- %s : %s" % (_("Parent server defined"), self.parent_url))
|
279
|
|
280
|
|
281
|
self._data_proxy = proxy
|
282
|
|
283
|
def load_conf(self, reloading=False):
|
284
|
"""Charge la configuration du serveur"""
|
285
|
if not reloading:
|
286
|
log.msg('* {0}'.format(_('loading server configuration')))
|
287
|
|
288
|
self.user_infos = user_infos
|
289
|
log.msg("- {0} : {1}".format(_("Calculated Attributes defined"), ", ".join(user_infos.dict_infos.keys())))
|
290
|
|
291
|
self.external_attrs = external_attrs.external_attrs
|
292
|
log.msg("- %s : %s" % (_("External federation Attributes defined"), ", ".join(self.external_attrs.keys())))
|
293
|
self.load_filters()
|
294
|
|
295
|
self.load_attribute_sets()
|
296
|
self.init_metadata()
|
297
|
|
298
|
self.gen_homonymes_infos()
|
299
|
|
300
|
update_etab_js(os.path.join(config.SSO_PATH,'interface','scripts','etabs.js'), sso_dir=config.SSO_PATH)
|
301
|
if not reloading:
|
302
|
|
303
|
if config.USE_SECURID or config.SECURID_PROVIDER:
|
304
|
from securid_utils import get_otp_templates
|
305
|
get_otp_templates(load = False)
|
306
|
|
307
|
def check_ticket_issuer(self, ticket_id):
|
308
|
"""vérifie qu'un ticket a été généré par ce serveur
|
309
|
"""
|
310
|
if ticket_id != '':
|
311
|
ticket_address = "-".join(ticket_id.split('-')[1:-1])
|
312
|
if ticket_address == self.address:
|
313
|
return True
|
314
|
return False
|
315
|
|
316
|
def end_session(self, session_id):
|
317
|
"""termine la session d'un utilisateur
|
318
|
"""
|
319
|
|
320
|
if session_id in self.user_sessions:
|
321
|
|
322
|
for ticket in self.user_app_tickets[session_id]:
|
323
|
|
324
|
if ticket.pgt != None:
|
325
|
del self.proxy_granting_sessions[ticket.pgt]
|
326
|
|
327
|
if hasattr(ticket,'idp_session_index'):
|
328
|
if ticket.idp_session_index in self.saml_sessions:
|
329
|
del self.saml_sessions[ticket.idp_session_index]
|
330
|
|
331
|
del self.app_sessions[ticket.ticket]
|
332
|
if session_id in self.user_app_tickets:
|
333
|
del self.user_app_tickets[session_id]
|
334
|
if session_id in self.data_cache:
|
335
|
del self.data_cache[session_id]
|
336
|
if session_id in self.calc_cache:
|
337
|
del self.calc_cache[session_id]
|
338
|
if session_id in self.pending_logout:
|
339
|
del self.pending_logout[session_id]
|
340
|
|
341
|
self._DBDeleteSession(session_id)
|
342
|
del self.user_sessions[session_id]
|
343
|
log.msg("%s -- %s" % (session_id, _("Terminating session")))
|
344
|
|
345
|
def gen_saml_id(self, data={}, prefix='_'):
|
346
|
msg_id = gen_random_id(prefix)
|
347
|
while self.saml_sent_msg.get_msg(msg_id) != None:
|
348
|
msg_id = gen_random_id(prefix)
|
349
|
data['date_creation']=time.time()
|
350
|
self.saml_sent_msg.add(msg_id, data)
|
351
|
return msg_id
|
352
|
|
353
|
def get_saml_msg(self, msg_id):
|
354
|
return self.saml_sent_msg.get_msg(msg_id)
|
355
|
|
356
|
def update_saml_msg(self, msg_id, data):
|
357
|
assert msg_id in self.saml_sent_msg
|
358
|
self.saml_sent_msg[msg_id].update(data)
|
359
|
|
360
|
def replayed_saml_msg(self, msg_id):
|
361
|
if msg_id in self.saml_rcved_msg:
|
362
|
return True
|
363
|
self.saml_rcved_msg.add(msg_id)
|
364
|
return False
|
365
|
|
366
|
def gen_relay_state(self, data, prefix='RS_'):
|
367
|
msg_id = gen_random_id(prefix)
|
368
|
while self.saml_sent_msg.get_msg(msg_id) != None:
|
369
|
msg_id = gen_random_id(prefix)
|
370
|
self.relay_state_cache.add(msg_id, data)
|
371
|
return msg_id
|
372
|
|
373
|
def get_relay_data(self, relay_state):
|
374
|
return self.relay_state_cache.get_msg(relay_state)
|
375
|
|
376
|
@trace
|
377
|
def remove_old_ticket(self, session_id, saml_ident):
|
378
|
""" deletes any existing session for a given partner entity
|
379
|
"""
|
380
|
for ticket in self.user_app_tickets[session_id]:
|
381
|
if getattr(ticket, 'saml_ident', '') == saml_ident:
|
382
|
old_ticket = ticket.ticket
|
383
|
|
384
|
if session_id in self.pending_logout:
|
385
|
if old_ticket in self.pending_logout[session_id][1]:
|
386
|
self.pending_logout[session_id][1].remove(old_ticket)
|
387
|
|
388
|
self.user_app_tickets[session_id].remove(ticket)
|
389
|
|
390
|
if hasattr(ticket, 'idp_session_index'):
|
391
|
if ticket.idp_session_index in self.saml_sessions:
|
392
|
del(self.saml_sessions[ticket.idp_session_index])
|
393
|
|
394
|
del(self.app_sessions[old_ticket])
|
395
|
del(ticket)
|
396
|
break
|
397
|
|
398
|
def load_filters(self):
|
399
|
"""chargement des filtres de données et applications
|
400
|
"""
|
401
|
loaded_apps = []
|
402
|
loaded_filters = []
|
403
|
app_filters = set()
|
404
|
|
405
|
glob_cfg = EoleParser()
|
406
|
for conf_file in glob('%s/*.global' % config.FILTER_DIR):
|
407
|
glob_cfg.read(conf_file)
|
408
|
self.global_filter = dict((section, dict(glob_cfg.items(section))) for section in glob_cfg.sections())
|
409
|
|
410
|
for conf_file in glob('%s/*.ini' % config.FILTER_DIR):
|
411
|
conf_filename = os.path.splitext(os.path.basename(conf_file))[0]
|
412
|
cfg = EoleParser()
|
413
|
cfg.read(conf_file)
|
414
|
if conf_filename.endswith('_apps') or conf_filename == 'applications':
|
415
|
|
416
|
new_apps, new_filters = self.load_applications(cfg)
|
417
|
loaded_apps.extend(new_apps)
|
418
|
app_filters.update(new_filters)
|
419
|
else:
|
420
|
|
421
|
app_filter = dict( (section, dict(cfg.items(section)))
|
422
|
for section in cfg.sections() )
|
423
|
self.filters[conf_filename] = app_filter
|
424
|
loaded_filters.append(conf_filename)
|
425
|
log.msg("- %s : %s" % (_("Filters loaded"), ", ".join(loaded_filters)))
|
426
|
log.msg("- %s : %s" % (_("Applications defined"), ", ".join(loaded_apps)))
|
427
|
|
428
|
missing_filters = []
|
429
|
for needed_filter in app_filters:
|
430
|
if needed_filter not in self.filters:
|
431
|
missing_filters.append(needed_filter)
|
432
|
if missing_filters:
|
433
|
log.msg('')
|
434
|
log.msg('\t!! %s : %s' % (_('Following filters are needed but missing'), ', '.join(missing_filters)))
|
435
|
log.msg('')
|
436
|
|
437
|
def load_applications(self, cfg):
|
438
|
"""Chargement des applications associées aux filtres
|
439
|
"""
|
440
|
loaded_apps = []
|
441
|
filters = []
|
442
|
for app in cfg.sections():
|
443
|
try:
|
444
|
app_infos = dict(cfg.items(app))
|
445
|
|
446
|
use_proxy = app_infos.get('proxy', '')
|
447
|
if ":" in use_proxy:
|
448
|
host, port = use_proxy.split(':')
|
449
|
if not check_url(host, port):
|
450
|
log.msg('\t! %s - %s : %s !' % (app, _('Warning, http proxy unreachable'), use_proxy))
|
451
|
use_proxy = ''
|
452
|
|
453
|
if 'sp_ident' in app_infos:
|
454
|
if 'sp_ident' in self.apps['sp_ident']:
|
455
|
log.msg(_("Warning, filter for service provider overwritten (already defined): {0}").format(str(app_infos['sp_ident'])))
|
456
|
self.apps['sp_ident'][app_infos['sp_ident']] = (app_infos['filter'], use_proxy)
|
457
|
loaded_apps.append(app)
|
458
|
else:
|
459
|
try:
|
460
|
port = app_infos['port'] = int(app_infos['port'])
|
461
|
except:
|
462
|
port = app_infos['port'] = 'all'
|
463
|
scheme = app_infos['scheme']
|
464
|
if scheme not in ['all', 'both']:
|
465
|
if scheme not in self.apps:
|
466
|
self.apps[scheme] = {'all':{}}
|
467
|
app_stores = [self.apps[scheme]]
|
468
|
elif scheme == 'both':
|
469
|
app_stores = [self.apps['http'], self.apps['https']]
|
470
|
else:
|
471
|
|
472
|
app_stores = self.apps.values()
|
473
|
|
474
|
for store in app_stores:
|
475
|
port_store = store.setdefault(port, {})
|
476
|
if app_infos['baseurl'] in port_store:
|
477
|
stored = False
|
478
|
for app_st in port_store[app_infos['baseurl']]:
|
479
|
if app_st[1] == app_infos['addr'] and app_st[2] == app_infos['typeaddr']:
|
480
|
log.msg(_("Warning, application overwritten (already defined): {0}").format(str(app_st)))
|
481
|
app_st[3] = app_infos['filter']
|
482
|
app_st[4] = use_proxy
|
483
|
stored = True
|
484
|
if not stored:
|
485
|
port_store[app_infos['baseurl']].append([scheme, app_infos['addr'], app_infos['typeaddr'], app_infos['filter'], use_proxy])
|
486
|
else:
|
487
|
port_store[app_infos['baseurl']] = [[scheme,
|
488
|
app_infos['addr'],
|
489
|
app_infos['typeaddr'],
|
490
|
app_infos['filter'],
|
491
|
use_proxy]]
|
492
|
loaded_apps.append(app)
|
493
|
filters.append(app_infos['filter'])
|
494
|
except Exception, e:
|
495
|
log.msg("! %s %s : %s !" % (_("Error loading application"), app, e))
|
496
|
return loaded_apps, filters
|
497
|
|
498
|
def load_attribute_sets(self):
|
499
|
"""chargement des jeux d'attributs pour la fédération d'identité
|
500
|
"""
|
501
|
self.check_eole_attr_sets()
|
502
|
missing_indexes = []
|
503
|
for conf_file in glob(os.path.join(config.ATTR_SET_DIR, '*.ini')):
|
504
|
conf_filename = os.path.splitext(os.path.basename(conf_file))[0]
|
505
|
if not conf_filename.startswith('associations'):
|
506
|
cfg = EoleParser()
|
507
|
cfg.read(conf_file)
|
508
|
|
509
|
|
510
|
|
511
|
|
512
|
attr_set = {'user_attrs':{}, 'branch_attrs':{}, 'optional_attrs':{}}
|
513
|
attr_set_index = None
|
514
|
for section in cfg.sections():
|
515
|
|
516
|
if section == 'metadata':
|
517
|
try:
|
518
|
set_index = cfg.get(section, 'index')
|
519
|
assert int(set_index) >= 0
|
520
|
assert set_index not in self.attribute_sets_ids.values()
|
521
|
attr_set_index = set_index
|
522
|
except:
|
523
|
log.msg(_('invalid attribute set index for {0}' ).format(conf_filename))
|
524
|
elif section == 'branch_attrs':
|
525
|
attr_set[section] = dict(cfg.items(section))
|
526
|
elif section == 'optional':
|
527
|
for dist_attr, local_attr in cfg.items(section):
|
528
|
attr_set['optional_attrs'][dist_attr] = local_attr
|
529
|
else:
|
530
|
|
531
|
for dist_attr, local_attr in cfg.items(section):
|
532
|
attr_set['user_attrs'][dist_attr] = local_attr
|
533
|
|
534
|
if attr_set_index is None:
|
535
|
self.attribute_sets_ids[conf_filename] = None
|
536
|
missing_indexes.append(conf_filename)
|
537
|
else:
|
538
|
self.attribute_sets_ids[conf_filename] = attr_set_index
|
539
|
self.attribute_sets[conf_filename] = attr_set
|
540
|
|
541
|
self.associations['default'] = {}
|
542
|
self.associations['default'].update(default_idp_options)
|
543
|
for assoc_file in glob('%s/associations*.ini' % config.ATTR_SET_DIR):
|
544
|
|
545
|
cfg = EoleParser()
|
546
|
cfg.read(assoc_file)
|
547
|
for entity_id in cfg.sections():
|
548
|
assoc = self.associations.get(entity_id, {})
|
549
|
|
550
|
opts_path = {'attribute_set':config.ATTR_SET_DIR, 'filter':config.FILTER_DIR}
|
551
|
for option, value in cfg.items(entity_id):
|
552
|
if option in ('attribute_set', 'filter'):
|
553
|
data_file = os.path.join(opts_path[option], '%s.ini' % value)
|
554
|
if not os.path.isfile(data_file):
|
555
|
log.msg('%s : %s %s' % (assoc_file, data_file, _('not found')))
|
556
|
continue
|
557
|
assoc[option] = value
|
558
|
self.associations[entity_id] = assoc
|
559
|
|
560
|
if is_true(assoc.get('passive', '')) and is_true(assoc.get('force_auth', '')):
|
561
|
log.msg('\t! %s : %s !' % (entity_id, _('Warning, force_auth and passive are mutually exclusive options, force_auth set to false')))
|
562
|
assoc['force_auth'] = 'false'
|
563
|
for opt, val in self.associations['default'].items():
|
564
|
if val != default_idp_options.get(opt, None):
|
565
|
if opt == 'attribute_set':
|
566
|
log.msg(_('default attribute set redefined : {0}').format(val))
|
567
|
else:
|
568
|
log.msg(_('default value for option {0} defined to : {1}').format(opt, val))
|
569
|
|
570
|
if missing_indexes:
|
571
|
for set_id in range(len(self.attribute_sets)):
|
572
|
set_id = str(set_id)
|
573
|
if set_id not in self.attribute_sets_ids.values():
|
574
|
attr_set = missing_indexes.pop(0)
|
575
|
|
576
|
self.store_set_index(set_id, attr_set)
|
577
|
self.attribute_sets_ids[attr_set] = set_id
|
578
|
if len(missing_indexes) == 0:
|
579
|
|
580
|
break
|
581
|
|
582
|
def store_set_index(self, index, setname):
|
583
|
"""enregistre l'index du jeu d'attribut dans le fichier de définition"""
|
584
|
conf_file = os.path.join(config.ATTR_SET_DIR, "%s.ini" % setname)
|
585
|
cfg = EoleParser()
|
586
|
cfg.read(conf_file)
|
587
|
if not cfg.has_section('metadata'):
|
588
|
cfg.add_section('metadata')
|
589
|
cfg.set('metadata', 'index', index)
|
590
|
try:
|
591
|
f_conf = open(conf_file, 'w')
|
592
|
cfg.write(f_conf)
|
593
|
f_conf.close()
|
594
|
log.msg(_('index {0} assigned to attribute set {1}').format(index, setname))
|
595
|
except IOError:
|
596
|
log.msg(_('error updating configuration file {0}').format(conf_file))
|
597
|
return False
|
598
|
return True
|
599
|
|
600
|
def check_eole_attr_sets(self):
|
601
|
"""vérifie la cohérence des jeux d'attributs livrés par EoleSSO"""
|
602
|
log.msg('- {0}'.format(_('updating default attribute_sets')))
|
603
|
for eole_file in glob(os.path.join(config.ATTR_SET_DIR, "eole", "*.ini")):
|
604
|
dest_file = os.path.join(config.ATTR_SET_DIR, os.path.basename(eole_file))
|
605
|
if not os.path.isfile(dest_file):
|
606
|
|
607
|
shutil.copy(eole_file, dest_file)
|
608
|
else:
|
609
|
|
610
|
eole_set = EoleParser()
|
611
|
eole_set.read(eole_file)
|
612
|
dest_set = EoleParser()
|
613
|
dest_set.read(dest_file)
|
614
|
|
615
|
if dest_set.has_option('metadata', 'index'):
|
616
|
eole_set.add_section('metadata')
|
617
|
|
618
|
current_index = dest_set.get('metadata', 'index')
|
619
|
eole_set.set('metadata', 'index', current_index)
|
620
|
|
621
|
f_dest = open(dest_file, 'w')
|
622
|
eole_set.write(f_dest)
|
623
|
f_dest.close()
|
624
|
|
625
|
|
626
|
def init_metadata(self):
|
627
|
associations_updated = False
|
628
|
|
629
|
cert_dir = os.path.join(config.METADATA_DIR,'certs')
|
630
|
if os.path.isdir(cert_dir):
|
631
|
shutil.rmtree(cert_dir)
|
632
|
os.makedirs(cert_dir)
|
633
|
for conf_file in glob('%s/*.xml' % config.METADATA_DIR):
|
634
|
saml_ident = os.path.splitext(os.path.basename(conf_file))[0]
|
635
|
self.load_metadata(saml_ident)
|
636
|
|
637
|
@trace
|
638
|
def load_metadata(self, saml_ident):
|
639
|
try:
|
640
|
sp_meta = get_metadata(saml_ident)
|
641
|
if 'entityID' in sp_meta:
|
642
|
saml_ident = sp_meta['entityID']
|
643
|
if type(saml_ident) == unicode:
|
644
|
saml_ident = saml_ident.encode(config.encoding)
|
645
|
f_cert = os.path.join(config.METADATA_DIR,'certs','%s.crt' % saml_ident.replace(os.sep,'_'))
|
646
|
if not os.path.isfile(f_cert):
|
647
|
|
648
|
cert_data = None
|
649
|
if 'SignCert' in sp_meta:
|
650
|
cert_data = sp_meta['SignCert']
|
651
|
elif 'SignCert' in sp_meta.get('SPSSODescriptor',{}):
|
652
|
cert_data = sp_meta['SPSSODescriptor']['SignCert']
|
653
|
elif 'SignCert' in sp_meta.get('IDPSSODescriptor',{}):
|
654
|
cert_data = sp_meta['IDPSSODescriptor']['SignCert']
|
655
|
if cert_data:
|
656
|
cert_buffer = recreate_cert(cert_data)
|
657
|
|
658
|
f = open(f_cert, 'w')
|
659
|
f.write(cert_buffer)
|
660
|
f.close()
|
661
|
except Exception, e:
|
662
|
log.msg("- %s (%s)" % (_("Error fetching SAML metadata for {0} : ").format(saml_ident), str(e)))
|
663
|
return {}
|
664
|
if saml_ident not in self.sp_meta:
|
665
|
attr_set = self.associations.get(saml_ident, {}).get('attribute_set', '')
|
666
|
if attr_set not in ('', 'default'):
|
667
|
attr_msg = ' (%s : %s)' % (_('attribute set'), attr_set)
|
668
|
else:
|
669
|
attr_msg = ''
|
670
|
log.msg("- %s : %s%s" % (_("Partner entity initialized"), saml_ident, attr_msg))
|
671
|
|
672
|
self.sp_meta[saml_ident] = sp_meta
|
673
|
return sp_meta
|
674
|
|
675
|
@trace
|
676
|
def get_metadata(self, saml_ident):
|
677
|
if saml_ident in self.sp_meta:
|
678
|
return self.sp_meta[saml_ident]
|
679
|
else:
|
680
|
try:
|
681
|
sp_meta = self.load_metadata(saml_ident)
|
682
|
except:
|
683
|
sp_meta = {}
|
684
|
return sp_meta
|
685
|
|
686
|
@trace
|
687
|
def get_attribute_set(self, idp_ident, search_branch=None):
|
688
|
attr_set = {'user_attrs':{}, 'branch_attrs':{}, 'optional_attrs':{}}
|
689
|
if idp_ident == config.IDP_IDENTITY:
|
690
|
|
691
|
|
692
|
if search_branch:
|
693
|
|
694
|
try:
|
695
|
host, base_ldap = search_branch.split(":",1)
|
696
|
default_userattr = self._data_proxy.ldap_servers[host][3]
|
697
|
except:
|
698
|
default_userattr = 'uid'
|
699
|
attr_set['user_attrs'][default_userattr] = default_userattr
|
700
|
attr_set['branch_attrs'].update(config.SEARCH_BASE_ATTRS)
|
701
|
else:
|
702
|
try:
|
703
|
|
704
|
attr_set_name = self.associations.get(idp_ident, {}).get('attribute_set', self.associations['default']['attribute_set'])
|
705
|
|
706
|
for attr_section in self.attribute_sets[attr_set_name]:
|
707
|
attr_set[attr_section].update(self.attribute_sets[attr_set_name][attr_section])
|
708
|
except:
|
709
|
log.msg(_('Unable to determine attribute set for {0}').format(idp_ident))
|
710
|
return attr_set
|
711
|
|
712
|
def gen_homonymes_infos(self):
|
713
|
"""Génère un fichier javascript contenant des
|
714
|
informations à afficher dans le cas ou des homonymes sont détectés
|
715
|
"""
|
716
|
h_dir = config.HOMONYMES_DIR
|
717
|
h_script = os.path.join(config.SSO_PATH, 'interface', 'scripts', 'homonymes.js')
|
718
|
script = """var msgs=new Array();\nvar host_infos=new Array();\n"""
|
719
|
infos_used = {}
|
720
|
host_infos = []
|
721
|
index_msg = 1
|
722
|
for host, infos in self._data_proxy.ldap_infos.items():
|
723
|
if infos and os.path.isfile(os.path.join(h_dir, infos)):
|
724
|
if infos not in infos_used:
|
725
|
msg = open(os.path.join(h_dir, infos)).read().strip().replace('"', "'")
|
726
|
infos_used[infos] = index_msg
|
727
|
script += """msgs['msg%s']="%s";\n""" % (str(index_msg), msg)
|
728
|
index_msg += 1
|
729
|
host_infos.append("""host_infos['%s']="msg%s";""" % (host, infos_used[infos]))
|
730
|
if host_infos:
|
731
|
script += "\n".join(host_infos);
|
732
|
f_script = open(h_script, 'w')
|
733
|
f_script.write(script)
|
734
|
f_script.close()
|
735
|
|
736
|
@trace
|
737
|
def get_default_logout_url(self, sso_session):
|
738
|
default_url = None
|
739
|
force_redirect = False
|
740
|
if sso_session in self.user_sessions:
|
741
|
idp_ident = self.user_sessions[sso_session][3]
|
742
|
if idp_ident and (idp_ident != config.IDP_IDENTITY):
|
743
|
default_url = self.associations.get(idp_ident, {}).get('default_logout_url', None)
|
744
|
if default_url:
|
745
|
|
746
|
|
747
|
force_redirect = is_true(self.associations.get(idp_ident, {}).get('force_logout_url', 'false'))
|
748
|
return default_url, force_redirect
|
749
|
|
750
|
@trace
|
751
|
def get_federation_options(self, idp_ident):
|
752
|
"""renvoie les options configurées pour l'accord de fédération avec un fournisseur d'identité
|
753
|
|
754
|
Les options gérées sont:
|
755
|
|
756
|
- attribute_set : jeu d'attribut permettant de retrouver l'utilisateur
|
757
|
- sign_request : force la signature des requêtes vers cet idp (false par défaut sauf si indiqué dans les métadata du FI)
|
758
|
- passive : indique une requête de type passive (si l'utilisateur n'est pas authentifié, réponse négative)
|
759
|
- force_auth : force une réauthentification de l'utilisateur même si il était préalablement connecté
|
760
|
- allow_idp : interdit la prise en compte de toute les assertions provenant d'un FI
|
761
|
- allow_idp_initiated : interdit la prise en compte des assertions envoyées spontanément par un FI
|
762
|
- default_service : adresse d'un service utilisé par défaut après une authentification réussie
|
763
|
- req_context : contexte d'authentification requis pour valider l'authentification provenant d'une assertion
|
764
|
- comparison : opérateur de comparaison pour le contexte ci dessus (minimum, maximum, better, exact)
|
765
|
"""
|
766
|
|
767
|
options = self.associations.get('default', {})
|
768
|
|
769
|
options.update(self.associations.get(idp_ident, {}))
|
770
|
return options
|
771
|
|
772
|
@trace
|
773
|
def check_federation_allowed(self, idp_ident, idp_initiated=False):
|
774
|
"""vérifie qu'un fournisseur d'identité est autorisé à fournir des assertions
|
775
|
idp_initiated : vérifie si les réponses non sollicitées sont autorisées (pas de requête préalable)
|
776
|
"""
|
777
|
default_allow_idp = self.associations['default'].get('allow_idp', 'true')
|
778
|
default_allow_idp_initiated = self.associations['default'].get('allow_idp_initiated', 'true')
|
779
|
if idp_ident in self.associations:
|
780
|
assoc_data = self.associations.get(idp_ident, {})
|
781
|
if idp_initiated:
|
782
|
|
783
|
if not is_true(assoc_data.get('allow_idp_initiated', default_allow_idp_initiated)):
|
784
|
return False
|
785
|
|
786
|
return is_true(assoc_data.get('allow_idp', default_allow_idp))
|
787
|
|
788
|
if is_true(default_allow_idp):
|
789
|
if not idp_initiated or is_true(default_allow_idp_initiated):
|
790
|
return True
|
791
|
return False
|
792
|
|
793
|
@trace
|
794
|
def get_attribute_service_index(self, idp_ident):
|
795
|
set_name = self.associations.get(idp_ident, {}).get('attribute_set', 'default')
|
796
|
return self.attribute_sets_ids.get(set_name, None)
|
797
|
|
798
|
@trace
|
799
|
def init_user_session(self, user_data, username, auth_instant , search_attrs=None, auth_class=available_contexts['URN_PROTECTED_PASSWORD'], idp_ident=None,sessionID=None ):
|
800
|
if sessionID is None:
|
801
|
sessionID = gen_ticket_id('TGC', self.address)
|
802
|
if sessionID != "":
|
803
|
|
804
|
if search_attrs is not None:
|
805
|
self.user_sessions[sessionID] = [search_attrs, auth_instant, auth_class, idp_ident]
|
806
|
else:
|
807
|
self.user_sessions[sessionID] = [username, auth_instant, auth_class, idp_ident]
|
808
|
|
809
|
for ignored_attr in config.IGNORED_ATTRS:
|
810
|
if ignored_attr in user_data:
|
811
|
del(user_data[ignored_attr])
|
812
|
self.data_cache[sessionID] = user_data
|
813
|
self.calc_cache[sessionID] = {}
|
814
|
|
815
|
self.user_app_tickets[sessionID] = []
|
816
|
reactor.callLater(config.SESSION_TIMER, self.end_session, sessionID)
|
817
|
|
818
|
|
819
|
self._DBInitUserSession(sessionID, user_data, username, auth_instant,search_attrs,auth_class,idp_ident)
|
820
|
|
821
|
return sessionID
|
822
|
|
823
|
@trace
|
824
|
def authenticate_federated_user(self, attrs, idp_ident, auth_instant, auth_class, current_session=None, search_branch=None, attr_set=None):
|
825
|
|
826
|
user_data = {}
|
827
|
optional_attrs = {}
|
828
|
|
829
|
|
830
|
if attr_set is None:
|
831
|
attr_set = self.get_attribute_set(idp_ident, search_branch)
|
832
|
user_attrs = {}
|
833
|
user_attrs.update(attrs)
|
834
|
idp_attrs = {}
|
835
|
|
836
|
|
837
|
for idp_attr, value in attrs.items():
|
838
|
if idp_attr in self.external_attrs.keys():
|
839
|
ext_attrs = self.external_attrs[idp_attr].get_local_attrs(copy.copy(value), attr_set)
|
840
|
|
841
|
idp_attrs.update(ext_attrs)
|
842
|
else:
|
843
|
idp_attrs[idp_attr] = value
|
844
|
|
845
|
if search_branch is None:
|
846
|
search_branch = self._data_proxy.get_search_branch(idp_attrs, attr_set)
|
847
|
if search_branch is None:
|
848
|
log.msg(_("Federation - Unable to determine search branch with following attributes : "), idp_attrs)
|
849
|
else:
|
850
|
try:
|
851
|
|
852
|
search_attrs = {}
|
853
|
search_base = None
|
854
|
|
855
|
for idp_attr, value in idp_attrs.items():
|
856
|
if idp_attr in attr_set['user_attrs']:
|
857
|
|
858
|
if len(value) > 0:
|
859
|
search_attrs[attr_set['user_attrs'][idp_attr]] = value[0]
|
860
|
if idp_attr in attr_set['optional_attrs']:
|
861
|
|
862
|
|
863
|
optional_attrs[attr_set['optional_attrs'][idp_attr]] = value
|
864
|
|
865
|
if search_attrs and (len(search_attrs) == len(attr_set['user_attrs'])):
|
866
|
log.msg(_('Federation: searching user matching attributes {0} (search branch: {1})').format(str(search_attrs), search_branch))
|
867
|
defer_fed_data = self._data_proxy.get_user_data(search_attrs, search_branch)
|
868
|
return defer_fed_data.addCallbacks(self.callb_federation, self.errb_auth,
|
869
|
callbackArgs=[search_attrs, optional_attrs, idp_ident, auth_instant, auth_class, current_session])
|
870
|
except:
|
871
|
traceback.print_exc()
|
872
|
user_data = {}
|
873
|
return defer.succeed(('', user_data))
|
874
|
|
875
|
def set_urllib_proxy(self, ticket=None):
|
876
|
"""Défini un proxy http pour les appels via urllib2
|
877
|
ticket: ticket applicatif ou rien pour supprimer les proxys
|
878
|
"""
|
879
|
urllib_proxy = None
|
880
|
if ticket:
|
881
|
proxy_app = getattr(ticket, 'use_proxy', '')
|
882
|
try:
|
883
|
if proxy_app == 'default':
|
884
|
assert config.PROXY_SERVER and config.PROXY_PORT
|
885
|
urllib_proxy = {'https':"http://%s:%s" % (config.PROXY_SERVER, config.PROXY_PORT)}
|
886
|
elif ":" in proxy_app:
|
887
|
|
888
|
urllib_proxy = {'https':"http://%s:%s" % tuple(proxy_app.split(':'))}
|
889
|
except:
|
890
|
log.msg(_('Could not determine http proxy for ticket {0} (service: {1})').format(ticket.ticket, ticket.service_url))
|
891
|
|
892
|
opener = urllib2.build_opener(urllib2.ProxyHandler(urllib_proxy))
|
893
|
urllib2.install_opener(opener)
|
894
|
|
895
|
def get_user_log_data(self, user_data, additional_attrs=[]):
|
896
|
log_attrs = []
|
897
|
display_attrs = ['uid', 'cn']
|
898
|
display_attrs.extend(additional_attrs)
|
899
|
for log_attr in display_attrs:
|
900
|
if log_attr in user_data:
|
901
|
log_attrs.append("%s: %s" % (log_attr, user_data[log_attr][0]))
|
902
|
return ", ".join(log_attrs)
|
903
|
|
904
|
@trace
|
905
|
def callb_federation(self, result, search_attrs, optional_attrs, idp_ident, auth_instant, auth_class, current_session):
|
906
|
bind_success, user_data = result
|
907
|
sessionID = ''
|
908
|
if current_session:
|
909
|
if self.user_sessions[current_session][0] == search_attrs:
|
910
|
|
911
|
return current_session, user_data
|
912
|
first_fed_key = search_attrs.keys()[0]
|
913
|
if first_fed_key in user_data:
|
914
|
|
915
|
username = user_data[first_fed_key][0]
|
916
|
|
917
|
|
918
|
if optional_attrs:
|
919
|
optional_attrs.update(user_data)
|
920
|
user_data = optional_attrs
|
921
|
|
922
|
sessionID = self.init_user_session(user_data, username, auth_instant, search_attrs, auth_class, idp_ident)
|
923
|
log.msg(_('Federation: user found ({0}). Session ID : {1}').format(self.get_user_log_data(user_data, [first_fed_key]), sessionID))
|
924
|
else:
|
925
|
log.msg(_('Federation: user from {0} not found ({1})').format(idp_ident, str(search_attrs)))
|
926
|
return sessionID, user_data
|
927
|
|
928
|
def authenticate(self, username, password, search_branch='default'):
|
929
|
user_data = {}
|
930
|
sessionID = ''
|
931
|
if type(username) == unicode: username = username.encode(config.encoding)
|
932
|
if type(password) == unicode: password = password.encode(config.encoding)
|
933
|
|
934
|
|
935
|
if self._parent is not None:
|
936
|
try:
|
937
|
sessionID, user_data = self._parent.authenticate(username, password, search_branch)
|
938
|
except Exception, e:
|
939
|
log.msg(_('Error calling function {0} on parent server : {1}').format('authenticate', str(e)))
|
940
|
pass
|
941
|
if sessionID != '':
|
942
|
log.msg(_("Session delivered by parent serveur for {0}. Session ID : {1}").format(username, sessionID))
|
943
|
self.data_cache[sessionID] = user_data
|
944
|
|
945
|
self.calc_cache[sessionID] = {}
|
946
|
|
947
|
|
948
|
self.user_sessions[sessionID] = [None,None,None,None]
|
949
|
self.user_app_tickets[sessionID] = []
|
950
|
elif password != '':
|
951
|
|
952
|
defer_auth = self._data_proxy.authenticate(username, password, search_branch)
|
953
|
return defer_auth.addCallbacks(self.callb_auth, self.errb_auth, callbackArgs=[username])
|
954
|
return defer.succeed((sessionID, user_data))
|
955
|
|
956
|
@trace
|
957
|
def errb_auth(self, failure):
|
958
|
log.msg(_("! Error accessing Authentication service ({0}) !").format(self._data_proxy.service_name))
|
959
|
log.msg(failure.getTraceback())
|
960
|
return '', {}
|
961
|
|
962
|
@trace
|
963
|
def callb_auth(self, res_auth, username):
|
964
|
"""traitement du retour de la méthode d'authentification de dataproxy
|
965
|
"""
|
966
|
|
967
|
auth_instant = time.time()
|
968
|
|
969
|
|
970
|
|
971
|
|
972
|
auth_class = available_contexts['URN_PROTECTED_PASSWORD']
|
973
|
authenticated, user_data = res_auth
|
974
|
if authenticated:
|
975
|
sessionID = self.init_user_session(user_data, username, auth_instant, auth_class=auth_class)
|
976
|
|
977
|
log.msg(_('user authentication verified {0}. Session ID : {1} ({2} sessions)')\
|
978
|
.format(self.get_user_log_data(user_data),sessionID,len(self.user_sessions)))
|
979
|
return sessionID, user_data
|
980
|
log.msg(_("! Authentication failure : {0} !").format(username))
|
981
|
return '', {}
|
982
|
|
983
|
def get_login_ticket(self, data=''):
|
984
|
return self.login_sessions.add_session(data)
|
985
|
|
986
|
@trace
|
987
|
def validate_session(self, session_id):
|
988
|
"""vérifie un session_id existant"""
|
989
|
self._DBCheckSession(session_id)
|
990
|
if self.check_ticket_issuer(session_id) and session_id in self.user_sessions:
|
991
|
return True
|
992
|
if self._parent is not None:
|
993
|
try:
|
994
|
valid_session = self._parent.validate_session(session_id)
|
995
|
except Exception, e:
|
996
|
log.msg(_('Error calling function {0} on parent server : {1}').format('validate_session', str(e)))
|
997
|
|
998
|
valid_session = False
|
999
|
if valid_session:
|
1000
|
log.msg("%s : %s" % (session_id, _("User session validated on parent server")))
|
1001
|
|
1002
|
if not session_id in self.user_sessions:
|
1003
|
self.user_sessions[session_id] = [None,None,None,None]
|
1004
|
self.user_app_tickets[session_id] = []
|
1005
|
return valid_session
|
1006
|
log.msg("! %s : %s !" % (_("Invalid session"), session_id))
|
1007
|
return False
|
1008
|
|
1009
|
@trace
|
1010
|
def verify_session_id(self, session_id):
|
1011
|
"""vérifie un session_id"""
|
1012
|
if self.check_ticket_issuer(session_id) and session_id in self.user_sessions:
|
1013
|
return True, session_id
|
1014
|
if self._parent is not None:
|
1015
|
try:
|
1016
|
return self._parent.verify_session_id(session_id)
|
1017
|
except Exception, e:
|
1018
|
log.msg(_('Error calling function {0} on parent server : {1}').format('verify_session', str(e)))
|
1019
|
return False, session_id
|
1020
|
log.msg("! %s : %s !" % (_("Unknown user session"), session_id))
|
1021
|
return False, session_id
|
1022
|
|
1023
|
@trace
|
1024
|
def get_user_info(self, session_id, details=False):
|
1025
|
"""méthode de convenance pour récupérer l'utilisateur"""
|
1026
|
if session_id in self.user_sessions:
|
1027
|
if self.check_ticket_issuer(session_id):
|
1028
|
if details:
|
1029
|
return self._get_user_details(session_id, "", {})
|
1030
|
else:
|
1031
|
return self.user_sessions[session_id][0]
|
1032
|
else:
|
1033
|
try:
|
1034
|
|
1035
|
return self._parent.get_user_info(session_id, details)
|
1036
|
except:
|
1037
|
pass
|
1038
|
raise InvalidSession(session_id)
|
1039
|
|
1040
|
@trace
|
1041
|
def get_auth_instant(self, app_ticket):
|
1042
|
"""renvoie la date à laquelle l'authentification a eu lieu
|
1043
|
"""
|
1044
|
session_id = app_ticket
|
1045
|
ticket = app_ticket
|
1046
|
if self._DBAppSessionFromTicket(app_ticket) and self.app_sessions[app_ticket] is not None:
|
1047
|
ticket = self.app_sessions[app_ticket]
|
1048
|
session_id = ticket.session_id
|
1049
|
if session_id in self.user_sessions:
|
1050
|
return self.user_sessions[session_id][1]
|
1051
|
else:
|
1052
|
|
1053
|
if self._parent is not None:
|
1054
|
try:
|
1055
|
return self._parent.get_auth_instant(app_ticket)
|
1056
|
except Exception, e:
|
1057
|
log.msg(_('Error calling function {0} on parent server : {1}').format('verify_app_ticket', str(e)))
|
1058
|
pass
|
1059
|
raise InvalidSession(session_id)
|
1060
|
|
1061
|
@trace
|
1062
|
def get_auth_class(self, app_ticket):
|
1063
|
"""retourne la classe d'authentification d'une session"""
|
1064
|
session_id = app_ticket
|
1065
|
ticket = app_ticket
|
1066
|
if self._DBAppSessionFromTicket(app_ticket) and self.app_sessions[app_ticket] is not None:
|
1067
|
ticket = self.app_sessions[app_ticket]
|
1068
|
session_id = ticket.session_id
|
1069
|
if session_id in self.user_sessions:
|
1070
|
return self.user_sessions[session_id][2]
|
1071
|
if self._parent is not None:
|
1072
|
try:
|
1073
|
return self._parent.get_auth_class(app_ticket)
|
1074
|
except Exception, e:
|
1075
|
log.msg(_('Error calling function {0} on parent server : {1}').format('get_auth_class', str(e)))
|
1076
|
return None
|
1077
|
log.msg("! %s : %s !" % (_("Unknown user session"), session_id))
|
1078
|
return None
|
1079
|
|
1080
|
@trace
|
1081
|
def set_auth_class(self, session_id, auth_class):
|
1082
|
"""modifie la classe d'authentification pour une session donnée"""
|
1083
|
if session_id in self.user_sessions:
|
1084
|
self.user_sessions[session_id][2] = auth_class
|
1085
|
log.msg(_('updating authentication context for session {0} : {1}').format(session_id, auth_class))
|
1086
|
return True
|
1087
|
return False
|
1088
|
|
1089
|
def logout(self, session_id):
|
1090
|
"""vérifie un session_id"""
|
1091
|
if session_id in self.user_sessions:
|
1092
|
self.end_session(session_id)
|
1093
|
if not self.check_ticket_issuer(session_id):
|
1094
|
|
1095
|
try:
|
1096
|
return self._parent.logout(session_id)
|
1097
|
except Exception, e:
|
1098
|
log.msg(_('Error calling function {0} on parent server : {1}').format('logout', str(e)))
|
1099
|
pass
|
1100
|
else:
|
1101
|
return 'ok'
|
1102
|
return ''
|
1103
|
|
1104
|
@trace
|
1105
|
def get_app_ticket(self, session_id, appurl, ticket_prefix='ST', from_credentials=False, idp_ident=None):
|
1106
|
|
1107
|
appSessionID = ''
|
1108
|
if self.validate_session(session_id):
|
1109
|
|
1110
|
appSessionID = gen_ticket_id(ticket_prefix, self.address)
|
1111
|
ticket = self.init_app_session(appSessionID, session_id, appurl, from_credentials)
|
1112
|
if ticket.filter != '':
|
1113
|
filter_msg = ' (%s : %s)' % (_("attribute filter"), ticket.filter)
|
1114
|
else:
|
1115
|
filter_msg = ''
|
1116
|
if idp_ident:
|
1117
|
|
1118
|
log.msg("%s -- %s" % (session_id, _("federation information stored (identity provider: {0})").format(idp_ident)))
|
1119
|
else:
|
1120
|
log.msg("%s -- %s %s%s" % (session_id, _("Session authorized for service"), ticket.service_url, filter_msg))
|
1121
|
return appSessionID
|
1122
|
else:
|
1123
|
log.msg("%s : %s" % (_("Unknown user session"), session_id))
|
1124
|
log.msg("! %s -- %s %s !" % (session_id, _("Failed to create session for service"), appurl))
|
1125
|
return ''
|
1126
|
|
1127
|
@trace
|
1128
|
def init_app_session(self, appSessionID, session_id, appurl, from_credentials):
|
1129
|
"""génère un ticket d'application et initialise son timeout
|
1130
|
"""
|
1131
|
ticket = AppTicket(appSessionID, session_id, appurl, self.address, from_credentials=from_credentials)
|
1132
|
|
1133
|
id_filter, use_proxy = self._check_filter(ticket.service_url)
|
1134
|
ticket.filter = id_filter
|
1135
|
ticket.use_proxy = use_proxy
|
1136
|
|
1137
|
|
1138
|
self.app_sessions[appSessionID] = ticket
|
1139
|
self.user_app_tickets[session_id].append(ticket)
|
1140
|
|
1141
|
|
1142
|
self._DBAddAppTicket(ticket)
|
1143
|
|
1144
|
|
1145
|
ticket.timeout_callb = reactor.callLater(config.APP_TIMER, self.invalidate_app_ticket, appSessionID)
|
1146
|
return ticket
|
1147
|
|
1148
|
@trace
|
1149
|
def invalidate_app_ticket(self, appSessionID):
|
1150
|
"""invalide un ticket d'application pour empêcher son utilisation
|
1151
|
"""
|
1152
|
if self._DBAppSessionFromTicket(appSessionID):
|
1153
|
self.app_sessions[appSessionID].valid = False
|
1154
|
|
1155
|
@trace
|
1156
|
def verify_app_ticket(self, app_ticket, appurl):
|
1157
|
"""vérifie le ticket fourni par une application"""
|
1158
|
verified = False
|
1159
|
ticket = app_ticket
|
1160
|
if self._DBAppSessionFromTicket(app_ticket):
|
1161
|
ticket = self.app_sessions[app_ticket]
|
1162
|
verified = ticket.verif_ticket(app_ticket, appurl)
|
1163
|
else:
|
1164
|
|
1165
|
if self._parent is not None:
|
1166
|
try:
|
1167
|
return self._parent.verify_app_ticket(app_ticket, appurl)
|
1168
|
except Exception, e:
|
1169
|
log.msg(_('Error calling function {0} on parent server : {1}').format('verify_app_ticket', str(e)))
|
1170
|
pass
|
1171
|
if not verified:
|
1172
|
log.msg("! %s %s !" % (_("Session verification failed for service"), get_service_from_url(appurl)))
|
1173
|
return verified, app_ticket
|
1174
|
|
1175
|
@trace
|
1176
|
def get_user_details(self, app_ticket, appurl, sections=False, renew=False, keep_valid=False):
|
1177
|
"""vérifie un ticket et renvoie les informations sur l'utilisateur
|
1178
|
section : si True, on renvoie aussi une description de l'organisation des données
|
1179
|
renew : si True, nécessite que l'utilisateur vienne de s'authentifier
|
1180
|
keep_valid : si True, le ticket reste valide après lecture des informations
|
1181
|
"""
|
1182
|
if config.DEBUG_LOG:
|
1183
|
log.msg('--- %s %s' % (_("Validating session for"), appurl))
|
1184
|
code = 'INVALID_TICKET'
|
1185
|
detail = "%s : %s" % (_("Unknown ticket"), app_ticket)
|
1186
|
if self._DBAppSessionFromTicket(app_ticket):
|
1187
|
if config.DEBUG_LOG:
|
1188
|
log.msg('--- %s' % _("Session OK"))
|
1189
|
ticket = self.app_sessions[app_ticket]
|
1190
|
|
1191
|
|
1192
|
if not ticket.from_credentials and renew:
|
1193
|
detail = _("Ticket {0} was not delivered during authentication process").format(app_ticket)
|
1194
|
elif ticket.verif_ticket(app_ticket, appurl, keep_valid):
|
1195
|
if config.DEBUG_LOG:
|
1196
|
log.msg('--- %s' % _("Appurl verified"))
|
1197
|
return True, self._get_user_details(ticket.session_id, ticket.filter, sections, ticket)
|
1198
|
else:
|
1199
|
code = 'INVALID_SERVICE'
|
1200
|
detail = _("Ticket has not been delivered for service {0}").format(appurl)
|
1201
|
log.msg("! %s !" % _("Error reading user data : invalid session"))
|
1202
|
else:
|
1203
|
|
1204
|
if self._parent is not None:
|
1205
|
|
1206
|
|
1207
|
try:
|
1208
|
return self._parent.get_user_details(app_ticket, appurl, sections, renew)
|
1209
|
except Exception, e:
|
1210
|
log.msg(_('Error calling function {0} on parent server : {1}').format('get_user_details', str(e)))
|
1211
|
pass
|
1212
|
log.msg("! %s %s !" % (_("User data access denied for service"), appurl))
|
1213
|
if sections:
|
1214
|
return False, ({'code':code,'detail':detail},"")
|
1215
|
return False, {'code':code,'detail':detail}
|
1216
|
|
1217
|
@trace
|
1218
|
def get_proxy_granting_ticket(self, app_ticket, service, pgturl):
|
1219
|
|
1220
|
|
1221
|
if app_ticket in self.app_sessions:
|
1222
|
ticket = self.app_sessions[app_ticket]
|
1223
|
if ticket.verif_ticket(app_ticket, service, keep_valid=True):
|
1224
|
ticket.generate_pgt(pgturl)
|
1225
|
self.proxy_granting_sessions[ticket.pgt] = ticket
|
1226
|
return ticket
|
1227
|
return None
|
1228
|
|
1229
|
@trace
|
1230
|
def get_proxy_ticket(self, pgt, target_service):
|
1231
|
|
1232
|
app_ticket = self.proxy_granting_sessions[pgt]
|
1233
|
appSessionID = gen_ticket_id('PT', self.address)
|
1234
|
|
1235
|
if target_service.startswith('imap://') or target_service.startswith('imaps://'):
|
1236
|
appSessionID = appSessionID[:60]
|
1237
|
ticket = self.init_app_session(appSessionID, app_ticket.session_id, target_service, app_ticket.from_credentials)
|
1238
|
log.msg("%s -- %s %s" % (app_ticket.session_id, _("Proxy Session created for service"), target_service))
|
1239
|
ticket.parent = app_ticket
|
1240
|
return ticket.ticket
|
1241
|
|
1242
|
@trace
|
1243
|
def invalidate_proxy(self, app_ticket):
|
1244
|
"""Supprime les paramètres de proxy d'un app_ticket si les vérifications sur l'url de callback ont échoué
|
1245
|
"""
|
1246
|
if app_ticket in self.app_sessions:
|
1247
|
ticket = self.app_sessions[app_ticket]
|
1248
|
if ticket.pgt is not None:
|
1249
|
del self.proxy_granting_sessions[ticket.pgt]
|
1250
|
ticket.reset_pgt()
|
1251
|
|
1252
|
def get_app_infos(self, appurl):
|
1253
|
"""renvoie les information définies pour une url
|
1254
|
"""
|
1255
|
url = urlparse.urlparse(appurl)
|
1256
|
if url.scheme in self.apps:
|
1257
|
store = self.apps[url.scheme]
|
1258
|
else:
|
1259
|
store = self.apps['http']
|
1260
|
|
1261
|
if url.port in store:
|
1262
|
apps = store[url.port]
|
1263
|
else:
|
1264
|
|
1265
|
apps = store['all']
|
1266
|
|
1267
|
|
1268
|
app_paths = apps.keys()
|
1269
|
app_paths.sort(reverse=True)
|
1270
|
url_path = url.path
|
1271
|
if not url_path.startswith('/'): url_path = '/' + url_path
|
1272
|
for baseurl in app_paths:
|
1273
|
if not baseurl.startswith('/'): check_baseurl = '/' + baseurl
|
1274
|
check_baseurl = baseurl
|
1275
|
if url_path.startswith(check_baseurl):
|
1276
|
for app in apps[baseurl]:
|
1277
|
scheme, addr, typeaddr, id_filter, use_proxy = app
|
1278
|
if typeaddr == 'ip':
|
1279
|
addr_ok = check_hostname_by_ip(url, addr)
|
1280
|
else:
|
1281
|
addr_ok = check_hostname_by_domain(url, addr)
|
1282
|
if addr_ok:
|
1283
|
|
1284
|
if config.DEBUG_LOG:
|
1285
|
log.msg(_("Applied filter {0} for url {1}").format(id_filter, get_service_from_url(appurl)))
|
1286
|
return id_filter, use_proxy
|
1287
|
break
|
1288
|
return None
|
1289
|
|
1290
|
@trace
|
1291
|
def _check_filter(self, appurl):
|
1292
|
"""vérifie si un filtre est disponible pour l'url"""
|
1293
|
return self.get_app_infos(appurl) or ("default", "")
|
1294
|
|
1295
|
@trace
|
1296
|
def _filter_data(self, infos, id_filter, sections, ticket=None):
|
1297
|
"""filtrage en fonction de l'application si des filtres sont définis"""
|
1298
|
data = {}
|
1299
|
filter_data = {}
|
1300
|
if id_filter in self.filters:
|
1301
|
|
1302
|
|
1303
|
if ticket and not hasattr(ticket, 'saml_ident'):
|
1304
|
for section, glob_attrs in self.global_filter.items():
|
1305
|
for libelle_cas, nom_val in glob_attrs.items():
|
1306
|
data[nom_val] = infos.get(nom_val, '')
|
1307
|
if section not in filter_data:
|
1308
|
filter_data[section] = copy.copy(glob_attrs)
|
1309
|
else:
|
1310
|
filter_data[section].update(copy.copy(glob_attrs))
|
1311
|
for section, attrs in self.filters[id_filter].items():
|
1312
|
for libelle_cas, nom_val in attrs.items():
|
1313
|
data[nom_val] = infos.get(nom_val, '')
|
1314
|
if section in filter_data:
|
1315
|
filter_data[section].update(attrs)
|
1316
|
else:
|
1317
|
filter_data[section] = attrs
|
1318
|
else:
|
1319
|
|
1320
|
data.update(infos)
|
1321
|
filter_data = ""
|
1322
|
if sections:
|
1323
|
return data, filter_data
|
1324
|
else:
|
1325
|
return data
|
1326
|
|
1327
|
def _add_user_infos(self, infos, session_id, ticket):
|
1328
|
|
1329
|
infos.update({'rne':[config.RNE],'nom_etab':[config.ETABLISSEMENT]})
|
1330
|
|
1331
|
|
1332
|
if ticket and hasattr(ticket, 'saml_ident'):
|
1333
|
infos['saml_ident'] = ticket.saml_ident
|
1334
|
if ticket and hasattr(ticket, 'saml_role'):
|
1335
|
infos['saml_role'] = ticket.saml_role
|
1336
|
if ticket and hasattr(ticket, 'uaj'):
|
1337
|
infos['uaj'] = ticket.uaj
|
1338
|
|
1339
|
if ticket and hasattr(ticket, 'service_url'):
|
1340
|
infos['service_url'] = ticket.service_url
|
1341
|
try:
|
1342
|
|
1343
|
auth_class = self.user_sessions[session_id][2]
|
1344
|
assert auth_class in available_contexts.values()
|
1345
|
except:
|
1346
|
|
1347
|
auth_class = available_contexts['URN_PROTECTED_PASSWORD']
|
1348
|
infos['auth_class'] = auth_class
|
1349
|
|
1350
|
@trace
|
1351
|
def _get_user_details(self, session_id, id_filter, sections, ticket=None):
|
1352
|
"""Renvoie les informations connues de l'utilisateur lié à <session_id>"""
|
1353
|
|
1354
|
|
1355
|
if self.check_ticket_issuer(session_id) and self.data_cache.has_key(session_id):
|
1356
|
infos = self.data_cache[session_id]
|
1357
|
else:
|
1358
|
|
1359
|
try:
|
1360
|
infos = self._parent.get_user_info(session_id, True)
|
1361
|
except Exception, e:
|
1362
|
log.msg(_('Error calling function {0} on parent server : {1}').format('get_user_info', str(e)))
|
1363
|
infos = {}
|
1364
|
|
1365
|
self._add_user_infos(infos, session_id, ticket)
|
1366
|
|
1367
|
|
1368
|
|
1369
|
|
1370
|
|
1371
|
|
1372
|
|
1373
|
|
1374
|
|
1375
|
|
1376
|
|
1377
|
multi_infos = {}
|
1378
|
|
1379
|
calc_names = self.user_infos.dict_infos.keys()
|
1380
|
calc_names.sort()
|
1381
|
calc_infos = None
|
1382
|
|
1383
|
for calc_name in calc_names:
|
1384
|
calc_func, use_cache = self.user_infos.dict_infos[calc_name]
|
1385
|
if use_cache and calc_name in self.calc_cache.get(session_id, {}):
|
1386
|
calc_infos = self.calc_cache[session_id][calc_name]
|
1387
|
if config.DEBUG_LOG:
|
1388
|
log.msg(_('{0} --- calculated attributes ({1}) fetched from cache').format(session_id, calc_name))
|
1389
|
else:
|
1390
|
log.msg("CLEON %s NO CACHE" % (calc_name))
|
1391
|
|
1392
|
|
1393
|
try:
|
1394
|
known_infos = {}
|
1395
|
known_infos.update(infos)
|
1396
|
known_infos.update(multi_infos)
|
1397
|
calc_infos = calc_func(known_infos)
|
1398
|
|
1399
|
|
1400
|
assert calc_infos is not None, _('calculated attribute returned None')
|
1401
|
if use_cache:
|
1402
|
self.calc_cache[session_id][calc_name] = calc_infos
|
1403
|
except Exception, e:
|
1404
|
log.msg("""! %s '%s' : %s""" % (_("Error computing data for attribute"), calc_name, str(e)))
|
1405
|
if config.DEBUG_LOG:
|
1406
|
traceback.print_exc()
|
1407
|
if config.DEBUG_LOG:
|
1408
|
if use_cache:
|
1409
|
log.msg(_('{0} --- calculated attributes ({1}) processed and stored in cache').format(session_id, calc_name))
|
1410
|
else:
|
1411
|
log.msg(_('{0} --- calculated attributes processed ({1})').format(session_id, calc_name))
|
1412
|
if type(calc_infos) is dict:
|
1413
|
|
1414
|
|
1415
|
multi_infos.update(calc_infos)
|
1416
|
else:
|
1417
|
infos[calc_name] = calc_infos
|
1418
|
|
1419
|
infos.update(multi_infos)
|
1420
|
|
1421
|
|
1422
|
|
1423
|
infos = self._filter_data(infos, id_filter, sections, ticket)
|
1424
|
|
1425
|
if config.DEBUG_LOG:
|
1426
|
log.msg('--- %s' % _("User data sent"))
|
1427
|
return infos
|