Projet

Général

Profil

zephirservice.py

Tom Ricci, 28/03/2024 16:51

Télécharger (25,6 ko)

 
1
# -*- coding: UTF-8 -*-
2
###########################################################################
3
# Eole NG - 2007
4
# Copyright Pole de Competence Eole  (Ministere Education - Academie Dijon)
5
# Licence CeCill  cf /root/LicenceEole.txt
6
# eole@ac-dijon.fr
7
###########################################################################
8

    
9
"""
10
Services Twisted de collection et de publication de données.
11
"""
12

    
13
import locale, gettext, os, pwd, shutil, random
14
from pathlib2 import Path
15
from glob import glob
16
import cjson
17
import traceback
18

    
19
# install locales early
20
from zephir.monitor.agentmanager import ZEPHIRAGENTS_DATADIR
21
APP = 'zephir-agents'
22
DIR = os.path.join(ZEPHIRAGENTS_DATADIR, 'i18n')
23
gettext.install(APP, DIR, unicode=False)
24

    
25
from creole.client import CreoleClient
26

    
27
from twisted.application import internet, service
28
from twisted.internet import utils, reactor
29
from twisted.web import resource, server, static, util, xmlrpc
30
from twisted.python import syslog
31

    
32
from zephir.monitor.agentmanager import config as cfg
33
from zephir.monitor.agentmanager.util import ensure_dirs, md5file, get_md5files, log
34
from zephir.monitor.agentmanager.web_resources import ZephirServerResource
35
from zephir.monitor.agentmanager.clientmanager import ClientManager
36

    
37
try:
38
    import zephir.zephir_conf.zephir_conf as conf_zeph
39
    from zephir.lib_zephir import zephir_proxy, convert, zephir_dir, update_sudoers, charset
40
    from zephir.lib_zephir import log as zeph_log
41
    registered = 1
42
except:
43
    # serveur non enregistré sur zephir
44
    registered = 0
45

    
46
creole_client = None
47

    
48
def container_aware_path(path):
49
    global creole_client
50
    path_elements = path.split('::')
51
    path = Path(path_elements[0])
52
    name = path.name
53
    if len(path_elements) > 1 and path_elements[1]:
54
        container = path_elements[1]
55
        if not creole_client:
56
            creole_client = CreoleClient()
57
        container_path = creole_client.get_creole('container_path_{}'.format(container), None)
58
        if container_path:
59
            path = Path(container_path).joinpath(path)
60
        name = name + "::{}".format(container)
61
    return path, name
62

    
63
class ZephirService(service.MultiService):
64
    """Main Twisted service for Zephir apps"""
65

    
66
    def __init__(self, config, root_resource=None, serve_static=False):
67
        """config will be completed by default values"""
68
        service.MultiService.__init__(self)
69
        self.config = cfg.DEFAULT_CONFIG.copy()
70
        self.config.update(config)
71
        self.updater = self.publisher = None
72
        # mise à jour des scripts clients dans sudoers
73
        if registered:
74
            update_sudoers()
75
        # parent web server
76
        if root_resource is None:
77
            self.root_resource = resource.Resource()
78
            webserver = internet.TCPServer(self.config['webserver_port'],
79
                                                server.Site(self.root_resource))
80
            webserver.setServiceParent(service.IServiceCollection(self))
81
        else:
82
            self.root_resource = root_resource
83
        # serve global static files
84
        if serve_static:
85
            self.root_resource.putChild('static',
86
                                        static.File(self.config['static_web_dir']))
87

    
88

    
89
    # subservices factory methods
90

    
91
    def with_updater(self):
92
        assert self.updater is None
93
        self.updater = UpdaterService(self.config, self, self.root_resource)
94
        return self
95

    
96
    def with_publisher(self):
97
        assert self.publisher is None
98
        self.publisher = PublisherService(self.config, self, self.root_resource)
99
        return self
100

    
101
    def with_updater_and_publisher(self):
102
        assert self.updater is None
103
        assert self.publisher is None
104
        self.updater = UpdaterService(self.config, self, self.root_resource)
105
        self.publisher = PublisherService(self.config, self, self.root_resource,
106
                                          show_clients_page = False,
107
                                          live_agents={self.config['host_ref']: self.updater.agents})
108
        return self
109

    
110

    
111

    
112

    
113
class UpdaterService(service.MultiService, xmlrpc.XMLRPC):
114
    """Schedules measures, data serialisation and upload."""
115

    
116
    def __init__(self, config, parent, root_resource):
117
        """config should be complete"""
118
        service.MultiService.__init__(self)
119
        xmlrpc.XMLRPC.__init__(self)
120
        self.old_obs = None
121
        self.config = config
122
        # updates site.cfg file
123
        self.update_static_data()
124
        # start subservices
125
        loc, enc = locale.getdefaultlocale()
126
        log.msg(_('default locale: %s encoding: %s') % (loc, enc))
127
        if enc == 'utf':
128
            log.msg(_('Warning: locale encoding %s broken in RRD graphs, set e.g: LC_ALL=fr_FR') % enc)
129
        self.agents = self.load_agents()
130
        # attach to parent service
131
        self.setServiceParent(service.IServiceCollection(parent))
132
        root_resource.putChild('xmlrpc', self)
133

    
134
    def startService(self):
135
        """initialize zephir services"""
136
        service.MultiService.startService(self)
137
        reactor.callLater(2,self.schedule_all)
138
        # mise à jour du préfixe de log (twisted par défaut)
139
        # FIX : on conserve la référence à l'ancien observer pour
140
        # éviter les pb à la fermeture du service
141
        self.old_obs = None
142
        if len(log.theLogPublisher.observers) >= 1:
143
            self.old_obs = log.theLogPublisher.observers[0]
144
        try:
145
            from zephir.backend import config as conf_zeph
146
            log_prefix = 'zephir_backend'
147
        except:
148
            log_prefix = 'zephiragents'
149
        new_obs = syslog.SyslogObserver(log_prefix, options=syslog.DEFAULT_OPTIONS, facility=syslog.DEFAULT_FACILITY)
150
        log.addObserver(new_obs.emit)
151
        log.removeObserver(self.old_obs)
152
        if registered != 0:
153
            # on est enregistré sur zephir => initiation de
154
            # la création et l'envoi d'archives
155
            self.setup_uucp()
156
            # dans le cas ou un reboot a été demandé, on indique que le redémarrage est bon
157
            if os.path.isfile(os.path.join(zephir_dir,'reboot.lck')):
158
                try:
159
                    zeph_log('REBOOT',0,'redémarrage du serveur terminé')
160
                    os.unlink(os.path.join(zephir_dir,'reboot.lck'))
161
                except:
162
                    pass
163

    
164
    def stopService(self):
165
        """stops zephir services"""
166
        if self.old_obs:
167
            log.removeObserver(log.theLogPublisher.observers[0])
168
            log.addObserver(self.old_obs)
169
        service.MultiService.stopService(self)
170

    
171
    def load_agents(self):
172
        """Charge tous les agents du répertoire de configurations."""
173
        log.msg(_("Loading agents from %s...") % self.config['config_dir'])
174
        loaded_agents = {}
175
        list_agents = glob(os.path.join(self.config['config_dir'], "*.agent"))
176
        for f in list_agents:
177
            log.msg(_("  from %s:") % os.path.basename(f))
178
            h = { 'AGENTS': None }
179
            execfile(f, globals(), h)
180
            assert h.has_key('AGENTS')
181
            for a in h['AGENTS']:
182
                assert not loaded_agents.has_key(a.name)
183
                # init agent data and do a first archive
184
                a.init_data(os.path.join(self.config['state_dir'],
185
                                         self.config['host_ref'],
186
                                         a.name))
187
                a.manager = self
188
                a.archive()
189
                loaded_agents[a.name] = a # /!\ écrasement des clés
190
                log.msg(_("    %s, period %d") % (a.name, a.period))
191
        log.msg(_("Loaded."))
192
        return loaded_agents
193

    
194

    
195
    # scheduling measures
196

    
197
    def schedule(self, agent_name):
198
        """Planifie les mesures périodiques d'un agent."""
199
        assert self.agents.has_key(agent_name)
200
        if self.agents[agent_name].period > 0:
201
            timer = internet.TimerService(self.agents[agent_name].period,
202
                                          self.wakeup_for_measure, agent_name)
203
            timer.setName(agent_name)
204
            timer.setServiceParent(service.IServiceCollection(self))
205

    
206

    
207
    def wakeup_for_measure(self, agent_name):
208
        """Callback pour les mesures planifiées."""
209
        assert self.agents.has_key(agent_name)
210
        # log.debug("Doing scheduled measure on " + agent_name)
211
        self.agents[agent_name].scheduled_measure()
212

    
213

    
214
    def schedule_all(self):
215
        """Planifie tous les agents chargés.
216
        Démarre le cycle de mesures périodiques de chaque agent
217
        chargé. La première mesure est prise immédiatement.
218
        """
219
        for agent_name in self.agents.keys():
220
            # charge les actions disponibles (standard en premier, puis les actions locales)
221
            # les actions locales écrasent les actions standard si les 2 existent
222
            for action_dir in (os.path.join(self.config['action_dir'],'eole'), self.config['action_dir']):
223
                f_actions = os.path.join(action_dir, "%s.actions" % agent_name)
224
                if os.path.isfile(f_actions):
225
                    actions = {}
226
                    execfile(f_actions, globals(), actions)
227
                    for item in actions.keys():
228
                        if item.startswith('action_'):
229
                            setattr(self.agents[agent_name], item, actions[item])
230
            # self.wakeup_for_measure(agent_name) # first measure at launch
231
            self.schedule(agent_name)
232

    
233

    
234
    def timer_for_agent_named(self, agent_name):
235
        assert self.agents.has_key(agent_name)
236
        return self.getServiceNamed(agent_name)
237

    
238

    
239
    # data upload to zephir server
240

    
241
    def setup_uucp(self):
242
        ensure_dirs(self.config['uucp_dir'])
243
        self.update_static_data()
244
        # récupération du délai de connexion à zephir
245
        try:
246
            reload(conf_zeph)
247
            # supression des éventuels répertoires de stats invalides
248
            # par ex, en cas de désinscription zephir 'manuelle'.
249

    
250
            # sur zephir : on garde toujours 0 pour éviter les conflits avec les serveurs enregistrés
251
            if not os.path.isdir('/var/lib/zephir'):
252
                for st_dir in os.listdir(self.config['state_dir']):
253
                    if st_dir != str(conf_zeph.id_serveur):
254
                        shutil.rmtree(os.path.join(self.config['state_dir'],st_dir))
255
            # vérification sur zephir du délai de connexion
256
            period = convert(zephir_proxy.serveurs.get_timeout(conf_zeph.id_serveur)[1])
257
        except:
258
            period = 0
259

    
260
        if period < 30:
261
            period = self.config['upload_period']
262
            log.msg(_('Using default period : %s seconds') % period)
263
        # on ajoute un décalage aléatoire (entre 30 secondes et period) au premier démarrage
264
        # (pour éviter trop de connexions simultanées si le service est relancé par crontab)
265
        delay = random.randrange(30,period)
266
        reactor.callLater(delay,self.wakeup_for_upload)
267

    
268
    def update_static_data(self):
269
        original = os.path.join(self.config['config_dir'], 'site.cfg')
270
        if os.path.isfile(original):
271
            destination = cfg.client_data_dir(self.config, self.config['host_ref'])
272
            ensure_dirs(destination)
273
            need_copy = False
274
            try:
275
                org_mtime = os.path.getmtime(original)
276
                dest_mtime = os.path.getmtime(os.path.join(destination, 'site.cfg'))
277
            except OSError:
278
                need_copy = True
279
            if need_copy or (org_mtime > dest_mtime):
280
                shutil.copy(original, destination)
281

    
282
    def wakeup_for_upload(self, recall=True):
283
        # relecture du délai de connexion sur zephir
284
        try:
285
            reload(conf_zeph)
286
            period = convert(zephir_proxy.serveurs.get_timeout(conf_zeph.id_serveur)[1])
287
        except:
288
            period = 0
289
        # on relance la fonction dans le délai demandé
290
        if period < 30:
291
            period = self.config['upload_period']
292
            log.msg(_('Using default period : %s seconds') % period)
293
        # on ajoute un décalage au premier démarrage
294
        # (pour éviter trop de connexions simultanées si le service est relancé par crontab)
295
        if recall:
296
            reactor.callLater(period,self.wakeup_for_upload)
297

    
298
        # virer l'ancienne archive du rép. uucp
299
        for agent in self.agents.values():
300
            agent.archive()
301
            # agent.reset_max_status()
302
        self.update_static_data()
303
        # archiver dans rép. uucp, donner les droits en lecture sur l'archive
304
        try:
305
            assert conf_zeph.id_serveur != 0
306
            client_dir = os.path.join(self.config['tmp_data_dir'],str(conf_zeph.id_serveur))
307
        except:
308
            client_dir = os.path.join(self.config['tmp_data_dir'],self.config['host_ref'])
309
        try:
310
            # purge du répertoire temporaire
311
            if os.path.isdir(client_dir):
312
                shutil.rmtree(client_dir)
313
            os.makedirs(client_dir)
314
        except: # non existant
315
            pass
316
        args = ['-Rf',os.path.abspath(os.path.join(cfg.client_data_dir(self.config, self.config['host_ref']),'site.cfg'))]
317
        ignore_file = os.path.abspath(os.path.join(self.config['state_dir'],'ignore_list'))
318
        if os.path.exists(ignore_file):
319
            args.append(ignore_file)
320
        # on ne copie que les données des agents instanciés
321
        # cela évite de remonter par exemple les stats rvp si le service a été désactivé
322
        for agent_name in self.agents.keys():
323
            args.append(os.path.abspath(cfg.agent_data_dir(self.config, self.config['host_ref'],agent_name)))
324
        args.append(os.path.abspath(client_dir))
325
        res = utils.getProcessOutput('/bin/cp', args = args)
326
        res.addCallbacks(self._make_archive,
327
                         lambda x: log.msg(_("/!\ copy failed (%s)\n"
328
                                             "data: %s")
329
                                           % (x, self.config['state_dir'])))
330

    
331
    def _check_md5(self):
332
        def to_bytes(objet):
333
            """Transforme les objets unicode contenus dans un objet en bytes
334
            """
335
            if isinstance(objet, tuple):
336
                l = []
337
                for item in objet:
338
                    l.append(to_bytes(item))
339
                return '({})'.format(', '.join(l))
340
            if isinstance(objet, list):
341
                l = []
342
                for item in objet:
343
                    l.append(to_bytes(item))
344
                return '[{}]'.format(', '.join(l))
345
            if isinstance(objet, dict):
346
                dico={}
347
                for cle in objet:
348
                    dico[to_bytes(cle)] = to_bytes(objet[cle])
349
                return '{{{}}}'.format(', '.join(['{}: {}'.format(el[0], el[1]) for el in sorted(dico.items())]))
350
            if isinstance(objet, unicode):
351
                string =  objet.encode(charset)
352
                return "'{}'".format(string)
353
            if isinstance(objet, int):
354
                return str(objet)
355
            if isinstance(objet, float):
356
                return str(objet)
357
            if objet == None:
358
                return 'None'
359
            return objet
360

    
361
        # calcul de sommes md5 pour config.eol et les patchs
362
        rep_src = "/usr/share/eole/creole"
363
        rep_conf = "/etc/eole"
364
        data = []
365
        try:
366
            for src, dst, pattern in get_md5files(cfg.distrib_version):
367
                if src == 'variables.eol':
368
                    # cas particulier : variables.eol, on génère le fichier à chaque fois
369
                    orig_eol = os.path.join(rep_conf, 'config.eol')
370
                    if os.path.isfile(orig_eol):
371
                        var_eol = os.path.join(rep_src, 'variables.eol')
372
                        # on crée un fichier avec variable:valeur ordonné par nom de variable
373
                        conf = cjson.decode(file(orig_eol).read(), all_unicode=True)
374
                        var_names = conf.keys()
375
                        var_names.sort()
376
                        f_var = file(var_eol, 'w')
377
                        with open(var_eol, 'w') as f_var:
378
                            for key, value in sorted(conf.items()):
379
                                if key not in ['mode_zephir', '___version___'] and isinstance(value, dict) and 'val' in value:
380
                                    if type(value['val']) in [list, dict, tuple]:
381
                                        converted_value = to_bytes(value['val'])
382
                                    else:
383
                                        converted_value = value['val']
384
                                    f_var.write('{}:{}\n'.format(key, converted_value))
385
                if os.path.isdir(os.path.join(rep_src,src)):
386
                    fics = os.listdir(os.path.join(rep_src,src))
387
                    fics = [(os.path.join(src,fic),os.path.join(dst,fic)) for fic in fics]
388
                else:
389
                    fics = [(src,dst)]
390
                for fic, fic_dst in fics:
391
                    if os.path.isfile(os.path.join(rep_src,fic)):
392
                        if (pattern is None) or fic.endswith(pattern):
393
                            md5res = md5file(os.path.join(rep_src,fic))
394
                            data.append("%s  %s\n" % (md5res, fic_dst))
395
            if Path('/usr/share/zephir/zephir_conf/fichiers_zephir').is_file():
396
                with open('/usr/share/zephir/zephir_conf/fichiers_zephir', 'r') as fz_fh:
397
                    fichiers_zephir = []
398
                    for l in fz_fh.readlines():
399
                        if l.startswith('#'):
400
                            continue
401
                        if l.startswith('%%'):
402
                            break
403
                        fichier_serveur, name = container_aware_path(l.strip())
404
                        if fichier_serveur.is_file():
405
                            md5res = md5file(fichier_serveur.as_posix())
406
                            data.append("{}  {}\n".format(md5res, Path('fichiers_zephir').joinpath(name)))
407
                        elif fichier_serveur.is_dir():
408
                            for step in os.walk(fichier_serveur.as_posix()):
409
                                for sub_fichier_serveur in step[2]:
410
                                    sub_fichier_serveur_full_path = Path(step[0]).joinpath(sub_fichier_serveur)
411
                                    md5res = md5file(sub_fichier_serveur_full_path.as_posix())
412
                                    sub_fichier_serveur = sub_fichier_serveur_full_path.relative_to(fichier_serveur)
413
                                    data.append("{}  {}\n".format(md5res,
414
                                                                Path('fichiers_zephir').joinpath(name, sub_fichier_serveur)))
415
            if Path('/usr/share/zephir/zephir_conf/fichiers_variante').is_file():
416
                with open('/usr/share/zephir/zephir_conf/fichiers_variante', 'r') as fz_fh:
417
                    fichiers_zephir = []
418
                    for l in fz_fh.readlines():
419
                        if l.startswith('#'):
420
                            continue
421
                        if l.startswith('%%'):
422
                            break
423
                        fichier_serveur, name = container_aware_path(l.strip())
424
                        if fichier_serveur.is_file():
425
                            md5res = md5file(fichier_serveur.as_posix())
426
                            data.append("{}  {}\n".format(md5res, Path('fichiers_zephir').joinpath('variante', name)))
427
        except:
428
            # on n'empêche pas de continuer les opérations si le calcul du md5 n'est pas bon
429
            log.msg('!! Erreur rencontrée lors du calcul du md5 de config.eol !!')
430
            traceback.print_exc()
431
        try:
432
            assert conf_zeph.id_serveur != 0
433
            outf = file(os.path.join(self.config['tmp_data_dir'],"config%s.md5" % str(conf_zeph.id_serveur)), "w")
434
        except:
435
            outf = file(os.path.join(self.config['tmp_data_dir'],"config%s.md5" % self.config['host_ref']), "w")
436
        outf.writelines(data)
437
        outf.close()
438

    
439
    def _get_packages(self, *args):
440
        """génère une liste des paquets installés
441
        """
442
        try:
443
            assert conf_zeph.id_serveur != 0
444
            cmd_pkg = ("/usr/bin/dpkg-query -W >" + os.path.join(self.config['tmp_data_dir'],"packages%s.list" % str(conf_zeph.id_serveur)))
445
        except:
446
            cmd_pkg = ("/usr/bin/dpkg-query -W >" + os.path.join(self.config['tmp_data_dir'],"packages%s.list" % self.config['host_ref']))
447
        os.system(cmd_pkg)
448

    
449
    def _make_archive(self,*args):
450
        self._check_md5()
451
        self._get_packages()
452
        # compression des données à envoyer
453
        try:
454
            assert conf_zeph.id_serveur != 0
455
            tarball = os.path.join(self.config['uucp_dir'],'site%s.tar' % str(conf_zeph.id_serveur))
456
        except:
457
            tarball = os.path.join(self.config['uucp_dir'],'site%s.tar' % self.config['host_ref'])
458
        tar_cwd = os.path.dirname(os.path.abspath(self.config['tmp_data_dir']))
459
        tar_dir = os.path.basename(os.path.abspath(self.config['tmp_data_dir']))
460
        res = utils.getProcessOutput('/bin/tar',
461
                                     args = ('czf', tarball,
462
                                             '--exclude', 'private',
463
                                             '-C', tar_cwd,
464
                                             tar_dir))
465
        res.addCallbacks(self._try_chown,
466
                         lambda x: log.msg(_("/!\ archiving failed (%s)\n"
467
                                             "data: %s\narchive: %s")
468
                                           % (str(x), self.config['state_dir'], tarball)),
469
                         callbackArgs = [tarball])
470

    
471
    def _try_chown(self, tar_output, tarball):
472
        try:
473
            uucp_uid, uucp_gid = pwd.getpwnam('uucp')[2:4]
474
            uid = os.getuid()
475
            os.chown(tarball, uucp_uid, uucp_gid) # only change group id so that uucp can read while we can still write
476
        except OSError, e:
477
            log.msg("/!\ chown error, check authorizations (%s)" % e)
478
        # upload uucp
479
        # on fait également un chown sur le fichier deffered_logs au cas ou il serait en root
480
        try:
481
            uucp_uid, uucp_gid = pwd.getpwnam('uucp')[2:4]
482
            os.chown('/usr/share/zephir/deffered_logs', uucp_uid, uucp_gid)
483
        except:
484
            log.msg("/!\ chown error on deffered_logs")
485
        os.system('/usr/share/zephir/scripts/zephir_client call &> /dev/null')
486

    
487

    
488
    # xmlrpc methods
489

    
490
    def xmlrpc_list_agents(self):
491
        """@return: Liste des agents chargés"""
492
        return self.agents.keys()
493
    xmlrpc_list_agents.signature = [['array']]
494

    
495
    def xmlrpc_agents_menu(self):
496
        """@return: Liste des agents chargés et structure d'affichage"""
497
        try:
498
            menu = {}
499
            for name, agent in self.agents.items():
500
                if agent.section != None:
501
                    if not menu.has_key(agent.section):
502
                        menu[agent.section] = []
503
                    menu[agent.section].append((name, agent.description))
504
            return menu
505
        except Exception, e:
506
            log.msg(e)
507
    xmlrpc_agents_menu.signature = [['struct']]
508

    
509
    def xmlrpc_status_for_agents(self, agent_name_list = []):
510
        """
511
        @return: Les statuts des agents listés dans un dictionnaire
512
        C{{nom:status}}. Le status est lui-même un dictionnaire avec
513
        pour clés C{'level'} et C{'message'}. Seuls les noms d'agents
514
        effectivement chargés apparaîtront parmi les clés du
515
        dictionnaire.
516
        """
517
        result = {}
518
        if len(agent_name_list) == 0:
519
            agent_name_list = self.agents.keys()
520
        for agent_name in agent_name_list:
521
            if self.agents.has_key(agent_name):
522
                result[agent_name] = self.agents[agent_name].check_status().to_dict()
523
        return result
524
    xmlrpc_status_for_agents.signature = [['string', 'struct']]
525

    
526
    def xmlrpc_reset_max_status_for_agents(self, agent_name_list=[]):
527
            if len(agent_name_list) == 0:
528
                agent_name_list = self.agents.keys()
529
            for agent_name in agent_name_list:
530
                if self.agents.has_key(agent_name):
531
                    self.agents[agent_name].reset_max_status()
532
            return "ok"
533

    
534
    def xmlrpc_archive_for_upload(self):
535
        self.wakeup_for_upload(False)
536
        return "ok"
537

    
538

    
539
class PublisherService(service.MultiService):
540
    """Serves the web interface for current agent data"""
541

    
542
    def __init__(self, config, parent, root_resource,
543
                 live_agents=None,
544
                 show_clients_page=True):
545
        """config should be complete"""
546
        service.MultiService.__init__(self)
547
        self.config = config
548
        self.show_clients_page = show_clients_page
549
        self.manager = ClientManager(self.config, live_agents)
550
        # attach to parent service
551
        self.setServiceParent(service.IServiceCollection(parent))
552
        # run webserver
553
        rsrc = ZephirServerResource(self.config, self.manager)
554
        root_resource.putChild('agents', rsrc)
555
        default_page = './agents/'
556
        if not self.show_clients_page:
557
            default_page += self.config['host_ref'] + '/'
558
        root_resource.putChild('', util.Redirect(default_page))
559

    
560
#TODO
561
# update resources: loading host structures, manager -> agent dict
562
# connect publisher and updater to zephir service (web server, config...)
563

    
564
# client manager: liste des host_ref, {host_ref => agent_manager}
565
# agent manager: structure, {nom => agent_data}