wcs-dynexport : tentative de correction du problème de génération du last-run.log...
[progfou.git] / wcs / wcs-dynexport
CommitLineData
20ae1ad6
P
1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3"""
4Outil d'export dynamique de données w.c.s.
5
6Copyright : Agence universitaire de la Francophonie — www.auf.org
7Licence : GNU General Public Licence, version 2
8Auteur : Jean Christophe André
9Date de création : 13 mars 2013
10
11Depends: wcs, python-simplejson, python-magic
12
13URL d'accès :
14- /dynexport => liste des formulaires pour le domaine courant
15- /dynexport/domains.json => liste des domaines disponibles
16- /dynexport/formulaire => liste des options ci-dessous
17- /dynexport/formulaire/fields.json
18- /dynexport/formulaire/field-names.json
19- /dynexport/formulaire/field-names.txt
20- /dynexport/formulaire/data.json
21- /dynexport/formulaire/last-run.log
22- /dynexport/formulaire/liste-dossiers.json
23- /dynexport/formulaire/clear-cache => vide le cache
24- /dynexport/formulaire/data/nom-dossier.json
25- /dynexport/formulaire/data/nom-dossier_attachement-1.xxx
26- /dynexport/formulaire/data/nom-dossier_attachement-2.xxx
27- /dynexport/formulaire/data/nom-dossier_attachement-…
28"""
29import sys
30import os
31import os.path
32import logging
33import time # time, gmtime, strftime, strptime, struct_time
34import simplejson as json
35import magic
36import mimetypes
37import unicodedata
38from cStringIO import StringIO
39from gzip import GzipFile
40from re import match
41
da0c36c5 42EXPIRE_DELAY = 5 # maximum 5 secondes en cache web
20ae1ad6 43TIME_FORMAT = '%a, %d %b %Y %H:%M:%S GMT' # format date pour HTTP
da0c36c5 44#ETABLISSEMENT_FORMAT = r'^(\w+\s-\s.+)\s\(\d+\s-\s(Nord|Sud)\)$'
20ae1ad6
P
45
46WCS_ROOT_DIR = '/var/lib/wcs'
47WCS_DOMAIN_SUFFIX = '.auf.org'
20ae1ad6 48WCS_CACHE_DIR = '/var/tmp'
da0c36c5
P
49WCS_CACHE_DELAY_DEFAULT = 7*24*60*60 # 1 semaine
50WCS_CACHE_DELAY_FORMS = 5*60 # 5 minutes
20ae1ad6
P
51
52#--------------------------------------------------------------------------
f2de318d
P
53# variables globales
54#--------------------------------------------------------------------------
55
56pub = None
57
58#--------------------------------------------------------------------------
20ae1ad6
P
59# fonctions de traitement
60#--------------------------------------------------------------------------
61
62def http_redirect(location, code='302'):
63 headers = {}
64 headers['Content-Type'] = 'text/plain; charset=utf-8'
65 headers['Status'] = '302 Redirection'
66 headers['Location'] = location
67 data = """If you see this, it means the automatic redirection has failed.
68Please go to ${location}"""
69 # envoi de la réponse
70 headers = ''.join(map(lambda x: "%s: %s\r\n" % (x, headers[x]), headers))
71 f = open('/dev/stdout', 'wb')
72 f.write(headers + "\r\n")
73 if data:
74 f.write(data)
75 f.flush()
76 # arrêt du traitement
77 sys.exit(0)
78
79def http_reply_and_exit(data, mime_type='text/html', charset='utf-8'):
b4f12399 80 if data is None: data = ''
20ae1ad6
P
81 # références horaires
82 current_time = time.time()
83 mtime = time.gmtime(current_time)
da0c36c5 84 etime = time.gmtime(current_time + EXPIRE_DELAY)
20ae1ad6
P
85 if os.environ.has_key('HTTP_IF_MODIFIED_SINCE'):
86 try:
87 itime = time.strptime(os.environ['HTTP_IF_MODIFIED_SINCE'], TIME_FORMAT)
88 except ValueError:
89 itime = None
90 else:
91 itime = None
92 # préparation des en-têtes et données
93 headers = {}
94 headers['Content-Type'] = '%s; charset=%s' % (mime_type, charset)
95 headers['Last-Modified'] = time.strftime(TIME_FORMAT, mtime)
96 headers['Expires'] = time.strftime(TIME_FORMAT, etime)
97 if os.environ['REQUEST_METHOD'] == 'GET' and (not itime or mtime > itime):
98 # détermination de la version demandée (compressée ou non)
99 if os.environ.get('HTTP_ACCEPT_ENCODING','').split(',').count('gzip') > 0:
100 zdata = StringIO()
101 GzipFile('', 'w', 9, zdata).write(data)
102 data = zdata.getvalue()
103 headers['Content-Encoding'] = 'gzip'
104 headers['Vary'] = 'Content-Encoding'
105 headers['Content-Length'] = len(data)
106 else:
107 data = None
108 # envoi de la réponse
109 headers = ''.join(map(lambda x: "%s: %s\r\n" % (x, headers[x]), headers))
110 f = open('/dev/stdout', 'wb')
111 f.write(headers + "\r\n")
112 if data:
113 f.write(data)
114 f.flush()
115 # arrêt du traitement
116 sys.exit(0)
117
118
119def _reduce_to_alnum(s, replacement_char='-'):
120 """réduction d'une chaîne de caractères à de l'alpha-numérique"""
121
122 if type(s) is not unicode:
123 s = unicode(s, 'utf-8')
124 s = unicodedata.normalize('NFKD', s).encode('ASCII', 'ignore')
125 r = ''
126 for c in s:
127 if ('a' <= c.lower() <= 'z') or ('0' <= c <= '9'):
128 r += c
129 elif len(r) > 0 and r[-1] != replacement_char:
130 r += replacement_char
131 else: # r == '' or r[-1] == replacement_char
132 pass
133 return r.strip(replacement_char)
134
135def _make_wcs_cache_name(domain, form, name):
136 return 'wcs-%s-%s-%s' % (domain, form, name)
137
da0c36c5 138def set_wcs_cache(domain, form, name, data, delay=WCS_CACHE_DELAY_DEFAULT):
20ae1ad6
P
139 os.umask(0022)
140 cache_filename = _make_wcs_cache_name(domain, form, name)
da0c36c5
P
141 cache_filename = os.path.join(WCS_CACHE_DIR, cache_filename)
142 f = open(cache_filename, 'wb')
20ae1ad6
P
143 f.write(data)
144 f.close()
da0c36c5
P
145 # la date de modification est utilisée comme date d'expiration
146 atime = time.time()
147 mtime = atime + delay
148 os.utime(cache_filename, (atime, mtime))
20ae1ad6
P
149
150def get_wcs_cache(domain, form, name):
151 data = None
152 cache_filename = _make_wcs_cache_name(domain, form, name)
153 cache_filename = os.path.join(WCS_CACHE_DIR, cache_filename)
154 if os.path.exists(cache_filename):
da0c36c5
P
155 # la date de modification est utilisée comme date d'expiration
156 if time.time() < os.path.getmtime(cache_filename):
157 data = open(cache_filename, 'rb').read()
158 else:
159 os.unlink(cache_filename)
20ae1ad6
P
160 return data
161
162def clear_wcs_cache(domain, form):
163 cache_filename = _make_wcs_cache_name(domain, form, '')
164 for f in os.listdir(WCS_CACHE_DIR):
165 if f.startswith(cache_filename):
166 os.unlink(os.path.join(WCS_CACHE_DIR, f))
167
f2de318d
P
168def set_wcs_publisher(domain):
169 global pub
170 if pub is None:
171 from wcs import publisher
172 pub = publisher.WcsPublisher.create_publisher()
173 pub.app_dir = os.path.join(pub.app_dir, domain)
174 pub.set_config()
175
20ae1ad6
P
176def get_wcs_domains():
177 root = WCS_ROOT_DIR
178 suffix = WCS_DOMAIN_SUFFIX
179 try:
180 l = os.listdir(root)
181 except OSError:
182 return None
183 return [x for x in l if os.path.isdir(os.path.join(root, x)) and x.endswith(suffix)]
184
185def get_wcs_forms(domain):
da0c36c5
P
186 """extraction de la liste des formulaires"""
187 data = get_wcs_cache(domain, 'ALL', 'ALL.json')
188 if data is not None:
189 return json.loads(data, encoding='utf-8')
f2de318d
P
190 set_wcs_publisher(domain)
191 from wcs.formdef import FormDef
da0c36c5
P
192 forms = [f.url_name for i,f in FormDef.items()]
193 data = json.dumps(forms, ensure_ascii=False).encode('utf-8')
194 set_wcs_cache(domain, 'ALL', 'ALL.json', data, WCS_CACHE_DELAY_FORMS)
195 return forms
20ae1ad6
P
196
197def get_wcs_form_data(domain, form):
198 """extraction des données du formulaire"""
199 data = get_wcs_cache(domain, form, 'metadata.json')
200 if data is not None:
201 return json.loads(data, encoding='utf-8')
202 # dictionnaire des metadonnées (qui seront mises en cache)
203 metadata = {}
204
205 os.umask(0022)
206 logname = _make_wcs_cache_name(domain, form, 'last-run.log')
b4f12399
P
207 logger = logging.getLogger('wcs-dynexport')
208 logger.setLevel(logging.DEBUG)
209 log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
210 log_handler = logging.FileHandler(os.path.join(WCS_CACHE_DIR, logname))
211 log_handler.setLevel(logging.DEBUG)
212 log_handler.setFormatter(log_formatter)
213 logger.addHandler(log_handler)
20ae1ad6 214
b4f12399
P
215 logger.info('Début.')
216 log_handler.flush()
20ae1ad6 217
f2de318d 218 set_wcs_publisher(domain)
20ae1ad6
P
219 from wcs.formdef import FormDef
220 from wcs.fields import TitleField, CommentField, TextField, \
221 StringField, ItemField, ItemsField, EmailField, \
222 DateField, FileField, BoolField, TableField
20ae1ad6
P
223 formdef = FormDef.get_by_urlname(form)
224
225 # nommage des champs de façon unique
226 fields = {}
227 field_names = {}
228 field_names_duplicates = {}
33fcaa8a 229 for i, field in enumerate(formdef.fields):
20ae1ad6
P
230 if isinstance(field, TitleField) or isinstance(field, CommentField):
231 continue
232 if field.varname:
233 name = field.varname
234 else:
235 name = _reduce_to_alnum(field.label,'_').lower()
236 if name in field_names.values(): # duplicat
237 field_names_duplicates[name] = field_names_duplicates.get(name, 1) + 1
238 name = '%s_%d' % (name, field_names_duplicates[name])
239 field_names.update({field.id: name})
33fcaa8a 240 fields.update({field.id: {'index': i, 'name': field_names[field.id], 'label': field.label, 'varname': field.varname and field.varname or ''}})
20ae1ad6
P
241
242 data = json.dumps(fields, ensure_ascii=False).encode('utf-8')
243 set_wcs_cache(domain, form, 'fields.json', data)
244 metadata.update({'fields': fields})
245
246 # on charge la base des types MIME une fois pour toutes
247 #magicmime = magic.Magic(mime=True) => ce sera pour plus tard…
248 magicmime = magic.open(magic.MAGIC_MIME)
249 magicmime.load()
250
251 liste_dossiers = []
252 liste_attachements = {}
253 for object in formdef.data_class().select():
254 if object.user is None:
b4f12399 255 logger.warning("Dossier '%s' sans utilisateur associé ?!?"\
20ae1ad6
P
256 " On ignore...", object.id)
257 continue
258
e86adf99
P
259 try:
260 workflow_status = object.status.startswith('wf-') and \
261 object.get_workflow_status().name or None
262 except:
263 workflow_status = None
264
20ae1ad6
P
265 result = {
266 'num_dossier': object.id,
267 'wcs_status': object.status,
e86adf99 268 'wcs_workflow_status': workflow_status,
20ae1ad6
P
269 'wcs_user_email': object.user.email,
270 'wcs_user_display_name': object.user.display_name,
271 #'wcs_last_modified': time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(object.last_modified())),
272 'wcs_comments': [],
273 }
274
275 if object.evolution is not None:
276 for e in object.evolution:
277 if e.comment is not None:
da0c36c5
P
278 try:
279 who = pub.user_class.get(e.who).display_name
280 except:
281 who = 'Inconnu(e)'
20ae1ad6
P
282 e_time = time.strftime('%Y-%m-%d %H:%M:%S', e.time)
283 comment = '%s -- %s %s' % (e.comment, who, e_time)
284 result['wcs_comments'].append(comment)
285
286 qfiles = { }
287 for field in formdef.fields:
288 field_id = str(field.id)
289 if not field_id in object.data:
290 continue
291 if isinstance(field, TitleField) or isinstance(field, CommentField):
292 continue
293 field_name = fields[field_id]['name']
294 data = object.data.get(field_id)
295 if data is None:
296 result[field_name] = None
297 continue
298 if isinstance(field, StringField) or isinstance(field, TextField) \
299 or isinstance(field, EmailField) or isinstance(field, ItemField):
da0c36c5
P
300 # nettoyage du nom d'établissement (suppression id et Nord/Sud)
301 #m = match(ETABLISSEMENT_FORMAT, data)
302 #if m is not None:
303 # data = m.groups()[0]
20ae1ad6
P
304 result[field_name] = data
305 elif isinstance(field, ItemsField) or isinstance(field, TableField):
306 result[field_name] = data # liste => peux-être joindre sur ';'
307 elif isinstance(field, BoolField):
308 result[field_name] = (data == 'True')
309 elif isinstance(field, DateField):
310 if isinstance(data, time.struct_time):
311 result[field_name] = '%04d-%02d-%02d' % (data.tm_year,
312 data.tm_mon, data.tm_mday)
313 else:
314 result[field_name] = data
315 elif isinstance(field, FileField):
316 if '.' in data.orig_filename:
317 extension = data.orig_filename.rpartition('.')[2].lower()
318 else: # il n'y a pas d'extension dans le nom de fichier
319 p = os.path.join(pub.app_dir, 'uploads', data.qfilename)
320 try:
321 #m = magicmime.from_file(p) => ce sera pour plus tard…
322 m = magicmime.file(p).split()[0].strip(';')
323 extension = mimetypes.guess_extension(m)
324 except:
b4f12399 325 logger.warning("Type de fichier inconnu pour '%s'.", p)
20ae1ad6
P
326 extension = None
327 if extension is not None:
328 extension = extension[1:]
329 else:
330 extension = 'unknown'
331 result[field_name] = "%s.%s" % (field_name, extension)
332 qfiles[field_name] = data.qfilename
333 else:
b4f12399 334 logger.warning("Type de champ inconnu '%s' pour '%s' (%s).",
20ae1ad6
P
335 field.__class__.__name__, field_name, field.label)
336
337 num_dossier = result['num_dossier']
338 nom = _reduce_to_alnum(result.get('nom','sans-nom')).upper()
339 prenom = _reduce_to_alnum(result.get('prenom','sans-prenom')).upper()
340 adel = result.get('adresse_electronique','sans-adel').replace('@','-').lower()
341
342 filename = "%04d-%s-%s-%s" % (num_dossier, nom, prenom, adel)
343 liste_dossiers.append(filename + '.json')
344
345 # sauvegarde des chemins d'accès aux fichiers joints
346 for f in qfiles:
347 dst = filename + '_' + result[f]
348 src = os.path.join(pub.app_dir, 'uploads', qfiles[f])
349 liste_attachements.update({dst: src})
904cd34f
P
350 # on renomme le fichier joint indiqué dans le dossier
351 result[f] = dst
20ae1ad6
P
352
353 # génération du fichier JSON
354 data = json.dumps(result, ensure_ascii=False).encode('utf-8')
355 set_wcs_cache(domain, form, 'data_%s.json' % filename, data)
356
b4f12399 357 logger.info("Dossier '%s' : %s.",
20ae1ad6
P
358 filename, result['wcs_workflow_status'])
359
360 data = json.dumps(liste_attachements, ensure_ascii=False).encode('utf-8')
361 set_wcs_cache(domain, form, 'data-files.json', data)
362 metadata.update({'attachements': liste_attachements})
363
364 liste_dossiers.sort()
365 data = json.dumps(liste_dossiers, ensure_ascii=False).encode('utf-8')
366 set_wcs_cache(domain, form, 'liste-dossiers.json', data)
367 metadata.update({'dossiers': liste_dossiers})
368
b4f12399
P
369 logger.info('Fin.')
370 log_handler.flush()
20ae1ad6
P
371
372 data = json.dumps(metadata, ensure_ascii=False).encode('utf-8')
373 set_wcs_cache(domain, form, 'metadata.json', data)
374
375#if __name__ == '__main__':
376# try:
377# extract_data(formdef, OUTPUT_DIRECTORY)
378# except:
b4f12399 379# logger.exception("Interruption du traitement pour cause d'erreur !")
20ae1ad6
P
380
381#--------------------------------------------------------------------------
382# gestion des requêtes web
383#--------------------------------------------------------------------------
384
385#l = []
386#for k in sorted(os.environ):
387# l.append('%s=%s\n' % (k, os.environ[k]))
388#data = ''.join(l)
389#http_reply_and_exit(data, 'text/plain')
390
391domain = os.environ.get('HTTP_HOST', '')
392if domain not in get_wcs_domains():
393 http_reply_and_exit("Domaine '%s' inconnu." % domain, 'text/plain')
394
395path_info = os.environ.get('PATH_INFO', '')
396
397path_prefix = os.environ.get('REQUEST_URI', '')
398if len(path_info) > 0:
399 path_prefix = path_prefix[:-len(path_info)]
400
401if path_info == '':
402 http_redirect(path_prefix + '/')
403
404if path_info == '/':
405 # liste des formulaires disponibles
406 l = sorted(get_wcs_forms(domain))
407 l = ['<li><a href="%s/">%s</a></li>' % (f, f) for f in l]
408 title = '<p>Liste des formulaires disponibles&nbsp;:</p>\n'
409 data = '<html>\n' + title + '<ul>\n' + '\n'.join(l) + '\n</ul>\n</html>'
410 http_reply_and_exit(data, 'text/html')
411
412if path_info == '/index.json':
413 # liste des formulaires disponibles
414 l = sorted(get_wcs_forms(domain))
415 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
416 http_reply_and_exit(data, 'application/json')
417
418if path_info == '/domains.json':
419 # liste des domaines disponibles
420 l = get_wcs_domains()
421 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
422 http_reply_and_exit(data, 'application/json')
423
424if match(r'^/[a-z0-9-]+$', path_info):
425 http_redirect(path_prefix + path_info + '/')
426
427if match(r'^/[a-z0-9-]+/$', path_info):
428 form = path_info.split('/')[1]
429 if form not in get_wcs_forms(domain):
430 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
431 l = [ 'fields.json', 'field-names.json', 'field-names.txt', 'last-run.log', 'liste-dossiers.json' ]
432 l = ['<li><a href="%s">%s</a></li>' % (f, f) for f in l]
433 title = '<p>Liste des informations disponibles&nbsp;:</p>\n'
434 action1 = """<p><a href="data/">Export des données</a></p>\n"""
435 action2 = """<p><a href="clear-cache">Suppression du cache</a> (pour ré-export)</p>\n"""
436 data = '<html>\n' + title + '<ul>\n' + '\n'.join(l) + '\n</ul>\n' + action1 + action2 + '</html>'
437 http_reply_and_exit(data, 'text/html')
438
439if match(r'^/[a-z0-9-]+/index.json$', path_info):
440 form = path_info.split('/')[1]
441 if form not in get_wcs_forms(domain):
442 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
443 l = [ 'fields.json', 'field-names.json', 'field-names.txt', 'last-run.log', 'liste-dossiers.json', 'data', 'clear-cache' ]
444 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
445 http_reply_and_exit(data, 'application/json')
446
447if match(r'^/[a-z0-9-]+/clear-cache$', path_info):
448 form = path_info.split('/')[1]
449 if form not in get_wcs_forms(domain):
450 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
451 clear_wcs_cache(domain, form)
452 http_reply_and_exit('Ok.', 'text/plain')
453
454if match(r'^/[a-z0-9-]+/fields.json$', path_info):
455 form = path_info.split('/')[1]
456 if form not in get_wcs_forms(domain):
457 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
458 get_wcs_form_data(domain, form)
459 d = json.loads(get_wcs_cache(domain, form, 'fields.json'), encoding='utf-8')
460 data = json.dumps(d, ensure_ascii=False, indent=' ').encode('utf-8')
461 http_reply_and_exit(data, 'application/json')
462
463if match(r'^/[a-z0-9-]+/field-names.json$', path_info):
464 form = path_info.split('/')[1]
465 if form not in get_wcs_forms(domain):
466 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
467 get_wcs_form_data(domain, form)
468 d = json.loads(get_wcs_cache(domain, form, 'fields.json'), encoding='utf-8')
469 d = dict([(k, d[k]['name']) for k in d])
470 data = json.dumps(d, ensure_ascii=False, indent=' ').encode('utf-8')
471 http_reply_and_exit(data, 'application/json')
472
473if match(r'^/[a-z0-9-]+/field-names.txt$', path_info):
474 form = path_info.split('/')[1]
475 if form not in get_wcs_forms(domain):
476 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
477 get_wcs_form_data(domain, form)
478 d = json.loads(get_wcs_cache(domain, form, 'fields.json'), encoding='utf-8')
479 d = [(k, d[k]['name'], d[k]['label']) for k in d]
480 d = sorted(d, key=lambda x: int(x[0]))
481 text = u''.join([u'%s:%s:%s\n' % (x[0], x[1], x[2]) for x in d])
482 data = text.encode('utf-8')
483 http_reply_and_exit(data, 'text/plain')
484
485if match(r'^/[a-z0-9-]+/last-run.log$', path_info):
486 form = path_info.split('/')[1]
487 if form not in get_wcs_forms(domain):
488 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
489 get_wcs_form_data(domain, form)
490 data = get_wcs_cache(domain, form, 'last-run.log')
491 http_reply_and_exit(data, 'text/plain')
492
493if match(r'^/[a-z0-9-]+/liste-dossiers.json$', path_info):
494 form = path_info.split('/')[1]
495 if form not in get_wcs_forms(domain):
496 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
497 get_wcs_form_data(domain, form)
498 data = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
499 data = json.dumps(data, ensure_ascii=False, indent=' ').encode('utf-8')
500 http_reply_and_exit(data, 'application/json')
501
502if match(r'^/[a-z0-9-]+/data$', path_info):
503 http_redirect(path_prefix + path_info + '/')
504
505if match(r'^/[a-z0-9-]+/data/$', path_info):
506 form = path_info.split('/')[1]
507 if form not in get_wcs_forms(domain):
508 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
509 get_wcs_form_data(domain, form)
510 dossiers = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
511 attachements = json.loads(get_wcs_cache(domain, form, 'data-files.json'), encoding='utf-8')
512 l = sorted(dossiers + attachements.keys())
513 if len(l) > 0:
514 l = ['<li><a href="%s">%s</a></li>' % (f, f) for f in l]
515 title = '<p>Liste des documents disponibles&nbsp;:</p>\n'
516 data = '<html>\n' + title + '<ul>\n' + '\n'.join(l) + '\n</ul>\n</html>'
517 else:
518 data = '<html>\n<p>Aucun document disponible.</p>\n</html>'
519 http_reply_and_exit(data, 'text/html')
520
521if match(r'^/[a-z0-9-]+/data/index.json$', path_info):
522 form = path_info.split('/')[1]
523 if form not in get_wcs_forms(domain):
524 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
525 get_wcs_form_data(domain, form)
526 dossiers = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
527 attachements = json.loads(get_wcs_cache(domain, form, 'data-files.json'), encoding='utf-8')
528 l = sorted(dossiers + attachements.keys())
529 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
530 http_reply_and_exit(data, 'application/json')
531
532if match(r'^/[a-z0-9-]+/data/[^/]+$', path_info):
533 form = path_info.split('/')[1]
534 if form not in get_wcs_forms(domain):
535 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
536 get_wcs_form_data(domain, form)
537 doc = path_info.split('/')[3]
538 dossiers = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
539 if doc in dossiers:
540 data = get_wcs_cache(domain, form, 'data_' + doc)
541 data = json.loads(data, encoding='utf-8')
542 data = json.dumps(data, ensure_ascii=False, indent=' ').encode('utf-8')
543 http_reply_and_exit(data, 'application/json')
544 attachements = json.loads(get_wcs_cache(domain, form, 'data-files.json'), encoding='utf-8')
545 if doc in attachements:
546 data = open(attachements[doc], 'rb').read()
547 mime_type = mimetypes.guess_type(doc)[0]
548 if mime_type is None:
549 mime_type = 'application/octet-stream'
550 http_reply_and_exit(data, mime_type)
551 http_reply_and_exit("Document '%s' inconnu." % path_info, 'text/plain')
552
553http_reply_and_exit("Requête '%s' inconnue." % path_info, 'text/plain')