wcs-dynexport : intégration du basculement vers PostgreSQL (suite)
[progfou.git] / wcs / wcs-dynexport
CommitLineData
20ae1ad6
P
1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3"""
4Outil d'export dynamique de données w.c.s.
5
6Copyright : Agence universitaire de la Francophonie — www.auf.org
7Licence : GNU General Public Licence, version 2
8Auteur : Jean Christophe André
9Date de création : 13 mars 2013
10
11Depends: wcs, python-simplejson, python-magic
12
13URL d'accès :
14- /dynexport => liste des formulaires pour le domaine courant
15- /dynexport/domains.json => liste des domaines disponibles
16- /dynexport/formulaire => liste des options ci-dessous
17- /dynexport/formulaire/fields.json
18- /dynexport/formulaire/field-names.json
19- /dynexport/formulaire/field-names.txt
20- /dynexport/formulaire/data.json
21- /dynexport/formulaire/last-run.log
22- /dynexport/formulaire/liste-dossiers.json
23- /dynexport/formulaire/clear-cache => vide le cache
24- /dynexport/formulaire/data/nom-dossier.json
25- /dynexport/formulaire/data/nom-dossier_attachement-1.xxx
26- /dynexport/formulaire/data/nom-dossier_attachement-2.xxx
27- /dynexport/formulaire/data/nom-dossier_attachement-…
28"""
29import sys
30import os
31import os.path
32import logging
33import time # time, gmtime, strftime, strptime, struct_time
34import simplejson as json
35import magic
36import mimetypes
37import unicodedata
38from cStringIO import StringIO
39from gzip import GzipFile
40from re import match
41
da0c36c5 42EXPIRE_DELAY = 5 # maximum 5 secondes en cache web
20ae1ad6 43TIME_FORMAT = '%a, %d %b %Y %H:%M:%S GMT' # format date pour HTTP
da0c36c5 44#ETABLISSEMENT_FORMAT = r'^(\w+\s-\s.+)\s\(\d+\s-\s(Nord|Sud)\)$'
20ae1ad6
P
45
46WCS_ROOT_DIR = '/var/lib/wcs'
47WCS_DOMAIN_SUFFIX = '.auf.org'
20ae1ad6 48WCS_CACHE_DIR = '/var/tmp'
da0c36c5
P
49WCS_CACHE_DELAY_DEFAULT = 7*24*60*60 # 1 semaine
50WCS_CACHE_DELAY_FORMS = 5*60 # 5 minutes
20ae1ad6
P
51
52#--------------------------------------------------------------------------
f2de318d
P
53# variables globales
54#--------------------------------------------------------------------------
55
56pub = None
57
58#--------------------------------------------------------------------------
20ae1ad6
P
59# fonctions de traitement
60#--------------------------------------------------------------------------
61
62def http_redirect(location, code='302'):
63 headers = {}
64 headers['Content-Type'] = 'text/plain; charset=utf-8'
65 headers['Status'] = '302 Redirection'
66 headers['Location'] = location
67 data = """If you see this, it means the automatic redirection has failed.
68Please go to ${location}"""
69 # envoi de la réponse
70 headers = ''.join(map(lambda x: "%s: %s\r\n" % (x, headers[x]), headers))
71 f = open('/dev/stdout', 'wb')
72 f.write(headers + "\r\n")
73 if data:
74 f.write(data)
75 f.flush()
76 # arrêt du traitement
77 sys.exit(0)
78
79def http_reply_and_exit(data, mime_type='text/html', charset='utf-8'):
80 # références horaires
81 current_time = time.time()
82 mtime = time.gmtime(current_time)
da0c36c5 83 etime = time.gmtime(current_time + EXPIRE_DELAY)
20ae1ad6
P
84 if os.environ.has_key('HTTP_IF_MODIFIED_SINCE'):
85 try:
86 itime = time.strptime(os.environ['HTTP_IF_MODIFIED_SINCE'], TIME_FORMAT)
87 except ValueError:
88 itime = None
89 else:
90 itime = None
91 # préparation des en-têtes et données
92 headers = {}
93 headers['Content-Type'] = '%s; charset=%s' % (mime_type, charset)
94 headers['Last-Modified'] = time.strftime(TIME_FORMAT, mtime)
95 headers['Expires'] = time.strftime(TIME_FORMAT, etime)
96 if os.environ['REQUEST_METHOD'] == 'GET' and (not itime or mtime > itime):
97 # détermination de la version demandée (compressée ou non)
98 if os.environ.get('HTTP_ACCEPT_ENCODING','').split(',').count('gzip') > 0:
99 zdata = StringIO()
100 GzipFile('', 'w', 9, zdata).write(data)
101 data = zdata.getvalue()
102 headers['Content-Encoding'] = 'gzip'
103 headers['Vary'] = 'Content-Encoding'
104 headers['Content-Length'] = len(data)
105 else:
106 data = None
107 # envoi de la réponse
108 headers = ''.join(map(lambda x: "%s: %s\r\n" % (x, headers[x]), headers))
109 f = open('/dev/stdout', 'wb')
110 f.write(headers + "\r\n")
111 if data:
112 f.write(data)
113 f.flush()
114 # arrêt du traitement
115 sys.exit(0)
116
117
118def _reduce_to_alnum(s, replacement_char='-'):
119 """réduction d'une chaîne de caractères à de l'alpha-numérique"""
120
121 if type(s) is not unicode:
122 s = unicode(s, 'utf-8')
123 s = unicodedata.normalize('NFKD', s).encode('ASCII', 'ignore')
124 r = ''
125 for c in s:
126 if ('a' <= c.lower() <= 'z') or ('0' <= c <= '9'):
127 r += c
128 elif len(r) > 0 and r[-1] != replacement_char:
129 r += replacement_char
130 else: # r == '' or r[-1] == replacement_char
131 pass
132 return r.strip(replacement_char)
133
134def _make_wcs_cache_name(domain, form, name):
135 return 'wcs-%s-%s-%s' % (domain, form, name)
136
da0c36c5 137def set_wcs_cache(domain, form, name, data, delay=WCS_CACHE_DELAY_DEFAULT):
20ae1ad6
P
138 os.umask(0022)
139 cache_filename = _make_wcs_cache_name(domain, form, name)
da0c36c5
P
140 cache_filename = os.path.join(WCS_CACHE_DIR, cache_filename)
141 f = open(cache_filename, 'wb')
20ae1ad6
P
142 f.write(data)
143 f.close()
da0c36c5
P
144 # la date de modification est utilisée comme date d'expiration
145 atime = time.time()
146 mtime = atime + delay
147 os.utime(cache_filename, (atime, mtime))
20ae1ad6
P
148
149def get_wcs_cache(domain, form, name):
150 data = None
151 cache_filename = _make_wcs_cache_name(domain, form, name)
152 cache_filename = os.path.join(WCS_CACHE_DIR, cache_filename)
153 if os.path.exists(cache_filename):
da0c36c5
P
154 # la date de modification est utilisée comme date d'expiration
155 if time.time() < os.path.getmtime(cache_filename):
156 data = open(cache_filename, 'rb').read()
157 else:
158 os.unlink(cache_filename)
20ae1ad6
P
159 return data
160
161def clear_wcs_cache(domain, form):
162 cache_filename = _make_wcs_cache_name(domain, form, '')
163 for f in os.listdir(WCS_CACHE_DIR):
164 if f.startswith(cache_filename):
165 os.unlink(os.path.join(WCS_CACHE_DIR, f))
166
f2de318d
P
167def set_wcs_publisher(domain):
168 global pub
169 if pub is None:
170 from wcs import publisher
171 pub = publisher.WcsPublisher.create_publisher()
172 pub.app_dir = os.path.join(pub.app_dir, domain)
173 pub.set_config()
174
20ae1ad6
P
175def get_wcs_domains():
176 root = WCS_ROOT_DIR
177 suffix = WCS_DOMAIN_SUFFIX
178 try:
179 l = os.listdir(root)
180 except OSError:
181 return None
182 return [x for x in l if os.path.isdir(os.path.join(root, x)) and x.endswith(suffix)]
183
184def get_wcs_forms(domain):
da0c36c5
P
185 """extraction de la liste des formulaires"""
186 data = get_wcs_cache(domain, 'ALL', 'ALL.json')
187 if data is not None:
188 return json.loads(data, encoding='utf-8')
f2de318d
P
189 set_wcs_publisher(domain)
190 from wcs.formdef import FormDef
da0c36c5
P
191 forms = [f.url_name for i,f in FormDef.items()]
192 data = json.dumps(forms, ensure_ascii=False).encode('utf-8')
193 set_wcs_cache(domain, 'ALL', 'ALL.json', data, WCS_CACHE_DELAY_FORMS)
194 return forms
20ae1ad6
P
195
196def get_wcs_form_data(domain, form):
197 """extraction des données du formulaire"""
198 data = get_wcs_cache(domain, form, 'metadata.json')
199 if data is not None:
200 return json.loads(data, encoding='utf-8')
201 # dictionnaire des metadonnées (qui seront mises en cache)
202 metadata = {}
203
204 os.umask(0022)
205 logname = _make_wcs_cache_name(domain, form, 'last-run.log')
206 logging.basicConfig(level=logging.DEBUG,
207 format='%(asctime)s %(levelname)s %(message)s',
208 filename=os.path.join(WCS_CACHE_DIR, logname),
209 filemode='w')
210
211 logging.info('Début.')
212
f2de318d 213 set_wcs_publisher(domain)
20ae1ad6
P
214 from wcs.formdef import FormDef
215 from wcs.fields import TitleField, CommentField, TextField, \
216 StringField, ItemField, ItemsField, EmailField, \
217 DateField, FileField, BoolField, TableField
20ae1ad6
P
218 formdef = FormDef.get_by_urlname(form)
219
220 # nommage des champs de façon unique
221 fields = {}
222 field_names = {}
223 field_names_duplicates = {}
33fcaa8a 224 for i, field in enumerate(formdef.fields):
20ae1ad6
P
225 if isinstance(field, TitleField) or isinstance(field, CommentField):
226 continue
227 if field.varname:
228 name = field.varname
229 else:
230 name = _reduce_to_alnum(field.label,'_').lower()
231 if name in field_names.values(): # duplicat
232 field_names_duplicates[name] = field_names_duplicates.get(name, 1) + 1
233 name = '%s_%d' % (name, field_names_duplicates[name])
234 field_names.update({field.id: name})
33fcaa8a 235 fields.update({field.id: {'index': i, 'name': field_names[field.id], 'label': field.label, 'varname': field.varname and field.varname or ''}})
20ae1ad6
P
236
237 data = json.dumps(fields, ensure_ascii=False).encode('utf-8')
238 set_wcs_cache(domain, form, 'fields.json', data)
239 metadata.update({'fields': fields})
240
241 # on charge la base des types MIME une fois pour toutes
242 #magicmime = magic.Magic(mime=True) => ce sera pour plus tard…
243 magicmime = magic.open(magic.MAGIC_MIME)
244 magicmime.load()
245
246 liste_dossiers = []
247 liste_attachements = {}
248 for object in formdef.data_class().select():
249 if object.user is None:
250 logging.warning("Dossier '%s' sans utilisateur associé ?!?"\
251 " On ignore...", object.id)
252 continue
253
e86adf99
P
254 try:
255 workflow_status = object.status.startswith('wf-') and \
256 object.get_workflow_status().name or None
257 except:
258 workflow_status = None
259
20ae1ad6
P
260 result = {
261 'num_dossier': object.id,
262 'wcs_status': object.status,
e86adf99 263 'wcs_workflow_status': workflow_status,
20ae1ad6
P
264 'wcs_user_email': object.user.email,
265 'wcs_user_display_name': object.user.display_name,
266 #'wcs_last_modified': time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(object.last_modified())),
267 'wcs_comments': [],
268 }
269
270 if object.evolution is not None:
271 for e in object.evolution:
272 if e.comment is not None:
da0c36c5
P
273 try:
274 who = pub.user_class.get(e.who).display_name
275 except:
276 who = 'Inconnu(e)'
20ae1ad6
P
277 e_time = time.strftime('%Y-%m-%d %H:%M:%S', e.time)
278 comment = '%s -- %s %s' % (e.comment, who, e_time)
279 result['wcs_comments'].append(comment)
280
281 qfiles = { }
282 for field in formdef.fields:
283 field_id = str(field.id)
284 if not field_id in object.data:
285 continue
286 if isinstance(field, TitleField) or isinstance(field, CommentField):
287 continue
288 field_name = fields[field_id]['name']
289 data = object.data.get(field_id)
290 if data is None:
291 result[field_name] = None
292 continue
293 if isinstance(field, StringField) or isinstance(field, TextField) \
294 or isinstance(field, EmailField) or isinstance(field, ItemField):
da0c36c5
P
295 # nettoyage du nom d'établissement (suppression id et Nord/Sud)
296 #m = match(ETABLISSEMENT_FORMAT, data)
297 #if m is not None:
298 # data = m.groups()[0]
20ae1ad6
P
299 result[field_name] = data
300 elif isinstance(field, ItemsField) or isinstance(field, TableField):
301 result[field_name] = data # liste => peux-être joindre sur ';'
302 elif isinstance(field, BoolField):
303 result[field_name] = (data == 'True')
304 elif isinstance(field, DateField):
305 if isinstance(data, time.struct_time):
306 result[field_name] = '%04d-%02d-%02d' % (data.tm_year,
307 data.tm_mon, data.tm_mday)
308 else:
309 result[field_name] = data
310 elif isinstance(field, FileField):
311 if '.' in data.orig_filename:
312 extension = data.orig_filename.rpartition('.')[2].lower()
313 else: # il n'y a pas d'extension dans le nom de fichier
314 p = os.path.join(pub.app_dir, 'uploads', data.qfilename)
315 try:
316 #m = magicmime.from_file(p) => ce sera pour plus tard…
317 m = magicmime.file(p).split()[0].strip(';')
318 extension = mimetypes.guess_extension(m)
319 except:
320 logging.warning("Type de fichier inconnu pour '%s'.", p)
321 extension = None
322 if extension is not None:
323 extension = extension[1:]
324 else:
325 extension = 'unknown'
326 result[field_name] = "%s.%s" % (field_name, extension)
327 qfiles[field_name] = data.qfilename
328 else:
329 logging.warning("Type de champ inconnu '%s' pour '%s' (%s).",
330 field.__class__.__name__, field_name, field.label)
331
332 num_dossier = result['num_dossier']
333 nom = _reduce_to_alnum(result.get('nom','sans-nom')).upper()
334 prenom = _reduce_to_alnum(result.get('prenom','sans-prenom')).upper()
335 adel = result.get('adresse_electronique','sans-adel').replace('@','-').lower()
336
337 filename = "%04d-%s-%s-%s" % (num_dossier, nom, prenom, adel)
338 liste_dossiers.append(filename + '.json')
339
340 # sauvegarde des chemins d'accès aux fichiers joints
341 for f in qfiles:
342 dst = filename + '_' + result[f]
343 src = os.path.join(pub.app_dir, 'uploads', qfiles[f])
344 liste_attachements.update({dst: src})
904cd34f
P
345 # on renomme le fichier joint indiqué dans le dossier
346 result[f] = dst
20ae1ad6
P
347
348 # génération du fichier JSON
349 data = json.dumps(result, ensure_ascii=False).encode('utf-8')
350 set_wcs_cache(domain, form, 'data_%s.json' % filename, data)
351
352 logging.info("Dossier '%s' : %s.",
353 filename, result['wcs_workflow_status'])
354
355 data = json.dumps(liste_attachements, ensure_ascii=False).encode('utf-8')
356 set_wcs_cache(domain, form, 'data-files.json', data)
357 metadata.update({'attachements': liste_attachements})
358
359 liste_dossiers.sort()
360 data = json.dumps(liste_dossiers, ensure_ascii=False).encode('utf-8')
361 set_wcs_cache(domain, form, 'liste-dossiers.json', data)
362 metadata.update({'dossiers': liste_dossiers})
363
364 logging.info('Fin.')
365
366 data = json.dumps(metadata, ensure_ascii=False).encode('utf-8')
367 set_wcs_cache(domain, form, 'metadata.json', data)
368
369#if __name__ == '__main__':
370# try:
371# extract_data(formdef, OUTPUT_DIRECTORY)
372# except:
373# logging.exception("Interruption du traitement pour cause d'erreur !")
374
375#--------------------------------------------------------------------------
376# gestion des requêtes web
377#--------------------------------------------------------------------------
378
379#l = []
380#for k in sorted(os.environ):
381# l.append('%s=%s\n' % (k, os.environ[k]))
382#data = ''.join(l)
383#http_reply_and_exit(data, 'text/plain')
384
385domain = os.environ.get('HTTP_HOST', '')
386if domain not in get_wcs_domains():
387 http_reply_and_exit("Domaine '%s' inconnu." % domain, 'text/plain')
388
389path_info = os.environ.get('PATH_INFO', '')
390
391path_prefix = os.environ.get('REQUEST_URI', '')
392if len(path_info) > 0:
393 path_prefix = path_prefix[:-len(path_info)]
394
395if path_info == '':
396 http_redirect(path_prefix + '/')
397
398if path_info == '/':
399 # liste des formulaires disponibles
400 l = sorted(get_wcs_forms(domain))
401 l = ['<li><a href="%s/">%s</a></li>' % (f, f) for f in l]
402 title = '<p>Liste des formulaires disponibles&nbsp;:</p>\n'
403 data = '<html>\n' + title + '<ul>\n' + '\n'.join(l) + '\n</ul>\n</html>'
404 http_reply_and_exit(data, 'text/html')
405
406if path_info == '/index.json':
407 # liste des formulaires disponibles
408 l = sorted(get_wcs_forms(domain))
409 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
410 http_reply_and_exit(data, 'application/json')
411
412if path_info == '/domains.json':
413 # liste des domaines disponibles
414 l = get_wcs_domains()
415 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
416 http_reply_and_exit(data, 'application/json')
417
418if match(r'^/[a-z0-9-]+$', path_info):
419 http_redirect(path_prefix + path_info + '/')
420
421if match(r'^/[a-z0-9-]+/$', path_info):
422 form = path_info.split('/')[1]
423 if form not in get_wcs_forms(domain):
424 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
425 l = [ 'fields.json', 'field-names.json', 'field-names.txt', 'last-run.log', 'liste-dossiers.json' ]
426 l = ['<li><a href="%s">%s</a></li>' % (f, f) for f in l]
427 title = '<p>Liste des informations disponibles&nbsp;:</p>\n'
428 action1 = """<p><a href="data/">Export des données</a></p>\n"""
429 action2 = """<p><a href="clear-cache">Suppression du cache</a> (pour ré-export)</p>\n"""
430 data = '<html>\n' + title + '<ul>\n' + '\n'.join(l) + '\n</ul>\n' + action1 + action2 + '</html>'
431 http_reply_and_exit(data, 'text/html')
432
433if match(r'^/[a-z0-9-]+/index.json$', path_info):
434 form = path_info.split('/')[1]
435 if form not in get_wcs_forms(domain):
436 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
437 l = [ 'fields.json', 'field-names.json', 'field-names.txt', 'last-run.log', 'liste-dossiers.json', 'data', 'clear-cache' ]
438 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
439 http_reply_and_exit(data, 'application/json')
440
441if match(r'^/[a-z0-9-]+/clear-cache$', path_info):
442 form = path_info.split('/')[1]
443 if form not in get_wcs_forms(domain):
444 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
445 clear_wcs_cache(domain, form)
446 http_reply_and_exit('Ok.', 'text/plain')
447
448if match(r'^/[a-z0-9-]+/fields.json$', path_info):
449 form = path_info.split('/')[1]
450 if form not in get_wcs_forms(domain):
451 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
452 get_wcs_form_data(domain, form)
453 d = json.loads(get_wcs_cache(domain, form, 'fields.json'), encoding='utf-8')
454 data = json.dumps(d, ensure_ascii=False, indent=' ').encode('utf-8')
455 http_reply_and_exit(data, 'application/json')
456
457if match(r'^/[a-z0-9-]+/field-names.json$', path_info):
458 form = path_info.split('/')[1]
459 if form not in get_wcs_forms(domain):
460 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
461 get_wcs_form_data(domain, form)
462 d = json.loads(get_wcs_cache(domain, form, 'fields.json'), encoding='utf-8')
463 d = dict([(k, d[k]['name']) for k in d])
464 data = json.dumps(d, ensure_ascii=False, indent=' ').encode('utf-8')
465 http_reply_and_exit(data, 'application/json')
466
467if match(r'^/[a-z0-9-]+/field-names.txt$', path_info):
468 form = path_info.split('/')[1]
469 if form not in get_wcs_forms(domain):
470 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
471 get_wcs_form_data(domain, form)
472 d = json.loads(get_wcs_cache(domain, form, 'fields.json'), encoding='utf-8')
473 d = [(k, d[k]['name'], d[k]['label']) for k in d]
474 d = sorted(d, key=lambda x: int(x[0]))
475 text = u''.join([u'%s:%s:%s\n' % (x[0], x[1], x[2]) for x in d])
476 data = text.encode('utf-8')
477 http_reply_and_exit(data, 'text/plain')
478
479if match(r'^/[a-z0-9-]+/last-run.log$', path_info):
480 form = path_info.split('/')[1]
481 if form not in get_wcs_forms(domain):
482 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
483 get_wcs_form_data(domain, form)
484 data = get_wcs_cache(domain, form, 'last-run.log')
485 http_reply_and_exit(data, 'text/plain')
486
487if match(r'^/[a-z0-9-]+/liste-dossiers.json$', path_info):
488 form = path_info.split('/')[1]
489 if form not in get_wcs_forms(domain):
490 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
491 get_wcs_form_data(domain, form)
492 data = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
493 data = json.dumps(data, ensure_ascii=False, indent=' ').encode('utf-8')
494 http_reply_and_exit(data, 'application/json')
495
496if match(r'^/[a-z0-9-]+/data$', path_info):
497 http_redirect(path_prefix + path_info + '/')
498
499if match(r'^/[a-z0-9-]+/data/$', path_info):
500 form = path_info.split('/')[1]
501 if form not in get_wcs_forms(domain):
502 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
503 get_wcs_form_data(domain, form)
504 dossiers = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
505 attachements = json.loads(get_wcs_cache(domain, form, 'data-files.json'), encoding='utf-8')
506 l = sorted(dossiers + attachements.keys())
507 if len(l) > 0:
508 l = ['<li><a href="%s">%s</a></li>' % (f, f) for f in l]
509 title = '<p>Liste des documents disponibles&nbsp;:</p>\n'
510 data = '<html>\n' + title + '<ul>\n' + '\n'.join(l) + '\n</ul>\n</html>'
511 else:
512 data = '<html>\n<p>Aucun document disponible.</p>\n</html>'
513 http_reply_and_exit(data, 'text/html')
514
515if match(r'^/[a-z0-9-]+/data/index.json$', path_info):
516 form = path_info.split('/')[1]
517 if form not in get_wcs_forms(domain):
518 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
519 get_wcs_form_data(domain, form)
520 dossiers = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
521 attachements = json.loads(get_wcs_cache(domain, form, 'data-files.json'), encoding='utf-8')
522 l = sorted(dossiers + attachements.keys())
523 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
524 http_reply_and_exit(data, 'application/json')
525
526if match(r'^/[a-z0-9-]+/data/[^/]+$', path_info):
527 form = path_info.split('/')[1]
528 if form not in get_wcs_forms(domain):
529 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
530 get_wcs_form_data(domain, form)
531 doc = path_info.split('/')[3]
532 dossiers = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
533 if doc in dossiers:
534 data = get_wcs_cache(domain, form, 'data_' + doc)
535 data = json.loads(data, encoding='utf-8')
536 data = json.dumps(data, ensure_ascii=False, indent=' ').encode('utf-8')
537 http_reply_and_exit(data, 'application/json')
538 attachements = json.loads(get_wcs_cache(domain, form, 'data-files.json'), encoding='utf-8')
539 if doc in attachements:
540 data = open(attachements[doc], 'rb').read()
541 mime_type = mimetypes.guess_type(doc)[0]
542 if mime_type is None:
543 mime_type = 'application/octet-stream'
544 http_reply_and_exit(data, mime_type)
545 http_reply_and_exit("Document '%s' inconnu." % path_info, 'text/plain')
546
547http_reply_and_exit("Requête '%s' inconnue." % path_info, 'text/plain')