Outil d'export dynamique des données pour wcs.
[progfou.git] / wcs / wcs-dynexport
CommitLineData
20ae1ad6
P
1#!/usr/bin/python
2# -*- coding: utf-8 -*-
3"""
4Outil d'export dynamique de données w.c.s.
5
6Copyright : Agence universitaire de la Francophonie — www.auf.org
7Licence : GNU General Public Licence, version 2
8Auteur : Jean Christophe André
9Date de création : 13 mars 2013
10
11Depends: wcs, python-simplejson, python-magic
12
13URL d'accès :
14- /dynexport => liste des formulaires pour le domaine courant
15- /dynexport/domains.json => liste des domaines disponibles
16- /dynexport/formulaire => liste des options ci-dessous
17- /dynexport/formulaire/fields.json
18- /dynexport/formulaire/field-names.json
19- /dynexport/formulaire/field-names.txt
20- /dynexport/formulaire/data.json
21- /dynexport/formulaire/last-run.log
22- /dynexport/formulaire/liste-dossiers.json
23- /dynexport/formulaire/clear-cache => vide le cache
24- /dynexport/formulaire/data/nom-dossier.json
25- /dynexport/formulaire/data/nom-dossier_attachement-1.xxx
26- /dynexport/formulaire/data/nom-dossier_attachement-2.xxx
27- /dynexport/formulaire/data/nom-dossier_attachement-…
28"""
29import sys
30import os
31import os.path
32import logging
33import time # time, gmtime, strftime, strptime, struct_time
34import simplejson as json
35import magic
36import mimetypes
37import unicodedata
38from cStringIO import StringIO
39from gzip import GzipFile
40from re import match
41
42DELAIS = 5 # maximum 5 secondes en cache
43TIME_FORMAT = '%a, %d %b %Y %H:%M:%S GMT' # format date pour HTTP
44
45WCS_ROOT_DIR = '/var/lib/wcs'
46WCS_DOMAIN_SUFFIX = '.auf.org'
47WCS_FORM_PREFIX = 'form-'
48WCS_CACHE_DIR = '/var/tmp'
49
50#--------------------------------------------------------------------------
51# fonctions de traitement
52#--------------------------------------------------------------------------
53
54def http_redirect(location, code='302'):
55 headers = {}
56 headers['Content-Type'] = 'text/plain; charset=utf-8'
57 headers['Status'] = '302 Redirection'
58 headers['Location'] = location
59 data = """If you see this, it means the automatic redirection has failed.
60Please go to ${location}"""
61 # envoi de la réponse
62 headers = ''.join(map(lambda x: "%s: %s\r\n" % (x, headers[x]), headers))
63 f = open('/dev/stdout', 'wb')
64 f.write(headers + "\r\n")
65 if data:
66 f.write(data)
67 f.flush()
68 # arrêt du traitement
69 sys.exit(0)
70
71def http_reply_and_exit(data, mime_type='text/html', charset='utf-8'):
72 # références horaires
73 current_time = time.time()
74 mtime = time.gmtime(current_time)
75 etime = time.gmtime(current_time + DELAIS)
76 if os.environ.has_key('HTTP_IF_MODIFIED_SINCE'):
77 try:
78 itime = time.strptime(os.environ['HTTP_IF_MODIFIED_SINCE'], TIME_FORMAT)
79 except ValueError:
80 itime = None
81 else:
82 itime = None
83 # préparation des en-têtes et données
84 headers = {}
85 headers['Content-Type'] = '%s; charset=%s' % (mime_type, charset)
86 headers['Last-Modified'] = time.strftime(TIME_FORMAT, mtime)
87 headers['Expires'] = time.strftime(TIME_FORMAT, etime)
88 if os.environ['REQUEST_METHOD'] == 'GET' and (not itime or mtime > itime):
89 # détermination de la version demandée (compressée ou non)
90 if os.environ.get('HTTP_ACCEPT_ENCODING','').split(',').count('gzip') > 0:
91 zdata = StringIO()
92 GzipFile('', 'w', 9, zdata).write(data)
93 data = zdata.getvalue()
94 headers['Content-Encoding'] = 'gzip'
95 headers['Vary'] = 'Content-Encoding'
96 headers['Content-Length'] = len(data)
97 else:
98 data = None
99 # envoi de la réponse
100 headers = ''.join(map(lambda x: "%s: %s\r\n" % (x, headers[x]), headers))
101 f = open('/dev/stdout', 'wb')
102 f.write(headers + "\r\n")
103 if data:
104 f.write(data)
105 f.flush()
106 # arrêt du traitement
107 sys.exit(0)
108
109
110def _reduce_to_alnum(s, replacement_char='-'):
111 """réduction d'une chaîne de caractères à de l'alpha-numérique"""
112
113 if type(s) is not unicode:
114 s = unicode(s, 'utf-8')
115 s = unicodedata.normalize('NFKD', s).encode('ASCII', 'ignore')
116 r = ''
117 for c in s:
118 if ('a' <= c.lower() <= 'z') or ('0' <= c <= '9'):
119 r += c
120 elif len(r) > 0 and r[-1] != replacement_char:
121 r += replacement_char
122 else: # r == '' or r[-1] == replacement_char
123 pass
124 return r.strip(replacement_char)
125
126def _make_wcs_cache_name(domain, form, name):
127 return 'wcs-%s-%s-%s' % (domain, form, name)
128
129def set_wcs_cache(domain, form, name, data):
130 os.umask(0022)
131 cache_filename = _make_wcs_cache_name(domain, form, name)
132 f = open(os.path.join(WCS_CACHE_DIR, cache_filename), 'wb')
133 f.write(data)
134 f.close()
135
136def get_wcs_cache(domain, form, name):
137 data = None
138 cache_filename = _make_wcs_cache_name(domain, form, name)
139 cache_filename = os.path.join(WCS_CACHE_DIR, cache_filename)
140 if os.path.exists(cache_filename):
141 f = open(cache_filename, 'rb')
142 data = f.read()
143 f.close()
144 return data
145
146def clear_wcs_cache(domain, form):
147 cache_filename = _make_wcs_cache_name(domain, form, '')
148 for f in os.listdir(WCS_CACHE_DIR):
149 if f.startswith(cache_filename):
150 os.unlink(os.path.join(WCS_CACHE_DIR, f))
151
152def get_wcs_domains():
153 root = WCS_ROOT_DIR
154 suffix = WCS_DOMAIN_SUFFIX
155 try:
156 l = os.listdir(root)
157 except OSError:
158 return None
159 return [x for x in l if os.path.isdir(os.path.join(root, x)) and x.endswith(suffix)]
160
161def get_wcs_forms(domain):
162 root = os.path.join(WCS_ROOT_DIR, domain)
163 prefix = WCS_FORM_PREFIX
164 try:
165 l = os.listdir(root)
166 except OSError:
167 return None
168 return [x[len(prefix):] for x in l if os.path.isdir(os.path.join(root, x)) and x.startswith(prefix)]
169
170
171def get_wcs_form_data(domain, form):
172 """extraction des données du formulaire"""
173 data = get_wcs_cache(domain, form, 'metadata.json')
174 if data is not None:
175 return json.loads(data, encoding='utf-8')
176 # dictionnaire des metadonnées (qui seront mises en cache)
177 metadata = {}
178
179 os.umask(0022)
180 logname = _make_wcs_cache_name(domain, form, 'last-run.log')
181 logging.basicConfig(level=logging.DEBUG,
182 format='%(asctime)s %(levelname)s %(message)s',
183 filename=os.path.join(WCS_CACHE_DIR, logname),
184 filemode='w')
185
186 logging.info('Début.')
187
188 from wcs import publisher
189 from wcs.formdef import FormDef
190 from wcs.fields import TitleField, CommentField, TextField, \
191 StringField, ItemField, ItemsField, EmailField, \
192 DateField, FileField, BoolField, TableField
193
194 pub = publisher.WcsPublisher.create_publisher()
195 pub.app_dir = os.path.join(pub.app_dir, domain)
196 formdef = FormDef.get_by_urlname(form)
197
198 # nommage des champs de façon unique
199 fields = {}
200 field_names = {}
201 field_names_duplicates = {}
202 for field in formdef.fields:
203 if isinstance(field, TitleField) or isinstance(field, CommentField):
204 continue
205 if field.varname:
206 name = field.varname
207 else:
208 name = _reduce_to_alnum(field.label,'_').lower()
209 if name in field_names.values(): # duplicat
210 field_names_duplicates[name] = field_names_duplicates.get(name, 1) + 1
211 name = '%s_%d' % (name, field_names_duplicates[name])
212 field_names.update({field.id: name})
213 fields.update({field.id: {'name': field_names[field.id], 'label': field.label, 'varname': field.varname and field.varname or ''}})
214
215 data = json.dumps(fields, ensure_ascii=False).encode('utf-8')
216 set_wcs_cache(domain, form, 'fields.json', data)
217 metadata.update({'fields': fields})
218
219 # on charge la base des types MIME une fois pour toutes
220 #magicmime = magic.Magic(mime=True) => ce sera pour plus tard…
221 magicmime = magic.open(magic.MAGIC_MIME)
222 magicmime.load()
223
224 liste_dossiers = []
225 liste_attachements = {}
226 for object in formdef.data_class().select():
227 if object.user is None:
228 logging.warning("Dossier '%s' sans utilisateur associé ?!?"\
229 " On ignore...", object.id)
230 continue
231
232 result = {
233 'num_dossier': object.id,
234 'wcs_status': object.status,
235 'wcs_workflow_status': (object.status.startswith('wf-') and \
236 object.get_workflow_status().name or None),
237 'wcs_user_email': object.user.email,
238 'wcs_user_display_name': object.user.display_name,
239 #'wcs_last_modified': time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(object.last_modified())),
240 'wcs_comments': [],
241 }
242
243 if object.evolution is not None:
244 for e in object.evolution:
245 if e.comment is not None:
246 who = pub.user_class.get(e.who).display_name
247 e_time = time.strftime('%Y-%m-%d %H:%M:%S', e.time)
248 comment = '%s -- %s %s' % (e.comment, who, e_time)
249 result['wcs_comments'].append(comment)
250
251 qfiles = { }
252 for field in formdef.fields:
253 field_id = str(field.id)
254 if not field_id in object.data:
255 continue
256 if isinstance(field, TitleField) or isinstance(field, CommentField):
257 continue
258 field_name = fields[field_id]['name']
259 data = object.data.get(field_id)
260 if data is None:
261 result[field_name] = None
262 continue
263 if isinstance(field, StringField) or isinstance(field, TextField) \
264 or isinstance(field, EmailField) or isinstance(field, ItemField):
265 result[field_name] = data
266 elif isinstance(field, ItemsField) or isinstance(field, TableField):
267 result[field_name] = data # liste => peux-être joindre sur ';'
268 elif isinstance(field, BoolField):
269 result[field_name] = (data == 'True')
270 elif isinstance(field, DateField):
271 if isinstance(data, time.struct_time):
272 result[field_name] = '%04d-%02d-%02d' % (data.tm_year,
273 data.tm_mon, data.tm_mday)
274 else:
275 result[field_name] = data
276 elif isinstance(field, FileField):
277 if '.' in data.orig_filename:
278 extension = data.orig_filename.rpartition('.')[2].lower()
279 else: # il n'y a pas d'extension dans le nom de fichier
280 p = os.path.join(pub.app_dir, 'uploads', data.qfilename)
281 try:
282 #m = magicmime.from_file(p) => ce sera pour plus tard…
283 m = magicmime.file(p).split()[0].strip(';')
284 extension = mimetypes.guess_extension(m)
285 except:
286 logging.warning("Type de fichier inconnu pour '%s'.", p)
287 extension = None
288 if extension is not None:
289 extension = extension[1:]
290 else:
291 extension = 'unknown'
292 result[field_name] = "%s.%s" % (field_name, extension)
293 qfiles[field_name] = data.qfilename
294 else:
295 logging.warning("Type de champ inconnu '%s' pour '%s' (%s).",
296 field.__class__.__name__, field_name, field.label)
297
298 num_dossier = result['num_dossier']
299 nom = _reduce_to_alnum(result.get('nom','sans-nom')).upper()
300 prenom = _reduce_to_alnum(result.get('prenom','sans-prenom')).upper()
301 adel = result.get('adresse_electronique','sans-adel').replace('@','-').lower()
302
303 filename = "%04d-%s-%s-%s" % (num_dossier, nom, prenom, adel)
304 liste_dossiers.append(filename + '.json')
305
306 # sauvegarde des chemins d'accès aux fichiers joints
307 for f in qfiles:
308 dst = filename + '_' + result[f]
309 src = os.path.join(pub.app_dir, 'uploads', qfiles[f])
310 liste_attachements.update({dst: src})
311
312 # génération du fichier JSON
313 data = json.dumps(result, ensure_ascii=False).encode('utf-8')
314 set_wcs_cache(domain, form, 'data_%s.json' % filename, data)
315
316 logging.info("Dossier '%s' : %s.",
317 filename, result['wcs_workflow_status'])
318
319 data = json.dumps(liste_attachements, ensure_ascii=False).encode('utf-8')
320 set_wcs_cache(domain, form, 'data-files.json', data)
321 metadata.update({'attachements': liste_attachements})
322
323 liste_dossiers.sort()
324 data = json.dumps(liste_dossiers, ensure_ascii=False).encode('utf-8')
325 set_wcs_cache(domain, form, 'liste-dossiers.json', data)
326 metadata.update({'dossiers': liste_dossiers})
327
328 logging.info('Fin.')
329
330 data = json.dumps(metadata, ensure_ascii=False).encode('utf-8')
331 set_wcs_cache(domain, form, 'metadata.json', data)
332
333#if __name__ == '__main__':
334# try:
335# extract_data(formdef, OUTPUT_DIRECTORY)
336# except:
337# logging.exception("Interruption du traitement pour cause d'erreur !")
338
339#--------------------------------------------------------------------------
340# gestion des requêtes web
341#--------------------------------------------------------------------------
342
343#l = []
344#for k in sorted(os.environ):
345# l.append('%s=%s\n' % (k, os.environ[k]))
346#data = ''.join(l)
347#http_reply_and_exit(data, 'text/plain')
348
349domain = os.environ.get('HTTP_HOST', '')
350if domain not in get_wcs_domains():
351 http_reply_and_exit("Domaine '%s' inconnu." % domain, 'text/plain')
352
353path_info = os.environ.get('PATH_INFO', '')
354
355path_prefix = os.environ.get('REQUEST_URI', '')
356if len(path_info) > 0:
357 path_prefix = path_prefix[:-len(path_info)]
358
359if path_info == '':
360 http_redirect(path_prefix + '/')
361
362if path_info == '/':
363 # liste des formulaires disponibles
364 l = sorted(get_wcs_forms(domain))
365 l = ['<li><a href="%s/">%s</a></li>' % (f, f) for f in l]
366 title = '<p>Liste des formulaires disponibles&nbsp;:</p>\n'
367 data = '<html>\n' + title + '<ul>\n' + '\n'.join(l) + '\n</ul>\n</html>'
368 http_reply_and_exit(data, 'text/html')
369
370if path_info == '/index.json':
371 # liste des formulaires disponibles
372 l = sorted(get_wcs_forms(domain))
373 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
374 http_reply_and_exit(data, 'application/json')
375
376if path_info == '/domains.json':
377 # liste des domaines disponibles
378 l = get_wcs_domains()
379 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
380 http_reply_and_exit(data, 'application/json')
381
382if match(r'^/[a-z0-9-]+$', path_info):
383 http_redirect(path_prefix + path_info + '/')
384
385if match(r'^/[a-z0-9-]+/$', path_info):
386 form = path_info.split('/')[1]
387 if form not in get_wcs_forms(domain):
388 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
389 l = [ 'fields.json', 'field-names.json', 'field-names.txt', 'last-run.log', 'liste-dossiers.json' ]
390 l = ['<li><a href="%s">%s</a></li>' % (f, f) for f in l]
391 title = '<p>Liste des informations disponibles&nbsp;:</p>\n'
392 action1 = """<p><a href="data/">Export des données</a></p>\n"""
393 action2 = """<p><a href="clear-cache">Suppression du cache</a> (pour ré-export)</p>\n"""
394 data = '<html>\n' + title + '<ul>\n' + '\n'.join(l) + '\n</ul>\n' + action1 + action2 + '</html>'
395 http_reply_and_exit(data, 'text/html')
396
397if match(r'^/[a-z0-9-]+/index.json$', path_info):
398 form = path_info.split('/')[1]
399 if form not in get_wcs_forms(domain):
400 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
401 l = [ 'fields.json', 'field-names.json', 'field-names.txt', 'last-run.log', 'liste-dossiers.json', 'data', 'clear-cache' ]
402 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
403 http_reply_and_exit(data, 'application/json')
404
405if match(r'^/[a-z0-9-]+/clear-cache$', path_info):
406 form = path_info.split('/')[1]
407 if form not in get_wcs_forms(domain):
408 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
409 clear_wcs_cache(domain, form)
410 http_reply_and_exit('Ok.', 'text/plain')
411
412if match(r'^/[a-z0-9-]+/fields.json$', path_info):
413 form = path_info.split('/')[1]
414 if form not in get_wcs_forms(domain):
415 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
416 get_wcs_form_data(domain, form)
417 d = json.loads(get_wcs_cache(domain, form, 'fields.json'), encoding='utf-8')
418 data = json.dumps(d, ensure_ascii=False, indent=' ').encode('utf-8')
419 http_reply_and_exit(data, 'application/json')
420
421if match(r'^/[a-z0-9-]+/field-names.json$', path_info):
422 form = path_info.split('/')[1]
423 if form not in get_wcs_forms(domain):
424 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
425 get_wcs_form_data(domain, form)
426 d = json.loads(get_wcs_cache(domain, form, 'fields.json'), encoding='utf-8')
427 d = dict([(k, d[k]['name']) for k in d])
428 data = json.dumps(d, ensure_ascii=False, indent=' ').encode('utf-8')
429 http_reply_and_exit(data, 'application/json')
430
431if match(r'^/[a-z0-9-]+/field-names.txt$', path_info):
432 form = path_info.split('/')[1]
433 if form not in get_wcs_forms(domain):
434 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
435 get_wcs_form_data(domain, form)
436 d = json.loads(get_wcs_cache(domain, form, 'fields.json'), encoding='utf-8')
437 d = [(k, d[k]['name'], d[k]['label']) for k in d]
438 d = sorted(d, key=lambda x: int(x[0]))
439 text = u''.join([u'%s:%s:%s\n' % (x[0], x[1], x[2]) for x in d])
440 data = text.encode('utf-8')
441 http_reply_and_exit(data, 'text/plain')
442
443if match(r'^/[a-z0-9-]+/last-run.log$', path_info):
444 form = path_info.split('/')[1]
445 if form not in get_wcs_forms(domain):
446 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
447 get_wcs_form_data(domain, form)
448 data = get_wcs_cache(domain, form, 'last-run.log')
449 http_reply_and_exit(data, 'text/plain')
450
451if match(r'^/[a-z0-9-]+/liste-dossiers.json$', path_info):
452 form = path_info.split('/')[1]
453 if form not in get_wcs_forms(domain):
454 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
455 get_wcs_form_data(domain, form)
456 data = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
457 data = json.dumps(data, ensure_ascii=False, indent=' ').encode('utf-8')
458 http_reply_and_exit(data, 'application/json')
459
460if match(r'^/[a-z0-9-]+/data$', path_info):
461 http_redirect(path_prefix + path_info + '/')
462
463if match(r'^/[a-z0-9-]+/data/$', path_info):
464 form = path_info.split('/')[1]
465 if form not in get_wcs_forms(domain):
466 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
467 get_wcs_form_data(domain, form)
468 dossiers = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
469 attachements = json.loads(get_wcs_cache(domain, form, 'data-files.json'), encoding='utf-8')
470 l = sorted(dossiers + attachements.keys())
471 if len(l) > 0:
472 l = ['<li><a href="%s">%s</a></li>' % (f, f) for f in l]
473 title = '<p>Liste des documents disponibles&nbsp;:</p>\n'
474 data = '<html>\n' + title + '<ul>\n' + '\n'.join(l) + '\n</ul>\n</html>'
475 else:
476 data = '<html>\n<p>Aucun document disponible.</p>\n</html>'
477 http_reply_and_exit(data, 'text/html')
478
479if match(r'^/[a-z0-9-]+/data/index.json$', path_info):
480 form = path_info.split('/')[1]
481 if form not in get_wcs_forms(domain):
482 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
483 get_wcs_form_data(domain, form)
484 dossiers = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
485 attachements = json.loads(get_wcs_cache(domain, form, 'data-files.json'), encoding='utf-8')
486 l = sorted(dossiers + attachements.keys())
487 data = json.dumps(l, ensure_ascii=False, indent=' ').encode('utf-8')
488 http_reply_and_exit(data, 'application/json')
489
490if match(r'^/[a-z0-9-]+/data/[^/]+$', path_info):
491 form = path_info.split('/')[1]
492 if form not in get_wcs_forms(domain):
493 http_reply_and_exit("Formulaire '%s' inconnu." % form, 'text/plain')
494 get_wcs_form_data(domain, form)
495 doc = path_info.split('/')[3]
496 dossiers = json.loads(get_wcs_cache(domain, form, 'liste-dossiers.json'), encoding='utf-8')
497 if doc in dossiers:
498 data = get_wcs_cache(domain, form, 'data_' + doc)
499 data = json.loads(data, encoding='utf-8')
500 data = json.dumps(data, ensure_ascii=False, indent=' ').encode('utf-8')
501 http_reply_and_exit(data, 'application/json')
502 attachements = json.loads(get_wcs_cache(domain, form, 'data-files.json'), encoding='utf-8')
503 if doc in attachements:
504 data = open(attachements[doc], 'rb').read()
505 mime_type = mimetypes.guess_type(doc)[0]
506 if mime_type is None:
507 mime_type = 'application/octet-stream'
508 http_reply_and_exit(data, mime_type)
509 http_reply_and_exit("Document '%s' inconnu." % path_info, 'text/plain')
510
511http_reply_and_exit("Requête '%s' inconnue." % path_info, 'text/plain')