Commit | Line | Data |
---|---|---|
0f48356a P |
1 | #!/usr/bin/env python |
2 | # -*- coding: utf-8 -*- | |
edf3eba0 P |
3 | """ |
4 | Outil d'export de données w.c.s. | |
0f48356a | 5 | |
edf3eba0 P |
6 | Copyright : Agence universitaire de la Francophonie — www.auf.org |
7 | Licence : GNU General Public Licence, version 2 | |
8 | Auteur : Jean Christophe André | |
9 | Date de création : 15 octobre 2009 | |
10 | ||
11 | Depends: wcs, python-simplejson, python-magic | |
12 | """ | |
0f48356a P |
13 | import os |
14 | import os.path | |
a9de62a1 P |
15 | import shutil |
16 | import logging | |
0f48356a P |
17 | from time import gmtime, strftime |
18 | import simplejson as json | |
edf3eba0 P |
19 | import magic |
20 | import mimetypes | |
0f48356a | 21 | |
0cd61ae9 P |
22 | from wcs import publisher |
23 | from wcs.formdef import FormDef | |
24 | from wcs.fields import TitleField, CommentField, TextField, \ | |
ee4d6a09 | 25 | StringField, ItemField, ItemsField, EmailField, \ |
d3a019f6 | 26 | DateField, FileField, BoolField, TableField |
0f48356a | 27 | |
0f48356a | 28 | |
0cd61ae9 P |
29 | def reduce_to_alnum(s, replacement_char='-'): |
30 | """réduction d'une chaîne de caractères à de l'alpha-numérique""" | |
31 | ||
0f48356a P |
32 | avec_accent = u'çÇáàâÁÀÂéèêëÉÈÊËíìîïÍÌÎÏóòôöÓÒÔÖúùûüÚÙÛÜýỳyÿÝỲYŸ' |
33 | sans_accent = u'cCaaaAAAeeeeEEEEiiiiIIIIooooOOOOuuuuUUUUyyyyYYYY' | |
34 | if type(s) is not unicode: | |
35 | s = unicode(s, 'utf-8') | |
36 | u = False | |
37 | r = '' | |
38 | for c in s: | |
39 | index = avec_accent.find(c) | |
40 | if index >= 0: | |
41 | r += sans_accent[index] | |
42 | elif ('a' <= c.lower() <= 'z') or ('0' <= c <= '9'): | |
43 | r += c | |
44 | elif len(r) > 0 and r[-1] != replacement_char: | |
45 | r += replacement_char | |
46 | else: # r == '' or r[-1] == replacement_char | |
47 | pass | |
48 | r = r.strip(replacement_char) | |
49 | if not u: | |
50 | r = r.encode('utf-8') | |
51 | return r | |
52 | ||
0f48356a | 53 | |
0cd61ae9 P |
54 | def extract_fields(formdef, output_directory): |
55 | """nommage des champs de façon unique""" | |
56 | # TODO: devrait retourner un résultat, qui serait alors sauvé en dehors | |
57 | ||
36319452 P |
58 | # XXX: hack temporaire… :-/ |
59 | global field_names | |
60 | ||
0cd61ae9 P |
61 | f = open(os.path.join(output_directory, 'field-names.txt'), 'w') |
62 | ||
63 | field_names = {} | |
64 | field_names_duplicates = {} | |
0f48356a | 65 | for field in formdef.fields: |
0f48356a P |
66 | if isinstance(field, TitleField) or isinstance(field, CommentField): |
67 | continue | |
fa3aa24e P |
68 | if field.varname: |
69 | name = field.varname | |
70 | else: | |
71 | name = reduce_to_alnum(field.label,'_').lower() | |
0cd61ae9 P |
72 | if name in field_names.values(): # duplicat |
73 | field_names_duplicates[name] = field_names_duplicates.get(name, 1) + 1 | |
74 | name = '%s_%d' % (name, field_names_duplicates[name]) | |
75 | field_names.update({field.id: name}) | |
76 | print >>f, "%s:%s:%s" % (field.id, field_names[field.id], field.label) | |
77 | ||
0f48356a P |
78 | f.close() |
79 | ||
0cd61ae9 | 80 | f = open(os.path.join(output_directory, 'field-names.json'), 'wb') |
47d9d835 | 81 | f.write(json.dumps(field_names, ensure_ascii=False)) |
0cd61ae9 P |
82 | f.close() |
83 | ||
84 | ||
85 | def extract_data(formdef, output_directory): | |
86 | """extraction des données du formulaire""" | |
87 | # TODO: devrait retourner un résultat, qui serait alors sauvé en dehors | |
88 | ||
e80345a7 P |
89 | # XXX: hack temporaire… :-/ |
90 | global pub | |
91 | ||
edf3eba0 P |
92 | # on charge la base des types MIME une fois pour toutes |
93 | #magicmime = magic.Magic(mime=True) => ce sera pour plus tard… | |
6244020f | 94 | magicmime = magic.open(magic.MAGIC_MIME) |
dffcfa61 | 95 | magicmime.load() |
edf3eba0 | 96 | |
2e14236b | 97 | liste_dossiers = [] |
0cd61ae9 | 98 | for object in formdef.data_class().select(): |
9d424de0 P |
99 | if object.user is None: |
100 | logging.warning("Dossier '%s' sans utilisateur associé ?!?"\ | |
101 | " On ignore...", object.id) | |
102 | continue | |
103 | ||
0cd61ae9 P |
104 | result = { |
105 | 'num_dossier': object.id, | |
106 | 'wcs_status': object.status, | |
fa3aa24e P |
107 | 'wcs_workflow_status': (object.status.startswith('wf-') and \ |
108 | object.get_workflow_status().name or None), | |
0cd61ae9 P |
109 | 'wcs_user_email': object.user.email, |
110 | 'wcs_user_display_name': object.user.display_name, | |
111 | #'wcs_last_modified': strftime('%Y-%m-%d %H:%M:%S', gmtime(object.last_modified())), | |
e80345a7 | 112 | 'wcs_comments': [], |
0cd61ae9 | 113 | } |
e80345a7 P |
114 | |
115 | if object.evolution is not None: | |
116 | for e in object.evolution: | |
117 | if e.comment is not None: | |
118 | who = pub.user_class.get(e.who).display_name | |
119 | time = strftime('%Y-%m-%d %H:%M:%S', e.time) | |
120 | comment = '%s -- %s %s' % (e.comment, who, time) | |
121 | result['wcs_comments'].append(comment) | |
122 | ||
0cd61ae9 P |
123 | qfiles = { } |
124 | for field in formdef.fields: | |
125 | field_id = str(field.id) | |
126 | if not field_id in object.data: | |
127 | continue | |
128 | if isinstance(field, TitleField) or isinstance(field, CommentField): | |
129 | continue | |
130 | field_name = field_names[field_id] | |
131 | data = object.data.get(field_id) | |
ee4d6a09 P |
132 | if data is None: |
133 | result[field_name] = None | |
134 | continue | |
0cd61ae9 P |
135 | if isinstance(field, StringField) or isinstance(field, TextField) \ |
136 | or isinstance(field, EmailField) or isinstance(field, ItemField): | |
137 | result[field_name] = data | |
d3a019f6 | 138 | elif isinstance(field, ItemsField) or isinstance(field, TableField): |
ee4d6a09 | 139 | result[field_name] = data # liste => peux-être joindre sur ';' |
0cd61ae9 P |
140 | elif isinstance(field, BoolField): |
141 | result[field_name] = (data == 'True') | |
142 | elif isinstance(field, DateField): | |
b182779a P |
143 | result[field_name] = '%04d-%02d-%02d' % (data.tm_year, |
144 | data.tm_mon, data.tm_mday) | |
791f9873 | 145 | elif isinstance(field, FileField): |
edf3eba0 P |
146 | if '.' in data.orig_filename: |
147 | extension = data.orig_filename.rpartition('.')[2].lower() | |
148 | else: # il n'y a pas d'extension dans le nom de fichier | |
149 | p = os.path.join(pub.app_dir, 'uploads', data.qfilename) | |
474e5135 P |
150 | try: |
151 | #m = magicmime.from_file(p) => ce sera pour plus tard… | |
152 | m = magicmime.file(p).split()[0].strip(';') | |
153 | extension = mimetypes.guess_extension(m) | |
154 | except: | |
155 | logging.warning("Type de fichier inconnu pour '%s'.", p) | |
156 | extension = None | |
edf3eba0 P |
157 | if extension is not None: |
158 | extension = extension[1:] | |
159 | else: | |
160 | extension = 'unknown' | |
0cd61ae9 P |
161 | result[field_name] = "%s.%s" % (field_name, extension) |
162 | qfiles[field_name] = data.qfilename | |
163 | else: | |
d3a019f6 P |
164 | logging.warning("Type de champ inconnu '%s' pour '%s' (%s).", |
165 | field.__class__.__name__, field_name, field.label) | |
0cd61ae9 P |
166 | |
167 | num_dossier = result['num_dossier'] | |
fa3aa24e P |
168 | nom = reduce_to_alnum(result.get('nom','sans-nom')).upper() |
169 | prenom = reduce_to_alnum(result.get('prenom','sans-prenom')).upper() | |
170 | adel = result.get('adresse_electronique','sans-adel').replace('@','-').lower() | |
0cd61ae9 P |
171 | |
172 | filename = "%04d-%s-%s-%s" % (num_dossier, nom, prenom, adel) | |
2e14236b | 173 | liste_dossiers.append(filename + '.json') |
0cd61ae9 | 174 | |
0cd61ae9 P |
175 | # copie des fichiers joints |
176 | for f in qfiles: | |
177 | result[f] = filename + '_' + result[f] | |
178 | src = os.path.join(pub.app_dir, 'uploads', qfiles[f]) | |
a9de62a1 | 179 | dst = os.path.join(output_directory, 'data', result[f]) |
0cd61ae9 | 180 | if not os.path.exists(dst) or os.path.getmtime(src) > os.path.getmtime(dst): |
a9de62a1 | 181 | shutil.copy2(src, dst) |
0cd61ae9 P |
182 | os.chmod(dst, 0644) |
183 | ||
184 | # génération du fichier JSON | |
a9de62a1 P |
185 | jsonname = os.path.join(output_directory, 'data', filename + '.json') |
186 | f = open(jsonname, 'wb') | |
fa3aa24e | 187 | f.write(json.dumps(result, ensure_ascii=False).encode('utf-8')) |
0cd61ae9 P |
188 | f.close() |
189 | ||
a9de62a1 P |
190 | logging.info("Dossier '%s' : %s.", |
191 | filename, result['wcs_workflow_status']) | |
0cd61ae9 | 192 | |
b35749bb | 193 | liste_dossiers.sort() |
2e14236b | 194 | f = open(os.path.join(output_directory, 'liste-dossiers.json'), 'wb') |
b35749bb | 195 | f.write(json.dumps(liste_dossiers, ensure_ascii=False)) |
2e14236b P |
196 | f.close() |
197 | ||
0cd61ae9 P |
198 | |
199 | if __name__ == '__main__': | |
200 | import sys | |
201 | ||
202 | if len(sys.argv) != 4: | |
203 | print >>sys.stderr, "Usage : %s <dossier-destination> <site> <formulaire>" % sys.argv[0] | |
204 | sys.exit(1) | |
205 | ||
206 | VHOST = sys.argv[2] | |
207 | FORM_NAME = sys.argv[3] | |
208 | OUTPUT_DIRECTORY = os.path.join(sys.argv[1], VHOST, FORM_NAME) | |
209 | ||
210 | os.umask(0022) | |
211 | # création du dossier d'extraction, au besoin | |
9d424de0 P |
212 | if not os.path.isdir(os.path.join(OUTPUT_DIRECTORY, 'data')): |
213 | os.makedirs(os.path.join(OUTPUT_DIRECTORY, 'data'), 0755) | |
0cd61ae9 | 214 | |
a9de62a1 P |
215 | logging.basicConfig(level=logging.DEBUG, |
216 | format='%(asctime)s %(levelname)s %(message)s', | |
217 | filename=os.path.join(OUTPUT_DIRECTORY, 'last-run.log'), | |
218 | filemode='w') | |
219 | ||
220 | logging.info('Début.') | |
221 | ||
0cd61ae9 P |
222 | pub = publisher.WcsPublisher.create_publisher() |
223 | pub.app_dir = os.path.join(pub.app_dir, VHOST) | |
224 | ||
225 | formdef = FormDef.get_by_urlname(FORM_NAME) | |
226 | ||
227 | extract_fields(formdef, OUTPUT_DIRECTORY) | |
228 | ||
b182779a P |
229 | try: |
230 | extract_data(formdef, OUTPUT_DIRECTORY) | |
231 | except: | |
232 | logging.exception("Interruption du traitement pour cause d'erreur !") | |
0cd61ae9 | 233 | |
a9de62a1 | 234 | logging.info('Fin.') |
0f48356a | 235 |