from gzip import GzipFile
from re import match
-DELAIS = 5 # maximum 5 secondes en cache
+EXPIRE_DELAY = 5 # maximum 5 secondes en cache web
TIME_FORMAT = '%a, %d %b %Y %H:%M:%S GMT' # format date pour HTTP
+#ETABLISSEMENT_FORMAT = r'^(\w+\s-\s.+)\s\(\d+\s-\s(Nord|Sud)\)$'
WCS_ROOT_DIR = '/var/lib/wcs'
WCS_DOMAIN_SUFFIX = '.auf.org'
-WCS_FORM_PREFIX = 'form-'
WCS_CACHE_DIR = '/var/tmp'
+WCS_CACHE_DELAY_DEFAULT = 7*24*60*60 # 1 semaine
+WCS_CACHE_DELAY_FORMS = 5*60 # 5 minutes
+
+#--------------------------------------------------------------------------
+# variables globales
+#--------------------------------------------------------------------------
+
+pub = None
#--------------------------------------------------------------------------
# fonctions de traitement
sys.exit(0)
def http_reply_and_exit(data, mime_type='text/html', charset='utf-8'):
+ if data is None: data = ''
# références horaires
current_time = time.time()
mtime = time.gmtime(current_time)
- etime = time.gmtime(current_time + DELAIS)
+ etime = time.gmtime(current_time + EXPIRE_DELAY)
if os.environ.has_key('HTTP_IF_MODIFIED_SINCE'):
try:
itime = time.strptime(os.environ['HTTP_IF_MODIFIED_SINCE'], TIME_FORMAT)
def _make_wcs_cache_name(domain, form, name):
return 'wcs-%s-%s-%s' % (domain, form, name)
-def set_wcs_cache(domain, form, name, data):
+def set_wcs_cache(domain, form, name, data, delay=WCS_CACHE_DELAY_DEFAULT):
os.umask(0022)
cache_filename = _make_wcs_cache_name(domain, form, name)
- f = open(os.path.join(WCS_CACHE_DIR, cache_filename), 'wb')
+ cache_filename = os.path.join(WCS_CACHE_DIR, cache_filename)
+ f = open(cache_filename, 'wb')
f.write(data)
f.close()
+ # la date de modification est utilisée comme date d'expiration
+ atime = time.time()
+ mtime = atime + delay
+ os.utime(cache_filename, (atime, mtime))
def get_wcs_cache(domain, form, name):
data = None
cache_filename = _make_wcs_cache_name(domain, form, name)
cache_filename = os.path.join(WCS_CACHE_DIR, cache_filename)
if os.path.exists(cache_filename):
- f = open(cache_filename, 'rb')
- data = f.read()
- f.close()
+ # la date de modification est utilisée comme date d'expiration
+ if time.time() < os.path.getmtime(cache_filename):
+ data = open(cache_filename, 'rb').read()
+ else:
+ os.unlink(cache_filename)
return data
def clear_wcs_cache(domain, form):
if f.startswith(cache_filename):
os.unlink(os.path.join(WCS_CACHE_DIR, f))
+def set_wcs_publisher(domain):
+ global pub
+ if pub is None:
+ from wcs import publisher
+ pub = publisher.WcsPublisher.create_publisher()
+ pub.app_dir = os.path.join(pub.app_dir, domain)
+ pub.set_config()
+
def get_wcs_domains():
root = WCS_ROOT_DIR
suffix = WCS_DOMAIN_SUFFIX
return [x for x in l if os.path.isdir(os.path.join(root, x)) and x.endswith(suffix)]
def get_wcs_forms(domain):
- root = os.path.join(WCS_ROOT_DIR, domain)
- prefix = WCS_FORM_PREFIX
- try:
- l = os.listdir(root)
- except OSError:
- return None
- return [x[len(prefix):] for x in l if os.path.isdir(os.path.join(root, x)) and x.startswith(prefix)]
-
+ """extraction de la liste des formulaires"""
+ data = get_wcs_cache(domain, 'ALL', 'ALL.json')
+ if data is not None:
+ return json.loads(data, encoding='utf-8')
+ set_wcs_publisher(domain)
+ from wcs.formdef import FormDef
+ forms = [f.url_name for i,f in FormDef.items()]
+ data = json.dumps(forms, ensure_ascii=False).encode('utf-8')
+ set_wcs_cache(domain, 'ALL', 'ALL.json', data, WCS_CACHE_DELAY_FORMS)
+ return forms
def get_wcs_form_data(domain, form):
"""extraction des données du formulaire"""
os.umask(0022)
logname = _make_wcs_cache_name(domain, form, 'last-run.log')
- logging.basicConfig(level=logging.DEBUG,
- format='%(asctime)s %(levelname)s %(message)s',
- filename=os.path.join(WCS_CACHE_DIR, logname),
- filemode='w')
-
- logging.info('Début.')
-
- from wcs import publisher
+ logger = logging.getLogger('wcs-dynexport')
+ logger.setLevel(logging.DEBUG)
+ log_formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
+ log_handler = logging.FileHandler(os.path.join(WCS_CACHE_DIR, logname))
+ log_handler.setLevel(logging.DEBUG)
+ log_handler.setFormatter(log_formatter)
+ logger.addHandler(log_handler)
+
+ logger.info('Début.')
+ log_handler.flush()
+
+ set_wcs_publisher(domain)
from wcs.formdef import FormDef
from wcs.fields import TitleField, CommentField, TextField, \
StringField, ItemField, ItemsField, EmailField, \
DateField, FileField, BoolField, TableField
-
- pub = publisher.WcsPublisher.create_publisher()
- pub.app_dir = os.path.join(pub.app_dir, domain)
formdef = FormDef.get_by_urlname(form)
# nommage des champs de façon unique
fields = {}
field_names = {}
field_names_duplicates = {}
- for field in formdef.fields:
+ for i, field in enumerate(formdef.fields):
if isinstance(field, TitleField) or isinstance(field, CommentField):
continue
if field.varname:
field_names_duplicates[name] = field_names_duplicates.get(name, 1) + 1
name = '%s_%d' % (name, field_names_duplicates[name])
field_names.update({field.id: name})
- fields.update({field.id: {'name': field_names[field.id], 'label': field.label, 'varname': field.varname and field.varname or ''}})
+ fields.update({field.id: {'index': i, 'name': field_names[field.id], 'label': field.label, 'varname': field.varname and field.varname or ''}})
data = json.dumps(fields, ensure_ascii=False).encode('utf-8')
set_wcs_cache(domain, form, 'fields.json', data)
liste_attachements = {}
for object in formdef.data_class().select():
if object.user is None:
- logging.warning("Dossier '%s' sans utilisateur associé ?!?"\
+ logger.warning("Dossier '%s' sans utilisateur associé ?!?"\
" On ignore...", object.id)
continue
if object.evolution is not None:
for e in object.evolution:
if e.comment is not None:
- who = pub.user_class.get(e.who).display_name
+ try:
+ who = pub.user_class.get(e.who).display_name
+ except:
+ who = 'Inconnu(e)'
e_time = time.strftime('%Y-%m-%d %H:%M:%S', e.time)
comment = '%s -- %s %s' % (e.comment, who, e_time)
result['wcs_comments'].append(comment)
continue
field_name = fields[field_id]['name']
data = object.data.get(field_id)
+ # paliatif aux corrections de formulaires en cours de route
+ # (compensation nécessaire pour l'import depuis Sigma 2)
+ if data is None and field.required:
+ if isinstance(field, StringField) \
+ or isinstance(field, TextField) \
+ or isinstance(field, EmailField) \
+ or isinstance(field, ItemField):
+ result[field_name] = '(vide)'
+ elif isinstance(field, ItemsField) \
+ or isinstance(field, TableField):
+ result[field_name] = '(vide)'
+ elif isinstance(field, BoolField):
+ result[field_name] = False
+ elif isinstance(field, DateField):
+ result[field_name] = "9999-12-31"
+ continue
if data is None:
result[field_name] = None
continue
if isinstance(field, StringField) or isinstance(field, TextField) \
or isinstance(field, EmailField) or isinstance(field, ItemField):
+ # nettoyage du nom d'établissement (suppression id et Nord/Sud)
+ #m = match(ETABLISSEMENT_FORMAT, data)
+ #if m is not None:
+ # data = m.groups()[0]
result[field_name] = data
elif isinstance(field, ItemsField) or isinstance(field, TableField):
result[field_name] = data # liste => peux-être joindre sur ';'
m = magicmime.file(p).split()[0].strip(';')
extension = mimetypes.guess_extension(m)
except:
- logging.warning("Type de fichier inconnu pour '%s'.", p)
+ logger.warning("Type de fichier inconnu pour '%s'.", p)
extension = None
if extension is not None:
extension = extension[1:]
result[field_name] = "%s.%s" % (field_name, extension)
qfiles[field_name] = data.qfilename
else:
- logging.warning("Type de champ inconnu '%s' pour '%s' (%s).",
+ logger.warning("Type de champ inconnu '%s' pour '%s' (%s).",
field.__class__.__name__, field_name, field.label)
num_dossier = result['num_dossier']
data = json.dumps(result, ensure_ascii=False).encode('utf-8')
set_wcs_cache(domain, form, 'data_%s.json' % filename, data)
- logging.info("Dossier '%s' : %s.",
+ logger.info("Dossier '%s' : %s.",
filename, result['wcs_workflow_status'])
data = json.dumps(liste_attachements, ensure_ascii=False).encode('utf-8')
set_wcs_cache(domain, form, 'liste-dossiers.json', data)
metadata.update({'dossiers': liste_dossiers})
- logging.info('Fin.')
+ logger.info('Fin.')
+ log_handler.flush()
data = json.dumps(metadata, ensure_ascii=False).encode('utf-8')
set_wcs_cache(domain, form, 'metadata.json', data)
# try:
# extract_data(formdef, OUTPUT_DIRECTORY)
# except:
-# logging.exception("Interruption du traitement pour cause d'erreur !")
+# logger.exception("Interruption du traitement pour cause d'erreur !")
#--------------------------------------------------------------------------
# gestion des requêtes web