2020-09-26 16:19:37 +02:00
|
|
|
# -*- mode: python -*-
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
##############################################################################
|
|
|
|
#
|
|
|
|
# Gestion scolarite IUT
|
|
|
|
#
|
2023-01-02 13:16:27 +01:00
|
|
|
# Copyright (c) 1999 - 2023 Emmanuel Viennet. All rights reserved.
|
2020-09-26 16:19:37 +02:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
# Emmanuel Viennet emmanuel.viennet@viennet.net
|
|
|
|
#
|
|
|
|
##############################################################################
|
|
|
|
|
|
|
|
|
|
|
|
""" Common definitions
|
|
|
|
"""
|
2021-07-05 00:07:17 +02:00
|
|
|
import base64
|
2021-02-03 22:00:41 +01:00
|
|
|
import bisect
|
2021-06-19 23:21:37 +02:00
|
|
|
import copy
|
|
|
|
import datetime
|
2023-02-02 22:20:25 +01:00
|
|
|
from enum import IntEnum, Enum
|
2022-04-08 13:01:47 +02:00
|
|
|
import io
|
2021-06-19 23:21:37 +02:00
|
|
|
import json
|
2021-07-05 00:07:17 +02:00
|
|
|
from hashlib import md5
|
2020-09-26 16:19:37 +02:00
|
|
|
import numbers
|
2021-06-19 23:21:37 +02:00
|
|
|
import os
|
|
|
|
import re
|
2021-09-25 10:43:06 +02:00
|
|
|
import _thread
|
2021-06-19 23:21:37 +02:00
|
|
|
import time
|
2021-07-12 11:54:04 +02:00
|
|
|
import unicodedata
|
2021-08-30 16:34:24 +02:00
|
|
|
import urllib
|
2021-09-25 10:43:06 +02:00
|
|
|
from urllib.parse import urlparse, parse_qsl, urlunparse, urlencode
|
2021-07-10 13:55:35 +02:00
|
|
|
|
2021-11-22 00:31:53 +01:00
|
|
|
import numpy as np
|
2020-09-26 16:19:37 +02:00
|
|
|
from PIL import Image as PILImage
|
2021-11-08 19:44:25 +01:00
|
|
|
import pydot
|
|
|
|
import requests
|
2020-09-26 16:19:37 +02:00
|
|
|
|
2023-02-02 22:20:25 +01:00
|
|
|
import dateutil.parser as dtparser
|
2022-04-08 13:01:47 +02:00
|
|
|
import flask
|
2021-09-27 16:42:14 +02:00
|
|
|
from flask import g, request
|
2022-02-26 10:09:14 +01:00
|
|
|
from flask import flash, url_for, make_response, jsonify
|
2022-08-07 19:56:25 +02:00
|
|
|
from werkzeug.http import HTTP_STATUS_CODES
|
2021-06-15 12:34:33 +02:00
|
|
|
|
2021-05-29 18:22:51 +02:00
|
|
|
from config import Config
|
2023-02-02 22:20:25 +01:00
|
|
|
from app import log, db
|
2021-07-10 13:55:35 +02:00
|
|
|
from app.scodoc.sco_vdi import ApoEtapeVDI
|
2021-06-19 23:21:37 +02:00
|
|
|
from app.scodoc.sco_codes_parcours import NOTES_TOLERANCE, CODES_EXPL
|
2021-07-10 13:55:35 +02:00
|
|
|
from app.scodoc import sco_xml
|
2022-12-14 17:41:57 +01:00
|
|
|
|
2021-08-21 17:07:44 +02:00
|
|
|
import sco_version
|
2021-06-19 23:21:37 +02:00
|
|
|
|
2022-06-26 17:54:44 +02:00
|
|
|
# le répertoire static, lié à chaque release pour éviter les problèmes de caches
|
|
|
|
STATIC_DIR = "/ScoDoc/static/links/" + sco_version.SCOVERSION
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
# ----- CALCUL ET PRESENTATION DES NOTES
|
|
|
|
NOTES_PRECISION = 1e-4 # evite eventuelles erreurs d'arrondis
|
|
|
|
NOTES_MIN = 0.0 # valeur minimale admise pour une note (sauf malus, dans [-20, 20])
|
|
|
|
NOTES_MAX = 1000.0
|
2021-12-08 14:13:18 +01:00
|
|
|
NOTES_ABSENCE = -999.0 # absences dans les DataFrames, NULL en base
|
2020-09-26 16:19:37 +02:00
|
|
|
NOTES_NEUTRALISE = -1000.0 # notes non prises en comptes dans moyennes
|
|
|
|
NOTES_SUPPRESS = -1001.0 # note a supprimer
|
|
|
|
NOTES_ATTENTE = -1002.0 # note "en attente" (se calcule comme une note neutralisee)
|
|
|
|
|
2021-12-14 23:03:59 +01:00
|
|
|
# ---- CODES INSCRIPTION AUX SEMESTRES
|
2021-12-20 20:38:21 +01:00
|
|
|
# (champ etat de FormSemestreInscription)
|
2021-12-14 23:03:59 +01:00
|
|
|
INSCRIT = "I"
|
|
|
|
DEMISSION = "D"
|
|
|
|
DEF = "DEF"
|
2022-10-04 21:56:10 +02:00
|
|
|
ETATS_INSCRIPTION = {
|
|
|
|
INSCRIT: "Inscrit",
|
|
|
|
DEMISSION: "Démission",
|
|
|
|
DEF: "Défaillant",
|
|
|
|
}
|
|
|
|
|
2021-12-14 23:03:59 +01:00
|
|
|
|
2023-02-02 22:20:25 +01:00
|
|
|
def get_model_api_object(model_cls: db.Model, model_id: int):
|
|
|
|
from app.models import Identite
|
|
|
|
|
|
|
|
query = model_cls.query.filter_by(id=model_id)
|
|
|
|
if g.scodoc_dept:
|
|
|
|
query = query.join(Identite).filter_by(dept_id=g.scodoc_dept_id)
|
|
|
|
unique: model_cls = query.first_or_404()
|
|
|
|
|
|
|
|
return jsonify(unique.to_dict(format_api=True))
|
|
|
|
|
|
|
|
|
|
|
|
class BiDirectionalEnum(Enum):
|
|
|
|
"""Permet la recherche inverse d'un enum
|
|
|
|
Condition : les clés et les valeurs doivent être uniques
|
|
|
|
les clés doivent être en MAJUSCULES
|
|
|
|
"""
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def contains(cls, attr: str):
|
|
|
|
return attr.upper() in cls._member_names_
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, attr: str, default: any = None):
|
|
|
|
val = None
|
|
|
|
try:
|
|
|
|
val = cls[attr.upper()]
|
|
|
|
except (KeyError, AttributeError):
|
|
|
|
val = default
|
|
|
|
return val
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def inverse(cls):
|
|
|
|
"""Retourne un dictionnaire représentant la map inverse de l'Enum"""
|
|
|
|
return cls._value2member_map_
|
|
|
|
|
|
|
|
|
|
|
|
class EtatAssiduite(int, BiDirectionalEnum):
|
2022-11-03 10:29:30 +01:00
|
|
|
"""Code des états d'assiduité"""
|
|
|
|
|
|
|
|
# Stockés en BD ne pas modifier
|
|
|
|
|
|
|
|
PRESENT = 0
|
|
|
|
RETARD = 1
|
|
|
|
ABSENT = 2
|
|
|
|
|
|
|
|
|
|
|
|
ETAT_ASSIDUITE_NAME = {
|
|
|
|
EtatAssiduite.PRESENT: "present",
|
|
|
|
EtatAssiduite.RETARD: "retard",
|
|
|
|
EtatAssiduite.ABSENT: "absent",
|
|
|
|
}
|
|
|
|
ETATS_ASSIDUITE = {
|
|
|
|
"present": EtatAssiduite.PRESENT,
|
|
|
|
"retard": EtatAssiduite.RETARD,
|
|
|
|
"absent": EtatAssiduite.ABSENT,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2023-02-02 22:20:25 +01:00
|
|
|
class EtatJustificatif(int, BiDirectionalEnum):
|
2022-11-03 10:29:30 +01:00
|
|
|
"""Code des états des justificatifs"""
|
|
|
|
|
|
|
|
# Stockés en BD ne pas modifier
|
|
|
|
|
|
|
|
VALIDE = 0
|
|
|
|
NON_VALIDE = 1
|
|
|
|
ATTENTE = 2
|
|
|
|
MODIFIE = 3
|
|
|
|
|
|
|
|
|
2022-12-14 17:41:57 +01:00
|
|
|
def is_iso_formated(date: str, convert=False) -> bool or datetime.datetime or None:
|
|
|
|
"""
|
|
|
|
Vérifie si une date est au format iso
|
|
|
|
|
|
|
|
Retourne un booléen Vrai (ou un objet Datetime si convert = True)
|
|
|
|
si l'objet est au format iso
|
|
|
|
|
|
|
|
Retourne Faux si l'objet n'est pas au format et convert = False
|
|
|
|
|
|
|
|
Retourne None sinon
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
date: datetime.datetime = dtparser.isoparse(date)
|
|
|
|
return date if convert else True
|
|
|
|
except Exception:
|
|
|
|
return None if convert else False
|
|
|
|
|
|
|
|
|
2022-12-19 21:32:45 +01:00
|
|
|
def localize_datetime(date: datetime.datetime or str) -> datetime.datetime:
|
2023-01-05 17:47:32 +01:00
|
|
|
if isinstance(date, str):
|
2022-12-19 21:32:45 +01:00
|
|
|
date = is_iso_formated(date, convert=True)
|
2022-12-14 17:41:57 +01:00
|
|
|
|
|
|
|
new_date: datetime.datetime = date
|
2023-01-05 17:47:32 +01:00
|
|
|
if date.tzinfo is None:
|
2022-12-14 17:41:57 +01:00
|
|
|
from app.models.assiduites import Assiduite
|
|
|
|
|
|
|
|
first_assiduite = Assiduite.query.first()
|
|
|
|
if first_assiduite is not None:
|
|
|
|
new_date = date.replace(tzinfo=first_assiduite.date_debut.tzinfo)
|
2022-12-22 21:36:09 +01:00
|
|
|
else:
|
|
|
|
# TOTALK: Paramètre permettant d'avoir l'UTC par défaut
|
|
|
|
tmp = is_iso_formated("2022-01-01T08:00:00+01:00", True)
|
|
|
|
new_date = date.replace(tzinfo=tmp.tzinfo)
|
2022-12-14 17:41:57 +01:00
|
|
|
return new_date
|
|
|
|
|
|
|
|
|
2023-01-05 17:47:32 +01:00
|
|
|
def is_period_overlapping(
|
|
|
|
periode: tuple[datetime.datetime, datetime.datetime],
|
|
|
|
interval: tuple[datetime.datetime, datetime.datetime],
|
2022-12-14 17:41:57 +01:00
|
|
|
) -> bool:
|
|
|
|
"""
|
2023-01-05 17:47:32 +01:00
|
|
|
Vérifie si la période et l'interval s'intersectent
|
2022-12-14 17:41:57 +01:00
|
|
|
|
|
|
|
Retourne Vrai si c'est le cas, faux sinon
|
|
|
|
"""
|
|
|
|
p_deb, p_fin = periode
|
|
|
|
i_deb, i_fin = interval
|
|
|
|
|
2023-01-05 17:47:32 +01:00
|
|
|
# i = intervalmap()
|
|
|
|
# p = intervalmap()
|
|
|
|
# i[:] = 0
|
|
|
|
# p[:] = 0
|
|
|
|
# i[i_deb:i_fin] = 1
|
|
|
|
# p[p_deb:p_fin] = 1
|
2022-12-14 17:41:57 +01:00
|
|
|
|
2023-01-05 17:47:32 +01:00
|
|
|
# # TOTALK: Vérification des bornes de la période dans l'interval et inversement
|
|
|
|
# res: int = sum((i[p_deb], i[p_fin], p[i_deb], p[i_fin]))
|
2022-12-14 17:41:57 +01:00
|
|
|
|
2023-01-05 17:47:32 +01:00
|
|
|
# return res > 0
|
|
|
|
return p_deb <= i_fin and p_fin >= i_deb
|
2022-12-14 17:41:57 +01:00
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
# Types de modules
|
2021-11-08 19:44:25 +01:00
|
|
|
class ModuleType(IntEnum):
|
|
|
|
"""Code des types de module."""
|
|
|
|
|
2021-11-12 22:17:46 +01:00
|
|
|
# Stockés en BD dans Module.module_type: ne pas modifier ces valeurs
|
2021-11-08 19:44:25 +01:00
|
|
|
STANDARD = 0
|
|
|
|
MALUS = 1
|
|
|
|
RESSOURCE = 2 # BUT
|
|
|
|
SAE = 3 # BUT
|
|
|
|
|
2022-03-26 23:33:57 +01:00
|
|
|
@classmethod
|
|
|
|
def get_abbrev(cls, code) -> str:
|
2022-08-25 12:04:10 +02:00
|
|
|
"""Abbréviation décrivant le type de module à partir du code integer:
|
2022-03-26 23:33:57 +01:00
|
|
|
"mod", "malus", "res", "sae"
|
2022-08-25 12:04:10 +02:00
|
|
|
(utilisées pour style CSS)
|
2022-03-26 23:33:57 +01:00
|
|
|
"""
|
|
|
|
return {
|
|
|
|
ModuleType.STANDARD: "mod",
|
|
|
|
ModuleType.MALUS: "malus",
|
|
|
|
ModuleType.RESSOURCE: "res",
|
|
|
|
ModuleType.SAE: "sae",
|
|
|
|
}.get(code, "???")
|
|
|
|
|
2021-11-08 19:44:25 +01:00
|
|
|
|
2021-11-12 22:17:46 +01:00
|
|
|
MODULE_TYPE_NAMES = {
|
|
|
|
ModuleType.STANDARD: "Module",
|
|
|
|
ModuleType.MALUS: "Malus",
|
|
|
|
ModuleType.RESSOURCE: "Ressource",
|
|
|
|
ModuleType.SAE: "SAÉ",
|
2022-01-04 15:03:38 +01:00
|
|
|
None: "Module",
|
2021-11-12 22:17:46 +01:00
|
|
|
}
|
|
|
|
|
2022-05-26 03:55:03 +02:00
|
|
|
PARTITION_PARCOURS = "Parcours"
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
MALUS_MAX = 20.0
|
|
|
|
MALUS_MIN = -20.0
|
|
|
|
|
|
|
|
APO_MISSING_CODE_STR = "----" # shown in HTML pages in place of missing code Apogée
|
|
|
|
EDIT_NB_ETAPES = 6 # Nombre max de codes étapes / semestre presentés dans l'UI
|
|
|
|
|
|
|
|
IT_SITUATION_MISSING_STR = (
|
|
|
|
"____" # shown on ficheEtud (devenir) in place of empty situation
|
|
|
|
)
|
|
|
|
|
|
|
|
RANG_ATTENTE_STR = "(attente)" # rang affiché sur bulletins quand notes en attente
|
|
|
|
|
|
|
|
# borne supérieure de chaque mention
|
|
|
|
NOTES_MENTIONS_TH = (
|
|
|
|
NOTES_TOLERANCE,
|
|
|
|
7.0,
|
|
|
|
10.0,
|
|
|
|
12.0,
|
|
|
|
14.0,
|
|
|
|
16.0,
|
|
|
|
18.0,
|
|
|
|
20.0 + NOTES_TOLERANCE,
|
|
|
|
)
|
|
|
|
NOTES_MENTIONS_LABS = (
|
|
|
|
"Nul",
|
|
|
|
"Faible",
|
|
|
|
"Insuffisant",
|
|
|
|
"Passable",
|
|
|
|
"Assez bien",
|
|
|
|
"Bien",
|
|
|
|
"Très bien",
|
|
|
|
"Excellent",
|
|
|
|
)
|
|
|
|
|
|
|
|
EVALUATION_NORMALE = 0
|
|
|
|
EVALUATION_RATTRAPAGE = 1
|
2021-03-11 14:49:37 +01:00
|
|
|
EVALUATION_SESSION2 = 2
|
2020-09-26 16:19:37 +02:00
|
|
|
|
2022-11-09 12:50:10 +01:00
|
|
|
# Dates et années scolaires
|
2022-11-13 14:55:18 +01:00
|
|
|
# Ces dates "pivot" sont paramétrables dans les préférences générales
|
|
|
|
# on donne ici les valeurs par défaut.
|
|
|
|
# Les semestres commençant à partir du 1er août 20XX sont
|
|
|
|
# dans l'année scolaire 20XX
|
|
|
|
MONTH_DEBUT_ANNEE_SCOLAIRE = 8 # août
|
|
|
|
# Les semestres commençant à partir du 1er décembre
|
|
|
|
# sont "2eme période" (S_pair):
|
|
|
|
MONTH_DEBUT_PERIODE2 = MONTH_DEBUT_ANNEE_SCOLAIRE + 4
|
2022-11-09 12:50:10 +01:00
|
|
|
|
2021-12-04 21:04:09 +01:00
|
|
|
MONTH_NAMES_ABBREV = (
|
|
|
|
"Jan ",
|
|
|
|
"Fév ",
|
|
|
|
"Mars",
|
|
|
|
"Avr ",
|
|
|
|
"Mai ",
|
|
|
|
"Juin",
|
|
|
|
"Jul ",
|
|
|
|
"Août",
|
|
|
|
"Sept",
|
|
|
|
"Oct ",
|
|
|
|
"Nov ",
|
|
|
|
"Déc ",
|
|
|
|
)
|
|
|
|
|
|
|
|
MONTH_NAMES = (
|
|
|
|
"janvier",
|
|
|
|
"février",
|
|
|
|
"mars",
|
|
|
|
"avril",
|
|
|
|
"mai",
|
|
|
|
"juin",
|
|
|
|
"juillet",
|
|
|
|
"août",
|
|
|
|
"septembre",
|
|
|
|
"octobre",
|
|
|
|
"novembre",
|
|
|
|
"décembre",
|
|
|
|
)
|
2022-04-08 16:36:56 +02:00
|
|
|
DAY_NAMES = ("lundi", "mardi", "mercredi", "jeudi", "vendredi", "samedi", "dimanche")
|
2021-12-04 21:04:09 +01:00
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
def fmt_note(val, note_max=None, keep_numeric=False):
|
|
|
|
"""conversion note en str pour affichage dans tables HTML ou PDF.
|
|
|
|
Si keep_numeric, laisse les valeur numeriques telles quelles (pour export Excel)
|
|
|
|
"""
|
2021-12-11 10:56:40 +01:00
|
|
|
if val is None or val == NOTES_ABSENCE:
|
2020-09-26 16:19:37 +02:00
|
|
|
return "ABS"
|
|
|
|
if val == NOTES_NEUTRALISE:
|
|
|
|
return "EXC" # excuse, note neutralise
|
|
|
|
if val == NOTES_ATTENTE:
|
|
|
|
return "ATT" # attente, note neutralisee
|
2022-05-10 18:21:28 +02:00
|
|
|
if not isinstance(val, str):
|
2021-11-22 00:31:53 +01:00
|
|
|
if np.isnan(val):
|
2021-12-06 10:57:10 +01:00
|
|
|
return "~"
|
2022-04-05 11:44:08 +02:00
|
|
|
if (note_max is not None) and note_max > 0:
|
2020-09-26 16:19:37 +02:00
|
|
|
val = val * 20.0 / note_max
|
|
|
|
if keep_numeric:
|
|
|
|
return val
|
|
|
|
else:
|
|
|
|
s = "%2.2f" % round(float(val), 2) # 2 chiffres apres la virgule
|
|
|
|
s = "0" * (5 - len(s)) + s # padding: 0 à gauche pour longueur 5: "12.34"
|
|
|
|
return s
|
|
|
|
else:
|
2021-10-19 15:52:02 +02:00
|
|
|
return val.replace("NA", "-")
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def fmt_coef(val):
|
2020-10-13 15:41:04 +02:00
|
|
|
"""Conversion valeur coefficient (float) en chaine"""
|
2020-09-26 16:19:37 +02:00
|
|
|
if val < 0.01:
|
|
|
|
return "%g" % val # unusually small value
|
|
|
|
return "%g" % round(val, 2)
|
|
|
|
|
|
|
|
|
|
|
|
def fmt_abs(val):
|
2020-10-13 15:41:04 +02:00
|
|
|
"""Conversion absences en chaine. val est une list [nb_abs_total, nb_abs_justifiees
|
2020-09-26 16:19:37 +02:00
|
|
|
=> NbAbs / Nb_justifiees
|
|
|
|
"""
|
|
|
|
return "%s / %s" % (val[0], val[1])
|
|
|
|
|
|
|
|
|
|
|
|
def isnumber(x):
|
|
|
|
"True if x is a number (int, float, etc.)"
|
|
|
|
return isinstance(x, numbers.Number)
|
|
|
|
|
|
|
|
|
2021-12-05 20:21:51 +01:00
|
|
|
def jsnan(x):
|
|
|
|
"if x is NaN, returns None"
|
|
|
|
if isinstance(x, numbers.Number) and np.isnan(x):
|
|
|
|
return None
|
|
|
|
return x
|
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
def join_words(*words):
|
|
|
|
words = [str(w).strip() for w in words if w is not None]
|
|
|
|
return " ".join([w for w in words if w])
|
|
|
|
|
|
|
|
|
|
|
|
def get_mention(moy):
|
|
|
|
"""Texte "mention" en fonction de la moyenne générale"""
|
|
|
|
try:
|
|
|
|
moy = float(moy)
|
|
|
|
except:
|
|
|
|
return ""
|
2022-02-11 08:52:09 +01:00
|
|
|
if moy > 0.0:
|
|
|
|
return NOTES_MENTIONS_LABS[bisect.bisect_right(NOTES_MENTIONS_TH, moy)]
|
|
|
|
else:
|
|
|
|
return ""
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2020-12-15 08:35:44 +01:00
|
|
|
class DictDefault(dict): # obsolete, use collections.defaultdict
|
|
|
|
"""A dictionnary with default value for all keys
|
|
|
|
Each time a non existent key is requested, it is added to the dict.
|
|
|
|
(used in python 2.4, can't use new __missing__ method)
|
|
|
|
"""
|
|
|
|
|
|
|
|
defaultvalue = 0
|
|
|
|
|
|
|
|
def __init__(self, defaultvalue=0, kv_dict={}):
|
|
|
|
dict.__init__(self)
|
|
|
|
self.defaultvalue = defaultvalue
|
|
|
|
self.update(kv_dict)
|
|
|
|
|
|
|
|
def __getitem__(self, k):
|
2021-07-09 17:47:06 +02:00
|
|
|
if k in self:
|
2020-12-15 08:35:44 +01:00
|
|
|
return self.get(k)
|
|
|
|
value = copy.copy(self.defaultvalue)
|
|
|
|
self[k] = value
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2021-07-09 23:31:16 +02:00
|
|
|
class WrapDict(object):
|
2020-12-15 08:35:44 +01:00
|
|
|
"""Wrap a dict so that getitem returns '' when values are None"""
|
|
|
|
|
|
|
|
def __init__(self, adict, NoneValue=""):
|
|
|
|
self.dict = adict
|
|
|
|
self.NoneValue = NoneValue
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
value = self.dict[key]
|
|
|
|
if value is None:
|
|
|
|
return self.NoneValue
|
|
|
|
else:
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def group_by_key(d, key):
|
2021-06-19 23:21:37 +02:00
|
|
|
gr = DictDefault(defaultvalue=[])
|
2020-12-15 08:35:44 +01:00
|
|
|
for e in d:
|
2021-06-19 23:21:37 +02:00
|
|
|
gr[e[key]].append(e)
|
|
|
|
return gr
|
2020-12-15 08:35:44 +01:00
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
# ----- Global lock for critical sections (except notes_tables caches)
|
2021-09-25 10:43:06 +02:00
|
|
|
GSL = _thread.allocate_lock() # Global ScoDoc Lock
|
2020-09-26 16:19:37 +02:00
|
|
|
|
2021-07-25 09:51:09 +02:00
|
|
|
SCODOC_DIR = Config.SCODOC_DIR
|
|
|
|
|
2021-05-29 18:22:51 +02:00
|
|
|
# ----- Repertoire "config" modifiable
|
2021-07-25 09:51:09 +02:00
|
|
|
# /opt/scodoc-data/config
|
|
|
|
SCODOC_CFG_DIR = os.path.join(Config.SCODOC_VAR_DIR, "config")
|
2021-05-29 18:22:51 +02:00
|
|
|
# ----- Version information
|
|
|
|
SCODOC_VERSION_DIR = os.path.join(SCODOC_CFG_DIR, "version")
|
2021-07-25 09:51:09 +02:00
|
|
|
# ----- Repertoire tmp : /opt/scodoc-data/tmp
|
|
|
|
SCO_TMP_DIR = os.path.join(Config.SCODOC_VAR_DIR, "tmp")
|
2021-09-25 09:13:39 +02:00
|
|
|
if not os.path.exists(SCO_TMP_DIR) and os.path.exists(Config.SCODOC_VAR_DIR):
|
2021-05-29 18:22:51 +02:00
|
|
|
os.mkdir(SCO_TMP_DIR, 0o755)
|
2021-07-25 09:51:09 +02:00
|
|
|
# ----- Les logos: /opt/scodoc-data/config/logos
|
2021-05-29 18:22:51 +02:00
|
|
|
SCODOC_LOGOS_DIR = os.path.join(SCODOC_CFG_DIR, "logos")
|
2021-09-08 23:00:01 +02:00
|
|
|
LOGOS_IMAGES_ALLOWED_TYPES = ("jpg", "jpeg", "png") # remind that PIL does not read pdf
|
2021-11-07 08:53:44 +01:00
|
|
|
LOGOS_DIR_PREFIX = "logos_"
|
|
|
|
LOGO_FILE_PREFIX = "logo_"
|
2021-09-08 23:00:01 +02:00
|
|
|
|
2021-11-07 08:53:44 +01:00
|
|
|
# forme générale des noms des fichiers logos/background:
|
|
|
|
# SCODOC_LOGO_DIR/LOGO_FILE_PREFIX<name>.<suffix> (fichier global) ou
|
|
|
|
# SCODOC_LOGO_DIR/LOGOS_DIR_PREFIX<dept_id>/LOGO_FILE_PREFIX<name>.<suffix> (fichier départemental)
|
2021-05-29 18:22:51 +02:00
|
|
|
|
2021-07-25 09:51:09 +02:00
|
|
|
# ----- Les outils distribués
|
|
|
|
SCO_TOOLS_DIR = os.path.join(Config.SCODOC_DIR, "tools")
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
# ----- Lecture du fichier de configuration
|
2021-06-19 23:21:37 +02:00
|
|
|
from app.scodoc import sco_config
|
|
|
|
from app.scodoc import sco_config_load
|
2020-09-26 16:19:37 +02:00
|
|
|
|
2021-05-29 18:22:51 +02:00
|
|
|
sco_config_load.load_local_configuration(SCODOC_CFG_DIR)
|
2020-12-15 08:35:44 +01:00
|
|
|
CONFIG = sco_config.CONFIG
|
2020-10-06 12:12:06 +02:00
|
|
|
if hasattr(CONFIG, "CODES_EXPL"):
|
2020-10-13 15:41:04 +02:00
|
|
|
CODES_EXPL.update(
|
|
|
|
CONFIG.CODES_EXPL
|
|
|
|
) # permet de customiser les explications de codes
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
if CONFIG.CUSTOM_HTML_HEADER:
|
|
|
|
CUSTOM_HTML_HEADER = open(CONFIG.CUSTOM_HTML_HEADER).read()
|
|
|
|
else:
|
|
|
|
CUSTOM_HTML_HEADER = ""
|
|
|
|
|
|
|
|
if CONFIG.CUSTOM_HTML_HEADER_CNX:
|
|
|
|
CUSTOM_HTML_HEADER_CNX = open(CONFIG.CUSTOM_HTML_HEADER_CNX).read()
|
|
|
|
else:
|
|
|
|
CUSTOM_HTML_HEADER_CNX = ""
|
|
|
|
|
|
|
|
if CONFIG.CUSTOM_HTML_FOOTER:
|
|
|
|
CUSTOM_HTML_FOOTER = open(CONFIG.CUSTOM_HTML_FOOTER).read()
|
|
|
|
else:
|
|
|
|
CUSTOM_HTML_FOOTER = ""
|
|
|
|
|
|
|
|
if CONFIG.CUSTOM_HTML_FOOTER_CNX:
|
|
|
|
CUSTOM_HTML_FOOTER_CNX = open(CONFIG.CUSTOM_HTML_FOOTER_CNX).read()
|
|
|
|
else:
|
|
|
|
CUSTOM_HTML_FOOTER_CNX = ""
|
|
|
|
|
|
|
|
SCO_ENCODING = "utf-8" # used by Excel, XML, PDF, ...
|
|
|
|
|
|
|
|
|
2021-07-21 22:32:30 +02:00
|
|
|
SCO_DEFAULT_SQL_USER = "scodoc" # should match Zope process UID
|
|
|
|
SCO_DEFAULT_SQL_PORT = "5432"
|
2020-09-26 16:19:37 +02:00
|
|
|
SCO_DEFAULT_SQL_USERS_CNX = "dbname=SCOUSERS port=%s" % SCO_DEFAULT_SQL_PORT
|
|
|
|
|
|
|
|
# Valeurs utilisées pour affichage seulement, pas de requetes ni de mails envoyés:
|
|
|
|
SCO_WEBSITE = "https://scodoc.org"
|
2020-10-13 15:41:04 +02:00
|
|
|
SCO_USER_MANUAL = "https://scodoc.org/GuideUtilisateur"
|
2020-09-26 16:19:37 +02:00
|
|
|
SCO_ANNONCES_WEBSITE = "https://listes.univ-paris13.fr/mailman/listinfo/scodoc-annonces"
|
|
|
|
SCO_DEVEL_LIST = "scodoc-devel@listes.univ-paris13.fr"
|
|
|
|
SCO_USERS_LIST = "notes@listes.univ-paris13.fr"
|
2022-09-10 15:23:54 +02:00
|
|
|
SCO_LISTS_URL = "https://scodoc.org/ListesDeDiffusion/"
|
|
|
|
SCO_DISCORD_ASSISTANCE = "https://discord.gg/ybw6ugtFsZ"
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
# Mails avec exceptions (erreurs) anormales envoyés à cette adresse:
|
|
|
|
# mettre '' pour désactiver completement l'envois de mails d'erreurs.
|
|
|
|
# (ces mails sont précieux pour corriger les erreurs, ne les désactiver que si
|
|
|
|
# vous avez de bonnes raisons de le faire: vous pouvez me contacter avant)
|
|
|
|
SCO_EXC_MAIL = "scodoc-exception@viennet.net"
|
|
|
|
|
|
|
|
# L'adresse du mainteneur (non utilisée automatiquement par ScoDoc: ne pas changer)
|
|
|
|
SCO_DEV_MAIL = "emmanuel.viennet@gmail.com" # SVP ne pas changer
|
|
|
|
|
|
|
|
# Adresse pour l'envoi des dumps (pour assistance technnique):
|
|
|
|
# ne pas changer (ou vous perdez le support)
|
2021-10-14 11:01:29 +02:00
|
|
|
SCO_DUMP_UP_URL = "https://scodoc.org/scodoc-installmgr/upload-dump"
|
2022-03-20 23:12:30 +01:00
|
|
|
SCO_UP2DATE = "https://scodoc.org/scodoc-installmgr/check_version"
|
2020-09-26 16:19:37 +02:00
|
|
|
CSV_FIELDSEP = ";"
|
|
|
|
CSV_LINESEP = "\n"
|
|
|
|
CSV_MIMETYPE = "text/comma-separated-values"
|
2021-09-16 00:15:10 +02:00
|
|
|
CSV_SUFFIX = ".csv"
|
2022-04-08 13:01:47 +02:00
|
|
|
DOCX_MIMETYPE = (
|
|
|
|
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
|
|
|
)
|
|
|
|
DOCX_SUFFIX = ".docx"
|
2021-09-16 00:15:10 +02:00
|
|
|
JSON_MIMETYPE = "application/json"
|
|
|
|
JSON_SUFFIX = ".json"
|
|
|
|
PDF_MIMETYPE = "application/pdf"
|
|
|
|
PDF_SUFFIX = ".pdf"
|
2020-09-26 16:19:37 +02:00
|
|
|
XLS_MIMETYPE = "application/vnd.ms-excel"
|
2021-09-25 10:43:06 +02:00
|
|
|
XLS_SUFFIX = ".xls"
|
2021-08-02 08:52:07 +02:00
|
|
|
XLSX_MIMETYPE = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
2021-08-12 14:49:53 +02:00
|
|
|
XLSX_SUFFIX = ".xlsx"
|
2020-09-26 16:19:37 +02:00
|
|
|
XML_MIMETYPE = "text/xml"
|
2021-09-16 00:15:10 +02:00
|
|
|
XML_SUFFIX = ".xml"
|
|
|
|
|
2022-06-09 14:01:47 +02:00
|
|
|
# Format pour lesquels on exporte sans formattage des nombres (pas de perte de précision)
|
|
|
|
FORMATS_NUMERIQUES = {"csv", "xls", "xlsx", "xml", "json"}
|
|
|
|
|
2021-09-16 00:15:10 +02:00
|
|
|
|
|
|
|
def get_mime_suffix(format_code: str) -> tuple[str, str]:
|
|
|
|
"""Returns (MIME, SUFFIX) from format_code == "xls", "xml", ...
|
|
|
|
SUFFIX includes the dot: ".xlsx", ".xml", ...
|
|
|
|
"xls" and "xlsx" format codes give XLSX
|
|
|
|
"""
|
|
|
|
d = {
|
|
|
|
"csv": (CSV_MIMETYPE, CSV_SUFFIX),
|
2022-04-08 13:01:47 +02:00
|
|
|
"docx": (DOCX_MIMETYPE, DOCX_SUFFIX),
|
2021-09-16 00:15:10 +02:00
|
|
|
"xls": (XLSX_MIMETYPE, XLSX_SUFFIX),
|
|
|
|
"xlsx": (XLSX_MIMETYPE, XLSX_SUFFIX),
|
|
|
|
"pdf": (PDF_MIMETYPE, PDF_SUFFIX),
|
|
|
|
"xml": (XML_MIMETYPE, XML_SUFFIX),
|
|
|
|
"json": (JSON_MIMETYPE, JSON_SUFFIX),
|
|
|
|
}
|
|
|
|
return d[format_code]
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
# Admissions des étudiants
|
|
|
|
# Différents types de voies d'admission:
|
|
|
|
# (stocké en texte libre dans la base, mais saisie par menus pour harmoniser)
|
|
|
|
TYPE_ADMISSION_DEFAULT = "Inconnue"
|
|
|
|
TYPES_ADMISSION = (TYPE_ADMISSION_DEFAULT, "APB", "APB-PC", "CEF", "Direct")
|
|
|
|
|
2021-07-19 19:53:01 +02:00
|
|
|
BULLETINS_VERSIONS = ("short", "selectedevals", "long")
|
2021-06-15 12:34:33 +02:00
|
|
|
|
|
|
|
# Support for ScoDoc7 compatibility
|
2021-08-10 12:57:38 +02:00
|
|
|
|
|
|
|
|
2021-06-15 12:34:33 +02:00
|
|
|
def ScoURL():
|
|
|
|
"""base URL for this sco instance.
|
|
|
|
e.g. https://scodoc.xxx.fr/ScoDoc/DEPT/Scolarite
|
|
|
|
= page accueil département
|
|
|
|
"""
|
|
|
|
return url_for("scolar.index_html", scodoc_dept=g.scodoc_dept)[
|
|
|
|
: -len("/index_html")
|
2021-07-12 15:13:10 +02:00
|
|
|
]
|
2021-06-15 12:34:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
def NotesURL():
|
|
|
|
"""URL of Notes
|
|
|
|
e.g. https://scodoc.xxx.fr/ScoDoc/DEPT/Scolarite/Notes
|
|
|
|
= url de base des méthodes de notes
|
|
|
|
(page accueil programmes).
|
|
|
|
"""
|
2021-07-12 15:13:10 +02:00
|
|
|
return url_for("notes.index_html", scodoc_dept=g.scodoc_dept)[: -len("/index_html")]
|
2021-06-15 12:34:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
def AbsencesURL():
|
|
|
|
"""URL of Absences"""
|
|
|
|
return url_for("absences.index_html", scodoc_dept=g.scodoc_dept)[
|
|
|
|
: -len("/index_html")
|
2021-07-12 15:13:10 +02:00
|
|
|
]
|
2021-06-15 12:34:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
def UsersURL():
|
|
|
|
"""URL of Users
|
|
|
|
e.g. https://scodoc.xxx.fr/ScoDoc/DEPT/Scolarite/Users
|
|
|
|
= url de base des requêtes ZScoUsers
|
|
|
|
et page accueil users
|
|
|
|
"""
|
2021-07-12 15:13:10 +02:00
|
|
|
return url_for("users.index_html", scodoc_dept=g.scodoc_dept)[: -len("/index_html")]
|
2021-06-15 12:34:33 +02:00
|
|
|
|
|
|
|
|
2020-10-21 00:15:40 +02:00
|
|
|
# ---- Simple python utilities
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def simplesqlquote(s, maxlen=50):
|
|
|
|
"""simple SQL quoting to avoid most SQL injection attacks.
|
|
|
|
Note: we use this function in the (rare) cases where we have to
|
|
|
|
construct SQL code manually"""
|
|
|
|
s = s[:maxlen]
|
|
|
|
s.replace("'", r"\'")
|
|
|
|
s.replace(";", r"\;")
|
|
|
|
for bad in ("select", "drop", ";", "--", "insert", "delete", "xp_"):
|
|
|
|
s = s.replace(bad, "")
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
def unescape_html(s):
|
|
|
|
"""un-escape html entities"""
|
|
|
|
s = s.strip().replace("&", "&")
|
|
|
|
s = s.replace("<", "<")
|
|
|
|
s = s.replace(">", ">")
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
2021-09-25 10:43:06 +02:00
|
|
|
def build_url_query(url: str, **params) -> str:
|
|
|
|
"""Add parameters to existing url, as a query string"""
|
|
|
|
url_parse = urlparse(url)
|
|
|
|
query = url_parse.query
|
|
|
|
url_dict = dict(parse_qsl(query))
|
|
|
|
url_dict.update(params)
|
|
|
|
url_new_query = urlencode(url_dict)
|
|
|
|
url_parse = url_parse._replace(query=url_new_query)
|
|
|
|
new_url = urlunparse(url_parse)
|
|
|
|
return new_url
|
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
# test if obj is iterable (but not a string)
|
|
|
|
isiterable = lambda obj: getattr(obj, "__iter__", False)
|
|
|
|
|
|
|
|
|
|
|
|
def unescape_html_dict(d):
|
|
|
|
"""un-escape all dict values, recursively"""
|
|
|
|
try:
|
2021-07-09 17:47:06 +02:00
|
|
|
indices = list(d.keys())
|
2020-09-26 16:19:37 +02:00
|
|
|
except:
|
2021-07-09 17:47:06 +02:00
|
|
|
indices = list(range(len(d)))
|
2020-09-26 16:19:37 +02:00
|
|
|
for k in indices:
|
|
|
|
v = d[k]
|
2021-07-11 22:32:01 +02:00
|
|
|
if isinstance(v, bytes):
|
2020-09-26 16:19:37 +02:00
|
|
|
d[k] = unescape_html(v)
|
|
|
|
elif isiterable(v):
|
|
|
|
unescape_html_dict(v)
|
|
|
|
|
|
|
|
|
|
|
|
# Expressions used to check noms/prenoms
|
|
|
|
FORBIDDEN_CHARS_EXP = re.compile(r"[*\|~\(\)\\]")
|
|
|
|
ALPHANUM_EXP = re.compile(r"^[\w-]+$", re.UNICODE)
|
|
|
|
|
|
|
|
|
|
|
|
def is_valid_code_nip(s):
|
2020-10-13 15:41:04 +02:00
|
|
|
"""True si s peut être un code NIP: au moins 6 chiffres décimaux"""
|
2020-09-26 16:19:37 +02:00
|
|
|
if not s:
|
|
|
|
return False
|
|
|
|
return re.match(r"^[0-9]{6,32}$", s)
|
|
|
|
|
|
|
|
|
|
|
|
def strnone(s):
|
|
|
|
"convert s to string, '' if s is false"
|
|
|
|
if s:
|
|
|
|
return str(s)
|
|
|
|
else:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
|
|
|
def stripquotes(s):
|
|
|
|
"strip s from spaces and quotes"
|
|
|
|
s = s.strip()
|
|
|
|
if s and ((s[0] == '"' and s[-1] == '"') or (s[0] == "'" and s[-1] == "'")):
|
|
|
|
s = s[1:-1]
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
def suppress_accents(s):
|
2021-07-12 11:54:04 +02:00
|
|
|
"remove accents and suppress non ascii characters from string s"
|
2021-08-11 00:36:07 +02:00
|
|
|
if isinstance(s, str):
|
|
|
|
return (
|
|
|
|
unicodedata.normalize("NFD", s)
|
|
|
|
.encode("ascii", "ignore")
|
|
|
|
.decode(SCO_ENCODING)
|
|
|
|
)
|
|
|
|
return s # may be int
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-09-25 22:42:44 +02:00
|
|
|
class PurgeChars:
|
|
|
|
"""delete all chars except those belonging to the specified string"""
|
|
|
|
|
|
|
|
def __init__(self, allowed_chars=""):
|
|
|
|
self.allowed_chars_set = {ord(c) for c in allowed_chars}
|
|
|
|
|
|
|
|
def __getitem__(self, x):
|
|
|
|
if x not in self.allowed_chars_set:
|
|
|
|
return None
|
|
|
|
raise LookupError()
|
|
|
|
|
|
|
|
|
|
|
|
def purge_chars(s, allowed_chars=""):
|
|
|
|
return s.translate(PurgeChars(allowed_chars=allowed_chars))
|
|
|
|
|
|
|
|
|
2022-07-03 11:30:07 +02:00
|
|
|
def sanitize_string(s, remove_spaces=True):
|
2020-09-26 16:19:37 +02:00
|
|
|
"""s is an ordinary string, encoding given by SCO_ENCODING"
|
2020-10-13 15:41:04 +02:00
|
|
|
suppress accents and chars interpreted in XML
|
2020-09-26 16:19:37 +02:00
|
|
|
Irreversible (not a quote)
|
|
|
|
|
|
|
|
For ids and some filenames
|
|
|
|
"""
|
2021-07-12 10:51:45 +02:00
|
|
|
# Table suppressing some chars:
|
2022-07-03 11:30:07 +02:00
|
|
|
to_del = "'`\"<>!&\\ " if remove_spaces else "'`\"<>!&"
|
|
|
|
trans = str.maketrans("", "", to_del)
|
|
|
|
|
|
|
|
return suppress_accents(s.translate(trans)).replace("\t", "_")
|
2021-07-12 10:51:45 +02:00
|
|
|
|
|
|
|
|
2021-09-20 15:54:38 +02:00
|
|
|
_BAD_FILENAME_CHARS = str.maketrans("", "", ":/\\&[]*?'")
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def make_filename(name):
|
2021-07-12 10:51:45 +02:00
|
|
|
"""Try to convert name to a reasonable filename
|
|
|
|
without spaces, (back)slashes, : and without accents
|
|
|
|
"""
|
2021-09-20 15:54:38 +02:00
|
|
|
return (
|
|
|
|
suppress_accents(name.translate(_BAD_FILENAME_CHARS)).replace(" ", "_")
|
|
|
|
or "scodoc"
|
|
|
|
)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
VALID_CARS = (
|
|
|
|
"-abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.!" # no / !
|
|
|
|
)
|
2020-12-02 01:00:23 +01:00
|
|
|
VALID_CARS_SET = set(VALID_CARS)
|
2020-09-26 16:19:37 +02:00
|
|
|
VALID_EXP = re.compile("^[" + VALID_CARS + "]+$")
|
|
|
|
|
|
|
|
|
|
|
|
def sanitize_filename(filename):
|
|
|
|
"""Keep only valid chars
|
|
|
|
used for archives filenames
|
|
|
|
"""
|
2021-08-31 20:18:50 +02:00
|
|
|
filename = suppress_accents(filename.replace(" ", "_"))
|
2020-09-26 16:19:37 +02:00
|
|
|
sane = "".join([c for c in filename if c in VALID_CARS_SET])
|
|
|
|
if len(sane) < 2:
|
|
|
|
sane = time.strftime("%Y-%m-%d-%H%M%S") + "-" + sane
|
|
|
|
return sane
|
|
|
|
|
|
|
|
|
|
|
|
def is_valid_filename(filename):
|
|
|
|
"""True if filename is safe"""
|
|
|
|
return VALID_EXP.match(filename)
|
|
|
|
|
|
|
|
|
2022-08-06 22:31:41 +02:00
|
|
|
BOOL_STR = {
|
|
|
|
"": False,
|
|
|
|
"false": False,
|
|
|
|
"0": False,
|
|
|
|
"1": True,
|
|
|
|
"true": True,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def to_bool(x) -> bool:
|
|
|
|
"""a boolean, may also be encoded as a string "0", "False", "1", "True" """
|
|
|
|
if isinstance(x, str):
|
|
|
|
return BOOL_STR.get(x.lower().strip(), True)
|
|
|
|
return bool(x)
|
|
|
|
|
|
|
|
|
2022-08-29 15:23:44 +02:00
|
|
|
# Min/Max values for numbers stored in database:
|
|
|
|
DB_MIN_FLOAT = -1e30
|
|
|
|
DB_MAX_FLOAT = 1e30
|
|
|
|
DB_MIN_INT = -(1 << 31)
|
|
|
|
DB_MAX_INT = (1 << 31) - 1
|
|
|
|
|
|
|
|
|
2022-03-06 22:40:20 +01:00
|
|
|
def bul_filename_old(sem: dict, etud: dict, format):
|
2021-09-18 13:42:19 +02:00
|
|
|
"""Build a filename for this bulletin"""
|
|
|
|
dt = time.strftime("%Y-%m-%d")
|
|
|
|
filename = f"bul-{sem['titre_num']}-{dt}-{etud['nom']}.{format}"
|
|
|
|
filename = make_filename(filename)
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
2022-03-06 22:40:20 +01:00
|
|
|
def bul_filename(formsemestre, etud, format):
|
|
|
|
"""Build a filename for this bulletin"""
|
|
|
|
dt = time.strftime("%Y-%m-%d")
|
2022-03-07 21:49:11 +01:00
|
|
|
filename = f"bul-{formsemestre.titre_num()}-{dt}-{etud.nom}.{format}"
|
2022-03-06 22:40:20 +01:00
|
|
|
filename = make_filename(filename)
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
2022-02-26 10:09:14 +01:00
|
|
|
def flash_errors(form):
|
|
|
|
"""Flashes form errors (version sommaire)"""
|
|
|
|
for field, errors in form.errors.items():
|
|
|
|
flash(
|
|
|
|
"Erreur: voir le champs %s" % (getattr(form, field).label.text,),
|
|
|
|
"warning",
|
|
|
|
)
|
|
|
|
# see https://getbootstrap.com/docs/4.0/components/alerts/
|
|
|
|
|
|
|
|
|
2022-07-08 18:09:45 +02:00
|
|
|
def flash_once(message: str):
|
|
|
|
"""Flash the message, but only once per request"""
|
|
|
|
if not hasattr(g, "sco_flashed_once"):
|
|
|
|
g.sco_flashed_once = set()
|
|
|
|
if not message in g.sco_flashed_once:
|
|
|
|
flash(message)
|
|
|
|
g.sco_flashed_once.add(message)
|
|
|
|
|
|
|
|
|
2021-09-27 10:20:10 +02:00
|
|
|
def sendCSVFile(data, filename): # DEPRECATED utiliser send_file
|
|
|
|
"""publication fichier CSV."""
|
|
|
|
return send_file(data, filename=filename, mime=CSV_MIMETYPE, attached=True)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-09-27 10:20:10 +02:00
|
|
|
def sendPDFFile(data, filename): # DEPRECATED utiliser send_file
|
2021-09-21 22:19:08 +02:00
|
|
|
return send_file(data, filename=filename, mime=PDF_MIMETYPE, attached=True)
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
class ScoDocJSONEncoder(json.JSONEncoder):
|
2020-12-02 01:00:23 +01:00
|
|
|
def default(self, o): # pylint: disable=E0202
|
|
|
|
if isinstance(o, (datetime.date, datetime.datetime)):
|
|
|
|
return o.isoformat()
|
2021-07-10 13:55:35 +02:00
|
|
|
elif isinstance(o, ApoEtapeVDI):
|
2020-09-26 16:19:37 +02:00
|
|
|
return str(o)
|
|
|
|
else:
|
|
|
|
return json.JSONEncoder.default(self, o)
|
|
|
|
|
|
|
|
|
2022-03-01 09:48:37 +01:00
|
|
|
def sendJSON(data, attached=False, filename=None):
|
2021-07-12 22:38:30 +02:00
|
|
|
js = json.dumps(data, indent=1, cls=ScoDocJSONEncoder)
|
2021-09-25 22:42:44 +02:00
|
|
|
return send_file(
|
2022-03-01 09:48:37 +01:00
|
|
|
js, filename=filename or "sco_data.json", mime=JSON_MIMETYPE, attached=attached
|
2021-09-25 22:42:44 +02:00
|
|
|
)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2022-03-01 09:48:37 +01:00
|
|
|
def sendXML(
|
|
|
|
data,
|
|
|
|
tagname=None,
|
|
|
|
force_outer_xml_tag=True,
|
|
|
|
attached=False,
|
2022-08-05 07:03:35 +02:00
|
|
|
quote=False,
|
2022-03-01 09:48:37 +01:00
|
|
|
filename=None,
|
|
|
|
):
|
2021-07-09 17:47:06 +02:00
|
|
|
if type(data) != list:
|
2020-09-26 16:19:37 +02:00
|
|
|
data = [data] # always list-of-dicts
|
|
|
|
if force_outer_xml_tag:
|
2021-09-13 17:10:38 +02:00
|
|
|
data = [{tagname: data}]
|
|
|
|
tagname += "_list"
|
2021-10-20 22:34:06 +02:00
|
|
|
doc = sco_xml.simple_dictlist2xml(data, tagname=tagname, quote=quote)
|
2022-03-01 09:48:37 +01:00
|
|
|
return send_file(
|
|
|
|
doc, filename=filename or "sco_data.xml", mime=XML_MIMETYPE, attached=attached
|
|
|
|
)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-10-20 22:34:06 +02:00
|
|
|
def sendResult(
|
|
|
|
data,
|
|
|
|
name=None,
|
|
|
|
format=None,
|
|
|
|
force_outer_xml_tag=True,
|
|
|
|
attached=False,
|
2022-08-05 07:03:35 +02:00
|
|
|
quote_xml=False,
|
2022-03-01 09:48:37 +01:00
|
|
|
filename=None,
|
2021-10-20 22:34:06 +02:00
|
|
|
):
|
2021-04-30 11:04:02 +02:00
|
|
|
if (format is None) or (format == "html"):
|
2020-09-26 16:19:37 +02:00
|
|
|
return data
|
|
|
|
elif format == "xml": # name is outer tagname
|
2021-09-25 22:42:44 +02:00
|
|
|
return sendXML(
|
|
|
|
data,
|
|
|
|
tagname=name,
|
|
|
|
force_outer_xml_tag=force_outer_xml_tag,
|
|
|
|
attached=attached,
|
2021-10-20 22:34:06 +02:00
|
|
|
quote=quote_xml,
|
2022-03-01 09:48:37 +01:00
|
|
|
filename=filename,
|
2021-09-25 22:42:44 +02:00
|
|
|
)
|
2020-09-26 16:19:37 +02:00
|
|
|
elif format == "json":
|
2022-03-01 09:48:37 +01:00
|
|
|
return sendJSON(data, attached=attached, filename=filename)
|
2020-09-26 16:19:37 +02:00
|
|
|
else:
|
|
|
|
raise ValueError("invalid format: %s" % format)
|
|
|
|
|
|
|
|
|
2021-09-24 12:10:53 +02:00
|
|
|
def send_file(data, filename="", suffix="", mime=None, attached=None):
|
2021-09-21 22:19:08 +02:00
|
|
|
"""Build Flask Response for file download of given type
|
2022-04-16 15:34:40 +02:00
|
|
|
By default (attached is None), json and xml are inlined and other types are attached.
|
2021-09-21 22:19:08 +02:00
|
|
|
"""
|
|
|
|
if attached is None:
|
|
|
|
if mime == XML_MIMETYPE or mime == JSON_MIMETYPE:
|
|
|
|
attached = False
|
2021-09-24 16:32:49 +02:00
|
|
|
else:
|
|
|
|
attached = True
|
2021-09-24 12:10:53 +02:00
|
|
|
if filename:
|
|
|
|
if suffix:
|
|
|
|
filename += suffix
|
|
|
|
filename = make_filename(filename)
|
2021-09-15 00:33:30 +02:00
|
|
|
response = make_response(data)
|
|
|
|
response.headers["Content-Type"] = mime
|
2021-09-24 12:10:53 +02:00
|
|
|
if attached and filename:
|
2021-09-15 00:33:30 +02:00
|
|
|
response.headers["Content-Disposition"] = 'attachment; filename="%s"' % filename
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2022-04-08 13:01:47 +02:00
|
|
|
def send_docx(document, filename):
|
|
|
|
"Send a python-docx document"
|
|
|
|
buffer = io.BytesIO() # in-memory document, no disk file
|
|
|
|
document.save(buffer)
|
|
|
|
buffer.seek(0)
|
|
|
|
return flask.send_file(
|
|
|
|
buffer,
|
2022-04-16 15:34:40 +02:00
|
|
|
download_name=sanitize_filename(filename),
|
2022-04-08 13:01:47 +02:00
|
|
|
mimetype=DOCX_MIMETYPE,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-09-27 16:42:14 +02:00
|
|
|
def get_request_args():
|
|
|
|
"""returns a dict with request (POST or GET) arguments
|
|
|
|
converted to suit legacy Zope style (scodoc7) functions.
|
|
|
|
"""
|
|
|
|
# copy to get a mutable object (necessary for TrivialFormulator and several methods)
|
|
|
|
if request.method == "POST":
|
|
|
|
# request.form is a werkzeug.datastructures.ImmutableMultiDict
|
|
|
|
# must copy to get a mutable version (needed by TrivialFormulator)
|
|
|
|
vals = request.form.copy()
|
|
|
|
if request.files:
|
|
|
|
# Add files in form:
|
|
|
|
vals.update(request.files)
|
|
|
|
for k in request.form:
|
|
|
|
if k.endswith(":list"):
|
|
|
|
vals[k[:-5]] = request.form.getlist(k)
|
|
|
|
elif request.method == "GET":
|
|
|
|
vals = {}
|
|
|
|
for k in request.args:
|
|
|
|
# current_app.logger.debug("%s\t%s" % (k, request.args.getlist(k)))
|
|
|
|
if k.endswith(":list"):
|
|
|
|
vals[k[:-5]] = request.args.getlist(k)
|
|
|
|
else:
|
|
|
|
values = request.args.getlist(k)
|
|
|
|
vals[k] = values[0] if len(values) == 1 else values
|
|
|
|
return vals
|
|
|
|
|
|
|
|
|
2022-08-07 19:56:25 +02:00
|
|
|
def json_error(status_code, message=None):
|
2021-11-12 22:17:46 +01:00
|
|
|
"""Simple JSON response, for errors"""
|
2022-08-07 19:56:25 +02:00
|
|
|
payload = {
|
|
|
|
"error": HTTP_STATUS_CODES.get(status_code, "Unknown error"),
|
|
|
|
"status": status_code,
|
2021-11-12 22:17:46 +01:00
|
|
|
}
|
2022-08-07 19:56:25 +02:00
|
|
|
if message:
|
|
|
|
payload["message"] = message
|
|
|
|
response = jsonify(payload)
|
|
|
|
response.status_code = status_code
|
2021-11-12 22:17:46 +01:00
|
|
|
log(f"Error: {response}")
|
2022-08-07 19:56:25 +02:00
|
|
|
return response
|
|
|
|
|
|
|
|
|
|
|
|
def json_ok_response(status_code=200, payload=None):
|
|
|
|
"""Simple JSON respons for "success" """
|
|
|
|
payload = payload or {"OK": True}
|
|
|
|
response = jsonify(payload)
|
|
|
|
response.status_code = status_code
|
|
|
|
return response
|
2021-11-12 22:17:46 +01:00
|
|
|
|
|
|
|
|
2020-12-21 18:42:02 +01:00
|
|
|
def get_scodoc_version():
|
|
|
|
"return a string identifying ScoDoc version"
|
2021-08-26 23:43:54 +02:00
|
|
|
return sco_version.SCOVERSION
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-07-05 00:07:17 +02:00
|
|
|
def check_scodoc7_password(scodoc7_hash, password):
|
|
|
|
"""Check a password vs scodoc7 hash
|
|
|
|
used only during old databases migrations"""
|
|
|
|
m = md5()
|
|
|
|
m.update(password.encode("utf-8"))
|
2021-08-20 10:53:24 +02:00
|
|
|
h = base64.encodebytes(m.digest()).decode("utf-8").strip()
|
2021-07-05 00:07:17 +02:00
|
|
|
return h == scodoc7_hash
|
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
# Simple string manipulations
|
|
|
|
|
|
|
|
|
|
|
|
def abbrev_prenom(prenom):
|
|
|
|
"Donne l'abreviation d'un prenom"
|
|
|
|
# un peu lent, mais espère traiter tous les cas
|
|
|
|
# Jean -> J.
|
|
|
|
# Charles -> Ch.
|
|
|
|
# Jean-Christophe -> J.-C.
|
|
|
|
# Marie Odile -> M. O.
|
2021-07-12 15:13:10 +02:00
|
|
|
prenom = prenom.replace(".", " ").strip()
|
2020-09-26 16:19:37 +02:00
|
|
|
if not prenom:
|
|
|
|
return ""
|
|
|
|
d = prenom[:3].upper()
|
|
|
|
if d == "CHA":
|
|
|
|
abrv = "Ch." # 'Charles' donne 'Ch.'
|
|
|
|
i = 3
|
|
|
|
else:
|
|
|
|
abrv = prenom[0].upper() + "."
|
|
|
|
i = 1
|
|
|
|
n = len(prenom)
|
|
|
|
while i < n:
|
|
|
|
c = prenom[i]
|
|
|
|
if c == " " or c == "-" and i < n - 1:
|
|
|
|
sep = c
|
|
|
|
i += 1
|
|
|
|
# gobbe tous les separateurs
|
|
|
|
while i < n and (prenom[i] == " " or prenom[i] == "-"):
|
|
|
|
if prenom[i] == "-":
|
|
|
|
sep = "-"
|
|
|
|
i += 1
|
|
|
|
if i < n:
|
|
|
|
abrv += sep + prenom[i].upper() + "."
|
|
|
|
i += 1
|
2021-07-12 15:13:10 +02:00
|
|
|
return abrv
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
def timedate_human_repr():
|
2022-03-02 23:14:04 +01:00
|
|
|
"representation du temps courant pour utilisateur"
|
2020-09-26 16:19:37 +02:00
|
|
|
return time.strftime("%d/%m/%Y à %Hh%M")
|
|
|
|
|
|
|
|
|
|
|
|
def annee_scolaire_repr(year, month):
|
|
|
|
"""representation de l'annee scolaire : '2009 - 2010'
|
|
|
|
à partir d'une date.
|
|
|
|
"""
|
2022-11-13 14:55:18 +01:00
|
|
|
if month >= MONTH_DEBUT_ANNEE_SCOLAIRE: # apres le 1er aout
|
|
|
|
return f"{year} - {year + 1}"
|
2020-09-26 16:19:37 +02:00
|
|
|
else:
|
2022-11-13 14:55:18 +01:00
|
|
|
return f"{year - 1} - {year}"
|
|
|
|
|
|
|
|
|
|
|
|
def annee_scolaire() -> int:
|
|
|
|
"""Année de debut de l'annee scolaire courante"""
|
|
|
|
t = time.localtime()
|
|
|
|
year, month = t[0], t[1]
|
|
|
|
return annee_scolaire_debut(year, month)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2022-06-09 07:39:58 +02:00
|
|
|
def annee_scolaire_debut(year, month) -> int:
|
2022-11-13 14:55:18 +01:00
|
|
|
"""Annee scolaire de début.
|
|
|
|
Par défaut (hémisphère nord), l'année du mois de août
|
|
|
|
précédent la date indiquée.
|
|
|
|
"""
|
|
|
|
if int(month) >= MONTH_DEBUT_ANNEE_SCOLAIRE:
|
2020-09-26 16:19:37 +02:00
|
|
|
return int(year)
|
|
|
|
else:
|
|
|
|
return int(year) - 1
|
|
|
|
|
|
|
|
|
2022-11-13 14:55:18 +01:00
|
|
|
def date_debut_anne_scolaire(annee_sco: int) -> datetime:
|
2022-04-20 23:47:46 +02:00
|
|
|
"""La date de début de l'année scolaire
|
2022-11-13 14:55:18 +01:00
|
|
|
(par défaut, le 1er aout)
|
2022-04-20 23:47:46 +02:00
|
|
|
"""
|
2022-11-13 14:55:18 +01:00
|
|
|
return datetime.datetime(year=annee_sco, month=MONTH_DEBUT_ANNEE_SCOLAIRE, day=1)
|
2022-04-20 23:47:46 +02:00
|
|
|
|
|
|
|
|
2022-11-13 14:55:18 +01:00
|
|
|
def date_fin_anne_scolaire(annee_sco: int) -> datetime:
|
2022-04-20 23:47:46 +02:00
|
|
|
"""La date de fin de l'année scolaire
|
2022-11-13 14:55:18 +01:00
|
|
|
(par défaut, le 31 juillet de l'année suivante)
|
2022-04-20 23:47:46 +02:00
|
|
|
"""
|
2022-11-13 14:55:18 +01:00
|
|
|
# on prend la date de début de l'année scolaire suivante,
|
|
|
|
# et on lui retre 1 jour.
|
|
|
|
# On s'affranchit ainsi des problèmes de durées de mois.
|
2022-11-09 12:50:10 +01:00
|
|
|
return datetime.datetime(
|
2022-11-13 14:55:18 +01:00
|
|
|
year=annee_sco + 1, month=MONTH_DEBUT_ANNEE_SCOLAIRE, day=1
|
|
|
|
) - datetime.timedelta(days=1)
|
2022-04-20 23:47:46 +02:00
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
def sem_decale_str(sem):
|
|
|
|
"""'D' si semestre decalé, ou ''"""
|
|
|
|
# considère "décalé" les semestre impairs commençant entre janvier et juin
|
|
|
|
# et les pairs entre juillet et decembre
|
|
|
|
if sem["semestre_id"] <= 0:
|
|
|
|
return ""
|
|
|
|
if (sem["semestre_id"] % 2 and sem["mois_debut_ord"] <= 6) or (
|
|
|
|
not sem["semestre_id"] % 2 and sem["mois_debut_ord"] > 6
|
|
|
|
):
|
|
|
|
return "D"
|
|
|
|
else:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
|
|
|
def is_valid_mail(email):
|
|
|
|
"""True if well-formed email address"""
|
2021-02-03 22:00:41 +01:00
|
|
|
return re.match(r"^.+@.+\..{2,3}$", email)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-07-25 16:42:47 +02:00
|
|
|
def graph_from_edges(edges, graph_name="mygraph"):
|
|
|
|
"""Crée un graph pydot
|
|
|
|
à partir d'une liste d'arêtes [ (n1, n2), (n2, n3), ... ]
|
|
|
|
où n1, n2, ... sont des chaînes donnant l'id des nœuds.
|
|
|
|
|
|
|
|
Fonction remplaçant celle de pydot qui est buggée.
|
|
|
|
"""
|
|
|
|
nodes = set([it for tup in edges for it in tup])
|
|
|
|
graph = pydot.Dot(graph_name)
|
|
|
|
for n in nodes:
|
|
|
|
graph.add_node(pydot.Node(n))
|
|
|
|
for e in edges:
|
|
|
|
graph.add_edge(pydot.Edge(src=e[0], dst=e[1]))
|
|
|
|
return graph
|
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
ICONSIZES = {} # name : (width, height) cache image sizes
|
|
|
|
|
|
|
|
|
2021-09-10 21:12:59 +02:00
|
|
|
def icontag(name, file_format="png", no_size=False, **attrs):
|
2020-09-26 16:19:37 +02:00
|
|
|
"""tag HTML pour un icone.
|
|
|
|
(dans les versions anterieures on utilisait Zope)
|
|
|
|
Les icones sont des fichiers PNG dans .../static/icons
|
2020-10-13 15:41:04 +02:00
|
|
|
Si la taille (width et height) n'est pas spécifiée, lit l'image
|
2020-09-26 16:19:37 +02:00
|
|
|
pour la mesurer (et cache le résultat).
|
|
|
|
"""
|
2021-09-10 21:12:59 +02:00
|
|
|
if (not no_size) and (("width" not in attrs) or ("height" not in attrs)):
|
2020-09-26 16:19:37 +02:00
|
|
|
if name not in ICONSIZES:
|
2021-07-25 09:51:09 +02:00
|
|
|
img_file = os.path.join(
|
|
|
|
Config.SCODOC_DIR,
|
|
|
|
"app/static/icons/%s.%s"
|
|
|
|
% (
|
|
|
|
name,
|
|
|
|
file_format,
|
|
|
|
),
|
|
|
|
)
|
2020-09-26 16:19:37 +02:00
|
|
|
im = PILImage.open(img_file)
|
|
|
|
width, height = im.size[0], im.size[1]
|
|
|
|
ICONSIZES[name] = (width, height) # cache
|
|
|
|
else:
|
|
|
|
width, height = ICONSIZES[name]
|
|
|
|
attrs["width"] = width
|
|
|
|
attrs["height"] = height
|
|
|
|
if "border" not in attrs:
|
|
|
|
attrs["border"] = 0
|
|
|
|
if "alt" not in attrs:
|
|
|
|
attrs["alt"] = "logo %s" % name
|
|
|
|
s = " ".join(['%s="%s"' % (k, attrs[k]) for k in attrs])
|
2022-06-26 17:54:44 +02:00
|
|
|
return f'<img class="{name}" {s} src="{STATIC_DIR}/icons/{name}.{file_format}" />'
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
ICON_PDF = icontag("pdficon16x20_img", title="Version PDF")
|
|
|
|
ICON_XLS = icontag("xlsicon_img", title="Version tableur")
|
|
|
|
|
2022-04-02 14:26:16 +02:00
|
|
|
# HTML emojis
|
|
|
|
EMO_WARNING = "⚠️" # warning /!\
|
2022-04-03 16:20:16 +02:00
|
|
|
EMO_RED_TRIANGLE_DOWN = "🔻" # red triangle pointed down
|
2022-06-29 22:53:56 +02:00
|
|
|
EMO_PREV_ARROW = "❮"
|
|
|
|
EMO_NEXT_ARROW = "❯"
|
2022-04-02 14:26:16 +02:00
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
def sort_dates(L, reverse=False):
|
|
|
|
"""Return sorted list of dates, allowing None items (they are put at the beginning)"""
|
|
|
|
mindate = datetime.datetime(datetime.MINYEAR, 1, 1)
|
|
|
|
try:
|
|
|
|
return sorted(L, key=lambda x: x or mindate, reverse=reverse)
|
|
|
|
except:
|
|
|
|
# Helps debugging
|
|
|
|
log("sort_dates( %s )" % L)
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2022-01-05 01:03:25 +01:00
|
|
|
def heterogeneous_sorting_key(x):
|
|
|
|
"key to sort non homogeneous sequences"
|
|
|
|
return (float(x), "") if isinstance(x, (bool, float, int)) else (-1e34, str(x))
|
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
def query_portal(req, msg="Portail Apogee", timeout=3):
|
2020-10-14 12:36:18 +02:00
|
|
|
"""Retreives external data using HTTP request
|
2020-09-26 16:19:37 +02:00
|
|
|
(used to connect to Apogee portal, or ScoDoc server)
|
2020-10-14 12:36:18 +02:00
|
|
|
returns a string, "" on error
|
2020-09-26 16:19:37 +02:00
|
|
|
"""
|
|
|
|
log("query_portal: %s" % req)
|
2022-09-14 10:03:08 +02:00
|
|
|
error_message = None
|
2020-09-26 16:19:37 +02:00
|
|
|
try:
|
2021-08-21 00:49:36 +02:00
|
|
|
r = requests.get(req, timeout=timeout) # seconds / request
|
2022-09-14 10:03:08 +02:00
|
|
|
except requests.ConnectionError:
|
|
|
|
error_message = "ConnectionError"
|
|
|
|
except requests.Timeout:
|
|
|
|
error_message = "Timeout"
|
|
|
|
except requests.TooManyRedirects:
|
|
|
|
error_message = "TooManyRedirects"
|
|
|
|
except requests.RequestException:
|
2022-09-14 21:44:38 +02:00
|
|
|
error_message = f"can't connect to {msg}"
|
2022-09-14 10:03:08 +02:00
|
|
|
if error_message is not None:
|
|
|
|
log(f"query_portal: {error_message}")
|
2020-09-26 16:19:37 +02:00
|
|
|
return ""
|
2021-08-21 00:49:36 +02:00
|
|
|
if r.status_code != 200:
|
|
|
|
log(f"query_portal: http error {r.status_code}")
|
2022-09-14 10:03:08 +02:00
|
|
|
return ""
|
2020-09-26 16:19:37 +02:00
|
|
|
|
2021-08-21 00:49:36 +02:00
|
|
|
return r.text
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-06-02 22:40:34 +02:00
|
|
|
def confirm_dialog(
|
|
|
|
message="<p>Confirmer ?</p>",
|
|
|
|
OK="OK",
|
|
|
|
Cancel="Annuler",
|
|
|
|
dest_url="",
|
|
|
|
cancel_url="",
|
|
|
|
target_variable="dialog_confirmed",
|
|
|
|
parameters={},
|
|
|
|
add_headers=True, # complete page
|
|
|
|
helpmsg=None,
|
|
|
|
):
|
2021-06-19 23:21:37 +02:00
|
|
|
from app.scodoc import html_sco_header
|
|
|
|
|
2021-06-02 22:40:34 +02:00
|
|
|
# dialog de confirmation simple
|
|
|
|
parameters[target_variable] = 1
|
|
|
|
# Attention: la page a pu etre servie en GET avec des parametres
|
|
|
|
# si on laisse l'url "action" vide, les parametres restent alors que l'on passe en POST...
|
|
|
|
if not dest_url:
|
2021-08-30 16:34:24 +02:00
|
|
|
action = ""
|
|
|
|
else:
|
|
|
|
# strip remaining parameters from destination url:
|
|
|
|
dest_url = urllib.parse.splitquery(dest_url)[0]
|
|
|
|
action = f'action="{dest_url}"'
|
|
|
|
|
2021-06-02 22:40:34 +02:00
|
|
|
H = [
|
2021-09-21 14:01:46 +02:00
|
|
|
f"""<form {action} method="POST">
|
|
|
|
{message}
|
|
|
|
""",
|
2021-06-02 22:40:34 +02:00
|
|
|
]
|
2021-09-27 22:54:58 +02:00
|
|
|
if OK or not cancel_url:
|
|
|
|
H.append(f'<input type="submit" value="{OK}"/>')
|
2021-06-02 22:40:34 +02:00
|
|
|
if cancel_url:
|
|
|
|
H.append(
|
|
|
|
"""<input type ="button" value="%s"
|
|
|
|
onClick="document.location='%s';"/>"""
|
|
|
|
% (Cancel, cancel_url)
|
|
|
|
)
|
|
|
|
for param in parameters.keys():
|
|
|
|
if parameters[param] is None:
|
|
|
|
parameters[param] = ""
|
|
|
|
if type(parameters[param]) == type([]):
|
|
|
|
for e in parameters[param]:
|
|
|
|
H.append('<input type="hidden" name="%s" value="%s"/>' % (param, e))
|
|
|
|
else:
|
|
|
|
H.append(
|
|
|
|
'<input type="hidden" name="%s" value="%s"/>'
|
|
|
|
% (param, parameters[param])
|
|
|
|
)
|
|
|
|
H.append("</form>")
|
|
|
|
if helpmsg:
|
|
|
|
H.append('<p class="help">' + helpmsg + "</p>")
|
2021-07-31 18:01:10 +02:00
|
|
|
if add_headers:
|
2021-06-13 19:12:20 +02:00
|
|
|
return (
|
2021-07-29 16:31:15 +02:00
|
|
|
html_sco_header.sco_header() + "\n".join(H) + html_sco_header.sco_footer()
|
2021-06-13 19:12:20 +02:00
|
|
|
)
|
2021-06-02 22:40:34 +02:00
|
|
|
else:
|
|
|
|
return "\n".join(H)
|
2021-12-16 21:42:23 +01:00
|
|
|
|
|
|
|
|
2021-12-17 13:42:39 +01:00
|
|
|
def objects_renumber(db, obj_list) -> None:
|
2021-12-16 21:42:23 +01:00
|
|
|
"""fixe les numeros des objets d'une liste de modèles
|
|
|
|
pour ne pas changer son ordre"""
|
2022-12-08 17:57:01 +01:00
|
|
|
log(f"objects_renumber")
|
2021-12-16 21:42:23 +01:00
|
|
|
for i, obj in enumerate(obj_list):
|
|
|
|
obj.numero = i
|
|
|
|
db.session.add(obj)
|
|
|
|
db.session.commit()
|
2022-03-22 22:14:45 +01:00
|
|
|
|
|
|
|
|
2022-04-10 17:38:59 +02:00
|
|
|
def gen_cell(key: str, row: dict, elt="td", with_col_class=False):
|
|
|
|
"html table cell"
|
|
|
|
klass = row.get(f"_{key}_class", "")
|
|
|
|
if with_col_class:
|
|
|
|
klass = key + " " + klass
|
|
|
|
attrs = f'class="{klass}"' if klass else ""
|
2023-01-14 18:01:54 +01:00
|
|
|
if elt == "th":
|
|
|
|
attrs += ' scope="row"'
|
2022-06-29 16:30:01 +02:00
|
|
|
data = row.get(f"_{key}_data") # dict
|
|
|
|
if data:
|
|
|
|
for k in data:
|
|
|
|
attrs += f' data-{k}="{data[k]}"'
|
2022-04-10 17:38:59 +02:00
|
|
|
order = row.get(f"_{key}_order")
|
|
|
|
if order:
|
|
|
|
attrs += f' data-order="{order}"'
|
|
|
|
content = row.get(key, "")
|
|
|
|
target = row.get(f"_{key}_target")
|
|
|
|
target_attrs = row.get(f"_{key}_target_attrs", "")
|
|
|
|
if target or target_attrs: # avec lien
|
|
|
|
href = f'href="{target}"' if target else ""
|
|
|
|
content = f"<a {href} {target_attrs}>{content}</a>"
|
|
|
|
return f"<{elt} {attrs}>{content}</{elt}>"
|
|
|
|
|
|
|
|
|
|
|
|
def gen_row(
|
|
|
|
keys: list[str], row, elt="td", selected_etudid=None, with_col_classes=False
|
|
|
|
):
|
|
|
|
"html table row"
|
|
|
|
klass = row.get("_tr_class")
|
|
|
|
tr_class = f'class="{klass}"' if klass else ""
|
|
|
|
tr_id = (
|
|
|
|
f"""id="row_selected" """ if (row.get("etudid", "") == selected_etudid) else ""
|
|
|
|
)
|
2023-01-18 18:25:40 +01:00
|
|
|
return f"""<tr {tr_id} {tr_class}>{
|
|
|
|
"".join([gen_cell(key, row, elt, with_col_class=with_col_classes)
|
|
|
|
for key in keys if not key.startswith('_')])
|
|
|
|
}</tr>"""
|
2022-04-10 17:38:59 +02:00
|
|
|
|
|
|
|
|
2022-03-22 22:14:45 +01:00
|
|
|
# Pour accès depuis les templates jinja
|
|
|
|
def is_entreprises_enabled():
|
|
|
|
from app.models import ScoDocSiteConfig
|
|
|
|
|
|
|
|
return ScoDocSiteConfig.is_entreprises_enabled()
|