2020-09-26 16:19:37 +02:00
|
|
|
# -*- mode: python -*-
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
##############################################################################
|
|
|
|
#
|
|
|
|
# Gestion scolarite IUT
|
|
|
|
#
|
2023-01-02 13:16:27 +01:00
|
|
|
# Copyright (c) 1999 - 2023 Emmanuel Viennet. All rights reserved.
|
2020-09-26 16:19:37 +02:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
# Emmanuel Viennet emmanuel.viennet@viennet.net
|
|
|
|
#
|
|
|
|
##############################################################################
|
|
|
|
|
|
|
|
|
|
|
|
""" Common definitions
|
|
|
|
"""
|
2021-07-05 00:07:17 +02:00
|
|
|
import base64
|
2021-02-03 22:00:41 +01:00
|
|
|
import bisect
|
2023-03-08 22:23:55 +01:00
|
|
|
import collections
|
2021-06-19 23:21:37 +02:00
|
|
|
import datetime
|
2023-04-17 15:34:00 +02:00
|
|
|
from enum import IntEnum, Enum
|
2022-04-08 13:01:47 +02:00
|
|
|
import io
|
2021-06-19 23:21:37 +02:00
|
|
|
import json
|
2021-07-05 00:07:17 +02:00
|
|
|
from hashlib import md5
|
2020-09-26 16:19:37 +02:00
|
|
|
import numbers
|
2021-06-19 23:21:37 +02:00
|
|
|
import os
|
|
|
|
import re
|
2023-04-17 15:34:00 +02:00
|
|
|
from shutil import get_terminal_size
|
2021-09-25 10:43:06 +02:00
|
|
|
import _thread
|
2021-06-19 23:21:37 +02:00
|
|
|
import time
|
2021-07-12 11:54:04 +02:00
|
|
|
import unicodedata
|
2021-08-30 16:34:24 +02:00
|
|
|
import urllib
|
2021-09-25 10:43:06 +02:00
|
|
|
from urllib.parse import urlparse, parse_qsl, urlunparse, urlencode
|
2021-07-10 13:55:35 +02:00
|
|
|
|
2021-11-22 00:31:53 +01:00
|
|
|
import numpy as np
|
2020-09-26 16:19:37 +02:00
|
|
|
from PIL import Image as PILImage
|
2021-11-08 19:44:25 +01:00
|
|
|
import pydot
|
|
|
|
import requests
|
2020-09-26 16:19:37 +02:00
|
|
|
|
2023-04-17 15:34:00 +02:00
|
|
|
from pytz import timezone
|
|
|
|
|
|
|
|
import dateutil.parser as dtparser
|
|
|
|
|
2022-04-08 13:01:47 +02:00
|
|
|
import flask
|
2023-04-06 16:10:32 +02:00
|
|
|
from flask import g, request, Response
|
|
|
|
from flask import flash, url_for, make_response
|
|
|
|
from flask_json import json_response
|
2022-08-07 19:56:25 +02:00
|
|
|
from werkzeug.http import HTTP_STATUS_CODES
|
2021-06-15 12:34:33 +02:00
|
|
|
|
2021-05-29 18:22:51 +02:00
|
|
|
from config import Config
|
2023-04-04 09:57:54 +02:00
|
|
|
from app import log, ScoDocJSONEncoder
|
|
|
|
|
2023-02-12 13:36:47 +01:00
|
|
|
from app.scodoc.codes_cursus import NOTES_TOLERANCE, CODES_EXPL
|
2021-07-10 13:55:35 +02:00
|
|
|
from app.scodoc import sco_xml
|
2021-08-21 17:07:44 +02:00
|
|
|
import sco_version
|
2021-06-19 23:21:37 +02:00
|
|
|
|
2023-08-22 17:02:00 +02:00
|
|
|
# En principe, aucun champ text ne devrait excéder cette taille
|
|
|
|
MAX_TEXT_LEN = 64 * 1024
|
|
|
|
|
2022-06-26 17:54:44 +02:00
|
|
|
# le répertoire static, lié à chaque release pour éviter les problèmes de caches
|
2023-04-14 15:03:41 +02:00
|
|
|
STATIC_DIR = (
|
|
|
|
os.environ.get("SCRIPT_NAME", "") + "/ScoDoc/static/links/" + sco_version.SCOVERSION
|
|
|
|
)
|
2023-08-25 17:58:57 +02:00
|
|
|
# La time zone du serveur:
|
2023-10-11 14:45:06 +02:00
|
|
|
# Attention: suppose que la timezone utilisée par postgresql soit la même !
|
2023-08-25 17:58:57 +02:00
|
|
|
TIME_ZONE = timezone("/".join(os.path.realpath("/etc/localtime").split("/")[-2:]))
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
# ----- CALCUL ET PRESENTATION DES NOTES
|
|
|
|
NOTES_PRECISION = 1e-4 # evite eventuelles erreurs d'arrondis
|
|
|
|
NOTES_MIN = 0.0 # valeur minimale admise pour une note (sauf malus, dans [-20, 20])
|
|
|
|
NOTES_MAX = 1000.0
|
2021-12-08 14:13:18 +01:00
|
|
|
NOTES_ABSENCE = -999.0 # absences dans les DataFrames, NULL en base
|
2020-09-26 16:19:37 +02:00
|
|
|
NOTES_NEUTRALISE = -1000.0 # notes non prises en comptes dans moyennes
|
|
|
|
NOTES_SUPPRESS = -1001.0 # note a supprimer
|
|
|
|
NOTES_ATTENTE = -1002.0 # note "en attente" (se calcule comme une note neutralisee)
|
|
|
|
|
2023-01-29 21:52:39 +01:00
|
|
|
NO_NOTE_STR = "-" # contenu des cellules de tableaux html sans notes
|
|
|
|
|
2021-12-14 23:03:59 +01:00
|
|
|
# ---- CODES INSCRIPTION AUX SEMESTRES
|
2021-12-20 20:38:21 +01:00
|
|
|
# (champ etat de FormSemestreInscription)
|
2021-12-14 23:03:59 +01:00
|
|
|
INSCRIT = "I"
|
|
|
|
DEMISSION = "D"
|
|
|
|
DEF = "DEF"
|
2022-10-04 21:56:10 +02:00
|
|
|
ETATS_INSCRIPTION = {
|
|
|
|
INSCRIT: "Inscrit",
|
|
|
|
DEMISSION: "Démission",
|
|
|
|
DEF: "Défaillant",
|
|
|
|
}
|
|
|
|
|
2021-12-14 23:03:59 +01:00
|
|
|
|
2023-04-17 15:34:00 +02:00
|
|
|
def print_progress_bar(
|
|
|
|
iteration,
|
|
|
|
total,
|
|
|
|
prefix="",
|
|
|
|
suffix="",
|
|
|
|
finish_msg="",
|
|
|
|
decimals=1,
|
|
|
|
length=100,
|
|
|
|
fill="█",
|
|
|
|
autosize=False,
|
|
|
|
):
|
|
|
|
"""
|
|
|
|
Affiche une progress bar à un point donné (mettre dans une boucle pour rendre dynamique)
|
|
|
|
@params:
|
|
|
|
iteration - Required : index du point donné (Int)
|
|
|
|
total - Required : nombre total avant complétion (eg: len(List))
|
|
|
|
prefix - Optional : Préfix -> écrit à gauche de la barre (Str)
|
|
|
|
suffix - Optional : Suffix -> écrit à droite de la barre (Str)
|
|
|
|
decimals - Optional : nombres de chiffres après la virgule (Int)
|
|
|
|
length - Optional : taille de la barre en nombre de caractères (Int)
|
|
|
|
fill - Optional : charactère de remplissange de la barre (Str)
|
|
|
|
autosize - Optional : Choisir automatiquement la taille de la barre en fonction du terminal (Bool)
|
|
|
|
"""
|
|
|
|
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
|
|
|
|
color = TerminalColor.RED
|
|
|
|
if 50 >= float(percent) > 25:
|
|
|
|
color = TerminalColor.MAGENTA
|
|
|
|
if 75 >= float(percent) > 50:
|
|
|
|
color = TerminalColor.BLUE
|
|
|
|
if 90 >= float(percent) > 75:
|
|
|
|
color = TerminalColor.CYAN
|
|
|
|
if 100 >= float(percent) > 90:
|
|
|
|
color = TerminalColor.GREEN
|
|
|
|
styling = f"{prefix} |{fill}| {percent}% {suffix}"
|
|
|
|
if autosize:
|
|
|
|
cols, _ = get_terminal_size(fallback=(length, 1))
|
|
|
|
length = cols - len(styling)
|
|
|
|
filled_length = int(length * iteration // total)
|
|
|
|
pg_bar = fill * filled_length + "-" * (length - filled_length)
|
|
|
|
print(f"\r{color}{styling.replace(fill, pg_bar)}{TerminalColor.RESET}", end="\r")
|
|
|
|
# Affiche une nouvelle ligne vide
|
|
|
|
if iteration == total:
|
|
|
|
print(f"\n{finish_msg}")
|
|
|
|
|
|
|
|
|
|
|
|
class TerminalColor:
|
|
|
|
"""Ensemble de couleur pour terminaux"""
|
|
|
|
|
|
|
|
BLUE = "\033[94m"
|
|
|
|
CYAN = "\033[96m"
|
|
|
|
GREEN = "\033[92m"
|
|
|
|
MAGENTA = "\033[95m"
|
|
|
|
RED = "\033[91m"
|
|
|
|
RESET = "\033[0m"
|
|
|
|
|
|
|
|
|
|
|
|
class BiDirectionalEnum(Enum):
|
|
|
|
"""Permet la recherche inverse d'un enum
|
|
|
|
Condition : les clés et les valeurs doivent être uniques
|
|
|
|
les clés doivent être en MAJUSCULES
|
|
|
|
"""
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def contains(cls, attr: str):
|
|
|
|
"""Vérifie sur un attribut existe dans l'enum"""
|
|
|
|
return attr.upper() in cls._member_names_
|
|
|
|
|
2023-08-22 15:43:10 +02:00
|
|
|
@classmethod
|
|
|
|
def all(cls, keys=True):
|
|
|
|
"""Retourne toutes les clés de l'enum"""
|
|
|
|
return cls._member_names_ if keys else list(cls._value2member_map_.keys())
|
|
|
|
|
2023-04-17 15:34:00 +02:00
|
|
|
@classmethod
|
|
|
|
def get(cls, attr: str, default: any = None):
|
|
|
|
"""Récupère une valeur à partir de son attribut"""
|
|
|
|
val = None
|
|
|
|
try:
|
|
|
|
val = cls[attr.upper()]
|
|
|
|
except (KeyError, AttributeError):
|
|
|
|
val = default
|
|
|
|
return val
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def inverse(cls):
|
|
|
|
"""Retourne un dictionnaire représentant la map inverse de l'Enum"""
|
|
|
|
return cls._value2member_map_
|
|
|
|
|
|
|
|
|
|
|
|
class EtatAssiduite(int, BiDirectionalEnum):
|
|
|
|
"""Code des états d'assiduité"""
|
|
|
|
|
|
|
|
# Stockés en BD ne pas modifier
|
|
|
|
|
|
|
|
PRESENT = 0
|
|
|
|
RETARD = 1
|
|
|
|
ABSENT = 2
|
|
|
|
|
|
|
|
|
|
|
|
class EtatJustificatif(int, BiDirectionalEnum):
|
|
|
|
"""Code des états des justificatifs"""
|
|
|
|
|
|
|
|
# Stockés en BD ne pas modifier
|
|
|
|
|
|
|
|
VALIDE = 0
|
|
|
|
NON_VALIDE = 1
|
|
|
|
ATTENTE = 2
|
|
|
|
MODIFIE = 3
|
|
|
|
|
|
|
|
|
|
|
|
def is_iso_formated(date: str, convert=False) -> bool or datetime.datetime or None:
|
|
|
|
"""
|
|
|
|
Vérifie si une date est au format iso
|
|
|
|
|
|
|
|
Retourne un booléen Vrai (ou un objet Datetime si convert = True)
|
|
|
|
si l'objet est au format iso
|
|
|
|
|
|
|
|
Retourne Faux si l'objet n'est pas au format et convert = False
|
|
|
|
|
|
|
|
Retourne None sinon
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
date: datetime.datetime = dtparser.isoparse(date)
|
|
|
|
return date if convert else True
|
|
|
|
except (dtparser.ParserError, ValueError, TypeError):
|
|
|
|
return None if convert else False
|
|
|
|
|
|
|
|
|
|
|
|
def localize_datetime(date: datetime.datetime or str) -> datetime.datetime:
|
|
|
|
"""Ajoute un timecode UTC à la date donnée."""
|
|
|
|
if isinstance(date, str):
|
|
|
|
date = is_iso_formated(date, convert=True)
|
|
|
|
|
|
|
|
new_date: datetime.datetime = date
|
|
|
|
if new_date.tzinfo is None:
|
|
|
|
try:
|
2023-09-05 14:25:38 +02:00
|
|
|
new_date = TIME_ZONE.localize(date)
|
2023-04-17 15:34:00 +02:00
|
|
|
except OverflowError:
|
|
|
|
new_date = timezone("UTC").localize(date)
|
|
|
|
return new_date
|
|
|
|
|
|
|
|
|
|
|
|
def is_period_overlapping(
|
|
|
|
periode: tuple[datetime.datetime, datetime.datetime],
|
|
|
|
interval: tuple[datetime.datetime, datetime.datetime],
|
|
|
|
bornes: bool = True,
|
|
|
|
) -> bool:
|
|
|
|
"""
|
|
|
|
Vérifie si la période et l'interval s'intersectent
|
|
|
|
si strict == True : les extrémitées ne comptes pas
|
2023-10-11 14:45:06 +02:00
|
|
|
Retourne Vrai si c'est le cas, faux sinon.
|
|
|
|
Attention: offset-aware datetimes
|
2023-04-17 15:34:00 +02:00
|
|
|
"""
|
|
|
|
p_deb, p_fin = periode
|
|
|
|
i_deb, i_fin = interval
|
|
|
|
|
|
|
|
if bornes:
|
|
|
|
return p_deb <= i_fin and p_fin >= i_deb
|
|
|
|
return p_deb < i_fin and p_fin > i_deb
|
|
|
|
|
|
|
|
|
2023-08-22 15:43:10 +02:00
|
|
|
class AssiduitesMetrics:
|
|
|
|
"""Labels associés au métrique de l'assiduité"""
|
|
|
|
|
|
|
|
SHORT: list[str] = ["1/2 J.", "J.", "H."]
|
2023-08-22 18:00:13 +02:00
|
|
|
LONG: list[str] = ["Demi-journée", "Journée", "Heure"]
|
2023-08-22 15:43:10 +02:00
|
|
|
TAG: list[str] = ["demi", "journee", "heure"]
|
|
|
|
|
|
|
|
|
|
|
|
def translate_assiduites_metric(metric, inverse=True, short=True) -> str:
|
|
|
|
"""
|
|
|
|
translate_assiduites_metric
|
|
|
|
|
|
|
|
SHORT[true] : "J." "H." "N." "1/2 J."
|
|
|
|
SHORT[false] : "Journée" "Heure" "Nombre" "Demi-Journée"
|
|
|
|
|
|
|
|
inverse[false] : "demi" -> "1/2 J."
|
|
|
|
inverse[true] : "1/2 J." -> "demi"
|
|
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
metric (str): la métrique à traduire
|
|
|
|
inverse (bool, optional). Defaults to True.
|
|
|
|
short (bool, optional). Defaults to True.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str: la métrique traduite
|
|
|
|
"""
|
|
|
|
index: int = None
|
|
|
|
if not inverse:
|
|
|
|
try:
|
|
|
|
index = AssiduitesMetrics.TAG.index(metric)
|
|
|
|
return (
|
|
|
|
AssiduitesMetrics.SHORT[index]
|
|
|
|
if short
|
|
|
|
else AssiduitesMetrics.LONG[index]
|
|
|
|
)
|
|
|
|
except ValueError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
try:
|
|
|
|
index = (
|
|
|
|
AssiduitesMetrics.SHORT.index(metric)
|
|
|
|
if short
|
|
|
|
else AssiduitesMetrics.LONG.index(metric)
|
|
|
|
)
|
|
|
|
return AssiduitesMetrics.TAG[index]
|
|
|
|
except ValueError:
|
|
|
|
return None
|
2023-08-19 16:17:21 +02:00
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
# Types de modules
|
2021-11-08 19:44:25 +01:00
|
|
|
class ModuleType(IntEnum):
|
|
|
|
"""Code des types de module."""
|
|
|
|
|
2021-11-12 22:17:46 +01:00
|
|
|
# Stockés en BD dans Module.module_type: ne pas modifier ces valeurs
|
2021-11-08 19:44:25 +01:00
|
|
|
STANDARD = 0
|
|
|
|
MALUS = 1
|
|
|
|
RESSOURCE = 2 # BUT
|
|
|
|
SAE = 3 # BUT
|
|
|
|
|
2022-03-26 23:33:57 +01:00
|
|
|
@classmethod
|
|
|
|
def get_abbrev(cls, code) -> str:
|
2022-08-25 12:04:10 +02:00
|
|
|
"""Abbréviation décrivant le type de module à partir du code integer:
|
2022-03-26 23:33:57 +01:00
|
|
|
"mod", "malus", "res", "sae"
|
2022-08-25 12:04:10 +02:00
|
|
|
(utilisées pour style CSS)
|
2022-03-26 23:33:57 +01:00
|
|
|
"""
|
|
|
|
return {
|
|
|
|
ModuleType.STANDARD: "mod",
|
|
|
|
ModuleType.MALUS: "malus",
|
|
|
|
ModuleType.RESSOURCE: "res",
|
|
|
|
ModuleType.SAE: "sae",
|
|
|
|
}.get(code, "???")
|
|
|
|
|
2021-11-08 19:44:25 +01:00
|
|
|
|
2021-11-12 22:17:46 +01:00
|
|
|
MODULE_TYPE_NAMES = {
|
|
|
|
ModuleType.STANDARD: "Module",
|
|
|
|
ModuleType.MALUS: "Malus",
|
|
|
|
ModuleType.RESSOURCE: "Ressource",
|
|
|
|
ModuleType.SAE: "SAÉ",
|
2022-01-04 15:03:38 +01:00
|
|
|
None: "Module",
|
2021-11-12 22:17:46 +01:00
|
|
|
}
|
|
|
|
|
2022-05-26 03:55:03 +02:00
|
|
|
PARTITION_PARCOURS = "Parcours"
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
MALUS_MAX = 20.0
|
|
|
|
MALUS_MIN = -20.0
|
|
|
|
|
|
|
|
APO_MISSING_CODE_STR = "----" # shown in HTML pages in place of missing code Apogée
|
|
|
|
EDIT_NB_ETAPES = 6 # Nombre max de codes étapes / semestre presentés dans l'UI
|
|
|
|
|
|
|
|
IT_SITUATION_MISSING_STR = (
|
|
|
|
"____" # shown on ficheEtud (devenir) in place of empty situation
|
|
|
|
)
|
|
|
|
|
|
|
|
RANG_ATTENTE_STR = "(attente)" # rang affiché sur bulletins quand notes en attente
|
|
|
|
|
|
|
|
# borne supérieure de chaque mention
|
|
|
|
NOTES_MENTIONS_TH = (
|
|
|
|
NOTES_TOLERANCE,
|
|
|
|
7.0,
|
|
|
|
10.0,
|
|
|
|
12.0,
|
|
|
|
14.0,
|
|
|
|
16.0,
|
|
|
|
18.0,
|
|
|
|
20.0 + NOTES_TOLERANCE,
|
|
|
|
)
|
|
|
|
NOTES_MENTIONS_LABS = (
|
|
|
|
"Nul",
|
|
|
|
"Faible",
|
|
|
|
"Insuffisant",
|
|
|
|
"Passable",
|
|
|
|
"Assez bien",
|
|
|
|
"Bien",
|
|
|
|
"Très bien",
|
|
|
|
"Excellent",
|
|
|
|
)
|
|
|
|
|
|
|
|
EVALUATION_NORMALE = 0
|
|
|
|
EVALUATION_RATTRAPAGE = 1
|
2021-03-11 14:49:37 +01:00
|
|
|
EVALUATION_SESSION2 = 2
|
2020-09-26 16:19:37 +02:00
|
|
|
|
2022-11-09 12:50:10 +01:00
|
|
|
# Dates et années scolaires
|
2022-11-13 14:55:18 +01:00
|
|
|
# Ces dates "pivot" sont paramétrables dans les préférences générales
|
|
|
|
# on donne ici les valeurs par défaut.
|
|
|
|
# Les semestres commençant à partir du 1er août 20XX sont
|
|
|
|
# dans l'année scolaire 20XX
|
|
|
|
MONTH_DEBUT_ANNEE_SCOLAIRE = 8 # août
|
|
|
|
# Les semestres commençant à partir du 1er décembre
|
|
|
|
# sont "2eme période" (S_pair):
|
|
|
|
MONTH_DEBUT_PERIODE2 = MONTH_DEBUT_ANNEE_SCOLAIRE + 4
|
2022-11-09 12:50:10 +01:00
|
|
|
|
2021-12-04 21:04:09 +01:00
|
|
|
MONTH_NAMES_ABBREV = (
|
|
|
|
"Jan ",
|
|
|
|
"Fév ",
|
|
|
|
"Mars",
|
|
|
|
"Avr ",
|
|
|
|
"Mai ",
|
|
|
|
"Juin",
|
|
|
|
"Jul ",
|
|
|
|
"Août",
|
|
|
|
"Sept",
|
|
|
|
"Oct ",
|
|
|
|
"Nov ",
|
|
|
|
"Déc ",
|
|
|
|
)
|
|
|
|
|
|
|
|
MONTH_NAMES = (
|
|
|
|
"janvier",
|
|
|
|
"février",
|
|
|
|
"mars",
|
|
|
|
"avril",
|
|
|
|
"mai",
|
|
|
|
"juin",
|
|
|
|
"juillet",
|
|
|
|
"août",
|
|
|
|
"septembre",
|
|
|
|
"octobre",
|
|
|
|
"novembre",
|
|
|
|
"décembre",
|
|
|
|
)
|
2022-04-08 16:36:56 +02:00
|
|
|
DAY_NAMES = ("lundi", "mardi", "mercredi", "jeudi", "vendredi", "samedi", "dimanche")
|
2021-12-04 21:04:09 +01:00
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
def fmt_note(val, note_max=None, keep_numeric=False):
|
|
|
|
"""conversion note en str pour affichage dans tables HTML ou PDF.
|
|
|
|
Si keep_numeric, laisse les valeur numeriques telles quelles (pour export Excel)
|
|
|
|
"""
|
2021-12-11 10:56:40 +01:00
|
|
|
if val is None or val == NOTES_ABSENCE:
|
2020-09-26 16:19:37 +02:00
|
|
|
return "ABS"
|
|
|
|
if val == NOTES_NEUTRALISE:
|
|
|
|
return "EXC" # excuse, note neutralise
|
|
|
|
if val == NOTES_ATTENTE:
|
|
|
|
return "ATT" # attente, note neutralisee
|
2022-05-10 18:21:28 +02:00
|
|
|
if not isinstance(val, str):
|
2021-11-22 00:31:53 +01:00
|
|
|
if np.isnan(val):
|
2021-12-06 10:57:10 +01:00
|
|
|
return "~"
|
2022-04-05 11:44:08 +02:00
|
|
|
if (note_max is not None) and note_max > 0:
|
2020-09-26 16:19:37 +02:00
|
|
|
val = val * 20.0 / note_max
|
|
|
|
if keep_numeric:
|
|
|
|
return val
|
|
|
|
else:
|
|
|
|
s = "%2.2f" % round(float(val), 2) # 2 chiffres apres la virgule
|
|
|
|
s = "0" * (5 - len(s)) + s # padding: 0 à gauche pour longueur 5: "12.34"
|
|
|
|
return s
|
|
|
|
else:
|
2021-10-19 15:52:02 +02:00
|
|
|
return val.replace("NA", "-")
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def fmt_coef(val):
|
2020-10-13 15:41:04 +02:00
|
|
|
"""Conversion valeur coefficient (float) en chaine"""
|
2020-09-26 16:19:37 +02:00
|
|
|
if val < 0.01:
|
|
|
|
return "%g" % val # unusually small value
|
|
|
|
return "%g" % round(val, 2)
|
|
|
|
|
|
|
|
|
|
|
|
def fmt_abs(val):
|
2020-10-13 15:41:04 +02:00
|
|
|
"""Conversion absences en chaine. val est une list [nb_abs_total, nb_abs_justifiees
|
2020-09-26 16:19:37 +02:00
|
|
|
=> NbAbs / Nb_justifiees
|
|
|
|
"""
|
|
|
|
return "%s / %s" % (val[0], val[1])
|
|
|
|
|
|
|
|
|
|
|
|
def isnumber(x):
|
|
|
|
"True if x is a number (int, float, etc.)"
|
|
|
|
return isinstance(x, numbers.Number)
|
|
|
|
|
|
|
|
|
2021-12-05 20:21:51 +01:00
|
|
|
def jsnan(x):
|
|
|
|
"if x is NaN, returns None"
|
|
|
|
if isinstance(x, numbers.Number) and np.isnan(x):
|
|
|
|
return None
|
|
|
|
return x
|
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
def join_words(*words):
|
|
|
|
words = [str(w).strip() for w in words if w is not None]
|
|
|
|
return " ".join([w for w in words if w])
|
|
|
|
|
|
|
|
|
|
|
|
def get_mention(moy):
|
|
|
|
"""Texte "mention" en fonction de la moyenne générale"""
|
|
|
|
try:
|
|
|
|
moy = float(moy)
|
|
|
|
except:
|
|
|
|
return ""
|
2022-02-11 08:52:09 +01:00
|
|
|
if moy > 0.0:
|
|
|
|
return NOTES_MENTIONS_LABS[bisect.bisect_right(NOTES_MENTIONS_TH, moy)]
|
|
|
|
else:
|
|
|
|
return ""
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2023-03-08 22:23:55 +01:00
|
|
|
def group_by_key(d: dict, key) -> dict:
|
|
|
|
grouped = collections.defaultdict(lambda: [])
|
2020-12-15 08:35:44 +01:00
|
|
|
for e in d:
|
2023-03-08 22:23:55 +01:00
|
|
|
grouped[e[key]].append(e)
|
|
|
|
return grouped
|
2020-12-15 08:35:44 +01:00
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
# ----- Global lock for critical sections (except notes_tables caches)
|
2021-09-25 10:43:06 +02:00
|
|
|
GSL = _thread.allocate_lock() # Global ScoDoc Lock
|
2020-09-26 16:19:37 +02:00
|
|
|
|
2021-07-25 09:51:09 +02:00
|
|
|
SCODOC_DIR = Config.SCODOC_DIR
|
|
|
|
|
2021-05-29 18:22:51 +02:00
|
|
|
# ----- Repertoire "config" modifiable
|
2021-07-25 09:51:09 +02:00
|
|
|
# /opt/scodoc-data/config
|
|
|
|
SCODOC_CFG_DIR = os.path.join(Config.SCODOC_VAR_DIR, "config")
|
2021-05-29 18:22:51 +02:00
|
|
|
# ----- Version information
|
|
|
|
SCODOC_VERSION_DIR = os.path.join(SCODOC_CFG_DIR, "version")
|
2021-07-25 09:51:09 +02:00
|
|
|
# ----- Repertoire tmp : /opt/scodoc-data/tmp
|
|
|
|
SCO_TMP_DIR = os.path.join(Config.SCODOC_VAR_DIR, "tmp")
|
2021-09-25 09:13:39 +02:00
|
|
|
if not os.path.exists(SCO_TMP_DIR) and os.path.exists(Config.SCODOC_VAR_DIR):
|
2021-05-29 18:22:51 +02:00
|
|
|
os.mkdir(SCO_TMP_DIR, 0o755)
|
2021-07-25 09:51:09 +02:00
|
|
|
# ----- Les logos: /opt/scodoc-data/config/logos
|
2021-05-29 18:22:51 +02:00
|
|
|
SCODOC_LOGOS_DIR = os.path.join(SCODOC_CFG_DIR, "logos")
|
2021-09-08 23:00:01 +02:00
|
|
|
LOGOS_IMAGES_ALLOWED_TYPES = ("jpg", "jpeg", "png") # remind that PIL does not read pdf
|
2021-11-07 08:53:44 +01:00
|
|
|
LOGOS_DIR_PREFIX = "logos_"
|
|
|
|
LOGO_FILE_PREFIX = "logo_"
|
2021-09-08 23:00:01 +02:00
|
|
|
|
2021-11-07 08:53:44 +01:00
|
|
|
# forme générale des noms des fichiers logos/background:
|
|
|
|
# SCODOC_LOGO_DIR/LOGO_FILE_PREFIX<name>.<suffix> (fichier global) ou
|
|
|
|
# SCODOC_LOGO_DIR/LOGOS_DIR_PREFIX<dept_id>/LOGO_FILE_PREFIX<name>.<suffix> (fichier départemental)
|
2021-05-29 18:22:51 +02:00
|
|
|
|
2021-07-25 09:51:09 +02:00
|
|
|
# ----- Les outils distribués
|
|
|
|
SCO_TOOLS_DIR = os.path.join(Config.SCODOC_DIR, "tools")
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
# ----- Lecture du fichier de configuration
|
2021-06-19 23:21:37 +02:00
|
|
|
from app.scodoc import sco_config
|
|
|
|
from app.scodoc import sco_config_load
|
2020-09-26 16:19:37 +02:00
|
|
|
|
2021-05-29 18:22:51 +02:00
|
|
|
sco_config_load.load_local_configuration(SCODOC_CFG_DIR)
|
2020-12-15 08:35:44 +01:00
|
|
|
CONFIG = sco_config.CONFIG
|
2020-10-06 12:12:06 +02:00
|
|
|
if hasattr(CONFIG, "CODES_EXPL"):
|
2020-10-13 15:41:04 +02:00
|
|
|
CODES_EXPL.update(
|
|
|
|
CONFIG.CODES_EXPL
|
|
|
|
) # permet de customiser les explications de codes
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
if CONFIG.CUSTOM_HTML_HEADER:
|
|
|
|
CUSTOM_HTML_HEADER = open(CONFIG.CUSTOM_HTML_HEADER).read()
|
|
|
|
else:
|
|
|
|
CUSTOM_HTML_HEADER = ""
|
|
|
|
|
|
|
|
if CONFIG.CUSTOM_HTML_HEADER_CNX:
|
|
|
|
CUSTOM_HTML_HEADER_CNX = open(CONFIG.CUSTOM_HTML_HEADER_CNX).read()
|
|
|
|
else:
|
|
|
|
CUSTOM_HTML_HEADER_CNX = ""
|
|
|
|
|
|
|
|
if CONFIG.CUSTOM_HTML_FOOTER:
|
|
|
|
CUSTOM_HTML_FOOTER = open(CONFIG.CUSTOM_HTML_FOOTER).read()
|
|
|
|
else:
|
|
|
|
CUSTOM_HTML_FOOTER = ""
|
|
|
|
|
|
|
|
if CONFIG.CUSTOM_HTML_FOOTER_CNX:
|
|
|
|
CUSTOM_HTML_FOOTER_CNX = open(CONFIG.CUSTOM_HTML_FOOTER_CNX).read()
|
|
|
|
else:
|
|
|
|
CUSTOM_HTML_FOOTER_CNX = ""
|
|
|
|
|
|
|
|
SCO_ENCODING = "utf-8" # used by Excel, XML, PDF, ...
|
|
|
|
|
|
|
|
|
2021-07-21 22:32:30 +02:00
|
|
|
SCO_DEFAULT_SQL_USER = "scodoc" # should match Zope process UID
|
|
|
|
SCO_DEFAULT_SQL_PORT = "5432"
|
2020-09-26 16:19:37 +02:00
|
|
|
SCO_DEFAULT_SQL_USERS_CNX = "dbname=SCOUSERS port=%s" % SCO_DEFAULT_SQL_PORT
|
|
|
|
|
|
|
|
# Valeurs utilisées pour affichage seulement, pas de requetes ni de mails envoyés:
|
|
|
|
SCO_WEBSITE = "https://scodoc.org"
|
2020-10-13 15:41:04 +02:00
|
|
|
SCO_USER_MANUAL = "https://scodoc.org/GuideUtilisateur"
|
2023-06-28 21:25:38 +02:00
|
|
|
SCO_ANNONCES_WEBSITE = "https://scodoc.org/Contact"
|
2020-09-26 16:19:37 +02:00
|
|
|
SCO_DEVEL_LIST = "scodoc-devel@listes.univ-paris13.fr"
|
|
|
|
SCO_USERS_LIST = "notes@listes.univ-paris13.fr"
|
2023-03-22 10:15:27 +01:00
|
|
|
SCO_LISTS_URL = "https://scodoc.org/Contact"
|
2022-09-10 15:23:54 +02:00
|
|
|
SCO_DISCORD_ASSISTANCE = "https://discord.gg/ybw6ugtFsZ"
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
# Mails avec exceptions (erreurs) anormales envoyés à cette adresse:
|
|
|
|
# mettre '' pour désactiver completement l'envois de mails d'erreurs.
|
|
|
|
# (ces mails sont précieux pour corriger les erreurs, ne les désactiver que si
|
|
|
|
# vous avez de bonnes raisons de le faire: vous pouvez me contacter avant)
|
|
|
|
SCO_EXC_MAIL = "scodoc-exception@viennet.net"
|
|
|
|
|
|
|
|
# L'adresse du mainteneur (non utilisée automatiquement par ScoDoc: ne pas changer)
|
|
|
|
SCO_DEV_MAIL = "emmanuel.viennet@gmail.com" # SVP ne pas changer
|
|
|
|
|
|
|
|
# Adresse pour l'envoi des dumps (pour assistance technnique):
|
|
|
|
# ne pas changer (ou vous perdez le support)
|
2021-10-14 11:01:29 +02:00
|
|
|
SCO_DUMP_UP_URL = "https://scodoc.org/scodoc-installmgr/upload-dump"
|
2022-03-20 23:12:30 +01:00
|
|
|
SCO_UP2DATE = "https://scodoc.org/scodoc-installmgr/check_version"
|
2023-04-06 10:38:31 +02:00
|
|
|
SCO_ORG_TIMEOUT = 180 # contacts scodoc.org
|
|
|
|
SCO_EXT_TIMEOUT = 180 # appels à des ressources extérieures (siret, ...)
|
|
|
|
SCO_TEST_API_TIMEOUT = 5 # pour tests unitaires API
|
2020-09-26 16:19:37 +02:00
|
|
|
CSV_FIELDSEP = ";"
|
|
|
|
CSV_LINESEP = "\n"
|
|
|
|
CSV_MIMETYPE = "text/comma-separated-values"
|
2021-09-16 00:15:10 +02:00
|
|
|
CSV_SUFFIX = ".csv"
|
2022-04-08 13:01:47 +02:00
|
|
|
DOCX_MIMETYPE = (
|
|
|
|
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
|
|
|
)
|
|
|
|
DOCX_SUFFIX = ".docx"
|
2021-09-16 00:15:10 +02:00
|
|
|
JSON_MIMETYPE = "application/json"
|
|
|
|
JSON_SUFFIX = ".json"
|
|
|
|
PDF_MIMETYPE = "application/pdf"
|
|
|
|
PDF_SUFFIX = ".pdf"
|
2020-09-26 16:19:37 +02:00
|
|
|
XLS_MIMETYPE = "application/vnd.ms-excel"
|
2021-09-25 10:43:06 +02:00
|
|
|
XLS_SUFFIX = ".xls"
|
2021-08-02 08:52:07 +02:00
|
|
|
XLSX_MIMETYPE = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
2021-08-12 14:49:53 +02:00
|
|
|
XLSX_SUFFIX = ".xlsx"
|
2020-09-26 16:19:37 +02:00
|
|
|
XML_MIMETYPE = "text/xml"
|
2021-09-16 00:15:10 +02:00
|
|
|
XML_SUFFIX = ".xml"
|
|
|
|
|
2022-06-09 14:01:47 +02:00
|
|
|
# Format pour lesquels on exporte sans formattage des nombres (pas de perte de précision)
|
|
|
|
FORMATS_NUMERIQUES = {"csv", "xls", "xlsx", "xml", "json"}
|
|
|
|
|
2021-09-16 00:15:10 +02:00
|
|
|
|
|
|
|
def get_mime_suffix(format_code: str) -> tuple[str, str]:
|
|
|
|
"""Returns (MIME, SUFFIX) from format_code == "xls", "xml", ...
|
|
|
|
SUFFIX includes the dot: ".xlsx", ".xml", ...
|
|
|
|
"xls" and "xlsx" format codes give XLSX
|
|
|
|
"""
|
|
|
|
d = {
|
|
|
|
"csv": (CSV_MIMETYPE, CSV_SUFFIX),
|
2022-04-08 13:01:47 +02:00
|
|
|
"docx": (DOCX_MIMETYPE, DOCX_SUFFIX),
|
2021-09-16 00:15:10 +02:00
|
|
|
"xls": (XLSX_MIMETYPE, XLSX_SUFFIX),
|
|
|
|
"xlsx": (XLSX_MIMETYPE, XLSX_SUFFIX),
|
|
|
|
"pdf": (PDF_MIMETYPE, PDF_SUFFIX),
|
|
|
|
"xml": (XML_MIMETYPE, XML_SUFFIX),
|
|
|
|
"json": (JSON_MIMETYPE, JSON_SUFFIX),
|
|
|
|
}
|
|
|
|
return d[format_code]
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
# Admissions des étudiants
|
|
|
|
# Différents types de voies d'admission:
|
|
|
|
# (stocké en texte libre dans la base, mais saisie par menus pour harmoniser)
|
|
|
|
TYPE_ADMISSION_DEFAULT = "Inconnue"
|
|
|
|
TYPES_ADMISSION = (TYPE_ADMISSION_DEFAULT, "APB", "APB-PC", "CEF", "Direct")
|
|
|
|
|
2023-10-13 22:25:44 +02:00
|
|
|
BULLETINS_VERSIONS = {
|
|
|
|
"short": "Version courte",
|
|
|
|
"selectedevals": "Version intermédiaire",
|
|
|
|
"long": "Version complète",
|
|
|
|
}
|
|
|
|
BULLETINS_VERSIONS_BUT = BULLETINS_VERSIONS | {
|
|
|
|
"butcourt": "Version courte spéciale BUT"
|
|
|
|
}
|
2021-06-15 12:34:33 +02:00
|
|
|
|
2023-10-13 22:25:44 +02:00
|
|
|
# ----- Support for ScoDoc7 compatibility
|
2021-08-10 12:57:38 +02:00
|
|
|
|
|
|
|
|
2021-06-15 12:34:33 +02:00
|
|
|
def ScoURL():
|
|
|
|
"""base URL for this sco instance.
|
|
|
|
e.g. https://scodoc.xxx.fr/ScoDoc/DEPT/Scolarite
|
|
|
|
= page accueil département
|
|
|
|
"""
|
|
|
|
return url_for("scolar.index_html", scodoc_dept=g.scodoc_dept)[
|
|
|
|
: -len("/index_html")
|
2021-07-12 15:13:10 +02:00
|
|
|
]
|
2021-06-15 12:34:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
def NotesURL():
|
|
|
|
"""URL of Notes
|
|
|
|
e.g. https://scodoc.xxx.fr/ScoDoc/DEPT/Scolarite/Notes
|
|
|
|
= url de base des méthodes de notes
|
|
|
|
(page accueil programmes).
|
|
|
|
"""
|
2021-07-12 15:13:10 +02:00
|
|
|
return url_for("notes.index_html", scodoc_dept=g.scodoc_dept)[: -len("/index_html")]
|
2021-06-15 12:34:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
def AbsencesURL():
|
|
|
|
"""URL of Absences"""
|
|
|
|
return url_for("absences.index_html", scodoc_dept=g.scodoc_dept)[
|
|
|
|
: -len("/index_html")
|
2023-06-28 17:15:24 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def AssiduitesURL():
|
|
|
|
"""URL of Assiduités"""
|
2023-09-13 15:19:21 +02:00
|
|
|
return url_for("assiduites.bilan_dept", scodoc_dept=g.scodoc_dept)[
|
|
|
|
: -len("/BilanDept")
|
2021-07-12 15:13:10 +02:00
|
|
|
]
|
2021-06-15 12:34:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
def UsersURL():
|
|
|
|
"""URL of Users
|
|
|
|
e.g. https://scodoc.xxx.fr/ScoDoc/DEPT/Scolarite/Users
|
|
|
|
= url de base des requêtes ZScoUsers
|
|
|
|
et page accueil users
|
|
|
|
"""
|
2021-07-12 15:13:10 +02:00
|
|
|
return url_for("users.index_html", scodoc_dept=g.scodoc_dept)[: -len("/index_html")]
|
2021-06-15 12:34:33 +02:00
|
|
|
|
|
|
|
|
2020-10-21 00:15:40 +02:00
|
|
|
# ---- Simple python utilities
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def simplesqlquote(s, maxlen=50):
|
|
|
|
"""simple SQL quoting to avoid most SQL injection attacks.
|
|
|
|
Note: we use this function in the (rare) cases where we have to
|
|
|
|
construct SQL code manually"""
|
|
|
|
s = s[:maxlen]
|
|
|
|
s.replace("'", r"\'")
|
|
|
|
s.replace(";", r"\;")
|
|
|
|
for bad in ("select", "drop", ";", "--", "insert", "delete", "xp_"):
|
|
|
|
s = s.replace(bad, "")
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
def unescape_html(s):
|
|
|
|
"""un-escape html entities"""
|
|
|
|
s = s.strip().replace("&", "&")
|
|
|
|
s = s.replace("<", "<")
|
|
|
|
s = s.replace(">", ">")
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
2021-09-25 10:43:06 +02:00
|
|
|
def build_url_query(url: str, **params) -> str:
|
|
|
|
"""Add parameters to existing url, as a query string"""
|
|
|
|
url_parse = urlparse(url)
|
|
|
|
query = url_parse.query
|
|
|
|
url_dict = dict(parse_qsl(query))
|
|
|
|
url_dict.update(params)
|
|
|
|
url_new_query = urlencode(url_dict)
|
|
|
|
url_parse = url_parse._replace(query=url_new_query)
|
|
|
|
new_url = urlunparse(url_parse)
|
|
|
|
return new_url
|
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
# test if obj is iterable (but not a string)
|
|
|
|
isiterable = lambda obj: getattr(obj, "__iter__", False)
|
|
|
|
|
|
|
|
|
|
|
|
def unescape_html_dict(d):
|
|
|
|
"""un-escape all dict values, recursively"""
|
|
|
|
try:
|
2021-07-09 17:47:06 +02:00
|
|
|
indices = list(d.keys())
|
2020-09-26 16:19:37 +02:00
|
|
|
except:
|
2021-07-09 17:47:06 +02:00
|
|
|
indices = list(range(len(d)))
|
2020-09-26 16:19:37 +02:00
|
|
|
for k in indices:
|
|
|
|
v = d[k]
|
2021-07-11 22:32:01 +02:00
|
|
|
if isinstance(v, bytes):
|
2020-09-26 16:19:37 +02:00
|
|
|
d[k] = unescape_html(v)
|
|
|
|
elif isiterable(v):
|
|
|
|
unescape_html_dict(v)
|
|
|
|
|
|
|
|
|
|
|
|
# Expressions used to check noms/prenoms
|
|
|
|
FORBIDDEN_CHARS_EXP = re.compile(r"[*\|~\(\)\\]")
|
|
|
|
ALPHANUM_EXP = re.compile(r"^[\w-]+$", re.UNICODE)
|
|
|
|
|
|
|
|
|
|
|
|
def is_valid_code_nip(s):
|
2020-10-13 15:41:04 +02:00
|
|
|
"""True si s peut être un code NIP: au moins 6 chiffres décimaux"""
|
2020-09-26 16:19:37 +02:00
|
|
|
if not s:
|
|
|
|
return False
|
|
|
|
return re.match(r"^[0-9]{6,32}$", s)
|
|
|
|
|
|
|
|
|
|
|
|
def strnone(s):
|
|
|
|
"convert s to string, '' if s is false"
|
|
|
|
if s:
|
|
|
|
return str(s)
|
|
|
|
else:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
2023-02-20 21:04:29 +01:00
|
|
|
def strip_str(s):
|
|
|
|
"if s is a string, strip it, if is None, do nothing"
|
|
|
|
return s.strip() if s else s
|
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
def stripquotes(s):
|
|
|
|
"strip s from spaces and quotes"
|
|
|
|
s = s.strip()
|
|
|
|
if s and ((s[0] == '"' and s[-1] == '"') or (s[0] == "'" and s[-1] == "'")):
|
|
|
|
s = s[1:-1]
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
def suppress_accents(s):
|
2021-07-12 11:54:04 +02:00
|
|
|
"remove accents and suppress non ascii characters from string s"
|
2021-08-11 00:36:07 +02:00
|
|
|
if isinstance(s, str):
|
|
|
|
return (
|
|
|
|
unicodedata.normalize("NFD", s)
|
|
|
|
.encode("ascii", "ignore")
|
|
|
|
.decode(SCO_ENCODING)
|
|
|
|
)
|
|
|
|
return s # may be int
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-09-25 22:42:44 +02:00
|
|
|
class PurgeChars:
|
|
|
|
"""delete all chars except those belonging to the specified string"""
|
|
|
|
|
|
|
|
def __init__(self, allowed_chars=""):
|
|
|
|
self.allowed_chars_set = {ord(c) for c in allowed_chars}
|
|
|
|
|
|
|
|
def __getitem__(self, x):
|
|
|
|
if x not in self.allowed_chars_set:
|
|
|
|
return None
|
|
|
|
raise LookupError()
|
|
|
|
|
|
|
|
|
|
|
|
def purge_chars(s, allowed_chars=""):
|
|
|
|
return s.translate(PurgeChars(allowed_chars=allowed_chars))
|
|
|
|
|
|
|
|
|
2022-07-03 11:30:07 +02:00
|
|
|
def sanitize_string(s, remove_spaces=True):
|
2020-09-26 16:19:37 +02:00
|
|
|
"""s is an ordinary string, encoding given by SCO_ENCODING"
|
2020-10-13 15:41:04 +02:00
|
|
|
suppress accents and chars interpreted in XML
|
2020-09-26 16:19:37 +02:00
|
|
|
Irreversible (not a quote)
|
|
|
|
|
|
|
|
For ids and some filenames
|
|
|
|
"""
|
2021-07-12 10:51:45 +02:00
|
|
|
# Table suppressing some chars:
|
2022-07-03 11:30:07 +02:00
|
|
|
to_del = "'`\"<>!&\\ " if remove_spaces else "'`\"<>!&"
|
|
|
|
trans = str.maketrans("", "", to_del)
|
|
|
|
|
|
|
|
return suppress_accents(s.translate(trans)).replace("\t", "_")
|
2021-07-12 10:51:45 +02:00
|
|
|
|
|
|
|
|
2021-09-20 15:54:38 +02:00
|
|
|
_BAD_FILENAME_CHARS = str.maketrans("", "", ":/\\&[]*?'")
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def make_filename(name):
|
2021-07-12 10:51:45 +02:00
|
|
|
"""Try to convert name to a reasonable filename
|
|
|
|
without spaces, (back)slashes, : and without accents
|
|
|
|
"""
|
2021-09-20 15:54:38 +02:00
|
|
|
return (
|
|
|
|
suppress_accents(name.translate(_BAD_FILENAME_CHARS)).replace(" ", "_")
|
|
|
|
or "scodoc"
|
|
|
|
)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
VALID_CARS = (
|
|
|
|
"-abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.!" # no / !
|
|
|
|
)
|
2020-12-02 01:00:23 +01:00
|
|
|
VALID_CARS_SET = set(VALID_CARS)
|
2020-09-26 16:19:37 +02:00
|
|
|
VALID_EXP = re.compile("^[" + VALID_CARS + "]+$")
|
|
|
|
|
|
|
|
|
|
|
|
def sanitize_filename(filename):
|
|
|
|
"""Keep only valid chars
|
|
|
|
used for archives filenames
|
|
|
|
"""
|
2021-08-31 20:18:50 +02:00
|
|
|
filename = suppress_accents(filename.replace(" ", "_"))
|
2020-09-26 16:19:37 +02:00
|
|
|
sane = "".join([c for c in filename if c in VALID_CARS_SET])
|
|
|
|
if len(sane) < 2:
|
|
|
|
sane = time.strftime("%Y-%m-%d-%H%M%S") + "-" + sane
|
|
|
|
return sane
|
|
|
|
|
|
|
|
|
|
|
|
def is_valid_filename(filename):
|
|
|
|
"""True if filename is safe"""
|
|
|
|
return VALID_EXP.match(filename)
|
|
|
|
|
|
|
|
|
2022-08-06 22:31:41 +02:00
|
|
|
BOOL_STR = {
|
|
|
|
"": False,
|
|
|
|
"0": False,
|
|
|
|
"1": True,
|
2023-03-02 22:55:25 +01:00
|
|
|
"f": False,
|
|
|
|
"false": False,
|
|
|
|
"n": False,
|
|
|
|
"t": True,
|
2022-08-06 22:31:41 +02:00
|
|
|
"true": True,
|
2023-03-02 22:55:25 +01:00
|
|
|
"y": True,
|
2022-08-06 22:31:41 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def to_bool(x) -> bool:
|
2023-03-02 22:55:25 +01:00
|
|
|
"""Cast value to boolean.
|
|
|
|
The value may be encoded as a string
|
|
|
|
False are: empty, "0", "False", "f", "n".
|
|
|
|
True: all other values, such as "1", "True", "foo", "bar"...
|
|
|
|
Case insentive, ignore leading and trailing spaces.
|
|
|
|
"""
|
2022-08-06 22:31:41 +02:00
|
|
|
if isinstance(x, str):
|
|
|
|
return BOOL_STR.get(x.lower().strip(), True)
|
|
|
|
return bool(x)
|
|
|
|
|
|
|
|
|
2022-08-29 15:23:44 +02:00
|
|
|
# Min/Max values for numbers stored in database:
|
|
|
|
DB_MIN_FLOAT = -1e30
|
|
|
|
DB_MAX_FLOAT = 1e30
|
|
|
|
DB_MIN_INT = -(1 << 31)
|
|
|
|
DB_MAX_INT = (1 << 31) - 1
|
|
|
|
|
|
|
|
|
2023-09-21 10:20:19 +02:00
|
|
|
def bul_filename_old(sem: dict, etud: dict, fmt):
|
2021-09-18 13:42:19 +02:00
|
|
|
"""Build a filename for this bulletin"""
|
|
|
|
dt = time.strftime("%Y-%m-%d")
|
2023-09-21 10:20:19 +02:00
|
|
|
filename = f"bul-{sem['titre_num']}-{dt}-{etud['nom']}.{fmt}"
|
2021-09-18 13:42:19 +02:00
|
|
|
filename = make_filename(filename)
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
2023-08-30 15:59:11 +02:00
|
|
|
def bul_filename(formsemestre, etud, prefix="bul"):
|
2023-07-12 14:06:34 +02:00
|
|
|
"""Build a filename for this bulletin (without suffix)"""
|
2022-03-06 22:40:20 +01:00
|
|
|
dt = time.strftime("%Y-%m-%d")
|
2023-08-30 15:59:11 +02:00
|
|
|
filename = f"{prefix}-{formsemestre.titre_num()}-{dt}-{etud.nom}"
|
2022-03-06 22:40:20 +01:00
|
|
|
filename = make_filename(filename)
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
2022-02-26 10:09:14 +01:00
|
|
|
def flash_errors(form):
|
|
|
|
"""Flashes form errors (version sommaire)"""
|
|
|
|
for field, errors in form.errors.items():
|
|
|
|
flash(
|
|
|
|
"Erreur: voir le champs %s" % (getattr(form, field).label.text,),
|
|
|
|
"warning",
|
|
|
|
)
|
|
|
|
# see https://getbootstrap.com/docs/4.0/components/alerts/
|
|
|
|
|
|
|
|
|
2022-07-08 18:09:45 +02:00
|
|
|
def flash_once(message: str):
|
|
|
|
"""Flash the message, but only once per request"""
|
|
|
|
if not hasattr(g, "sco_flashed_once"):
|
|
|
|
g.sco_flashed_once = set()
|
|
|
|
if not message in g.sco_flashed_once:
|
|
|
|
flash(message)
|
|
|
|
g.sco_flashed_once.add(message)
|
|
|
|
|
|
|
|
|
2021-09-27 10:20:10 +02:00
|
|
|
def sendCSVFile(data, filename): # DEPRECATED utiliser send_file
|
|
|
|
"""publication fichier CSV."""
|
|
|
|
return send_file(data, filename=filename, mime=CSV_MIMETYPE, attached=True)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-09-27 10:20:10 +02:00
|
|
|
def sendPDFFile(data, filename): # DEPRECATED utiliser send_file
|
2021-09-21 22:19:08 +02:00
|
|
|
return send_file(data, filename=filename, mime=PDF_MIMETYPE, attached=True)
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
|
2022-03-01 09:48:37 +01:00
|
|
|
def sendJSON(data, attached=False, filename=None):
|
2021-07-12 22:38:30 +02:00
|
|
|
js = json.dumps(data, indent=1, cls=ScoDocJSONEncoder)
|
2021-09-25 22:42:44 +02:00
|
|
|
return send_file(
|
2022-03-01 09:48:37 +01:00
|
|
|
js, filename=filename or "sco_data.json", mime=JSON_MIMETYPE, attached=attached
|
2021-09-25 22:42:44 +02:00
|
|
|
)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2022-03-01 09:48:37 +01:00
|
|
|
def sendXML(
|
|
|
|
data,
|
|
|
|
tagname=None,
|
|
|
|
force_outer_xml_tag=True,
|
|
|
|
attached=False,
|
2022-08-05 07:03:35 +02:00
|
|
|
quote=False,
|
2022-03-01 09:48:37 +01:00
|
|
|
filename=None,
|
|
|
|
):
|
2021-07-09 17:47:06 +02:00
|
|
|
if type(data) != list:
|
2020-09-26 16:19:37 +02:00
|
|
|
data = [data] # always list-of-dicts
|
|
|
|
if force_outer_xml_tag:
|
2021-09-13 17:10:38 +02:00
|
|
|
data = [{tagname: data}]
|
|
|
|
tagname += "_list"
|
2021-10-20 22:34:06 +02:00
|
|
|
doc = sco_xml.simple_dictlist2xml(data, tagname=tagname, quote=quote)
|
2022-03-01 09:48:37 +01:00
|
|
|
return send_file(
|
|
|
|
doc, filename=filename or "sco_data.xml", mime=XML_MIMETYPE, attached=attached
|
|
|
|
)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-10-20 22:34:06 +02:00
|
|
|
def sendResult(
|
|
|
|
data,
|
|
|
|
name=None,
|
2023-09-21 10:20:19 +02:00
|
|
|
fmt=None,
|
2021-10-20 22:34:06 +02:00
|
|
|
force_outer_xml_tag=True,
|
|
|
|
attached=False,
|
2022-08-05 07:03:35 +02:00
|
|
|
quote_xml=False,
|
2022-03-01 09:48:37 +01:00
|
|
|
filename=None,
|
2021-10-20 22:34:06 +02:00
|
|
|
):
|
2023-09-21 10:20:19 +02:00
|
|
|
if (fmt is None) or (fmt == "html"):
|
2020-09-26 16:19:37 +02:00
|
|
|
return data
|
2023-09-21 10:20:19 +02:00
|
|
|
elif fmt == "xml": # name is outer tagname
|
2021-09-25 22:42:44 +02:00
|
|
|
return sendXML(
|
|
|
|
data,
|
|
|
|
tagname=name,
|
|
|
|
force_outer_xml_tag=force_outer_xml_tag,
|
|
|
|
attached=attached,
|
2021-10-20 22:34:06 +02:00
|
|
|
quote=quote_xml,
|
2022-03-01 09:48:37 +01:00
|
|
|
filename=filename,
|
2021-09-25 22:42:44 +02:00
|
|
|
)
|
2023-09-21 10:20:19 +02:00
|
|
|
elif fmt == "json":
|
2022-03-01 09:48:37 +01:00
|
|
|
return sendJSON(data, attached=attached, filename=filename)
|
2020-09-26 16:19:37 +02:00
|
|
|
else:
|
2023-09-21 10:20:19 +02:00
|
|
|
raise ValueError(f"invalid format: {fmt}")
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-09-24 12:10:53 +02:00
|
|
|
def send_file(data, filename="", suffix="", mime=None, attached=None):
|
2021-09-21 22:19:08 +02:00
|
|
|
"""Build Flask Response for file download of given type
|
2022-04-16 15:34:40 +02:00
|
|
|
By default (attached is None), json and xml are inlined and other types are attached.
|
2021-09-21 22:19:08 +02:00
|
|
|
"""
|
|
|
|
if attached is None:
|
|
|
|
if mime == XML_MIMETYPE or mime == JSON_MIMETYPE:
|
|
|
|
attached = False
|
2021-09-24 16:32:49 +02:00
|
|
|
else:
|
|
|
|
attached = True
|
2021-09-24 12:10:53 +02:00
|
|
|
if filename:
|
|
|
|
if suffix:
|
|
|
|
filename += suffix
|
|
|
|
filename = make_filename(filename)
|
2021-09-15 00:33:30 +02:00
|
|
|
response = make_response(data)
|
|
|
|
response.headers["Content-Type"] = mime
|
2021-09-24 12:10:53 +02:00
|
|
|
if attached and filename:
|
2021-09-15 00:33:30 +02:00
|
|
|
response.headers["Content-Disposition"] = 'attachment; filename="%s"' % filename
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2022-04-08 13:01:47 +02:00
|
|
|
def send_docx(document, filename):
|
|
|
|
"Send a python-docx document"
|
|
|
|
buffer = io.BytesIO() # in-memory document, no disk file
|
|
|
|
document.save(buffer)
|
|
|
|
buffer.seek(0)
|
|
|
|
return flask.send_file(
|
|
|
|
buffer,
|
2022-04-16 15:34:40 +02:00
|
|
|
download_name=sanitize_filename(filename),
|
2022-04-08 13:01:47 +02:00
|
|
|
mimetype=DOCX_MIMETYPE,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-09-27 16:42:14 +02:00
|
|
|
def get_request_args():
|
|
|
|
"""returns a dict with request (POST or GET) arguments
|
|
|
|
converted to suit legacy Zope style (scodoc7) functions.
|
|
|
|
"""
|
|
|
|
# copy to get a mutable object (necessary for TrivialFormulator and several methods)
|
|
|
|
if request.method == "POST":
|
|
|
|
# request.form is a werkzeug.datastructures.ImmutableMultiDict
|
|
|
|
# must copy to get a mutable version (needed by TrivialFormulator)
|
|
|
|
vals = request.form.copy()
|
|
|
|
if request.files:
|
|
|
|
# Add files in form:
|
|
|
|
vals.update(request.files)
|
|
|
|
for k in request.form:
|
|
|
|
if k.endswith(":list"):
|
|
|
|
vals[k[:-5]] = request.form.getlist(k)
|
|
|
|
elif request.method == "GET":
|
|
|
|
vals = {}
|
|
|
|
for k in request.args:
|
|
|
|
# current_app.logger.debug("%s\t%s" % (k, request.args.getlist(k)))
|
|
|
|
if k.endswith(":list"):
|
|
|
|
vals[k[:-5]] = request.args.getlist(k)
|
|
|
|
else:
|
|
|
|
values = request.args.getlist(k)
|
|
|
|
vals[k] = values[0] if len(values) == 1 else values
|
|
|
|
return vals
|
|
|
|
|
|
|
|
|
2023-04-06 16:10:32 +02:00
|
|
|
def json_error(status_code, message=None) -> Response:
|
2023-09-10 21:16:31 +02:00
|
|
|
"""Simple JSON for errors."""
|
2022-08-07 19:56:25 +02:00
|
|
|
payload = {
|
|
|
|
"error": HTTP_STATUS_CODES.get(status_code, "Unknown error"),
|
|
|
|
"status": status_code,
|
2021-11-12 22:17:46 +01:00
|
|
|
}
|
2022-08-07 19:56:25 +02:00
|
|
|
if message:
|
|
|
|
payload["message"] = message
|
2023-04-06 16:10:32 +02:00
|
|
|
response = json_response(status_=status_code, data_=payload)
|
2022-08-07 19:56:25 +02:00
|
|
|
response.status_code = status_code
|
2021-11-12 22:17:46 +01:00
|
|
|
log(f"Error: {response}")
|
2022-08-07 19:56:25 +02:00
|
|
|
return response
|
|
|
|
|
|
|
|
|
2023-04-06 16:10:32 +02:00
|
|
|
def json_ok_response(status_code=200, payload=None) -> Response:
|
2022-08-07 19:56:25 +02:00
|
|
|
"""Simple JSON respons for "success" """
|
|
|
|
payload = payload or {"OK": True}
|
2023-04-06 16:10:32 +02:00
|
|
|
response = json_response(status_=status_code, data_=payload)
|
2022-08-07 19:56:25 +02:00
|
|
|
response.status_code = status_code
|
|
|
|
return response
|
2021-11-12 22:17:46 +01:00
|
|
|
|
|
|
|
|
2020-12-21 18:42:02 +01:00
|
|
|
def get_scodoc_version():
|
|
|
|
"return a string identifying ScoDoc version"
|
2021-08-26 23:43:54 +02:00
|
|
|
return sco_version.SCOVERSION
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-07-05 00:07:17 +02:00
|
|
|
def check_scodoc7_password(scodoc7_hash, password):
|
|
|
|
"""Check a password vs scodoc7 hash
|
|
|
|
used only during old databases migrations"""
|
|
|
|
m = md5()
|
|
|
|
m.update(password.encode("utf-8"))
|
2021-08-20 10:53:24 +02:00
|
|
|
h = base64.encodebytes(m.digest()).decode("utf-8").strip()
|
2021-07-05 00:07:17 +02:00
|
|
|
return h == scodoc7_hash
|
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
# Simple string manipulations
|
|
|
|
|
|
|
|
|
|
|
|
def abbrev_prenom(prenom):
|
|
|
|
"Donne l'abreviation d'un prenom"
|
|
|
|
# un peu lent, mais espère traiter tous les cas
|
|
|
|
# Jean -> J.
|
|
|
|
# Charles -> Ch.
|
|
|
|
# Jean-Christophe -> J.-C.
|
|
|
|
# Marie Odile -> M. O.
|
2021-07-12 15:13:10 +02:00
|
|
|
prenom = prenom.replace(".", " ").strip()
|
2020-09-26 16:19:37 +02:00
|
|
|
if not prenom:
|
|
|
|
return ""
|
|
|
|
d = prenom[:3].upper()
|
|
|
|
if d == "CHA":
|
|
|
|
abrv = "Ch." # 'Charles' donne 'Ch.'
|
|
|
|
i = 3
|
|
|
|
else:
|
|
|
|
abrv = prenom[0].upper() + "."
|
|
|
|
i = 1
|
|
|
|
n = len(prenom)
|
|
|
|
while i < n:
|
|
|
|
c = prenom[i]
|
|
|
|
if c == " " or c == "-" and i < n - 1:
|
|
|
|
sep = c
|
|
|
|
i += 1
|
|
|
|
# gobbe tous les separateurs
|
|
|
|
while i < n and (prenom[i] == " " or prenom[i] == "-"):
|
|
|
|
if prenom[i] == "-":
|
|
|
|
sep = "-"
|
|
|
|
i += 1
|
|
|
|
if i < n:
|
|
|
|
abrv += sep + prenom[i].upper() + "."
|
|
|
|
i += 1
|
2021-07-12 15:13:10 +02:00
|
|
|
return abrv
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
def timedate_human_repr():
|
2022-03-02 23:14:04 +01:00
|
|
|
"representation du temps courant pour utilisateur"
|
2020-09-26 16:19:37 +02:00
|
|
|
return time.strftime("%d/%m/%Y à %Hh%M")
|
|
|
|
|
|
|
|
|
|
|
|
def annee_scolaire_repr(year, month):
|
|
|
|
"""representation de l'annee scolaire : '2009 - 2010'
|
|
|
|
à partir d'une date.
|
|
|
|
"""
|
2022-11-13 14:55:18 +01:00
|
|
|
if month >= MONTH_DEBUT_ANNEE_SCOLAIRE: # apres le 1er aout
|
|
|
|
return f"{year} - {year + 1}"
|
2020-09-26 16:19:37 +02:00
|
|
|
else:
|
2022-11-13 14:55:18 +01:00
|
|
|
return f"{year - 1} - {year}"
|
|
|
|
|
|
|
|
|
|
|
|
def annee_scolaire() -> int:
|
|
|
|
"""Année de debut de l'annee scolaire courante"""
|
|
|
|
t = time.localtime()
|
|
|
|
year, month = t[0], t[1]
|
|
|
|
return annee_scolaire_debut(year, month)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2022-06-09 07:39:58 +02:00
|
|
|
def annee_scolaire_debut(year, month) -> int:
|
2022-11-13 14:55:18 +01:00
|
|
|
"""Annee scolaire de début.
|
|
|
|
Par défaut (hémisphère nord), l'année du mois de août
|
|
|
|
précédent la date indiquée.
|
|
|
|
"""
|
|
|
|
if int(month) >= MONTH_DEBUT_ANNEE_SCOLAIRE:
|
2020-09-26 16:19:37 +02:00
|
|
|
return int(year)
|
|
|
|
else:
|
|
|
|
return int(year) - 1
|
|
|
|
|
|
|
|
|
2022-11-13 14:55:18 +01:00
|
|
|
def date_debut_anne_scolaire(annee_sco: int) -> datetime:
|
2022-04-20 23:47:46 +02:00
|
|
|
"""La date de début de l'année scolaire
|
2022-11-13 14:55:18 +01:00
|
|
|
(par défaut, le 1er aout)
|
2022-04-20 23:47:46 +02:00
|
|
|
"""
|
2022-11-13 14:55:18 +01:00
|
|
|
return datetime.datetime(year=annee_sco, month=MONTH_DEBUT_ANNEE_SCOLAIRE, day=1)
|
2022-04-20 23:47:46 +02:00
|
|
|
|
|
|
|
|
2022-11-13 14:55:18 +01:00
|
|
|
def date_fin_anne_scolaire(annee_sco: int) -> datetime:
|
2022-04-20 23:47:46 +02:00
|
|
|
"""La date de fin de l'année scolaire
|
2022-11-13 14:55:18 +01:00
|
|
|
(par défaut, le 31 juillet de l'année suivante)
|
2022-04-20 23:47:46 +02:00
|
|
|
"""
|
2022-11-13 14:55:18 +01:00
|
|
|
# on prend la date de début de l'année scolaire suivante,
|
|
|
|
# et on lui retre 1 jour.
|
|
|
|
# On s'affranchit ainsi des problèmes de durées de mois.
|
2022-11-09 12:50:10 +01:00
|
|
|
return datetime.datetime(
|
2022-11-13 14:55:18 +01:00
|
|
|
year=annee_sco + 1, month=MONTH_DEBUT_ANNEE_SCOLAIRE, day=1
|
|
|
|
) - datetime.timedelta(days=1)
|
2022-04-20 23:47:46 +02:00
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
def sem_decale_str(sem):
|
|
|
|
"""'D' si semestre decalé, ou ''"""
|
|
|
|
# considère "décalé" les semestre impairs commençant entre janvier et juin
|
|
|
|
# et les pairs entre juillet et decembre
|
|
|
|
if sem["semestre_id"] <= 0:
|
|
|
|
return ""
|
|
|
|
if (sem["semestre_id"] % 2 and sem["mois_debut_ord"] <= 6) or (
|
|
|
|
not sem["semestre_id"] % 2 and sem["mois_debut_ord"] > 6
|
|
|
|
):
|
|
|
|
return "D"
|
|
|
|
else:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
|
|
|
def is_valid_mail(email):
|
|
|
|
"""True if well-formed email address"""
|
2021-02-03 22:00:41 +01:00
|
|
|
return re.match(r"^.+@.+\..{2,3}$", email)
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-07-25 16:42:47 +02:00
|
|
|
def graph_from_edges(edges, graph_name="mygraph"):
|
|
|
|
"""Crée un graph pydot
|
|
|
|
à partir d'une liste d'arêtes [ (n1, n2), (n2, n3), ... ]
|
|
|
|
où n1, n2, ... sont des chaînes donnant l'id des nœuds.
|
|
|
|
|
|
|
|
Fonction remplaçant celle de pydot qui est buggée.
|
|
|
|
"""
|
|
|
|
nodes = set([it for tup in edges for it in tup])
|
|
|
|
graph = pydot.Dot(graph_name)
|
|
|
|
for n in nodes:
|
|
|
|
graph.add_node(pydot.Node(n))
|
|
|
|
for e in edges:
|
|
|
|
graph.add_edge(pydot.Edge(src=e[0], dst=e[1]))
|
|
|
|
return graph
|
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
ICONSIZES = {} # name : (width, height) cache image sizes
|
|
|
|
|
|
|
|
|
2021-09-10 21:12:59 +02:00
|
|
|
def icontag(name, file_format="png", no_size=False, **attrs):
|
2020-09-26 16:19:37 +02:00
|
|
|
"""tag HTML pour un icone.
|
|
|
|
(dans les versions anterieures on utilisait Zope)
|
|
|
|
Les icones sont des fichiers PNG dans .../static/icons
|
2020-10-13 15:41:04 +02:00
|
|
|
Si la taille (width et height) n'est pas spécifiée, lit l'image
|
2020-09-26 16:19:37 +02:00
|
|
|
pour la mesurer (et cache le résultat).
|
|
|
|
"""
|
2021-09-10 21:12:59 +02:00
|
|
|
if (not no_size) and (("width" not in attrs) or ("height" not in attrs)):
|
2020-09-26 16:19:37 +02:00
|
|
|
if name not in ICONSIZES:
|
2021-07-25 09:51:09 +02:00
|
|
|
img_file = os.path.join(
|
|
|
|
Config.SCODOC_DIR,
|
|
|
|
"app/static/icons/%s.%s"
|
|
|
|
% (
|
|
|
|
name,
|
|
|
|
file_format,
|
|
|
|
),
|
|
|
|
)
|
2023-04-03 17:46:31 +02:00
|
|
|
with PILImage.open(img_file) as image:
|
|
|
|
width, height = image.size[0], image.size[1]
|
2020-09-26 16:19:37 +02:00
|
|
|
ICONSIZES[name] = (width, height) # cache
|
|
|
|
else:
|
|
|
|
width, height = ICONSIZES[name]
|
|
|
|
attrs["width"] = width
|
|
|
|
attrs["height"] = height
|
|
|
|
if "border" not in attrs:
|
|
|
|
attrs["border"] = 0
|
|
|
|
if "alt" not in attrs:
|
|
|
|
attrs["alt"] = "logo %s" % name
|
|
|
|
s = " ".join(['%s="%s"' % (k, attrs[k]) for k in attrs])
|
2022-06-26 17:54:44 +02:00
|
|
|
return f'<img class="{name}" {s} src="{STATIC_DIR}/icons/{name}.{file_format}" />'
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
ICON_PDF = icontag("pdficon16x20_img", title="Version PDF")
|
|
|
|
ICON_XLS = icontag("xlsicon_img", title="Version tableur")
|
|
|
|
|
2022-04-02 14:26:16 +02:00
|
|
|
# HTML emojis
|
|
|
|
EMO_WARNING = "⚠️" # warning /!\
|
2022-04-03 16:20:16 +02:00
|
|
|
EMO_RED_TRIANGLE_DOWN = "🔻" # red triangle pointed down
|
2022-06-29 22:53:56 +02:00
|
|
|
EMO_PREV_ARROW = "❮"
|
|
|
|
EMO_NEXT_ARROW = "❯"
|
2022-04-02 14:26:16 +02:00
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
def sort_dates(L, reverse=False):
|
|
|
|
"""Return sorted list of dates, allowing None items (they are put at the beginning)"""
|
|
|
|
mindate = datetime.datetime(datetime.MINYEAR, 1, 1)
|
|
|
|
try:
|
|
|
|
return sorted(L, key=lambda x: x or mindate, reverse=reverse)
|
|
|
|
except:
|
|
|
|
# Helps debugging
|
|
|
|
log("sort_dates( %s )" % L)
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2022-01-05 01:03:25 +01:00
|
|
|
def heterogeneous_sorting_key(x):
|
|
|
|
"key to sort non homogeneous sequences"
|
|
|
|
return (float(x), "") if isinstance(x, (bool, float, int)) else (-1e34, str(x))
|
|
|
|
|
|
|
|
|
2020-09-26 16:19:37 +02:00
|
|
|
def query_portal(req, msg="Portail Apogee", timeout=3):
|
2020-10-14 12:36:18 +02:00
|
|
|
"""Retreives external data using HTTP request
|
2020-09-26 16:19:37 +02:00
|
|
|
(used to connect to Apogee portal, or ScoDoc server)
|
2020-10-14 12:36:18 +02:00
|
|
|
returns a string, "" on error
|
2020-09-26 16:19:37 +02:00
|
|
|
"""
|
|
|
|
log("query_portal: %s" % req)
|
2022-09-14 10:03:08 +02:00
|
|
|
error_message = None
|
2020-09-26 16:19:37 +02:00
|
|
|
try:
|
2021-08-21 00:49:36 +02:00
|
|
|
r = requests.get(req, timeout=timeout) # seconds / request
|
2022-09-14 10:03:08 +02:00
|
|
|
except requests.ConnectionError:
|
|
|
|
error_message = "ConnectionError"
|
|
|
|
except requests.Timeout:
|
|
|
|
error_message = "Timeout"
|
|
|
|
except requests.TooManyRedirects:
|
|
|
|
error_message = "TooManyRedirects"
|
|
|
|
except requests.RequestException:
|
2022-09-14 21:44:38 +02:00
|
|
|
error_message = f"can't connect to {msg}"
|
2022-09-14 10:03:08 +02:00
|
|
|
if error_message is not None:
|
|
|
|
log(f"query_portal: {error_message}")
|
2020-09-26 16:19:37 +02:00
|
|
|
return ""
|
2021-08-21 00:49:36 +02:00
|
|
|
if r.status_code != 200:
|
|
|
|
log(f"query_portal: http error {r.status_code}")
|
2022-09-14 10:03:08 +02:00
|
|
|
return ""
|
2020-09-26 16:19:37 +02:00
|
|
|
|
2021-08-21 00:49:36 +02:00
|
|
|
return r.text
|
2020-09-26 16:19:37 +02:00
|
|
|
|
|
|
|
|
2021-06-02 22:40:34 +02:00
|
|
|
def confirm_dialog(
|
|
|
|
message="<p>Confirmer ?</p>",
|
|
|
|
OK="OK",
|
2023-02-23 21:19:57 +01:00
|
|
|
add_headers=True, # complete page
|
|
|
|
cancel_label="Annuler",
|
2021-06-02 22:40:34 +02:00
|
|
|
cancel_url="",
|
2023-02-23 21:19:57 +01:00
|
|
|
dest_url="",
|
|
|
|
help_msg=None,
|
|
|
|
parameters: dict = None,
|
2021-06-02 22:40:34 +02:00
|
|
|
target_variable="dialog_confirmed",
|
|
|
|
):
|
2023-02-23 21:19:57 +01:00
|
|
|
"""HTML confirmation dialog: submit (POST) to same page or dest_url if given."""
|
2021-06-19 23:21:37 +02:00
|
|
|
from app.scodoc import html_sco_header
|
|
|
|
|
2023-02-23 21:19:57 +01:00
|
|
|
parameters = parameters or {}
|
2021-06-02 22:40:34 +02:00
|
|
|
# dialog de confirmation simple
|
|
|
|
parameters[target_variable] = 1
|
|
|
|
# Attention: la page a pu etre servie en GET avec des parametres
|
|
|
|
# si on laisse l'url "action" vide, les parametres restent alors que l'on passe en POST...
|
|
|
|
if not dest_url:
|
2021-08-30 16:34:24 +02:00
|
|
|
action = ""
|
|
|
|
else:
|
|
|
|
# strip remaining parameters from destination url:
|
|
|
|
dest_url = urllib.parse.splitquery(dest_url)[0]
|
|
|
|
action = f'action="{dest_url}"'
|
|
|
|
|
2021-06-02 22:40:34 +02:00
|
|
|
H = [
|
2021-09-21 14:01:46 +02:00
|
|
|
f"""<form {action} method="POST">
|
|
|
|
{message}
|
|
|
|
""",
|
2021-06-02 22:40:34 +02:00
|
|
|
]
|
2021-09-27 22:54:58 +02:00
|
|
|
if OK or not cancel_url:
|
|
|
|
H.append(f'<input type="submit" value="{OK}"/>')
|
2021-06-02 22:40:34 +02:00
|
|
|
if cancel_url:
|
|
|
|
H.append(
|
2023-02-23 21:19:57 +01:00
|
|
|
f"""<input type ="button" value="{cancel_label}"
|
|
|
|
onClick="document.location='{cancel_url}';"/>"""
|
2021-06-02 22:40:34 +02:00
|
|
|
)
|
|
|
|
for param in parameters.keys():
|
|
|
|
if parameters[param] is None:
|
|
|
|
parameters[param] = ""
|
2023-02-23 21:19:57 +01:00
|
|
|
if isinstance(parameters[param], list):
|
2021-06-02 22:40:34 +02:00
|
|
|
for e in parameters[param]:
|
2023-02-23 21:19:57 +01:00
|
|
|
H.append(f"""<input type="hidden" name="{param}" value="{e}"/>""")
|
2021-06-02 22:40:34 +02:00
|
|
|
else:
|
|
|
|
H.append(
|
2023-02-23 21:19:57 +01:00
|
|
|
f"""<input type="hidden" name="{param}" value="{parameters[param]}"/>"""
|
2021-06-02 22:40:34 +02:00
|
|
|
)
|
|
|
|
H.append("</form>")
|
2023-02-23 21:19:57 +01:00
|
|
|
if help_msg:
|
|
|
|
H.append('<p class="help">' + help_msg + "</p>")
|
2021-07-31 18:01:10 +02:00
|
|
|
if add_headers:
|
2021-06-13 19:12:20 +02:00
|
|
|
return (
|
2021-07-29 16:31:15 +02:00
|
|
|
html_sco_header.sco_header() + "\n".join(H) + html_sco_header.sco_footer()
|
2021-06-13 19:12:20 +02:00
|
|
|
)
|
2021-06-02 22:40:34 +02:00
|
|
|
else:
|
|
|
|
return "\n".join(H)
|
2021-12-16 21:42:23 +01:00
|
|
|
|
|
|
|
|
2021-12-17 13:42:39 +01:00
|
|
|
def objects_renumber(db, obj_list) -> None:
|
2021-12-16 21:42:23 +01:00
|
|
|
"""fixe les numeros des objets d'une liste de modèles
|
|
|
|
pour ne pas changer son ordre"""
|
2023-08-30 16:03:36 +02:00
|
|
|
log("objects_renumber")
|
2021-12-16 21:42:23 +01:00
|
|
|
for i, obj in enumerate(obj_list):
|
|
|
|
obj.numero = i
|
|
|
|
db.session.add(obj)
|
|
|
|
db.session.commit()
|
2022-03-22 22:14:45 +01:00
|
|
|
|
|
|
|
|
2023-08-30 16:03:36 +02:00
|
|
|
def comp_ranks(tab: list[tuple]) -> dict[int, str]:
|
2023-02-20 21:04:29 +01:00
|
|
|
"""Calcul rangs à partir d'une liste ordonnée de tuples [ (valeur, ..., etudid) ]
|
|
|
|
(valeur est une note numérique), en tenant compte des ex-aequos
|
|
|
|
Le resultat est: { etudid : rang } où rang est une chaine decrivant le rang
|
|
|
|
"""
|
|
|
|
rangs = {} # { etudid : rang } (rang est une chaine)
|
|
|
|
nb_ex = 0 # nb d'ex-aequo consécutifs en cours
|
2023-08-30 16:03:36 +02:00
|
|
|
for i, row in enumerate(tab):
|
2023-02-20 21:04:29 +01:00
|
|
|
# test ex-aequo
|
2023-08-30 16:03:36 +02:00
|
|
|
if i < len(tab) - 1:
|
|
|
|
next_val = tab[i + 1][0]
|
2023-02-20 21:04:29 +01:00
|
|
|
else:
|
2023-08-30 16:03:36 +02:00
|
|
|
next_val = None
|
|
|
|
moy = row[0]
|
2023-02-20 21:04:29 +01:00
|
|
|
if nb_ex:
|
|
|
|
srang = "%d ex" % (i + 1 - nb_ex)
|
2023-08-30 16:03:36 +02:00
|
|
|
if moy == next_val:
|
2023-02-20 21:04:29 +01:00
|
|
|
nb_ex += 1
|
|
|
|
else:
|
|
|
|
nb_ex = 0
|
|
|
|
else:
|
2023-08-30 16:03:36 +02:00
|
|
|
if moy == next_val:
|
2023-02-20 21:04:29 +01:00
|
|
|
srang = "%d ex" % (i + 1 - nb_ex)
|
|
|
|
nb_ex = 1
|
|
|
|
else:
|
|
|
|
srang = "%d" % (i + 1)
|
2023-08-30 16:03:36 +02:00
|
|
|
rangs[row[-1]] = srang
|
2023-02-20 21:04:29 +01:00
|
|
|
return rangs
|
|
|
|
|
|
|
|
|
2022-04-10 17:38:59 +02:00
|
|
|
def gen_cell(key: str, row: dict, elt="td", with_col_class=False):
|
|
|
|
"html table cell"
|
|
|
|
klass = row.get(f"_{key}_class", "")
|
|
|
|
if with_col_class:
|
|
|
|
klass = key + " " + klass
|
|
|
|
attrs = f'class="{klass}"' if klass else ""
|
2023-01-14 18:01:54 +01:00
|
|
|
if elt == "th":
|
|
|
|
attrs += ' scope="row"'
|
2022-06-29 16:30:01 +02:00
|
|
|
data = row.get(f"_{key}_data") # dict
|
|
|
|
if data:
|
|
|
|
for k in data:
|
|
|
|
attrs += f' data-{k}="{data[k]}"'
|
2022-04-10 17:38:59 +02:00
|
|
|
order = row.get(f"_{key}_order")
|
|
|
|
if order:
|
|
|
|
attrs += f' data-order="{order}"'
|
|
|
|
content = row.get(key, "")
|
|
|
|
target = row.get(f"_{key}_target")
|
|
|
|
target_attrs = row.get(f"_{key}_target_attrs", "")
|
|
|
|
if target or target_attrs: # avec lien
|
|
|
|
href = f'href="{target}"' if target else ""
|
|
|
|
content = f"<a {href} {target_attrs}>{content}</a>"
|
|
|
|
return f"<{elt} {attrs}>{content}</{elt}>"
|
|
|
|
|
|
|
|
|
|
|
|
def gen_row(
|
|
|
|
keys: list[str], row, elt="td", selected_etudid=None, with_col_classes=False
|
|
|
|
):
|
|
|
|
"html table row"
|
|
|
|
klass = row.get("_tr_class")
|
2023-01-29 21:52:39 +01:00
|
|
|
if row.get("etudid", "") == selected_etudid:
|
|
|
|
klass += " row_selected"
|
2022-04-10 17:38:59 +02:00
|
|
|
tr_class = f'class="{klass}"' if klass else ""
|
2023-01-29 21:52:39 +01:00
|
|
|
return f"""<tr {tr_class}>{
|
2023-01-18 18:25:40 +01:00
|
|
|
"".join([gen_cell(key, row, elt, with_col_class=with_col_classes)
|
|
|
|
for key in keys if not key.startswith('_')])
|
|
|
|
}</tr>"""
|
2022-04-10 17:38:59 +02:00
|
|
|
|
|
|
|
|
2022-03-22 22:14:45 +01:00
|
|
|
# Pour accès depuis les templates jinja
|
|
|
|
def is_entreprises_enabled():
|
|
|
|
from app.models import ScoDocSiteConfig
|
|
|
|
|
|
|
|
return ScoDocSiteConfig.is_entreprises_enabled()
|