forked from ScoDoc/ScoDoc
Update opolka/ScoDoc from ScoDoc/ScoDoc #2
@ -7,6 +7,7 @@
|
||||
Usage:
|
||||
cd /opt/scodoc/tests/api
|
||||
python make_samples.py [entry_names]
|
||||
python make_samples.py -i <filepath> [entrynames]
|
||||
|
||||
si entry_names est spécifié, la génération est restreints aux exemples cités. expl: `python make_samples departements departement-formsemestres`
|
||||
doit être exécutée immédiatement apres une initialisation de la base pour test API! (car dépendant des identifiants générés lors de la création des objets)
|
||||
@ -37,7 +38,6 @@ Quand la structure est complète, on génére tous les fichiers textes
|
||||
- le résultat
|
||||
Le tout mis en forme au format markdown et rangé dans le répertoire DATA_DIR (/tmp/samples) qui est créé ou écrasé si déjà existant
|
||||
|
||||
TODO: ajouter un argument au script permettant de ne générer qu'un seul fichier (exemple: `python make_samples.py nom_exemple`)
|
||||
|
||||
"""
|
||||
import os
|
||||
@ -65,7 +65,7 @@ from setup_test_api import (
|
||||
)
|
||||
|
||||
DATA_DIR = "/tmp/samples/"
|
||||
SAMPLES_FILENAME = "tests/ressources/samples.csv"
|
||||
SAMPLES_FILENAME = "tests/ressources/samples/samples.csv"
|
||||
|
||||
|
||||
class Sample:
|
||||
@ -180,11 +180,13 @@ class Samples:
|
||||
file.close()
|
||||
|
||||
|
||||
def make_samples():
|
||||
def make_samples(samples_filename):
|
||||
if len(sys.argv) == 1:
|
||||
entry_names = None
|
||||
else:
|
||||
entry_names = sys.argv[1:]
|
||||
elif len(sys.argv) >= 3 and sys.argv[1] == "-i":
|
||||
samples_filename = sys.argv[2]
|
||||
entry_names = sys.argv[3:] if len(sys.argv) > 3 else None
|
||||
|
||||
if os.path.exists(DATA_DIR):
|
||||
if not os.path.isdir(DATA_DIR):
|
||||
raise f"{DATA_DIR} existe déjà et n'est pas un répertoire"
|
||||
@ -197,7 +199,7 @@ def make_samples():
|
||||
|
||||
samples = Samples(entry_names)
|
||||
df = read_csv(
|
||||
SAMPLES_FILENAME,
|
||||
samples_filename,
|
||||
sep=";",
|
||||
quotechar='"',
|
||||
dtype={
|
||||
@ -217,4 +219,4 @@ def make_samples():
|
||||
|
||||
if not CHECK_CERTIFICATE:
|
||||
urllib3.disable_warnings()
|
||||
make_samples()
|
||||
make_samples(SAMPLES_FILENAME)
|
||||
|
392
tests/api/test_api_assiduites.py
Normal file
392
tests/api/test_api_assiduites.py
Normal file
@ -0,0 +1,392 @@
|
||||
"""
|
||||
Test de l'api Assiduité
|
||||
|
||||
Ecrit par HARTMANN Matthias
|
||||
|
||||
"""
|
||||
|
||||
from random import randint
|
||||
|
||||
from tests.api.setup_test_api import GET, POST_JSON, APIError, api_headers
|
||||
|
||||
ETUDID = 1
|
||||
FAUX = 42069
|
||||
FORMSEMESTREID = 1
|
||||
MODULE = 1
|
||||
|
||||
|
||||
ASSIDUITES_FIELDS = {
|
||||
"assiduite_id": int,
|
||||
"etudid": int,
|
||||
"moduleimpl_id": int,
|
||||
"date_debut": str,
|
||||
"date_fin": str,
|
||||
"etat": str,
|
||||
"desc": str,
|
||||
"entry_date": str,
|
||||
"user_id": str,
|
||||
"est_just": bool,
|
||||
}
|
||||
|
||||
CREATE_FIELD = {"assiduite_id": int}
|
||||
BATCH_FIELD = {"errors": dict, "success": dict}
|
||||
|
||||
COUNT_FIELDS = {"compte": int, "journee": int, "demi": int, "heure": float}
|
||||
|
||||
TO_REMOVE = []
|
||||
|
||||
|
||||
def check_fields(data: dict, fields: dict = None):
|
||||
"""
|
||||
Cette fonction permet de vérifier que le dictionnaire data
|
||||
contient les bonnes clés et les bons types de valeurs.
|
||||
|
||||
Args:
|
||||
data (dict): un dictionnaire (json de retour de l'api)
|
||||
fields (dict, optional): Un dictionnaire représentant les clés et les types d'une réponse.
|
||||
"""
|
||||
if fields is None:
|
||||
fields = ASSIDUITES_FIELDS
|
||||
assert set(data.keys()) == set(fields.keys())
|
||||
for key in data:
|
||||
if key in ("moduleimpl_id", "desc", "user_id"):
|
||||
assert isinstance(data[key], fields[key]) or data[key] is None
|
||||
else:
|
||||
assert isinstance(data[key], fields[key])
|
||||
|
||||
|
||||
def check_failure_get(path: str, headers: dict, err: str = None):
|
||||
"""
|
||||
Cette fonction vérifiée que la requête GET renvoie bien un 404
|
||||
|
||||
Args:
|
||||
path (str): la route de l'api
|
||||
headers (dict): le token d'auth de l'api
|
||||
err (str, optional): L'erreur qui est sensée être fournie par l'api.
|
||||
|
||||
Raises:
|
||||
APIError: Une erreur car la requête a fonctionné (mauvais comportement)
|
||||
"""
|
||||
|
||||
try:
|
||||
GET(path=path, headers=headers)
|
||||
# ^ Renvoi un 404
|
||||
except APIError as api_err:
|
||||
if err is not None:
|
||||
assert api_err.payload["message"] == err
|
||||
else:
|
||||
raise APIError("Le GET n'aurait pas du fonctionner")
|
||||
|
||||
|
||||
def check_failure_post(path: str, headers: dict, data: dict, err: str = None):
|
||||
"""
|
||||
Cette fonction vérifiée que la requête POST renvoie bien un 404
|
||||
|
||||
Args:
|
||||
path (str): la route de l'api
|
||||
headers (dict): le token d'auth
|
||||
data (dict): un dictionnaire (json) à envoyer
|
||||
err (str, optional): L'erreur qui est sensée être fournie par l'api.
|
||||
|
||||
Raises:
|
||||
APIError: Une erreur car la requête a fonctionné (mauvais comportement)
|
||||
"""
|
||||
|
||||
try:
|
||||
data = POST_JSON(path=path, headers=headers, data=data)
|
||||
# ^ Renvoi un 404
|
||||
except APIError as api_err:
|
||||
if err is not None:
|
||||
assert api_err.payload["message"] == err
|
||||
else:
|
||||
raise APIError("Le GET n'aurait pas du fonctionner")
|
||||
|
||||
|
||||
def create_data(etat: str, day: str, module: int = None, desc: str = None):
|
||||
"""
|
||||
Permet de créer un dictionnaire assiduité
|
||||
|
||||
Args:
|
||||
etat (str): l'état de l'assiduité (PRESENT,ABSENT,RETARD)
|
||||
day (str): Le jour de l'assiduité
|
||||
module (int, optional): Le moduleimpl_id associé
|
||||
desc (str, optional): Une description de l'assiduité (eg: motif retard )
|
||||
|
||||
Returns:
|
||||
dict: la représentation d'une assiduité
|
||||
"""
|
||||
data = {
|
||||
"date_debut": f"2022-01-{day}T08:00",
|
||||
"date_fin": f"2022-01-{day}T10:00",
|
||||
"etat": etat,
|
||||
}
|
||||
|
||||
if module is not None:
|
||||
data["moduleimpl_id"] = module
|
||||
if desc is not None:
|
||||
data["desc"] = desc
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def test_route_assiduite(api_headers):
|
||||
"""test de la route /assiduite/<assiduite_id:int>"""
|
||||
|
||||
# Bon fonctionnement == id connu
|
||||
data = GET(path="/assiduite/1", headers=api_headers)
|
||||
check_fields(data)
|
||||
|
||||
# Mauvais Fonctionnement == id inconnu
|
||||
|
||||
check_failure_get(
|
||||
f"/assiduite/{FAUX}",
|
||||
api_headers,
|
||||
)
|
||||
|
||||
|
||||
def test_route_count_assiduites(api_headers):
|
||||
"""test de la route /assiduites/<etudid:int>/count"""
|
||||
|
||||
# Bon fonctionnement
|
||||
|
||||
data = GET(path=f"/assiduites/{ETUDID}/count", headers=api_headers)
|
||||
check_fields(data, COUNT_FIELDS)
|
||||
|
||||
metrics = {"heure", "compte"}
|
||||
data = GET(
|
||||
path=f"/assiduites/{ETUDID}/count/query?metric={','.join(metrics)}",
|
||||
headers=api_headers,
|
||||
)
|
||||
|
||||
assert set(data.keys()) == metrics
|
||||
|
||||
# Mauvais fonctionnement
|
||||
|
||||
check_failure_get(f"/assiduites/{FAUX}/count", api_headers)
|
||||
|
||||
|
||||
def test_route_assiduites(api_headers):
|
||||
"""test de la route /assiduites/<etudid:int>"""
|
||||
|
||||
# Bon fonctionnement
|
||||
|
||||
data = GET(path=f"/assiduites/{ETUDID}", headers=api_headers)
|
||||
assert isinstance(data, list)
|
||||
for ass in data:
|
||||
check_fields(ass, ASSIDUITES_FIELDS)
|
||||
|
||||
data = GET(path=f"/assiduites/{ETUDID}/query?", headers=api_headers)
|
||||
assert isinstance(data, list)
|
||||
for ass in data:
|
||||
check_fields(ass, ASSIDUITES_FIELDS)
|
||||
|
||||
# Mauvais fonctionnement
|
||||
check_failure_get(f"/assiduites/{FAUX}", api_headers)
|
||||
check_failure_get(f"/assiduites/{FAUX}/query?", api_headers)
|
||||
|
||||
|
||||
def test_route_formsemestre_assiduites(api_headers):
|
||||
"""test de la route /assiduites/formsemestre/<formsemestre_id:int>"""
|
||||
|
||||
# Bon fonctionnement
|
||||
|
||||
data = GET(path=f"/assiduites/formsemestre/{FORMSEMESTREID}", headers=api_headers)
|
||||
assert isinstance(data, list)
|
||||
for ass in data:
|
||||
check_fields(ass, ASSIDUITES_FIELDS)
|
||||
|
||||
data = GET(
|
||||
path=f"/assiduites/formsemestre/{FORMSEMESTREID}/query?", headers=api_headers
|
||||
)
|
||||
assert isinstance(data, list)
|
||||
for ass in data:
|
||||
check_fields(ass, ASSIDUITES_FIELDS)
|
||||
|
||||
# Mauvais fonctionnement
|
||||
check_failure_get(
|
||||
f"/assiduites/formsemestre/{FAUX}",
|
||||
api_headers,
|
||||
err="le paramètre 'formsemestre_id' n'existe pas",
|
||||
)
|
||||
check_failure_get(
|
||||
f"/assiduites/formsemestre/{FAUX}/query?",
|
||||
api_headers,
|
||||
err="le paramètre 'formsemestre_id' n'existe pas",
|
||||
)
|
||||
|
||||
|
||||
def test_route_count_formsemestre_assiduites(api_headers):
|
||||
"""test de la route /assiduites/formsemestre/<formsemestre_id:int>/count"""
|
||||
|
||||
# Bon fonctionnement
|
||||
|
||||
data = GET(
|
||||
path=f"/assiduites/formsemestre/{FORMSEMESTREID}/count", headers=api_headers
|
||||
)
|
||||
check_fields(data, COUNT_FIELDS)
|
||||
metrics = {"heure", "compte"}
|
||||
data = GET(
|
||||
path=f"/assiduites/formsemestre/{FORMSEMESTREID}/count/query?metric={','.join(metrics)}",
|
||||
headers=api_headers,
|
||||
)
|
||||
assert set(data.keys()) == metrics
|
||||
|
||||
# Mauvais fonctionnement
|
||||
check_failure_get(
|
||||
f"/assiduites/formsemestre/{FAUX}/count",
|
||||
api_headers,
|
||||
err="le paramètre 'formsemestre_id' n'existe pas",
|
||||
)
|
||||
check_failure_get(
|
||||
f"/assiduites/formsemestre/{FAUX}/count/query?",
|
||||
api_headers,
|
||||
err="le paramètre 'formsemestre_id' n'existe pas",
|
||||
)
|
||||
|
||||
|
||||
def test_route_create(api_headers):
|
||||
"""test de la route /assiduite/<etudid:int>/create"""
|
||||
|
||||
# -== Unique ==-
|
||||
|
||||
# Bon fonctionnement
|
||||
data = create_data("present", "01")
|
||||
|
||||
res = POST_JSON(f"/assiduite/{ETUDID}/create", [data], api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["success"]) == 1
|
||||
|
||||
TO_REMOVE.append(res["success"]["0"]["assiduite_id"])
|
||||
|
||||
data2 = create_data("absent", "02", MODULE, "desc")
|
||||
res = POST_JSON(f"/assiduite/{ETUDID}/create", [data2], api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["success"]) == 1
|
||||
|
||||
TO_REMOVE.append(res["success"]["0"]["assiduite_id"])
|
||||
|
||||
# Mauvais fonctionnement
|
||||
check_failure_post(f"/assiduite/{FAUX}/create", api_headers, [data])
|
||||
|
||||
res = POST_JSON(f"/assiduite/{ETUDID}/create", [data], api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["errors"]) == 1
|
||||
assert (
|
||||
res["errors"]["0"]
|
||||
== "Duplication des assiduités (la période rentrée rentre en conflit avec une assiduité enregistrée)"
|
||||
)
|
||||
|
||||
res = POST_JSON(
|
||||
f"/assiduite/{ETUDID}/create", [create_data("absent", "03", FAUX)], api_headers
|
||||
)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["errors"]) == 1
|
||||
assert res["errors"]["0"] == "param 'moduleimpl_id': invalide"
|
||||
|
||||
# -== Multiple ==-
|
||||
|
||||
# Bon Fonctionnement
|
||||
|
||||
etats = ["present", "absent", "retard"]
|
||||
data = [
|
||||
create_data(etats[d % 3], 10 + d, MODULE if d % 2 else None)
|
||||
for d in range(randint(3, 5))
|
||||
]
|
||||
|
||||
res = POST_JSON(f"/assiduite/{ETUDID}/create", data, api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
for dat in res["success"]:
|
||||
check_fields(res["success"][dat], CREATE_FIELD)
|
||||
TO_REMOVE.append(res["success"][dat]["assiduite_id"])
|
||||
|
||||
# Mauvais Fonctionnement
|
||||
|
||||
data2 = [
|
||||
create_data("present", "01"),
|
||||
create_data("present", "25", FAUX),
|
||||
create_data("blabla", 26),
|
||||
create_data("absent", 32),
|
||||
]
|
||||
|
||||
res = POST_JSON(f"/assiduite/{ETUDID}/create", data2, api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["errors"]) == 4
|
||||
|
||||
assert (
|
||||
res["errors"]["0"]
|
||||
== "Duplication des assiduités (la période rentrée rentre en conflit avec une assiduité enregistrée)"
|
||||
)
|
||||
assert res["errors"]["1"] == "param 'moduleimpl_id': invalide"
|
||||
assert res["errors"]["2"] == "param 'etat': invalide"
|
||||
assert (
|
||||
res["errors"]["3"]
|
||||
== "param 'date_debut': format invalide, param 'date_fin': format invalide"
|
||||
)
|
||||
|
||||
|
||||
def test_route_edit(api_headers):
|
||||
"""test de la route /assiduite/<assiduite_id:int>/edit"""
|
||||
|
||||
# Bon fonctionnement
|
||||
|
||||
data = {"etat": "retard", "moduleimpl_id": MODULE}
|
||||
res = POST_JSON(f"/assiduite/{TO_REMOVE[0]}/edit", data, api_headers)
|
||||
assert res == {"OK": True}
|
||||
|
||||
data["moduleimpl_id"] = None
|
||||
res = POST_JSON(f"/assiduite/{TO_REMOVE[1]}/edit", data, api_headers)
|
||||
assert res == {"OK": True}
|
||||
|
||||
# Mauvais fonctionnement
|
||||
|
||||
check_failure_post(f"/assiduite/{FAUX}/edit", api_headers, data)
|
||||
data["etat"] = "blabla"
|
||||
check_failure_post(
|
||||
f"/assiduite/{TO_REMOVE[2]}/edit",
|
||||
api_headers,
|
||||
data,
|
||||
err="param 'etat': invalide",
|
||||
)
|
||||
|
||||
|
||||
def test_route_delete(api_headers):
|
||||
"""test de la route /assiduite/delete"""
|
||||
# -== Unique ==-
|
||||
|
||||
# Bon fonctionnement
|
||||
data = TO_REMOVE[0]
|
||||
|
||||
res = POST_JSON("/assiduite/delete", [data], api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
for dat in res["success"]:
|
||||
assert res["success"][dat] == {"OK": True}
|
||||
|
||||
# Mauvais fonctionnement
|
||||
res = POST_JSON("/assiduite/delete", [data], api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["errors"]) == 1
|
||||
|
||||
# -== Multiple ==-
|
||||
|
||||
# Bon Fonctionnement
|
||||
|
||||
data = TO_REMOVE[1:]
|
||||
|
||||
res = POST_JSON("/assiduite/delete", data, api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
for dat in res["success"]:
|
||||
assert res["success"][dat] == {"OK": True}
|
||||
|
||||
# Mauvais Fonctionnement
|
||||
|
||||
data2 = [
|
||||
FAUX,
|
||||
FAUX + 1,
|
||||
FAUX + 2,
|
||||
]
|
||||
|
||||
res = POST_JSON("/assiduite/delete", data2, api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["errors"]) == 3
|
||||
|
||||
assert all([res["errors"][i] == "Assiduite non existante" for i in res["errors"]])
|
1
tests/api/test_api_justificatif.txt
Normal file
1
tests/api/test_api_justificatif.txt
Normal file
@ -0,0 +1 @@
|
||||
test de l'importation des fichiers / archive justificatif
|
1
tests/api/test_api_justificatif2.txt
Normal file
1
tests/api/test_api_justificatif2.txt
Normal file
@ -0,0 +1 @@
|
||||
test de l'importation des fichiers / archive justificatif
|
469
tests/api/test_api_justificatifs.py
Normal file
469
tests/api/test_api_justificatifs.py
Normal file
@ -0,0 +1,469 @@
|
||||
"""
|
||||
Test de l'api justificatif
|
||||
|
||||
Ecrit par HARTMANN Matthias
|
||||
|
||||
"""
|
||||
|
||||
from random import randint
|
||||
|
||||
import requests
|
||||
from tests.api.setup_test_api import (
|
||||
API_URL,
|
||||
CHECK_CERTIFICATE,
|
||||
GET,
|
||||
POST_JSON,
|
||||
APIError,
|
||||
api_headers,
|
||||
)
|
||||
|
||||
ETUDID = 1
|
||||
FAUX = 42069
|
||||
|
||||
|
||||
JUSTIFICATIFS_FIELDS = {
|
||||
"justif_id": int,
|
||||
"etudid": int,
|
||||
"date_debut": str,
|
||||
"date_fin": str,
|
||||
"etat": str,
|
||||
"raison": str,
|
||||
"entry_date": str,
|
||||
"fichier": str,
|
||||
"user_id": int,
|
||||
}
|
||||
|
||||
CREATE_FIELD = {"justif_id": int, "couverture": list}
|
||||
BATCH_FIELD = {"errors": dict, "success": dict}
|
||||
|
||||
TO_REMOVE = []
|
||||
|
||||
|
||||
def check_fields(data, fields: dict = None):
|
||||
"""
|
||||
Cette fonction permet de vérifier que le dictionnaire data
|
||||
contient les bonnes clés et les bons types de valeurs.
|
||||
|
||||
Args:
|
||||
data (dict): un dictionnaire (json de retour de l'api)
|
||||
fields (dict, optional): Un dictionnaire représentant les clés et les types d'une réponse.
|
||||
"""
|
||||
if fields is None:
|
||||
fields = JUSTIFICATIFS_FIELDS
|
||||
assert set(data.keys()) == set(fields.keys())
|
||||
for key in data:
|
||||
if key in ("raison", "fichier", "user_id"):
|
||||
assert isinstance(data[key], fields[key]) or data[key] is None
|
||||
else:
|
||||
assert isinstance(data[key], fields[key])
|
||||
|
||||
|
||||
def check_failure_get(path, headers, err=None):
|
||||
"""
|
||||
Cette fonction vérifiée que la requête GET renvoie bien un 404
|
||||
|
||||
Args:
|
||||
path (str): la route de l'api
|
||||
headers (dict): le token d'auth de l'api
|
||||
err (str, optional): L'erreur qui est sensée être fournie par l'api.
|
||||
|
||||
Raises:
|
||||
APIError: Une erreur car la requête a fonctionné (mauvais comportement)
|
||||
"""
|
||||
try:
|
||||
GET(path=path, headers=headers)
|
||||
# ^ Renvoi un 404
|
||||
except APIError as api_err:
|
||||
if err is not None:
|
||||
assert api_err.payload["message"] == err
|
||||
else:
|
||||
raise APIError("Le GET n'aurait pas du fonctionner")
|
||||
|
||||
|
||||
def check_failure_post(path, headers, data, err=None):
|
||||
"""
|
||||
Cette fonction vérifiée que la requête POST renvoie bien un 404
|
||||
|
||||
Args:
|
||||
path (str): la route de l'api
|
||||
headers (dict): le token d'auth
|
||||
data (dict): un dictionnaire (json) à envoyer
|
||||
err (str, optional): L'erreur qui est sensée être fournie par l'api.
|
||||
|
||||
Raises:
|
||||
APIError: Une erreur car la requête a fonctionné (mauvais comportement)
|
||||
"""
|
||||
try:
|
||||
data = POST_JSON(path=path, headers=headers, data=data)
|
||||
# ^ Renvoi un 404
|
||||
except APIError as api_err:
|
||||
if err is not None:
|
||||
assert api_err.payload["message"] == err
|
||||
else:
|
||||
raise APIError("Le POST n'aurait pas du fonctionner")
|
||||
|
||||
|
||||
def create_data(etat: str, day: str, raison: str = None):
|
||||
"""
|
||||
Permet de créer un dictionnaire assiduité
|
||||
|
||||
Args:
|
||||
etat (str): l'état du justificatif (VALIDE,NON_VALIDE,MODIFIE, ATTENTE)
|
||||
day (str): Le jour du justificatif
|
||||
raison (str, optional): Une description du justificatif (eg: motif retard )
|
||||
|
||||
Returns:
|
||||
dict: la représentation d'une assiduité
|
||||
"""
|
||||
data = {
|
||||
"date_debut": f"2022-01-{day}T08:00",
|
||||
"date_fin": f"2022-01-{day}T10:00",
|
||||
"etat": etat,
|
||||
}
|
||||
if raison is not None:
|
||||
data["desc"] = raison
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def test_route_justificatif(api_headers):
|
||||
"""test de la route /justificatif/<justif_id:int>"""
|
||||
|
||||
# Bon fonctionnement == id connu
|
||||
data = GET(path="/justificatif/1", headers=api_headers)
|
||||
check_fields(data)
|
||||
|
||||
# Mauvais Fonctionnement == id inconnu
|
||||
|
||||
check_failure_get(
|
||||
f"/justificatif/{FAUX}",
|
||||
api_headers,
|
||||
)
|
||||
|
||||
|
||||
def test_route_justificatifs(api_headers):
|
||||
"""test de la route /justificatifs/<etudid:int>"""
|
||||
# Bon fonctionnement
|
||||
|
||||
data = GET(path=f"/justificatifs/{ETUDID}", headers=api_headers)
|
||||
assert isinstance(data, list)
|
||||
for just in data:
|
||||
check_fields(just, JUSTIFICATIFS_FIELDS)
|
||||
|
||||
data = GET(path=f"/justificatifs/{ETUDID}/query?", headers=api_headers)
|
||||
assert isinstance(data, list)
|
||||
for just in data:
|
||||
check_fields(just, JUSTIFICATIFS_FIELDS)
|
||||
|
||||
# Mauvais fonctionnement
|
||||
check_failure_get(f"/justificatifs/{FAUX}", api_headers)
|
||||
check_failure_get(f"/justificatifs/{FAUX}/query?", api_headers)
|
||||
|
||||
|
||||
def test_route_create(api_headers):
|
||||
"""test de la route /justificatif/<justif_id:int>/create"""
|
||||
# -== Unique ==-
|
||||
|
||||
# Bon fonctionnement
|
||||
data = create_data("valide", "01")
|
||||
|
||||
res = POST_JSON(f"/justificatif/{ETUDID}/create", [data], api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["success"]) == 1
|
||||
|
||||
TO_REMOVE.append(res["success"]["0"]["justif_id"])
|
||||
|
||||
data2 = create_data("modifie", "02", "raison")
|
||||
res = POST_JSON(f"/justificatif/{ETUDID}/create", [data2], api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["success"]) == 1
|
||||
|
||||
TO_REMOVE.append(res["success"]["0"]["justif_id"])
|
||||
|
||||
# Mauvais fonctionnement
|
||||
check_failure_post(f"/justificatif/{FAUX}/create", api_headers, [data])
|
||||
|
||||
res = POST_JSON(
|
||||
f"/justificatif/{ETUDID}/create",
|
||||
[create_data("absent", "03")],
|
||||
api_headers,
|
||||
)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["errors"]) == 1
|
||||
assert res["errors"]["0"] == "param 'etat': invalide"
|
||||
|
||||
# -== Multiple ==-
|
||||
|
||||
# Bon Fonctionnement
|
||||
|
||||
etats = ["valide", "modifie", "non_valide", "attente"]
|
||||
data = [
|
||||
create_data(etats[d % 4], 10 + d, "raison" if d % 2 else None)
|
||||
for d in range(randint(3, 5))
|
||||
]
|
||||
|
||||
res = POST_JSON(f"/justificatif/{ETUDID}/create", data, api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
for dat in res["success"]:
|
||||
check_fields(res["success"][dat], CREATE_FIELD)
|
||||
TO_REMOVE.append(res["success"][dat]["justif_id"])
|
||||
|
||||
# Mauvais Fonctionnement
|
||||
|
||||
data2 = [
|
||||
create_data(None, "25"),
|
||||
create_data("blabla", 26),
|
||||
create_data("valide", 32),
|
||||
]
|
||||
|
||||
res = POST_JSON(f"/justificatif/{ETUDID}/create", data2, api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["errors"]) == 3
|
||||
|
||||
assert res["errors"]["0"] == "param 'etat': manquant"
|
||||
assert res["errors"]["1"] == "param 'etat': invalide"
|
||||
assert (
|
||||
res["errors"]["2"]
|
||||
== "param 'date_debut': format invalide, param 'date_fin': format invalide"
|
||||
)
|
||||
|
||||
|
||||
def test_route_edit(api_headers):
|
||||
"""test de la route /justificatif/<justif_id:int>/edit"""
|
||||
# Bon fonctionnement
|
||||
|
||||
data = {"etat": "modifie", "raison": "test"}
|
||||
res = POST_JSON(f"/justificatif/{TO_REMOVE[0]}/edit", data, api_headers)
|
||||
assert isinstance(res, dict) and "couverture" in res.keys()
|
||||
|
||||
data["raison"] = None
|
||||
res = POST_JSON(f"/justificatif/{TO_REMOVE[1]}/edit", data, api_headers)
|
||||
assert isinstance(res, dict) and "couverture" in res.keys()
|
||||
|
||||
# Mauvais fonctionnement
|
||||
|
||||
check_failure_post(f"/justificatif/{FAUX}/edit", api_headers, data)
|
||||
data["etat"] = "blabla"
|
||||
check_failure_post(
|
||||
f"/justificatif/{TO_REMOVE[2]}/edit",
|
||||
api_headers,
|
||||
data,
|
||||
err="param 'etat': invalide",
|
||||
)
|
||||
|
||||
|
||||
def test_route_delete(api_headers):
|
||||
"""test de la route /justificatif/delete"""
|
||||
# -== Unique ==-
|
||||
|
||||
# Bon fonctionnement
|
||||
data = TO_REMOVE[0]
|
||||
|
||||
res = POST_JSON("/justificatif/delete", [data], api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
for dat in res["success"]:
|
||||
assert res["success"][dat] == {"OK": True}
|
||||
|
||||
# Mauvais fonctionnement
|
||||
res = POST_JSON("/justificatif/delete", [data], api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["errors"]) == 1
|
||||
|
||||
# -== Multiple ==-
|
||||
|
||||
# Bon Fonctionnement
|
||||
|
||||
data = TO_REMOVE[1:]
|
||||
|
||||
res = POST_JSON("/justificatif/delete", data, api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
for dat in res["success"]:
|
||||
assert res["success"][dat] == {"OK": True}
|
||||
|
||||
# Mauvais Fonctionnement
|
||||
|
||||
data2 = [
|
||||
FAUX,
|
||||
FAUX + 1,
|
||||
FAUX + 2,
|
||||
]
|
||||
|
||||
res = POST_JSON("/justificatif/delete", data2, api_headers)
|
||||
check_fields(res, BATCH_FIELD)
|
||||
assert len(res["errors"]) == 3
|
||||
|
||||
assert all([res["errors"][i] == "Justificatif non existant" for i in res["errors"]])
|
||||
|
||||
|
||||
# Gestion de l'archivage
|
||||
|
||||
|
||||
def send_file(justif_id: int, filename: str, headers):
|
||||
"""
|
||||
Envoi un fichier vers la route d'importation
|
||||
"""
|
||||
with open(filename, "rb") as file:
|
||||
url: str = API_URL + f"/justificatif/{justif_id}/import"
|
||||
req = requests.post(
|
||||
url,
|
||||
files={filename: file},
|
||||
headers=headers,
|
||||
verify=CHECK_CERTIFICATE,
|
||||
)
|
||||
|
||||
if req.status_code != 200:
|
||||
raise APIError(f"erreur status={req.status_code} !", req.json())
|
||||
|
||||
return req.json()
|
||||
|
||||
|
||||
def check_failure_send(
|
||||
justif_id: int,
|
||||
headers,
|
||||
filename: str = "tests/api/test_api_justificatif.txt",
|
||||
err: str = None,
|
||||
):
|
||||
"""
|
||||
Vérifie si l'envoie d'un fichier renvoie bien un 404
|
||||
|
||||
Args:
|
||||
justif_id (int): l'id du justificatif
|
||||
headers (dict): token d'auth de l'api
|
||||
filename (str, optional): le chemin vers le fichier.
|
||||
Defaults to "tests/api/test_api_justificatif.txt".
|
||||
err (str, optional): l'erreur attendue.
|
||||
|
||||
Raises:
|
||||
APIError: Si l'envoie fonction (mauvais comportement)
|
||||
"""
|
||||
try:
|
||||
send_file(justif_id, filename, headers)
|
||||
# ^ Renvoi un 404
|
||||
except APIError as api_err:
|
||||
if err is not None:
|
||||
assert api_err.payload["message"] == err
|
||||
else:
|
||||
raise APIError("Le POST n'aurait pas du fonctionner")
|
||||
|
||||
|
||||
def test_import_justificatif(api_headers):
|
||||
"""test de la route /justificatif/<justif_id:int>/import"""
|
||||
|
||||
# Bon fonctionnement
|
||||
|
||||
filename: str = "tests/api/test_api_justificatif.txt"
|
||||
|
||||
resp: dict = send_file(1, filename, api_headers)
|
||||
assert "filename" in resp
|
||||
assert resp["filename"] == "test_api_justificatif.txt"
|
||||
|
||||
filename: str = "tests/api/test_api_justificatif2.txt"
|
||||
resp: dict = send_file(1, filename, api_headers)
|
||||
assert "filename" in resp
|
||||
assert resp["filename"] == "test_api_justificatif2.txt"
|
||||
|
||||
# Mauvais fonctionnement
|
||||
|
||||
check_failure_send(FAUX, api_headers)
|
||||
|
||||
|
||||
def test_list_justificatifs(api_headers):
|
||||
"""test de la route /justificatif/<justif_id:int>/list"""
|
||||
|
||||
# Bon fonctionnement
|
||||
|
||||
res: list = GET("/justificatif/1/list", api_headers)
|
||||
|
||||
assert isinstance(res, list)
|
||||
assert len(res) == 2
|
||||
|
||||
res: list = GET("/justificatif/2/list", api_headers)
|
||||
|
||||
assert isinstance(res, list)
|
||||
assert len(res) == 0
|
||||
|
||||
# Mauvais fonctionnement
|
||||
|
||||
check_failure_get(f"/justificatif/{FAUX}/list", api_headers)
|
||||
|
||||
|
||||
def post_export(justif_id: int, fname: str, api_headers):
|
||||
"""
|
||||
Envoie une requête poste sans data et la retourne
|
||||
|
||||
Args:
|
||||
id (int): justif_id
|
||||
fname (str): nom du fichier (coté serv)
|
||||
api_headers (dict): token auth de l'api
|
||||
|
||||
Returns:
|
||||
request: la réponse de l'api
|
||||
"""
|
||||
url: str = API_URL + f"/justificatif/{justif_id}/export/{fname}"
|
||||
res = requests.post(url, headers=api_headers)
|
||||
return res
|
||||
|
||||
|
||||
def test_export(api_headers):
|
||||
"""test de la route /justificatif/<justif_id:int>/export/<filename:str>"""
|
||||
|
||||
# Bon fonctionnement
|
||||
|
||||
assert post_export(1, "test_api_justificatif.txt", api_headers).status_code == 200
|
||||
|
||||
# Mauvais fonctionnement
|
||||
assert (
|
||||
post_export(FAUX, "test_api_justificatif.txt", api_headers).status_code == 404
|
||||
)
|
||||
assert post_export(1, "blabla.txt", api_headers).status_code == 404
|
||||
assert post_export(2, "blabla.txt", api_headers).status_code == 404
|
||||
|
||||
|
||||
def test_remove_justificatif(api_headers):
|
||||
"""test de la route /justificatif/<justif_id:int>/remove"""
|
||||
|
||||
# Bon fonctionnement
|
||||
|
||||
filename: str = "tests/api/test_api_justificatif.txt"
|
||||
send_file(2, filename, api_headers)
|
||||
filename: str = "tests/api/test_api_justificatif2.txt"
|
||||
send_file(2, filename, api_headers)
|
||||
|
||||
res: dict = POST_JSON("/justificatif/1/remove", {"remove": "all"}, api_headers)
|
||||
assert res == {"response": "removed"}
|
||||
assert len(GET("/justificatif/1/list", api_headers)) == 0
|
||||
|
||||
res: dict = POST_JSON(
|
||||
"/justificatif/2/remove",
|
||||
{"remove": "list", "filenames": ["test_api_justificatif2.txt"]},
|
||||
api_headers,
|
||||
)
|
||||
assert res == {"response": "removed"}
|
||||
assert len(GET("/justificatif/2/list", api_headers)) == 1
|
||||
|
||||
res: dict = POST_JSON(
|
||||
"/justificatif/2/remove",
|
||||
{"remove": "list", "filenames": ["test_api_justificatif.txt"]},
|
||||
api_headers,
|
||||
)
|
||||
assert res == {"response": "removed"}
|
||||
assert len(GET("/justificatif/2/list", api_headers)) == 0
|
||||
|
||||
# Mauvais fonctionnement
|
||||
|
||||
check_failure_post("/justificatif/2/remove", api_headers, {})
|
||||
check_failure_post(f"/justificatif/{FAUX}/remove", api_headers, {"remove": "all"})
|
||||
check_failure_post("/justificatif/1/remove", api_headers, {"remove": "all"})
|
||||
|
||||
|
||||
def test_justifies(api_headers):
|
||||
"""test la route /justificatif/<justif_id:int>/justifies"""
|
||||
|
||||
# Bon fonctionnement
|
||||
|
||||
res: list = GET("/justificatif/1/justifies", api_headers)
|
||||
assert isinstance(res, list)
|
||||
|
||||
# Mauvais fonctionnement
|
||||
|
||||
check_failure_get(f"/justificatif/{FAUX}/justifies", api_headers)
|
3
tests/api/test_api_permissions.py
Normal file → Executable file
3
tests/api/test_api_permissions.py
Normal file → Executable file
@ -60,6 +60,9 @@ def test_permissions(api_headers):
|
||||
"role_name": "Ens",
|
||||
"uid": 1,
|
||||
"version": "long",
|
||||
"assiduite_id": 1,
|
||||
"justif_id": 1,
|
||||
"etudids": "1",
|
||||
}
|
||||
for rule in api_rules:
|
||||
path = rule.build(args)[1]
|
||||
|
26
tests/ressources/samples/assiduites_samples.csv
Normal file
26
tests/ressources/samples/assiduites_samples.csv
Normal file
@ -0,0 +1,26 @@
|
||||
"entry_name";"url";"permission";"method";"content"
|
||||
"assiduite";"/assiduite/1";"ScoView";"GET";
|
||||
"assiduites";"/assiduites/1";"ScoView";"GET";
|
||||
"assiduites";"/assiduites/1/query?etat=retard";"ScoView";"GET";
|
||||
"assiduites";"/assiduites/1/query?moduleimpl_id=1";"ScoView";"GET";
|
||||
"assiduites_count";"/assiduites/1/count";"ScoView";"GET";
|
||||
"assiduites_count";"/assiduites/1/count/query?etat=retard";"ScoView";"GET";
|
||||
"assiduites_count";"/assiduites/1/count/query?etat=present,retard&metric=compte,heure";"ScoView";"GET";
|
||||
"assiduites_formsemestre";"/assiduites/formsemestre/1";"ScoView";"GET";
|
||||
"assiduites_formsemestre";"/assiduites/formsemestre/1/query?etat=retard";"ScoView";"GET";
|
||||
"assiduites_formsemestre";"/assiduites/formsemestre/1/query?moduleimpl_id=1";"ScoView";"GET";
|
||||
"assiduites_formsemestre_count";"/assiduites/formsemestre/1/count";"ScoView";"GET";
|
||||
"assiduites_formsemestre_count";"/assiduites/formsemestre/1/count/query?etat=retard";"ScoView";"GET";
|
||||
"assiduites_formsemestre_count";"/assiduites/formsemestre/1/count/query?etat=present,retard&metric=compte,heure";"ScoView";"GET";
|
||||
"assiduite_create";"/assiduite/1/create";"ScoView";"POST";"[{""date_debut"": ""2022-10-27T08:00"",""date_fin"": ""2022-10-27T10:00"",""etat"": ""absent""}]"
|
||||
"assiduite_edit";"/assiduite/1/edit";"ScoView";"POST";"{""etat"":""absent""}"
|
||||
"assiduite_edit";"/assiduite/1/edit";"ScoView";"POST";"{""moduleimpl_id"":2}"
|
||||
"assiduite_edit";"/assiduite/1/edit";"ScoView";"POST";"{""etat"": ""retard"",""moduleimpl_id"":3}"
|
||||
"assiduite_delete";"/assiduite/delete";"ScoView";"POST";"[2,2,3]"
|
||||
"justificatif";"/justificatif/1";"ScoView";"GET";
|
||||
"justificatifs";"/justificatifs/1";"ScoView";"GET";
|
||||
"justificatifs";"/justificatifs/1/query?etat=attente";"ScoView";"GET";
|
||||
"justificatif_create";"/justificatif/1/create";"ScoView";"POST";"[{""date_debut"": ""2022-10-27T08:00"",""date_fin"": ""2022-10-27T10:00"",""etat"": ""attente""}]"
|
||||
"justificatif_edit";"/justificatif/1/edit";"ScoView";"POST";"{""etat"":""valide""}"
|
||||
"justificatif_edit";"/justificatif/1/edit";"ScoView";"POST";"{""raison"":""MEDIC""}"
|
||||
"justificatif_delete";"/justificatif/delete";"ScoView";"POST";"[2,2,3]"
|
|
@ -1,4 +1,24 @@
|
||||
"entry_name";"url";"permission";"method";"content"
|
||||
"assiduite";"/assiduite/1";"ScoView";"GET";
|
||||
"assiduites";"/assiduites/1";"ScoView";"GET";
|
||||
"assiduites";"/assiduites/1/query?etat=retard";"ScoView";"GET";
|
||||
"assiduites";"/assiduites/1/query?moduleimpl_id=1";"ScoView";"GET";
|
||||
"assiduites_count";"/assiduites/1/count";"ScoView";"GET";
|
||||
"assiduites_count";"/assiduites/1/count/query?etat=retard";"ScoView";"GET";
|
||||
"assiduites_count";"/assiduites/1/count/query?etat=present,retard&metric=compte,heure";"ScoView";"GET";
|
||||
"assiduites_formsemestre";"/assiduites/formsemestre/1";"ScoView";"GET";
|
||||
"assiduites_formsemestre";"/assiduites/formsemestre/1/query?etat=retard";"ScoView";"GET";
|
||||
"assiduites_formsemestre";"/assiduites/formsemestre/1/query?moduleimpl_id=1";"ScoView";"GET";
|
||||
"assiduites_formsemestre_count";"/assiduites/formsemestre/1/count";"ScoView";"GET";
|
||||
"assiduites_formsemestre_count";"/assiduites/formsemestre/1/count/query?etat=retard";"ScoView";"GET";
|
||||
"assiduites_formsemestre_count";"/assiduites/formsemestre/1/count/query?etat=present,retard&metric=compte,heure";"ScoView";"GET";
|
||||
"assiduite_create";"/assiduite/1/create";"ScoView";"POST";"{""date_debut"": ""2022-10-27T08:00"",""date_fin"": ""2022-10-27T10:00"",""etat"": ""absent""}"
|
||||
"assiduite_create";"/assiduite/1/create/batch";"ScoView";"POST";"{""batch"":[{""date_debut"": ""2022-10-27T08:00"",""date_fin"": ""2022-10-27T10:00"",""etat"": ""absent""},{""date_debut"": ""2022-10-27T08:00"",""date_fin"": ""2022-10-27T10:00"",""etat"": ""retard""},{""date_debut"": ""2022-10-27T11:00"",""date_fin"": ""2022-10-27T13:00"",""etat"": ""present""}]}"
|
||||
"assiduite_edit";"/assiduite/1/edit";"ScoView";"POST";"{""etat"":""absent""}"
|
||||
"assiduite_edit";"/assiduite/1/edit";"ScoView";"POST";"{""moduleimpl_id"":2}"
|
||||
"assiduite_edit";"/assiduite/1/edit";"ScoView";"POST";"{""etat"": ""retard"",""moduleimpl_id"":3}"
|
||||
"assiduite_delete";"/assiduite/delete";"ScoView";"POST";"{""assiduite_id"": 1}"
|
||||
"assiduite_delete";"/assiduite/delete/batch";"ScoView";"POST";"{""batch"":[2,2,3]}"
|
||||
"departements";"/departements";"ScoView";"GET";
|
||||
"departements-ids";"/departements_ids";"ScoView";"GET";
|
||||
"departement";"/departement/TAPI";"ScoView";"GET";
|
|
728
tests/unit/test_assiduites.py
Normal file
728
tests/unit/test_assiduites.py
Normal file
@ -0,0 +1,728 @@
|
||||
# -*- mode: python -*-
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Tests unitaires vérifiant le bon fonctionnement du modèle Assiduité et de
|
||||
ses fonctions liées
|
||||
|
||||
Ecrit par HARTMANN Matthias (en s'inspirant de tests.unit.test_abs_count.py par Fares Amer )
|
||||
"""
|
||||
|
||||
from tests.unit import sco_fake_gen
|
||||
|
||||
from app import db
|
||||
|
||||
from app.scodoc import sco_formsemestre
|
||||
import app.scodoc.sco_assiduites as scass
|
||||
from app.models import Assiduite, Justificatif, Identite, FormSemestre, ModuleImpl
|
||||
from app.scodoc.sco_exceptions import ScoValueError
|
||||
import app.scodoc.sco_utils as scu
|
||||
from app.scodoc.sco_abs import (
|
||||
get_abs_count_in_interval,
|
||||
get_assiduites_count_in_interval,
|
||||
)
|
||||
from app.scodoc import sco_abs_views
|
||||
from tools import migrate_abs_to_assiduites, downgrade_module
|
||||
|
||||
|
||||
class BiInt(int, scu.BiDirectionalEnum):
|
||||
"""Classe pour tester la classe BiDirectionalEnum"""
|
||||
|
||||
A = 1
|
||||
B = 2
|
||||
|
||||
|
||||
def test_bi_directional_enum(test_client):
|
||||
"""Test le bon fonctionnement de la classe BiDirectionalEnum"""
|
||||
|
||||
assert BiInt.get("A") == BiInt.get("a") == BiInt.A == 1
|
||||
assert BiInt.get("B") == BiInt.get("b") == BiInt.B == 2
|
||||
assert BiInt.get("blabla") is None
|
||||
assert BiInt.get("blabla", -1) == -1
|
||||
assert isinstance(BiInt.inverse(), dict)
|
||||
assert BiInt.inverse()[1] == BiInt.A and BiInt.inverse()[2] == BiInt.B
|
||||
|
||||
|
||||
def test_general(test_client):
|
||||
"""tests général du modèle assiduite"""
|
||||
|
||||
g_fake = sco_fake_gen.ScoFake(verbose=False)
|
||||
|
||||
# Création d'une formation (1)
|
||||
|
||||
formation_id = g_fake.create_formation()
|
||||
ue_id = g_fake.create_ue(
|
||||
formation_id=formation_id, acronyme="T1", titre="UE TEST 1"
|
||||
)
|
||||
matiere_id = g_fake.create_matiere(ue_id=ue_id, titre="test matière")
|
||||
module_id_1 = g_fake.create_module(
|
||||
matiere_id=matiere_id, code="Mo1", coefficient=1.0, titre="test module"
|
||||
)
|
||||
module_id_2 = g_fake.create_module(
|
||||
matiere_id=matiere_id, code="Mo2", coefficient=1.0, titre="test module2"
|
||||
)
|
||||
|
||||
# Création semestre (2)
|
||||
|
||||
formsemestre_id_1 = g_fake.create_formsemestre(
|
||||
formation_id=formation_id,
|
||||
semestre_id=1,
|
||||
date_debut="01/09/2022",
|
||||
date_fin="31/12/2022",
|
||||
)
|
||||
formsemestre_id_2 = g_fake.create_formsemestre(
|
||||
formation_id=formation_id,
|
||||
semestre_id=2,
|
||||
date_debut="01/01/2023",
|
||||
date_fin="31/07/2023",
|
||||
)
|
||||
formsemestre_id_3 = g_fake.create_formsemestre(
|
||||
formation_id=formation_id,
|
||||
semestre_id=3,
|
||||
date_debut="01/01/2024",
|
||||
date_fin="31/07/2024",
|
||||
)
|
||||
|
||||
formsemestre_1 = sco_formsemestre.get_formsemestre(formsemestre_id_1)
|
||||
formsemestre_2 = sco_formsemestre.get_formsemestre(formsemestre_id_2)
|
||||
formsemestre_3 = sco_formsemestre.get_formsemestre(formsemestre_id_3)
|
||||
|
||||
# Création des modulesimpls (4, 2 par semestre)
|
||||
|
||||
moduleimpl_1_1 = g_fake.create_moduleimpl(
|
||||
module_id=module_id_1,
|
||||
formsemestre_id=formsemestre_id_1,
|
||||
)
|
||||
moduleimpl_1_2 = g_fake.create_moduleimpl(
|
||||
module_id=module_id_2,
|
||||
formsemestre_id=formsemestre_id_1,
|
||||
)
|
||||
|
||||
moduleimpl_2_1 = g_fake.create_moduleimpl(
|
||||
module_id=module_id_1,
|
||||
formsemestre_id=formsemestre_id_2,
|
||||
)
|
||||
moduleimpl_2_2 = g_fake.create_moduleimpl(
|
||||
module_id=module_id_2,
|
||||
formsemestre_id=formsemestre_id_2,
|
||||
)
|
||||
|
||||
moduleimpls = [
|
||||
moduleimpl_1_1,
|
||||
moduleimpl_1_2,
|
||||
moduleimpl_2_1,
|
||||
moduleimpl_2_2,
|
||||
]
|
||||
|
||||
moduleimpls = [
|
||||
ModuleImpl.query.filter_by(id=mi_id).first() for mi_id in moduleimpls
|
||||
]
|
||||
|
||||
# Création des étudiants (3)
|
||||
|
||||
etuds_dict = [
|
||||
g_fake.create_etud(code_nip=None, prenom=f"etud{i}") for i in range(3)
|
||||
]
|
||||
|
||||
etuds = []
|
||||
for etud in etuds_dict:
|
||||
g_fake.inscrit_etudiant(formsemestre_id=formsemestre_id_1, etud=etud)
|
||||
g_fake.inscrit_etudiant(formsemestre_id=formsemestre_id_2, etud=etud)
|
||||
|
||||
etuds.append(Identite.query.filter_by(id=etud["id"]).first())
|
||||
|
||||
assert None not in etuds, "Problème avec la conversion en Identite"
|
||||
|
||||
# Etudiant faux
|
||||
|
||||
etud_faux_dict = g_fake.create_etud(code_nip=None, prenom="etudfaux")
|
||||
etud_faux = Identite.query.filter_by(id=etud_faux_dict["id"]).first()
|
||||
|
||||
verif_migration_abs_assiduites()
|
||||
|
||||
ajouter_assiduites(etuds, moduleimpls, etud_faux)
|
||||
justificatifs: list[Justificatif] = ajouter_justificatifs(etuds[0])
|
||||
verifier_comptage_et_filtrage_assiduites(
|
||||
etuds, moduleimpls, (formsemestre_1, formsemestre_2, formsemestre_3)
|
||||
)
|
||||
verifier_filtrage_justificatifs(etuds[0], justificatifs)
|
||||
editer_supprimer_assiduites(etuds, moduleimpls)
|
||||
editer_supprimer_justificatif(etuds[0])
|
||||
|
||||
|
||||
def verif_migration_abs_assiduites():
|
||||
"""Vérification que le script de migration fonctionne correctement"""
|
||||
downgrade_module(assiduites=True, justificatifs=True)
|
||||
|
||||
etudid: int = 1
|
||||
|
||||
for debut, fin, demijournee in [
|
||||
(
|
||||
"02/01/2023",
|
||||
"10/01/2023",
|
||||
2,
|
||||
), # 2 assiduités 02/01: 08h -> 06/01: 18h & assiduités 09/01: 08h -> 10/01: 18h | 14dj
|
||||
("16/01/2023", "16/01/2023", 1), # 1 assiduité 16/01: 08h -> 16/01: 12h | 1dj
|
||||
("19/01/2023", "19/01/2023", 0), # 1 assiduité 19/01: 12h -> 19/01: 18h | 1dj
|
||||
("18/01/2023", "18/01/2023", 2), # 1 assiduité 18/01: 08h -> 18/01: 18h | 2dj
|
||||
("23/01/2023", "23/01/2023", 0), # 1 assiduité 23/01: 12h -> 24/01: 18h | 3dj
|
||||
("24/01/2023", "24/01/2023", 2),
|
||||
]:
|
||||
sco_abs_views.doSignaleAbsence(
|
||||
datedebut=debut,
|
||||
datefin=fin,
|
||||
demijournee=demijournee,
|
||||
etudid=etudid,
|
||||
)
|
||||
|
||||
# --- Justification de certaines absences
|
||||
|
||||
for debut, fin, demijournee in [
|
||||
(
|
||||
"02/01/2023",
|
||||
"10/01/2023",
|
||||
2,
|
||||
), # 2 justificatif 02/01: 08h -> 06/01: 18h & justificatif 09/01: 08h -> 10/01: 18h | 14dj
|
||||
(
|
||||
"19/01/2023",
|
||||
"19/01/2023",
|
||||
0,
|
||||
), # 1 justificatif 19/01: 12h -> 19/01: 18h | 1dj
|
||||
(
|
||||
"18/01/2023",
|
||||
"18/01/2023",
|
||||
2,
|
||||
), # 1 justificatif 18/01: 08h -> 18/01: 18h | 2dj
|
||||
]:
|
||||
sco_abs_views.doJustifAbsence(
|
||||
datedebut=debut,
|
||||
datefin=fin,
|
||||
demijournee=demijournee,
|
||||
etudid=etudid,
|
||||
)
|
||||
|
||||
migrate_abs_to_assiduites()
|
||||
|
||||
assert Assiduite.query.count() == 6, "Erreur migration assiduites"
|
||||
assert Justificatif.query.count() == 4, "Erreur migration justificatifs"
|
||||
|
||||
essais_cache(etudid)
|
||||
|
||||
downgrade_module(assiduites=True, justificatifs=True)
|
||||
|
||||
|
||||
def essais_cache(etudid):
|
||||
"""Vérification des fonctionnalités du cache TODO:WIP"""
|
||||
|
||||
date_deb: str = "2023-01-01T07:00"
|
||||
date_fin: str = "2023-03-31T19:00"
|
||||
|
||||
abs_count_no_cache: int = get_abs_count_in_interval(etudid, date_deb, date_fin)
|
||||
abs_count_cache = get_abs_count_in_interval(etudid, date_deb, date_fin)
|
||||
assiduites_count_no_cache = get_assiduites_count_in_interval(
|
||||
etudid, date_deb, date_fin
|
||||
)
|
||||
assiduites_count_cache = get_assiduites_count_in_interval(
|
||||
etudid, date_deb, date_fin
|
||||
)
|
||||
|
||||
assert (
|
||||
abs_count_cache
|
||||
== abs_count_no_cache
|
||||
== assiduites_count_cache
|
||||
== assiduites_count_no_cache
|
||||
== (21, 17)
|
||||
), "Erreur cache"
|
||||
|
||||
|
||||
def ajouter_justificatifs(etud):
|
||||
"""test de l'ajout des justificatifs"""
|
||||
|
||||
obj_justificatifs = [
|
||||
{
|
||||
"etat": scu.EtatJustificatif.ATTENTE,
|
||||
"deb": "2022-09-03T08:00+01:00",
|
||||
"fin": "2022-09-03T09:59:59+01:00",
|
||||
"raison": None,
|
||||
},
|
||||
{
|
||||
"etat": scu.EtatJustificatif.VALIDE,
|
||||
"deb": "2023-01-03T07:00+01:00",
|
||||
"fin": "2023-01-03T11:00+01:00",
|
||||
"raison": None,
|
||||
},
|
||||
{
|
||||
"etat": scu.EtatJustificatif.VALIDE,
|
||||
"deb": "2022-09-03T10:00:00+01:00",
|
||||
"fin": "2022-09-03T12:00+01:00",
|
||||
"raison": None,
|
||||
},
|
||||
{
|
||||
"etat": scu.EtatJustificatif.NON_VALIDE,
|
||||
"deb": "2022-09-03T14:00:00+01:00",
|
||||
"fin": "2022-09-03T15:00+01:00",
|
||||
"raison": "Description",
|
||||
},
|
||||
{
|
||||
"etat": scu.EtatJustificatif.MODIFIE,
|
||||
"deb": "2023-01-03T11:30+01:00",
|
||||
"fin": "2023-01-03T12:00+01:00",
|
||||
"raison": None,
|
||||
},
|
||||
]
|
||||
|
||||
justificatifs = [
|
||||
Justificatif.create_justificatif(
|
||||
etud,
|
||||
scu.is_iso_formated(just["deb"], True),
|
||||
scu.is_iso_formated(just["fin"], True),
|
||||
just["etat"],
|
||||
just["raison"],
|
||||
)
|
||||
for just in obj_justificatifs
|
||||
]
|
||||
# Vérification de la création des justificatifs
|
||||
assert [
|
||||
justi for justi in justificatifs if not isinstance(justi, Justificatif)
|
||||
] == [], "La création des justificatifs de base n'est pas OK"
|
||||
|
||||
# Vérification de la gestion des erreurs
|
||||
|
||||
test_assiduite = {
|
||||
"etat": scu.EtatJustificatif.ATTENTE,
|
||||
"deb": "2023-01-03T11:00:01+01:00",
|
||||
"fin": "2023-01-03T12:00+01:00",
|
||||
"raison": "Description",
|
||||
}
|
||||
return justificatifs
|
||||
|
||||
|
||||
def verifier_filtrage_justificatifs(etud: Identite, justificatifs: list[Justificatif]):
|
||||
"""
|
||||
- vérifier le filtrage des justificatifs (etat, debut, fin)
|
||||
"""
|
||||
|
||||
# Vérification du filtrage classique
|
||||
|
||||
# Etat
|
||||
assert (
|
||||
scass.filter_justificatifs_by_etat(etud.justificatifs, "valide").count() == 2
|
||||
), "Filtrage de l'état 'valide' mauvais"
|
||||
assert (
|
||||
scass.filter_justificatifs_by_etat(etud.justificatifs, "attente").count() == 1
|
||||
), "Filtrage de l'état 'attente' mauvais"
|
||||
assert (
|
||||
scass.filter_justificatifs_by_etat(etud.justificatifs, "modifie").count() == 1
|
||||
), "Filtrage de l'état 'modifie' mauvais"
|
||||
assert (
|
||||
scass.filter_justificatifs_by_etat(etud.justificatifs, "non_valide").count()
|
||||
== 1
|
||||
), "Filtrage de l'état 'non_valide' mauvais"
|
||||
assert (
|
||||
scass.filter_justificatifs_by_etat(etud.justificatifs, "valide,modifie").count()
|
||||
== 3
|
||||
), "Filtrage de l'état 'valide,modifie' mauvais"
|
||||
assert (
|
||||
scass.filter_justificatifs_by_etat(
|
||||
etud.justificatifs, "valide,modifie,attente"
|
||||
).count()
|
||||
== 4
|
||||
), "Filtrage de l'état 'valide,modifie,attente' mauvais"
|
||||
assert (
|
||||
scass.filter_justificatifs_by_etat(
|
||||
etud.justificatifs, "valide,modifie,attente,non_valide"
|
||||
).count()
|
||||
== 5
|
||||
), "Filtrage de l'état 'valide,modifie,attente,_non_valide' mauvais"
|
||||
|
||||
assert (
|
||||
scass.filter_justificatifs_by_etat(etud.justificatifs, "autre").count() == 0
|
||||
), "Filtrage de l'état 'autre' mauvais"
|
||||
|
||||
# Dates
|
||||
|
||||
assert (
|
||||
scass.filter_by_date(etud.justificatifs, Justificatif).count() == 5
|
||||
), "Filtrage 'Toute Date' mauvais 1"
|
||||
|
||||
date = scu.localize_datetime("2022-09-01T10:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etud.justificatifs, Justificatif, date_deb=date).count()
|
||||
== 5
|
||||
), "Filtrage 'Toute Date' mauvais 2"
|
||||
|
||||
date = scu.localize_datetime("2022-09-03T08:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etud.justificatifs, Justificatif, date_deb=date).count()
|
||||
== 5
|
||||
), "Filtrage 'date début' mauvais 3"
|
||||
|
||||
date = scu.localize_datetime("2022-09-03T08:00:01+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etud.justificatifs, Justificatif, date_deb=date).count()
|
||||
== 5
|
||||
), "Filtrage 'date début' mauvais 4"
|
||||
|
||||
date = scu.localize_datetime("2022-09-03T10:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etud.justificatifs, Justificatif, date_deb=date).count()
|
||||
== 4
|
||||
), "Filtrage 'date début' mauvais 5"
|
||||
|
||||
date = scu.localize_datetime("2022-09-01T10:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etud.justificatifs, Justificatif, date_fin=date).count()
|
||||
== 0
|
||||
), "Filtrage 'Toute Date' mauvais 6"
|
||||
|
||||
date = scu.localize_datetime("2022-09-03T08:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etud.justificatifs, Justificatif, date_fin=date).count()
|
||||
== 1
|
||||
), "Filtrage 'date début' mauvais 7"
|
||||
|
||||
date = scu.localize_datetime("2022-09-03T10:00:01+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etud.justificatifs, Justificatif, date_fin=date).count()
|
||||
== 2
|
||||
), "Filtrage 'date début' mauvais 8"
|
||||
|
||||
date = scu.localize_datetime("2023-01-03T12:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etud.justificatifs, Justificatif, date_fin=date).count()
|
||||
== 5
|
||||
), "Filtrage 'date début' mauvais 9"
|
||||
|
||||
# Justifications des assiduites
|
||||
|
||||
assert len(scass.justifies(justificatifs[2])) == 1, "Justifications mauvais"
|
||||
assert len(scass.justifies(justificatifs[0])) == 0, "Justifications mauvais"
|
||||
|
||||
|
||||
def editer_supprimer_justificatif(etud: Identite):
|
||||
"""
|
||||
Troisième Partie:
|
||||
- Vérification de l'édition des justificatifs
|
||||
- Vérification de la suppression des justificatifs
|
||||
"""
|
||||
|
||||
justi: Justificatif = etud.justificatifs.first()
|
||||
|
||||
# Modification de l'état
|
||||
justi.etat = scu.EtatJustificatif.MODIFIE
|
||||
# Modification du moduleimpl
|
||||
justi.date_debut = scu.localize_datetime("2023-02-03T11:00:01+01:00")
|
||||
justi.date_fin = scu.localize_datetime("2023-02-03T12:00:01+01:00")
|
||||
|
||||
db.session.add(justi)
|
||||
db.session.commit()
|
||||
|
||||
# Vérification du changement
|
||||
assert (
|
||||
scass.filter_justificatifs_by_etat(etud.justificatifs, "modifie").count() == 2
|
||||
), "Edition de justificatif mauvais"
|
||||
|
||||
assert (
|
||||
scass.filter_by_date(
|
||||
etud.justificatifs,
|
||||
Justificatif,
|
||||
date_deb=scu.localize_datetime("2023-02-01T11:00:00+01:00"),
|
||||
).count()
|
||||
== 1
|
||||
), "Edition de justificatif mauvais 2"
|
||||
|
||||
# Supression d'une assiduité
|
||||
|
||||
db.session.delete(justi)
|
||||
db.session.commit()
|
||||
|
||||
assert etud.justificatifs.count() == 4, "Supression de justificatif mauvais"
|
||||
|
||||
|
||||
def editer_supprimer_assiduites(etuds: list[Identite], moduleimpls: list[int]):
|
||||
"""
|
||||
Troisième Partie:
|
||||
- Vérification de l'édition des assiduitées
|
||||
- Vérification de la suppression des assiduitées
|
||||
"""
|
||||
|
||||
ass1: Assiduite = etuds[0].assiduites.first()
|
||||
ass2: Assiduite = etuds[1].assiduites.first()
|
||||
ass3: Assiduite = etuds[2].assiduites.first()
|
||||
|
||||
# Modification de l'état
|
||||
ass1.etat = scu.EtatAssiduite.RETARD
|
||||
db.session.add(ass1)
|
||||
# Modification du moduleimpl
|
||||
ass2.moduleimpl_id = moduleimpls[0].id
|
||||
db.session.add(ass2)
|
||||
db.session.commit()
|
||||
|
||||
# Vérification du changement
|
||||
assert (
|
||||
scass.filter_assiduites_by_etat(etuds[0].assiduites, "retard").count() == 4
|
||||
), "Edition d'assiduité mauvais"
|
||||
assert (
|
||||
scass.filter_by_module_impl(etuds[1].assiduites, moduleimpls[0].id).count() == 2
|
||||
), "Edition d'assiduité mauvais"
|
||||
|
||||
# Supression d'une assiduité
|
||||
|
||||
db.session.delete(ass3)
|
||||
db.session.commit()
|
||||
|
||||
assert etuds[2].assiduites.count() == 6, "Supression d'assiduité mauvais"
|
||||
|
||||
|
||||
def ajouter_assiduites(
|
||||
etuds: list[Identite], moduleimpls: list[ModuleImpl], etud_faux: Identite
|
||||
):
|
||||
"""
|
||||
Première partie:
|
||||
- Ajoute 6 assiduités à chaque étudiant
|
||||
- 2 présence (semestre 1 et 2)
|
||||
- 2 retard (semestre 2)
|
||||
- 2 absence (semestre 1)
|
||||
- Vérifie la création des assiduités
|
||||
"""
|
||||
|
||||
for etud in etuds:
|
||||
obj_assiduites = [
|
||||
{
|
||||
"etat": scu.EtatAssiduite.PRESENT,
|
||||
"deb": "2022-09-03T08:00+01:00",
|
||||
"fin": "2022-09-03T10:00+01:00",
|
||||
"moduleimpl": None,
|
||||
"desc": None,
|
||||
},
|
||||
{
|
||||
"etat": scu.EtatAssiduite.PRESENT,
|
||||
"deb": "2023-01-03T08:00+01:00",
|
||||
"fin": "2023-01-03T10:00+01:00",
|
||||
"moduleimpl": moduleimpls[2],
|
||||
"desc": None,
|
||||
},
|
||||
{
|
||||
"etat": scu.EtatAssiduite.ABSENT,
|
||||
"deb": "2022-09-03T10:00:01+01:00",
|
||||
"fin": "2022-09-03T11:00+01:00",
|
||||
"moduleimpl": moduleimpls[0],
|
||||
"desc": None,
|
||||
},
|
||||
{
|
||||
"etat": scu.EtatAssiduite.ABSENT,
|
||||
"deb": "2022-09-03T14:00:00+01:00",
|
||||
"fin": "2022-09-03T15:00+01:00",
|
||||
"moduleimpl": moduleimpls[1],
|
||||
"desc": "Description",
|
||||
},
|
||||
{
|
||||
"etat": scu.EtatAssiduite.RETARD,
|
||||
"deb": "2023-01-03T11:00:01+01:00",
|
||||
"fin": "2023-01-03T12:00+01:00",
|
||||
"moduleimpl": moduleimpls[3],
|
||||
"desc": None,
|
||||
},
|
||||
{
|
||||
"etat": scu.EtatAssiduite.RETARD,
|
||||
"deb": "2023-01-04T11:00:01+01:00",
|
||||
"fin": "2023-01-04T12:00+01:00",
|
||||
"moduleimpl": moduleimpls[3],
|
||||
"desc": "Description",
|
||||
},
|
||||
{
|
||||
"etat": scu.EtatAssiduite.RETARD,
|
||||
"deb": "2022-11-04T11:00:01+01:00",
|
||||
"fin": "2022-12-05T12:00+01:00",
|
||||
"moduleimpl": None,
|
||||
"desc": "Description",
|
||||
},
|
||||
]
|
||||
|
||||
assiduites = [
|
||||
Assiduite.create_assiduite(
|
||||
etud,
|
||||
scu.is_iso_formated(ass["deb"], True),
|
||||
scu.is_iso_formated(ass["fin"], True),
|
||||
ass["etat"],
|
||||
ass["moduleimpl"],
|
||||
ass["desc"],
|
||||
)
|
||||
for ass in obj_assiduites
|
||||
]
|
||||
|
||||
# Vérification de la création des assiduités
|
||||
assert [
|
||||
ass for ass in assiduites if not isinstance(ass, Assiduite)
|
||||
] == [], "La création des assiduités de base n'est pas OK"
|
||||
|
||||
# Vérification de la gestion des erreurs
|
||||
|
||||
test_assiduite = {
|
||||
"etat": scu.EtatAssiduite.RETARD,
|
||||
"deb": "2023-01-04T11:00:01+01:00",
|
||||
"fin": "2023-01-04T12:00+01:00",
|
||||
"moduleimpl": moduleimpls[3],
|
||||
"desc": "Description",
|
||||
}
|
||||
|
||||
try:
|
||||
Assiduite.create_assiduite(
|
||||
etuds[0],
|
||||
scu.is_iso_formated(test_assiduite["deb"], True),
|
||||
scu.is_iso_formated(test_assiduite["fin"], True),
|
||||
test_assiduite["etat"],
|
||||
test_assiduite["moduleimpl"],
|
||||
test_assiduite["desc"],
|
||||
)
|
||||
except ScoValueError as excp:
|
||||
assert (
|
||||
excp.args[0]
|
||||
== "Duplication des assiduités (la période rentrée rentre en conflit avec une assiduité enregistrée)"
|
||||
)
|
||||
try:
|
||||
Assiduite.create_assiduite(
|
||||
etud_faux,
|
||||
scu.is_iso_formated(test_assiduite["deb"], True),
|
||||
scu.is_iso_formated(test_assiduite["fin"], True),
|
||||
test_assiduite["etat"],
|
||||
test_assiduite["moduleimpl"],
|
||||
test_assiduite["desc"],
|
||||
)
|
||||
except ScoValueError as excp:
|
||||
assert excp.args[0] == "L'étudiant n'est pas inscrit au moduleimpl"
|
||||
|
||||
|
||||
def verifier_comptage_et_filtrage_assiduites(
|
||||
etuds: list[Identite], moduleimpls: list[int], formsemestres: tuple[int]
|
||||
):
|
||||
"""
|
||||
Deuxième partie:
|
||||
- vérifier les valeurs du comptage (compte, heure, journée, demi-journée)
|
||||
- vérifier le filtrage des assiduites (etat, debut, fin, module, formsemestre)
|
||||
|
||||
"""
|
||||
|
||||
etu1, etu2, etu3 = etuds
|
||||
|
||||
mod11, mod12, mod21, mod22 = moduleimpls
|
||||
|
||||
# Vérification du comptage classique
|
||||
comptage = scass.get_assiduites_stats(etu1.assiduites)
|
||||
|
||||
assert comptage["compte"] == 6 + 1, "la métrique 'Comptage' n'est pas bien calculée"
|
||||
assert (
|
||||
comptage["journee"] == 3 + 22
|
||||
), "la métrique 'Journée' n'est pas bien calculée"
|
||||
assert (
|
||||
comptage["demi"] == 4 + 43
|
||||
), "la métrique 'Demi-Journée' n'est pas bien calculée"
|
||||
assert comptage["heure"] == float(
|
||||
8 + 169
|
||||
), "la métrique 'Heure' n'est pas bien calculée"
|
||||
|
||||
# Vérification du filtrage classique
|
||||
|
||||
# Etat
|
||||
assert (
|
||||
scass.filter_assiduites_by_etat(etu2.assiduites, "present").count() == 2
|
||||
), "Filtrage de l'état 'présent' mauvais"
|
||||
assert (
|
||||
scass.filter_assiduites_by_etat(etu2.assiduites, "retard").count() == 3
|
||||
), "Filtrage de l'état 'retard' mauvais"
|
||||
assert (
|
||||
scass.filter_assiduites_by_etat(etu2.assiduites, "absent").count() == 2
|
||||
), "Filtrage de l'état 'absent' mauvais"
|
||||
assert (
|
||||
scass.filter_assiduites_by_etat(etu2.assiduites, "absent,retard").count() == 5
|
||||
), "Filtrage de l'état 'absent,retard' mauvais"
|
||||
assert (
|
||||
scass.filter_assiduites_by_etat(
|
||||
etu2.assiduites, "absent,retard,present"
|
||||
).count()
|
||||
== 7
|
||||
), "Filtrage de l'état 'absent,retard,present' mauvais"
|
||||
assert (
|
||||
scass.filter_assiduites_by_etat(etu2.assiduites, "autre").count() == 0
|
||||
), "Filtrage de l'état 'autre' mauvais"
|
||||
|
||||
# Module
|
||||
assert (
|
||||
scass.filter_by_module_impl(etu3.assiduites, mod11.id).count() == 1
|
||||
), "Filtrage par 'Moduleimpl' mauvais"
|
||||
assert (
|
||||
scass.filter_by_module_impl(etu3.assiduites, mod12.id).count() == 1
|
||||
), "Filtrage par 'Moduleimpl' mauvais"
|
||||
assert (
|
||||
scass.filter_by_module_impl(etu3.assiduites, mod21.id).count() == 1
|
||||
), "Filtrage par 'Moduleimpl' mauvais"
|
||||
assert (
|
||||
scass.filter_by_module_impl(etu3.assiduites, mod22.id).count() == 2
|
||||
), "Filtrage par 'Moduleimpl' mauvais"
|
||||
assert (
|
||||
scass.filter_by_module_impl(etu3.assiduites, None).count() == 2
|
||||
), "Filtrage par 'Moduleimpl' mauvais"
|
||||
assert (
|
||||
scass.filter_by_module_impl(etu3.assiduites, 152).count() == 0
|
||||
), "Filtrage par 'Moduleimpl' mauvais"
|
||||
|
||||
# Formsemestre
|
||||
formsemestres = [
|
||||
FormSemestre.query.filter_by(id=fms["id"]).first() for fms in formsemestres
|
||||
]
|
||||
assert (
|
||||
scass.filter_by_formsemestre(etu1.assiduites, formsemestres[0]).count() == 4
|
||||
), "Filtrage 'Formsemestre' mauvais"
|
||||
assert (
|
||||
scass.filter_by_formsemestre(etu1.assiduites, formsemestres[1]).count() == 3
|
||||
), "Filtrage 'Formsemestre' mauvais"
|
||||
assert (
|
||||
scass.filter_by_formsemestre(etu1.assiduites, formsemestres[2]).count() == 0
|
||||
), "Filtrage 'Formsemestre' mauvais"
|
||||
|
||||
# Date début
|
||||
assert (
|
||||
scass.filter_by_date(etu2.assiduites, Assiduite).count() == 7
|
||||
), "Filtrage 'Date début' mauvais 1"
|
||||
|
||||
date = scu.localize_datetime("2022-09-01T10:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etu2.assiduites, Assiduite, date_deb=date).count() == 7
|
||||
), "Filtrage 'Date début' mauvais 2"
|
||||
|
||||
date = scu.localize_datetime("2022-09-03T10:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etu2.assiduites, Assiduite, date_deb=date).count() == 7
|
||||
), "Filtrage 'Date début' mauvais 3"
|
||||
|
||||
date = scu.localize_datetime("2022-09-03T16:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etu2.assiduites, Assiduite, date_deb=date).count() == 4
|
||||
), "Filtrage 'Date début' mauvais 4"
|
||||
|
||||
# Date Fin
|
||||
|
||||
date = scu.localize_datetime("2022-09-01T10:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etu2.assiduites, Assiduite, date_fin=date).count() == 0
|
||||
), "Filtrage 'Date fin' mauvais 1"
|
||||
|
||||
date = scu.localize_datetime("2022-09-03T10:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etu2.assiduites, Assiduite, date_fin=date).count() == 1
|
||||
), "Filtrage 'Date fin' mauvais 2"
|
||||
|
||||
date = scu.localize_datetime("2022-09-03T10:00:01+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etu2.assiduites, Assiduite, date_fin=date).count() == 2
|
||||
), "Filtrage 'Date fin' mauvais 3"
|
||||
|
||||
date = scu.localize_datetime("2022-09-03T16:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etu2.assiduites, Assiduite, date_fin=date).count() == 3
|
||||
), "Filtrage 'Date fin' mauvais 4"
|
||||
|
||||
date = scu.localize_datetime("2023-01-04T16:00+01:00")
|
||||
assert (
|
||||
scass.filter_by_date(etu2.assiduites, Assiduite, date_fin=date).count() == 7
|
||||
), "Filtrage 'Date fin' mauvais 5"
|
@ -21,11 +21,13 @@ from app import models
|
||||
from app.models import departements
|
||||
from app.models import (
|
||||
Absence,
|
||||
Assiduite,
|
||||
Departement,
|
||||
Formation,
|
||||
FormSemestre,
|
||||
FormSemestreEtape,
|
||||
Identite,
|
||||
Justificatif,
|
||||
ModuleImpl,
|
||||
NotesNotes,
|
||||
)
|
||||
@ -37,6 +39,7 @@ from app.scodoc import (
|
||||
sco_groups,
|
||||
)
|
||||
from app.scodoc.sco_permissions import Permission
|
||||
from app.scodoc.sco_utils import localize_datetime
|
||||
from tools.fakeportal.gen_nomprenoms import nomprenom
|
||||
|
||||
random.seed(12345678) # tests reproductibles
|
||||
@ -378,6 +381,56 @@ def create_logos():
|
||||
)
|
||||
|
||||
|
||||
def ajouter_assiduites_justificatifs(formsemestre: FormSemestre):
|
||||
"""
|
||||
Ajoute des assiduités semi-aléatoires à chaque étudiant du semestre
|
||||
"""
|
||||
MODS = [moduleimpl for moduleimpl in formsemestre.modimpls]
|
||||
MODS.append(None)
|
||||
|
||||
for etud in formsemestre.etuds:
|
||||
base_date = datetime.datetime(2022, 9, random.randint(1, 30), 8, 0, 0)
|
||||
base_date = localize_datetime(base_date)
|
||||
|
||||
for i in range(random.randint(1, 5)):
|
||||
etat = random.randint(0, 2)
|
||||
moduleimpl = random.choice(MODS)
|
||||
deb_date = base_date + datetime.timedelta(days=i)
|
||||
fin_date = deb_date + datetime.timedelta(hours=i)
|
||||
|
||||
code = Assiduite.create_assiduite(
|
||||
etud, deb_date, fin_date, etat, moduleimpl
|
||||
)
|
||||
|
||||
assert isinstance(
|
||||
code, Assiduite
|
||||
), "Erreur dans la génération des assiduités"
|
||||
|
||||
db.session.add(code)
|
||||
|
||||
for i in range(random.randint(0, 2)):
|
||||
etat = random.randint(0, 3)
|
||||
deb_date = base_date + datetime.timedelta(days=i)
|
||||
fin_date = deb_date + datetime.timedelta(hours=8)
|
||||
raison = random.choice(["raison", None])
|
||||
|
||||
code = Justificatif.create_justificatif(
|
||||
etud=etud,
|
||||
date_debut=deb_date,
|
||||
date_fin=fin_date,
|
||||
etat=etat,
|
||||
raison=raison,
|
||||
)
|
||||
|
||||
assert isinstance(
|
||||
code, Justificatif
|
||||
), "Erreur dans la génération des justificatifs"
|
||||
|
||||
db.session.add(code)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def init_test_database():
|
||||
"""Appelé par la commande `flask init-test-database`
|
||||
|
||||
@ -398,6 +451,7 @@ def init_test_database():
|
||||
saisie_notes_evaluations(formsemestre, user_lecteur)
|
||||
add_absences(formsemestre)
|
||||
create_etape_apo(formsemestre)
|
||||
ajouter_assiduites_justificatifs(formsemestre)
|
||||
create_logos()
|
||||
# à compléter
|
||||
# - groupes
|
||||
|
Loading…
Reference in New Issue
Block a user