1
0
forked from ScoDoc/ScoDoc

WIP: ajustements pour upgrade SQLAlchemy

This commit is contained in:
Emmanuel Viennet 2023-04-04 09:57:54 +02:00
parent 2248090248
commit cd24fe53d5
13 changed files with 128 additions and 96 deletions

View File

@ -3,6 +3,7 @@
import base64
import datetime
import json
import os
import socket
import sys
@ -12,12 +13,13 @@ import traceback
import logging
from logging.handlers import SMTPHandler, WatchedFileHandler
from threading import Thread
import warnings
import flask
from flask import current_app, g, request
from flask import Flask
from flask import abort, flash, has_request_context, jsonify
from flask import render_template
from flask.json import JSONEncoder
from flask.logging import default_handler
from flask_bootstrap import Bootstrap
@ -42,6 +44,8 @@ from app.scodoc.sco_exceptions import (
ScoValueError,
APIInvalidParams,
)
from app.scodoc.sco_vdi import ApoEtapeVDI
from config import DevConfig
import sco_version
@ -140,12 +144,14 @@ def handle_invalid_usage(error):
# JSON ENCODING
class ScoDocJSONEncoder(flask.json.provider.DefaultJSONProvider):
def default(self, o):
if isinstance(o, (datetime.datetime, datetime.date)):
class ScoDocJSONEncoder(JSONEncoder):
def default(self, o): # pylint: disable=E0202
if isinstance(o, (datetime.date, datetime.datetime)):
return o.isoformat()
return super().default(o)
elif isinstance(o, ApoEtapeVDI):
return str(o)
else:
return json.JSONEncoder.default(self, o)
def render_raw_html(template_filename: str, **args) -> str:
@ -258,6 +264,10 @@ def create_app(config_class=DevConfig):
# Evite de logguer toutes les requetes dans notre log
logging.getLogger("werkzeug").disabled = True
app.logger.setLevel(app.config["LOG_LEVEL"])
if app.config["TESTING"] or app.config["DEBUG"]:
# S'arrête sur tous les warnings, sauf
# flask_sqlalchemy/query (pb deprecation du model.get())
warnings.filterwarnings("error", module="flask_sqlalchemy/query")
# Vérifie/crée lien sym pour les URL statiques
link_filename = f"{app.root_path}/static/links/{sco_version.SCOVERSION}"

View File

@ -4,6 +4,7 @@
"""Matrices d'inscription aux modules d'un semestre
"""
import pandas as pd
import sqlalchemy as sa
from app import db
@ -12,6 +13,13 @@ from app import db
# sur test debug 116 etuds, 18 modules, on est autour de 250ms.
# On a testé trois approches, ci-dessous (et retenu la 1ere)
#
_load_modimpl_inscr_q = sa.text(
"""SELECT etudid, 1 AS ":moduleimpl_id"
FROM notes_moduleimpl_inscription
WHERE moduleimpl_id=:moduleimpl_id"""
)
def df_load_modimpl_inscr(formsemestre) -> pd.DataFrame:
"""Charge la matrice des inscriptions aux modules du semestre
rows: etudid (inscrits au semestre, avec DEM et DEF)
@ -22,12 +30,11 @@ def df_load_modimpl_inscr(formsemestre) -> pd.DataFrame:
moduleimpl_ids = [m.id for m in formsemestre.modimpls_sorted]
etudids = [inscr.etudid for inscr in formsemestre.inscriptions]
df = pd.DataFrame(index=etudids, dtype=int)
with db.engine.begin() as connection:
for moduleimpl_id in moduleimpl_ids:
ins_df = pd.read_sql_query(
"""SELECT etudid, 1 AS "%(moduleimpl_id)s"
FROM notes_moduleimpl_inscription
WHERE moduleimpl_id=%(moduleimpl_id)s""",
db.engine,
_load_modimpl_inscr_q,
connection,
params={"moduleimpl_id": moduleimpl_id},
index_col="etudid",
dtype=int,

View File

@ -7,6 +7,7 @@
"""Stockage des décisions de jury
"""
import pandas as pd
import sqlalchemy as sa
from app import db
from app.models import FormSemestre, Identite, ScolarFormSemestreValidation, UniteEns
@ -132,7 +133,8 @@ def formsemestre_get_ue_capitalisees(formsemestre: FormSemestre) -> pd.DataFrame
# Note: pour récupérer aussi les UE validées en CMp ou ADJ, changer une ligne
# and ( SFV.code = 'ADM' or SFV.code = 'ADJ' or SFV.code = 'CMP' )
query = """
query = sa.text(
"""
SELECT DISTINCT SFV.*, ue.ue_code
FROM
notes_ue ue,
@ -144,21 +146,22 @@ def formsemestre_get_ue_capitalisees(formsemestre: FormSemestre) -> pd.DataFrame
WHERE ue.formation_id = nf.id
and nf.formation_code = nf2.formation_code
and nf2.id=%(formation_id)s
and nf2.id=:formation_id
and ins.etudid = SFV.etudid
and ins.formsemestre_id = %(formsemestre_id)s
and ins.formsemestre_id = :formsemestre_id
and SFV.ue_id = ue.id
and SFV.code = 'ADM'
and ( (sem.id = SFV.formsemestre_id
and sem.date_debut < %(date_debut)s
and sem.semestre_id = %(semestre_id)s )
and sem.date_debut < :date_debut
and sem.semestre_id = :semestre_id )
or (
((SFV.formsemestre_id is NULL) OR (SFV.is_external)) -- les UE externes ou "anterieures"
AND (SFV.semestre_id is NULL OR SFV.semestre_id=%(semestre_id)s)
AND (SFV.semestre_id is NULL OR SFV.semestre_id=:semestre_id)
) )
"""
)
params = {
"formation_id": formsemestre.formation.id,
"formsemestre_id": formsemestre.id,
@ -166,5 +169,6 @@ def formsemestre_get_ue_capitalisees(formsemestre: FormSemestre) -> pd.DataFrame
"date_debut": formsemestre.date_debut,
}
df = pd.read_sql_query(query, db.engine, params=params, index_col="etudid")
with db.engine.begin() as connection:
df = pd.read_sql_query(query, connection, params=params, index_col="etudid")
return df

View File

@ -38,6 +38,7 @@ from dataclasses import dataclass
import numpy as np
import pandas as pd
import sqlalchemy as sa
import app
from app import db
@ -192,18 +193,23 @@ class ModuleImplResults:
evals_notes.columns = pd.Index([int(x) for x in evals_notes.columns], dtype=int)
self.evals_notes = evals_notes
_load_evaluation_notes_q = sa.text(
"""SELECT n.etudid, n.value AS ":evaluation_id"
FROM notes_notes n, notes_moduleimpl_inscription i
WHERE evaluation_id=:evaluation_id
AND n.etudid = i.etudid
AND i.moduleimpl_id = :moduleimpl_id
"""
)
def _load_evaluation_notes(self, evaluation: Evaluation) -> pd.DataFrame:
"""Charge les notes de l'évaluation
Resultat: dataframe, index: etudid ayant une note, valeur: note brute.
"""
with db.engine.begin() as connection:
eval_df = pd.read_sql_query(
"""SELECT n.etudid, n.value AS "%(evaluation_id)s"
FROM notes_notes n, notes_moduleimpl_inscription i
WHERE evaluation_id=%(evaluation_id)s
AND n.etudid = i.etudid
AND i.moduleimpl_id = %(moduleimpl_id)s
""",
db.engine,
self._load_evaluation_notes_q,
connection,
params={
"evaluation_id": evaluation.id,
"moduleimpl_id": evaluation.moduleimpl.id,

View File

@ -206,20 +206,14 @@ class UniteEns(db.Model):
Si niveau est None, désassocie.
"""
if niveau is not None:
self._check_apc_conflict(niveau.id, self.parcour_id)
# Le niveau est-il dans le parcours ? Sinon, erreur
if self.parcour and niveau.id not in (
n.id
for n in niveau.niveaux_annee_de_parcours(
self.parcour, self.annee(), self.formation.referentiel_competence
if niveau.id == self.niveau_competence_id:
return True # nothing to do
if (niveau is not None) and (self.niveau_competence_id is not None):
ok, error_message = self.check_niveau_unique_dans_parcours(
niveau, self.parcours
)
):
log(
f"set_niveau_competence: niveau {niveau} hors parcours {self.parcour}"
)
return
if not ok:
return ok, error_message
self.niveau_competence = niveau
db.session.add(self)

View File

@ -40,7 +40,6 @@ Par exemple, la clé '_css_row_class' spécifie le style CSS de la ligne.
"""
from __future__ import print_function
import random
from collections import OrderedDict
from xml.etree import ElementTree
@ -60,7 +59,7 @@ from app.scodoc import sco_pdf
from app.scodoc import sco_xml
from app.scodoc.sco_exceptions import ScoPDFFormatError
from app.scodoc.sco_pdf import SU
from app import log
from app import log, ScoDocJSONEncoder
def mark_paras(L, tags) -> list[str]:
@ -647,7 +646,7 @@ class GenTable(object):
# v = str(v)
r[cid] = v
d.append(r)
return json.dumps(d, cls=scu.ScoDocJSONEncoder)
return json.dumps(d, cls=ScoDocJSONEncoder)
def make_page(
self,
@ -758,7 +757,7 @@ class SeqGenTable(object):
def excel(self):
"""Export des genTables dans un unique fichier excel avec plusieurs feuilles tagguées"""
book = sco_excel.ScoExcelBook() # pylint: disable=no-member
for (_, gt) in self.genTables.items():
for _, gt in self.genTables.items():
gt.excel(wb=book) # Ecrit dans un fichier excel
return book.generate()

View File

@ -64,7 +64,7 @@ from flask import flash, g, request, url_for
import app.scodoc.sco_utils as scu
from config import Config
from app import log
from app import log, ScoDocJSONEncoder
from app.but import jury_but_pv
from app.comp import res_sem
from app.comp.res_compat import NotesTableCompat
@ -360,7 +360,7 @@ def do_formsemestre_archive(
# Bulletins en JSON
data = gen_formsemestre_recapcomplet_json(formsemestre_id, xml_with_decisions=True)
data_js = json.dumps(data, indent=1, cls=scu.ScoDocJSONEncoder)
data_js = json.dumps(data, indent=1, cls=ScoDocJSONEncoder)
if data:
PVArchive.store(archive_id, "Bulletins.json", data_js)
# Décisions de jury, en XLS

View File

@ -33,6 +33,7 @@ import json
from flask import abort
from app import ScoDocJSONEncoder
from app.comp import res_sem
from app.comp.res_compat import NotesTableCompat
from app.models import but_validations
@ -74,7 +75,7 @@ def make_json_formsemestre_bulletinetud(
version=version,
)
return json.dumps(d, cls=scu.ScoDocJSONEncoder)
return json.dumps(d, cls=ScoDocJSONEncoder)
# (fonction séparée: n'utilise pas formsemestre_bulletinetud_dict()

View File

@ -111,7 +111,7 @@ get_base_preferences(formsemestre_id)
"""
import flask
from flask import g, request, url_for
from flask import current_app, g, request, url_for
# from flask_login import current_user
@ -1956,6 +1956,7 @@ class BasePreferences(object):
value = _get_pref_default_value_from_config(name, pref[1])
self.default[name] = value
self.prefs[None][name] = value
if not current_app.testing:
log(f"creating missing preference for {name}={value}")
# add to db table
self._editor.create(
@ -2310,7 +2311,7 @@ function set_global_pref(el, pref_name) {
self.formsemestre_id, tf[2]["create_local"], cur_value
)
# Modifie valeurs:
for (pref_name, descr) in self.base_prefs.prefs_definition:
for pref_name, descr in self.base_prefs.prefs_definition:
if (
pref_name in tf[2]
and not descr.get("only_global", False)

View File

@ -152,7 +152,7 @@ def _check_notes(notes: list[(int, float)], evaluation: dict, mod: dict):
absents = [] # etudid absents
tosuppress = [] # etudids avec ancienne note à supprimer
for (etudid, note) in notes:
for etudid, note in notes:
note = str(note).strip().upper()
try:
etudid = int(etudid) #
@ -536,7 +536,7 @@ def notes_add(
evaluation_id, getallstudents=True, include_demdef=True
)
}
for (etudid, value) in notes:
for etudid, value in notes:
if check_inscription and (etudid not in inscrits):
raise NoteProcessError(f"etudiant {etudid} non inscrit dans ce module")
if (value is not None) and not isinstance(value, float):
@ -556,7 +556,7 @@ def notes_add(
[]
) # etudids pour lesquels il y a une decision de jury et que la note change
try:
for (etudid, value) in notes:
for etudid, value in notes:
changed = False
if etudid not in notes_db:
# nouvelle note
@ -657,6 +657,7 @@ def notes_add(
formsemestre_id=M["formsemestre_id"]
) # > modif notes (exception)
sco_cache.EvaluationCache.delete(evaluation_id)
raise # XXX
raise ScoGenError("Erreur enregistrement note: merci de ré-essayer") from exc
if do_it:
cnx.commit()

View File

@ -56,8 +56,8 @@ from flask import flash, url_for, make_response, jsonify
from werkzeug.http import HTTP_STATUS_CODES
from config import Config
from app import log
from app.scodoc.sco_vdi import ApoEtapeVDI
from app import log, ScoDocJSONEncoder
from app.scodoc.codes_cursus import NOTES_TOLERANCE, CODES_EXPL
from app.scodoc import sco_xml
import sco_version
@ -690,16 +690,6 @@ def sendPDFFile(data, filename): # DEPRECATED utiliser send_file
return send_file(data, filename=filename, mime=PDF_MIMETYPE, attached=True)
class ScoDocJSONEncoder(flask.json.provider.DefaultJSONProvider):
def default(self, o): # pylint: disable=E0202
if isinstance(o, (datetime.date, datetime.datetime)):
return o.isoformat()
elif isinstance(o, ApoEtapeVDI):
return str(o)
else:
return json.JSONEncoder.default(self, o)
def sendJSON(data, attached=False, filename=None):
js = json.dumps(data, indent=1, cls=ScoDocJSONEncoder)
return send_file(

View File

@ -1,9 +1,10 @@
from __future__ import with_statement
# Copied 2023-04-03 from
# https://raw.githubusercontent.com/miguelgrinberg/Flask-Migrate/main/src/flask_migrate/templates/flask/env.py
import logging
from logging.config import fileConfig
from flask import current_app
import flask_sqlalchemy
from alembic import context
@ -14,17 +15,31 @@ config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
logger = logging.getLogger("alembic.env")
def get_engine():
if int(flask_sqlalchemy.__version__[0]) < 3: # <--------- MODIFIED By EMMANUEL
# this works with Flask-SQLAlchemy<3 and Alchemical
return current_app.extensions["migrate"].db.get_engine()
else:
# this works with Flask-SQLAlchemy>=3
return current_app.extensions["migrate"].db.engine
def get_engine_url():
try:
return get_engine().url.render_as_string(hide_password=False).replace("%", "%%")
except AttributeError:
return str(get_engine().url).replace("%", "%%")
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option(
'sqlalchemy.url',
str(current_app.extensions['migrate'].db.get_engine().url).replace(
'%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata
config.set_main_option("sqlalchemy.url", get_engine_url())
target_db = current_app.extensions["migrate"].db
# other values from the config, defined by the needs of env.py,
# can be acquired:
@ -32,6 +47,12 @@ target_metadata = current_app.extensions['migrate'].db.metadata
# ... etc.
def get_metadata():
if hasattr(target_db, "metadatas"):
return target_db.metadatas[None]
return target_db.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode.
@ -45,9 +66,7 @@ def run_migrations_offline():
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True
)
context.configure(url=url, target_metadata=get_metadata(), literal_binds=True)
with context.begin_transaction():
context.run_migrations()
@ -65,20 +84,20 @@ def run_migrations_online():
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
if getattr(config.cmd_opts, "autogenerate", False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
logger.info("No changes in schema detected.")
connectable = current_app.extensions['migrate'].db.get_engine()
connectable = get_engine()
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
target_metadata=get_metadata(),
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
**current_app.extensions["migrate"].configure_args
)
with context.begin_transaction():

View File

@ -178,7 +178,7 @@ class ScoFake(object):
self,
formation_id=None,
acronyme=None,
numero=None,
numero=0,
titre="",
type=None,
ue_code=None,
@ -200,7 +200,7 @@ class ScoFake(object):
return oid
@logging_meth
def create_matiere(self, ue_id=None, titre=None, numero=None) -> int:
def create_matiere(self, ue_id=None, titre=None, numero=0) -> int:
oid = sco_edit_matiere.do_matiere_create(locals())
oids = sco_edit_matiere.matiere_list(args={"matiere_id": oid})
if not oids:
@ -218,7 +218,7 @@ class ScoFake(object):
coefficient=None,
matiere_id=None,
semestre_id=1,
numero=None,
numero=0,
abbrev=None,
ects=None,
code_apogee=None,