Formatage de tous les fichiers python (sauf migrations) avec black
This commit is contained in:
Lyanis Souidi 2024-06-05 10:23:21 +02:00
parent 1167c13787
commit d6d6899a53
Signed by: lyanis
GPG Key ID: 202150AA0DAB9FAC
29 changed files with 187 additions and 134 deletions

View File

@ -1,5 +1,6 @@
"""api.__init__ """api.__init__
""" """
from flask_json import as_json from flask_json import as_json
from flask import Blueprint from flask import Blueprint
from flask import request, g from flask import request, g

View File

@ -35,9 +35,9 @@ def after_cas_login():
if user.cas_allow_login: if user.cas_allow_login:
current_app.logger.info(f"CAS: login {user.user_name}") current_app.logger.info(f"CAS: login {user.user_name}")
if login_user(user): if login_user(user):
flask.session[ flask.session["scodoc_cas_login_date"] = (
"scodoc_cas_login_date" datetime.datetime.now().isoformat()
] = datetime.datetime.now().isoformat() )
user.cas_last_login = datetime.datetime.utcnow() user.cas_last_login = datetime.datetime.utcnow()
if flask.session.get("CAS_EDT_ID"): if flask.session.get("CAS_EDT_ID"):
# essaie de récupérer l'edt_id s'il est présent # essaie de récupérer l'edt_id s'il est présent
@ -45,8 +45,10 @@ def after_cas_login():
# via l'expression `cas_edt_id_from_xml_regexp` # via l'expression `cas_edt_id_from_xml_regexp`
# voir flask_cas.routing # voir flask_cas.routing
edt_id = flask.session.get("CAS_EDT_ID") edt_id = flask.session.get("CAS_EDT_ID")
current_app.logger.info(f"""after_cas_login: storing edt_id for { current_app.logger.info(
user.user_name}: '{edt_id}'""") f"""after_cas_login: storing edt_id for {
user.user_name}: '{edt_id}'"""
)
user.edt_id = edt_id user.edt_id = edt_id
db.session.add(user) db.session.add(user)
db.session.commit() db.session.commit()

View File

@ -14,6 +14,7 @@ from app.auth.models import User, is_valid_password
_ = lambda x: x # sans babel _ = lambda x: x # sans babel
_l = _ _l = _
# See http://flask.pocoo.org/snippets/63/ # See http://flask.pocoo.org/snippets/63/
def is_safe_url(target): def is_safe_url(target):
ref_url = urlparse(request.host_url) ref_url = urlparse(request.host_url)

View File

@ -56,16 +56,20 @@ def _get_jury_but_etud_result(
rcue_dict = { rcue_dict = {
"ue_1": { "ue_1": {
"ue_id": rcue.ue_1.id, "ue_id": rcue.ue_1.id,
"moy": None "moy": (
if (dec_ue1.moy_ue is None or np.isnan(dec_ue1.moy_ue)) None
else dec_ue1.moy_ue, if (dec_ue1.moy_ue is None or np.isnan(dec_ue1.moy_ue))
else dec_ue1.moy_ue
),
"code": dec_ue1.code_valide, "code": dec_ue1.code_valide,
}, },
"ue_2": { "ue_2": {
"ue_id": rcue.ue_2.id, "ue_id": rcue.ue_2.id,
"moy": None "moy": (
if (dec_ue2.moy_ue is None or np.isnan(dec_ue2.moy_ue)) None
else dec_ue2.moy_ue, if (dec_ue2.moy_ue is None or np.isnan(dec_ue2.moy_ue))
else dec_ue2.moy_ue
),
"code": dec_ue2.code_valide, "code": dec_ue2.code_valide,
}, },
"moy": rcue.moy_rcue, "moy": rcue.moy_rcue,

View File

@ -345,9 +345,9 @@ def check_entreprises_import(m):
adresse=entreprise_data["adresse"], adresse=entreprise_data["adresse"],
ville=entreprise_data["ville"], ville=entreprise_data["ville"],
codepostal=entreprise_data["code_postal"], codepostal=entreprise_data["code_postal"],
pays=entreprise_data["pays"] pays=(
if entreprise_data["pays"] entreprise_data["pays"] if entreprise_data["pays"] else "FRANCE"
else "FRANCE", ),
visible=True, visible=True,
) )
entreprises_import.append(entreprise_import) entreprises_import.append(entreprise_import)

View File

@ -1,5 +1,6 @@
"""évènements scolaires dans la vie d'un étudiant(inscription, ...) """évènements scolaires dans la vie d'un étudiant(inscription, ...)
""" """
from app import db from app import db
from app.models import SHORT_STR_LEN from app.models import SHORT_STR_LEN

View File

@ -3,6 +3,7 @@ Gestion de l'archivage des justificatifs
Ecrit par Matthias HARTMANN Ecrit par Matthias HARTMANN
""" """
import os import os
from datetime import datetime from datetime import datetime
from shutil import rmtree from shutil import rmtree

View File

@ -60,13 +60,15 @@ def formation_table_recap(formation_id, fmt="html") -> Response:
"_sem_order": f"{li:04d}", "_sem_order": f"{li:04d}",
"code": ue.acronyme, "code": ue.acronyme,
"titre": ue.titre or "", "titre": ue.titre or "",
"_titre_target": url_for( "_titre_target": (
"notes.ue_edit", url_for(
scodoc_dept=g.scodoc_dept, "notes.ue_edit",
ue_id=ue.id, scodoc_dept=g.scodoc_dept,
) ue_id=ue.id,
if can_edit )
else None, if can_edit
else None
),
"apo": ue.code_apogee or "", "apo": ue.code_apogee or "",
"_apo_td_attrs": f""" data-oid="{ue.id}" data-value="{ue.code_apogee or ''}" """, "_apo_td_attrs": f""" data-oid="{ue.id}" data-value="{ue.code_apogee or ''}" """,
"coef": ue.coefficient or "", "coef": ue.coefficient or "",
@ -83,19 +85,23 @@ def formation_table_recap(formation_id, fmt="html") -> Response:
# le module (ou ressource ou sae) # le module (ou ressource ou sae)
T.append( T.append(
{ {
"sem": f"S{mod.semestre_id}" "sem": (
if mod.semestre_id is not None f"S{mod.semestre_id}"
else "-", if mod.semestre_id is not None
else "-"
),
"_sem_order": f"{li:04d}", "_sem_order": f"{li:04d}",
"code": mod.code, "code": mod.code,
"titre": mod.abbrev or mod.titre, "titre": mod.abbrev or mod.titre,
"_titre_target": url_for( "_titre_target": (
"notes.module_edit", url_for(
scodoc_dept=g.scodoc_dept, "notes.module_edit",
module_id=mod.id, scodoc_dept=g.scodoc_dept,
) module_id=mod.id,
if can_edit )
else None, if can_edit
else None
),
"apo": mod.code_apogee, "apo": mod.code_apogee,
"_apo_td_attrs": f""" data-oid="{mod.id}" data-value="{mod.code_apogee or ''}" """, "_apo_td_attrs": f""" data-oid="{mod.id}" data-value="{mod.code_apogee or ''}" """,
"coef": mod.coefficient, "coef": mod.coefficient,

View File

@ -466,9 +466,9 @@ def etud_add_group_infos(
etud['groupes'] = "TDB, Gr2, TPB1" etud['groupes'] = "TDB, Gr2, TPB1"
etud['partitionsgroupes'] = "Groupes TD:TDB, Groupes TP:Gr2 (...)" etud['partitionsgroupes'] = "Groupes TD:TDB, Groupes TP:Gr2 (...)"
""" """
etud[ etud["partitions"] = (
"partitions" collections.OrderedDict()
] = collections.OrderedDict() # partition_id : group + partition_name ) # partition_id : group + partition_name
if not formsemestre_id: if not formsemestre_id:
etud["groupes"] = "" etud["groupes"] = ""
return etud return etud

View File

@ -32,7 +32,7 @@ def clone_partitions_and_groups(
# Création des groupes dans les nouvelles partitions: # Création des groupes dans les nouvelles partitions:
for newpart in sco_groups.get_partitions_list(formsemestre_id): for newpart in sco_groups.get_partitions_list(formsemestre_id):
for (new_partition_id, list_groups) in list_groups_per_part: for new_partition_id, list_groups in list_groups_per_part:
if newpart["partition_id"] == new_partition_id: if newpart["partition_id"] == new_partition_id:
for group in list_groups: for group in list_groups:
new_group = sco_groups.create_group( new_group = sco_groups.create_group(

View File

@ -86,9 +86,11 @@ def group_rename(group_id):
"size": 12, "size": 12,
"allow_null": False, "allow_null": False,
"validator": lambda val, _: len(val) < GROUPNAME_STR_LEN, "validator": lambda val, _: len(val) < GROUPNAME_STR_LEN,
"explanation": "doit être unique dans cette partition" "explanation": (
if group.partition.groups_editable "doit être unique dans cette partition"
else "groupes non modifiables dans cette partition", if group.partition.groups_editable
else "groupes non modifiables dans cette partition"
),
"enabled": group.partition.groups_editable, "enabled": group.partition.groups_editable,
}, },
), ),

View File

@ -585,8 +585,8 @@ def groups_table(
etud_info["_nom_disp_order"] = etud_sort_key(etud_info) etud_info["_nom_disp_order"] = etud_sort_key(etud_info)
etud_info["_prenom_target"] = fiche_url etud_info["_prenom_target"] = fiche_url
etud_info["_nom_disp_td_attrs"] = ( etud_info["_nom_disp_td_attrs"] = 'id="%s" class="etudinfo"' % (
'id="%s" class="etudinfo"' % (etud_info["etudid"]) etud_info["etudid"]
) )
etud_info["bourse_str"] = "oui" if etud_info["boursier"] else "non" etud_info["bourse_str"] = "oui" if etud_info["boursier"] else "non"
if etud_info["etat"] == "D": if etud_info["etat"] == "D":

View File

@ -2291,7 +2291,9 @@ class BasePreferences:
if "explanation" in descr: if "explanation" in descr:
del descr["explanation"] del descr["explanation"]
if formsemestre_id: if formsemestre_id:
descr["explanation"] = f"""ou <span class="spanlink" descr[
"explanation"
] = f"""ou <span class="spanlink"
onclick="set_global_pref(this, '{pref_name}');" onclick="set_global_pref(this, '{pref_name}');"
>utiliser paramètre global</span>""" >utiliser paramètre global</span>"""
if formsemestre_id and self.is_global(formsemestre_id, pref_name): if formsemestre_id and self.is_global(formsemestre_id, pref_name):

View File

@ -31,9 +31,9 @@ def trombino_doc(groups_infos):
) )
section = document.sections[0] section = document.sections[0]
footer = section.footer footer = section.footer
footer.paragraphs[ footer.paragraphs[0].text = (
0 f"Généré par {sco_version.SCONAME} le {scu.timedate_human_repr()}"
].text = f"Généré par {sco_version.SCONAME} le {scu.timedate_human_repr()}" )
nb_images = len(groups_infos.members) nb_images = len(groups_infos.members)
table = document.add_table(rows=2 * (nb_images // N_PER_ROW + 1), cols=N_PER_ROW) table = document.add_table(rows=2 * (nb_images // N_PER_ROW + 1), cols=N_PER_ROW)

View File

@ -419,9 +419,11 @@ def _get_dates_from_assi_form(
] ]
): ):
form.set_error( form.set_error(
"La date de début n'appartient à aucun semestre de l'étudiant" (
if formsemestre is None "La date de début n'appartient à aucun semestre de l'étudiant"
else "La date de début n'appartient pas au semestre", if formsemestre is None
else "La date de début n'appartient pas au semestre"
),
form.date_debut, form.date_debut,
) )
@ -433,9 +435,11 @@ def _get_dates_from_assi_form(
] ]
): ):
form.set_error( form.set_error(
"La date de fin n'appartient à aucun semestre de l'étudiant" (
if not formsemestre "La date de fin n'appartient à aucun semestre de l'étudiant"
else "La date de fin n'appartient pas au semestre", if not formsemestre
else "La date de fin n'appartient pas au semestre"
),
form.date_fin, form.date_fin,
) )

View File

@ -4,7 +4,6 @@ Tableau de bord utilisateur
Emmanuel Viennet, 2023 Emmanuel Viennet, 2023
""" """
from flask import flash, redirect, render_template, url_for from flask import flash, redirect, render_template, url_for
from flask import g, request from flask import g, request
from flask_login import login_required from flask_login import login_required

View File

@ -2,6 +2,7 @@
Routes for CAS authentication Routes for CAS authentication
Modified for ScoDoc Modified for ScoDoc
""" """
import re import re
import ssl import ssl
from urllib.error import URLError from urllib.error import URLError

View File

@ -7,65 +7,84 @@ Source: http://wikipython.flibuste.net/moin.py/JouerAvecUnicode#head-1213938516c
""" """
_reptable = {} _reptable = {}
def _fill_reptable(): def _fill_reptable():
_corresp = [ _corresp = [
(u"A", [0x00C0,0x00C1,0x00C2,0x00C3,0x00C4,0x00C5,0x0100,0x0102,0x0104]), ("A", [0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x0100, 0x0102, 0x0104]),
(u"AE", [0x00C6]), ("AE", [0x00C6]),
(u"a", [0x00E0,0x00E1,0x00E2,0x00E3,0x00E4,0x00E5,0x0101,0x0103,0x0105]), ("a", [0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x0101, 0x0103, 0x0105]),
(u"ae", [0x00E6]), ("ae", [0x00E6]),
(u"C", [0x00C7,0x0106,0x0108,0x010A,0x010C]), ("C", [0x00C7, 0x0106, 0x0108, 0x010A, 0x010C]),
(u"c", [0x00E7,0x0107,0x0109,0x010B,0x010D]), ("c", [0x00E7, 0x0107, 0x0109, 0x010B, 0x010D]),
(u"D", [0x00D0,0x010E,0x0110]), ("D", [0x00D0, 0x010E, 0x0110]),
(u"d", [0x00F0,0x010F,0x0111]), ("d", [0x00F0, 0x010F, 0x0111]),
(u"E", [0x00C8,0x00C9,0x00CA,0x00CB,0x0112,0x0114,0x0116,0x0118,0x011A]), ("E", [0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x0112, 0x0114, 0x0116, 0x0118, 0x011A]),
(u"e", [0x00E8,0x00E9,0x00EA,0x00EB,0x0113,0x0115,0x0117,0x0119,0x011B]), ("e", [0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x0113, 0x0115, 0x0117, 0x0119, 0x011B]),
(u"G", [0x011C,0x011E,0x0120,0x0122]), ("G", [0x011C, 0x011E, 0x0120, 0x0122]),
(u"g", [0x011D,0x011F,0x0121,0x0123]), ("g", [0x011D, 0x011F, 0x0121, 0x0123]),
(u"H", [0x0124,0x0126]), ("H", [0x0124, 0x0126]),
(u"h", [0x0125,0x0127]), ("h", [0x0125, 0x0127]),
(u"I", [0x00CC,0x00CD,0x00CE,0x00CF,0x0128,0x012A,0x012C,0x012E,0x0130]), ("I", [0x00CC, 0x00CD, 0x00CE, 0x00CF, 0x0128, 0x012A, 0x012C, 0x012E, 0x0130]),
(u"i", [0x00EC,0x00ED,0x00EE,0x00EF,0x0129,0x012B,0x012D,0x012F,0x0131]), ("i", [0x00EC, 0x00ED, 0x00EE, 0x00EF, 0x0129, 0x012B, 0x012D, 0x012F, 0x0131]),
(u"IJ", [0x0132]), ("IJ", [0x0132]),
(u"ij", [0x0133]), ("ij", [0x0133]),
(u"J", [0x0134]), ("J", [0x0134]),
(u"j", [0x0135]), ("j", [0x0135]),
(u"K", [0x0136]), ("K", [0x0136]),
(u"k", [0x0137,0x0138]), ("k", [0x0137, 0x0138]),
(u"L", [0x0139,0x013B,0x013D,0x013F,0x0141]), ("L", [0x0139, 0x013B, 0x013D, 0x013F, 0x0141]),
(u"l", [0x013A,0x013C,0x013E,0x0140,0x0142]), ("l", [0x013A, 0x013C, 0x013E, 0x0140, 0x0142]),
(u"N", [0x00D1,0x0143,0x0145,0x0147,0x014A]), ("N", [0x00D1, 0x0143, 0x0145, 0x0147, 0x014A]),
(u"n", [0x00F1,0x0144,0x0146,0x0148,0x0149,0x014B]), ("n", [0x00F1, 0x0144, 0x0146, 0x0148, 0x0149, 0x014B]),
(u"O", [0x00D2,0x00D3,0x00D4,0x00D5,0x00D6,0x00D8,0x014C,0x014E,0x0150]), ("O", [0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D8, 0x014C, 0x014E, 0x0150]),
(u"o", [0x00F2,0x00F3,0x00F4,0x00F5,0x00F6,0x00F8,0x014D,0x014F,0x0151]), ("o", [0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F8, 0x014D, 0x014F, 0x0151]),
(u"OE", [0x0152]), ("OE", [0x0152]),
(u"oe", [0x0153]), ("oe", [0x0153]),
(u"R", [0x0154,0x0156,0x0158]), ("R", [0x0154, 0x0156, 0x0158]),
(u"r", [0x0155,0x0157,0x0159]), ("r", [0x0155, 0x0157, 0x0159]),
(u"S", [0x015A,0x015C,0x015E,0x0160]), ("S", [0x015A, 0x015C, 0x015E, 0x0160]),
(u"s", [0x015B,0x015D,0x015F,0x01610,0x017F]), ("s", [0x015B, 0x015D, 0x015F, 0x01610, 0x017F]),
(u"T", [0x0162,0x0164,0x0166]), ("T", [0x0162, 0x0164, 0x0166]),
(u"t", [0x0163,0x0165,0x0167]), ("t", [0x0163, 0x0165, 0x0167]),
(u"U", [0x00D9,0x00DA,0x00DB,0x00DC,0x0168,0x016A,0x016C,0x016E,0x0170,0x172]), (
(u"u", [0x00F9,0x00FA,0x00FB,0x00FC,0x0169,0x016B,0x016D,0x016F,0x0171]), "U",
(u"W", [0x0174]), [
(u"w", [0x0175]), 0x00D9,
(u"Y", [0x00DD,0x0176,0x0178]), 0x00DA,
(u"y", [0x00FD,0x00FF,0x0177]), 0x00DB,
(u"Z", [0x0179,0x017B,0x017D]), 0x00DC,
(u"z", [0x017A,0x017C,0x017E]), 0x0168,
(u"2", [0x00B2]), # deux exposant 0x016A,
(u" ", [0x00A0]), # &nbsp 0x016C,
(u"", [0xB0]), # degre 0x016E,
(u"", [0xA9]), # copyright 0x0170,
(u"1/2", [0xBD]), # 1/2 0x172,
] ],
),
("u", [0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0169, 0x016B, 0x016D, 0x016F, 0x0171]),
("W", [0x0174]),
("w", [0x0175]),
("Y", [0x00DD, 0x0176, 0x0178]),
("y", [0x00FD, 0x00FF, 0x0177]),
("Z", [0x0179, 0x017B, 0x017D]),
("z", [0x017A, 0x017C, 0x017E]),
("2", [0x00B2]), # deux exposant
(" ", [0x00A0]), # &nbsp
("", [0xB0]), # degre
("", [0xA9]), # copyright
("1/2", [0xBD]), # 1/2
]
global _reptable global _reptable
for repchar,codes in _corresp : for repchar, codes in _corresp:
for code in codes : for code in codes:
_reptable[code] = repchar _reptable[code] = repchar
_fill_reptable() _fill_reptable()
def suppression_diacritics(s) :
def suppression_diacritics(s):
"""Suppression des accents et autres marques. """Suppression des accents et autres marques.
@param s: le texte à nettoyer. @param s: le texte à nettoyer.
@ -73,6 +92,6 @@ def suppression_diacritics(s) :
@return: le texte nettoyé de ses marques diacritiques. @return: le texte nettoyé de ses marques diacritiques.
@rtype: unicode @rtype: unicode
""" """
if isinstance(s,str) : if isinstance(s, str):
s = unicode(s,"utf8","replace") s = unicode(s, "utf8", "replace")
return s.translate(_reptable) return s.translate(_reptable)

View File

@ -34,7 +34,7 @@ response = urllib2.urlopen(req)
# Affiche la liste des formations en format XML # Affiche la liste des formations en format XML
req = urllib2.Request(BASEURL + "/Notes/formation_list?fmt=xml") req = urllib2.Request(BASEURL + "/Notes/formation_list?fmt=xml")
response = urllib2.urlopen(req) response = urllib2.urlopen(req)
print response.read()[:100] # limite aux 100 premiers caracteres... print(response.read()[:100]) # limite aux 100 premiers caracteres...
# Recupere la liste de tous les semestres: # Recupere la liste de tous les semestres:
req = urllib2.Request(BASEURL + "/Notes/formsemestre_list?fmt=json") # format json req = urllib2.Request(BASEURL + "/Notes/formsemestre_list?fmt=json") # format json
@ -49,7 +49,7 @@ except:
data = json.loads(js_data) # decode la reponse JSON data = json.loads(js_data) # decode la reponse JSON
if not data: if not data:
print "Aucun semestre !" print("Aucun semestre !")
else: else:
formsemestre_id = str(data[0]["formsemestre_id"]) formsemestre_id = str(data[0]["formsemestre_id"])
# Obtient la liste des groupes: # Obtient la liste des groupes:
@ -73,7 +73,7 @@ else:
data = json.loads(js_data) data = json.loads(js_data)
# Le code du premier étudiant: # Le code du premier étudiant:
if not data: if not data:
print ("pas d'etudiants dans ce semestre !") print("pas d'etudiants dans ce semestre !")
else: else:
etudid = data[0]["etudid"] etudid = data[0]["etudid"]
# Récupère bulletin de notes: # Récupère bulletin de notes:
@ -87,8 +87,8 @@ else:
) # format XML ici ! ) # format XML ici !
response = urllib2.urlopen(req) response = urllib2.urlopen(req)
xml_bulletin = response.read() xml_bulletin = response.read()
print "----- Bulletin de notes en XML:" print("----- Bulletin de notes en XML:")
print xml_bulletin print(xml_bulletin)
# Récupère la moyenne générale: # Récupère la moyenne générale:
import xml.dom.minidom import xml.dom.minidom
@ -96,4 +96,4 @@ else:
moy = doc.getElementsByTagName("note")[0].getAttribute( moy = doc.getElementsByTagName("note")[0].getAttribute(
"value" "value"
) # une chaine unicode ) # une chaine unicode
print "\nMoyenne generale: ", moy print("\nMoyenne generale: ", moy)

View File

@ -35,31 +35,31 @@ Utilise geopy http://www.geopy.org/ et l'API Google
from geopy import geocoders from geopy import geocoders
import time import time
SOURCE = '../config/etablissements-orig.csv' SOURCE = "../config/etablissements-orig.csv"
DEST = '../config/etablissements-geocode.csv' DEST = "../config/etablissements-geocode.csv"
g = geocoders.Google(domain="maps.google.fr") #, api_key='XXX') g = geocoders.Google(domain="maps.google.fr") # , api_key='XXX')
inf = open(SOURCE) inf = open(SOURCE)
out = open(DEST, 'w') out = open(DEST, "w")
head = inf.readline() head = inf.readline()
out.write(head.strip() + ';LAT;LNG'+'\n') out.write(head.strip() + ";LAT;LNG" + "\n")
for line in inf: for line in inf:
address = ' '.join(line.split(';')[2:]).strip() address = " ".join(line.split(";")[2:]).strip()
print address print(address)
try: try:
place, (lat, lng) = g.geocode(address) place, (lat, lng) = g.geocode(address)
except: # multiple possible locations ? except: # multiple possible locations ?
time.sleep(0.11) time.sleep(0.11)
try: try:
place, (lat, lng) = g.geocode(address + ' France', exactly_one=False)[0] place, (lat, lng) = g.geocode(address + " France", exactly_one=False)[0]
except: except:
place, (lat, lng) = 'NOT FOUND', (0.,0.) place, (lat, lng) = "NOT FOUND", (0.0, 0.0)
print "%s: %.5f, %.5f" % (address, lat, lng) print(f"{address}: {lat:.5f}, {lng:.5f}")
out.write( line.strip() + ';%s;%s\n' % (lat,lng) ) out.write(line.strip() + ";%s;%s\n" % (lat, lng))
time.sleep(0.11) # Google API Rate limit of 10 requests per second. time.sleep(0.11) # Google API Rate limit of 10 requests per second.
inf.close() inf.close()
out.close() out.close()

View File

@ -23,6 +23,7 @@ idx_nom = 0
SCO_ENCODING = "iso8859-15" SCO_ENCODING = "iso8859-15"
# from SuppressAccents import suppression_diacritics # from SuppressAccents import suppression_diacritics
# XXX a revoir si ce script est utile: en python3, unicodedata.normalize("NFD", s).encode("ascii", "ignore").decode(SCO_ENCODING) # XXX a revoir si ce script est utile: en python3, unicodedata.normalize("NFD", s).encode("ascii", "ignore").decode(SCO_ENCODING)
def suppr_acc_and_ponct(s): def suppr_acc_and_ponct(s):
s = s.replace(" ", "") s = s.replace(" ", "")
@ -42,7 +43,7 @@ for row in reader:
if row[0][0] != "#": if row[0][0] != "#":
key = make_key(row[idx_nom], row[idx_prenom]) key = make_key(row[idx_nom], row[idx_prenom])
if noms.has_key(key): if noms.has_key(key):
raise ValueError, "duplicate key: %s" % key raise ValueError(f"duplicate key: {key}")
noms[key] = row noms[key] = row
cnx = psycopg.connect(DBCNXSTRING) cnx = psycopg.connect(DBCNXSTRING)
@ -55,16 +56,16 @@ cursor.execute(
R = cursor.dictfetchall() R = cursor.dictfetchall()
nok = 0 nok = 0
print "nom,prenom,ine,nip" print("nom,prenom,ine,nip")
for e in R: for e in R:
key = make_key(e["nom"], e["prenom"]) key = make_key(e["nom"], e["prenom"])
if not noms.has_key(key): if not noms.has_key(key):
print "** no match for %s (%s)" % (key, e["etudid"]) print(f"** no match for {key} ({e["etudid"]})")
else: else:
info = noms[key] info = noms[key]
print "%s,%s,%s,%s" % (e["nom"], e["prenom"], e["code_ine"], e["code_nip"]) print(f"{e["nom"]},{e["prenom"]},{e["code_ine"]},{e["code_nip"]}")
nok += 1 nok += 1
cnx.commit() cnx.commit()
print "%d etudiants, %d ok" % (len(R), nok) print(f"{len(R)} etudiants, {nok} ok")

View File

@ -14,6 +14,7 @@ Utilisation :
Lancer : Lancer :
pytest tests/api/test_api_formsemestre.py pytest tests/api/test_api_formsemestre.py
""" """
import requests import requests
from app.scodoc import sco_utils as scu from app.scodoc import sco_utils as scu

View File

@ -1,5 +1,6 @@
"""Utilitaires pour les tests de l'API """Utilitaires pour les tests de l'API
""" """
import json import json

View File

@ -1,6 +1,7 @@
""" """
Quelques fonctions d'initialisation pour tests unitaires Quelques fonctions d'initialisation pour tests unitaires
""" """
import datetime import datetime
from app import db, models from app import db, models
@ -82,9 +83,11 @@ def build_formation_test(
coefficient=1.0, coefficient=1.0,
titre=f"module test {i}", titre=f"module test {i}",
semestre_id=2, semestre_id=2,
module_type=scu.ModuleType.RESSOURCE module_type=(
if parcours.APC_SAE scu.ModuleType.RESSOURCE
else scu.ModuleType.STANDARD, if parcours.APC_SAE
else scu.ModuleType.STANDARD
),
) )
module_ids.append(module_id) module_ids.append(module_id)
if with_ue_sport: if with_ue_sport:

View File

@ -4,6 +4,7 @@ Utiliser comme:
pytest tests/unit/test_bulletin_bonus.py pytest tests/unit/test_bulletin_bonus.py
""" """
from app.but.bulletin_but_pdf import BulletinGeneratorStandardBUT from app.but.bulletin_but_pdf import BulletinGeneratorStandardBUT

View File

@ -2,6 +2,7 @@
Vérif moyennes de modules des bulletins Vérif moyennes de modules des bulletins
et aussi moyennes modules et UE internes (via nt) et aussi moyennes modules et UE internes (via nt)
""" """
import datetime import datetime
import numpy as np import numpy as np
from flask import g from flask import g

View File

@ -3,6 +3,7 @@ Commande permettant de supprimer les assiduités et les justificatifs
Ecrit par Matthias HARTMANN Ecrit par Matthias HARTMANN
""" """
import sqlalchemy as sa import sqlalchemy as sa
from app import db from app import db

View File

@ -185,7 +185,7 @@ def import_scodoc7_dept(dept_id: str, dept_db_uri=None):
default_user = get_super_admin() default_user = get_super_admin()
# #
t0 = time.time() t0 = time.time()
for (table, id_name) in SCO7_TABLES_ORDONNEES: for table, id_name in SCO7_TABLES_ORDONNEES:
logging.info(f"{dept.acronym}: converting {table}...") logging.info(f"{dept.acronym}: converting {table}...")
klass = get_class_for_table(table) klass = get_class_for_table(table)
t1 = time.time() t1 = time.time()

View File

@ -3,6 +3,7 @@ Script de migration des données de la base "absences" -> "assiduites"/"justific
Ecrit par Matthias HARTMANN Ecrit par Matthias HARTMANN
""" """
from datetime import date, datetime, time, timedelta from datetime import date, datetime, time, timedelta
from json import dump, dumps from json import dump, dumps
from sqlalchemy import not_ from sqlalchemy import not_