Skip to content

Commit

Permalink
REQ-456 Retire python3.5 to support psqlgraph (#401)
Browse files Browse the repository at this point in the history
* REQ-456 Retire python3.5 to support psqlgraph
  • Loading branch information
stilesj-uchicago committed Apr 13, 2023
1 parent ee19295 commit 4489324
Show file tree
Hide file tree
Showing 22 changed files with 86 additions and 74 deletions.
1 change: 0 additions & 1 deletion .travis.yml
Expand Up @@ -3,7 +3,6 @@ language: python
dist: focal

python:
- 3.5
- 3.6.8
- 3.7
- 3.8
Expand Down
4 changes: 2 additions & 2 deletions bin/update_related_case_caches.py
Expand Up @@ -57,7 +57,7 @@ def update_project_related_case_cache(project):
"""

logger.info("Project: {}".format(project.code))
logger.info(f"Project: {project.code}")
for case in project.cases:
recursive_update_related_case_caches(case, case)

Expand All @@ -83,7 +83,7 @@ def main():
)

args = parser.parse_args()
prompt = "Password for {}:".format(args.user)
prompt = f"Password for {args.user}:"
password = args.password or getpass.getpass(prompt)
g = PsqlGraphDriver(args.host, args.user, password, args.database)

Expand Down
25 changes: 12 additions & 13 deletions dev-requirements.txt
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile
# This file is autogenerated by pip-compile with python 3.6
# To update, run:
#
# pip-compile dev-requirements.in
Expand Down Expand Up @@ -55,6 +55,11 @@ ipykernel==5.5.6
# jupyter-console
# notebook
# qtconsole
ipython==7.9.0
# via
# ipykernel
# ipywidgets
# jupyter-console
ipython-genutils==0.2.0
# via
# ipykernel
Expand All @@ -63,11 +68,6 @@ ipython-genutils==0.2.0
# notebook
# qtconsole
# traitlets
ipython==7.9.0
# via
# ipykernel
# ipywidgets
# jupyter-console
ipywidgets==7.7.2
# via jupyter
jedi==0.17.2
Expand All @@ -80,6 +80,8 @@ jsonschema==3.2.0
# via
# -c requirements.txt
# nbformat
jupyter==1.0.0
# via -r dev-requirements.in
jupyter-client==4.4.0
# via
# -r dev-requirements.in
Expand All @@ -99,8 +101,8 @@ jupyter-core==4.6.3
# nbformat
# notebook
# qtconsole
jupyter==1.0.0
# via -r dev-requirements.in
jupyterlab-widgets==1.1.4
# via ipywidgets
markupsafe==1.1.1
# via jinja2
mistune==0.8.4
Expand Down Expand Up @@ -130,8 +132,6 @@ pandocfilters==1.5.0
# via nbconvert
parso==0.7.1
# via jedi
pathlib2==2.3.7.post1
# via pytest
pexpect==4.8.0
# via ipython
pickleshare==0.7.5
Expand Down Expand Up @@ -161,12 +161,12 @@ pyrsistent==0.16.1
# via
# -c requirements.txt
# jsonschema
pytest-cov==2.8.1
# via -r dev-requirements.in
pytest==4.6.10
# via
# -r dev-requirements.in
# pytest-cov
pytest-cov==2.8.1
# via -r dev-requirements.in
pyyaml==5.3.1
# via
# -c requirements.txt
Expand All @@ -186,7 +186,6 @@ six==1.15.0
# bleach
# cfgv
# jsonschema
# pathlib2
# pre-commit
# prompt-toolkit
# pytest
Expand Down
6 changes: 3 additions & 3 deletions gdcdatamodel/gdc_postgres_admin.py
Expand Up @@ -24,7 +24,7 @@
logger.setLevel(logging.INFO)

name_root = "table_creator_"
app_name = "{}{}".format(name_root, random.randint(1000, 9999))
app_name = f"{name_root}{random.randint(1000, 9999)}"


GRANT_READ_PRIVS_SQL = """
Expand Down Expand Up @@ -106,7 +106,7 @@ def create_graph_tables(engine, timeout, namespace=None):
trans = connection.begin()
logger.info("Setting lock_timeout to %d", timeout)

timeout_str = "{}s".format(int(timeout + 1))
timeout_str = f"{int(timeout + 1)}s"
connection.execute("SET LOCAL lock_timeout = %s;", timeout_str)

orm_base = ext.get_orm_base(namespace) if namespace else ORMBase
Expand Down Expand Up @@ -137,7 +137,7 @@ def create_tables(engine, delay, retries, namespace=None):
raise RuntimeError("Max retries exceeded")

logger.info(
"Trying again in {} seconds ({} retries remaining)".format(delay, retries)
f"Trying again in {delay} seconds ({retries} retries remaining)"
)
time.sleep(delay)

Expand Down
18 changes: 9 additions & 9 deletions gdcdatamodel/models/__init__.py
Expand Up @@ -69,7 +69,7 @@ def remove_spaces(s):
def get_cls_package(package_namespace=None):
cls_package = "gdcdatamodel.models"
if package_namespace:
cls_package = "{}.{}".format(cls_package, package_namespace)
cls_package = f"{cls_package}.{package_namespace}"
return cls_package


Expand Down Expand Up @@ -198,7 +198,7 @@ def _versions(self):
session = self.get_session()
if not session:
raise RuntimeError(
"{} not bound to a session. Try .get_versions(session).".format(self)
f"{self} not bound to a session. Try .get_versions(session)."
)
return self.get_versions(session)

Expand Down Expand Up @@ -509,7 +509,7 @@ def generate_edge_tablename(src_label, label, dst_label):
# truncate the each part of the name
if len(tablename) > 40:
oldname = tablename
logger.debug("Edge tablename {} too long, shortening".format(oldname))
logger.debug(f"Edge tablename {oldname} too long, shortening")
tablename = "edge_{}_{}".format(
hashlib.md5(py3_to_bytes(tablename)).hexdigest()[:8],
"{}{}{}".format(
Expand All @@ -518,7 +518,7 @@ def generate_edge_tablename(src_label, label, dst_label):
"".join([a[:2] for a in dst_label.split("_")])[:10],
),
)
logger.debug("Shortening {} -> {}".format(oldname, tablename))
logger.debug(f"Shortening {oldname} -> {tablename}")

return tablename

Expand Down Expand Up @@ -648,7 +648,7 @@ def load_nodes(dictionary, node_cls=None, package_namespace=None):
cls = NodeFactory(_id, subschema, node_cls, package_namespace)
register_class(cls, package_namespace)
except Exception:
print("Unable to load {}".format(name))
print(f"Unable to load {name}")
raise


Expand Down Expand Up @@ -684,7 +684,7 @@ def parse_edge(
edge_name = "".join(map(get_class_name_from_id, [src_label, edge_label, dst_label]))

if edge_cls.is_subclass_loaded(name):
return "_{}_out".format(edge_name)
return f"_{edge_name}_out"

edge = EdgeFactory(
edge_name,
Expand All @@ -698,7 +698,7 @@ def parse_edge(
package_namespace=package_namespace,
)

return "_{}_out".format(edge.__name__)
return f"_{edge.__name__}_out"


def load_edges(dictionary, node_cls=Node, edge_cls=Edge, package_namespace=None):
Expand All @@ -712,7 +712,7 @@ def load_edges(dictionary, node_cls=Node, edge_cls=Edge, package_namespace=None)

src_cls = node_cls.get_subclass(src_label)
if not src_cls:
raise RuntimeError("No source class labeled {}".format(src_label))
raise RuntimeError(f"No source class labeled {src_label}")

for name, link in get_links(subschema).items():
edge_label = link["label"]
Expand Down Expand Up @@ -747,7 +747,7 @@ def load_edges(dictionary, node_cls=Node, edge_cls=Edge, package_namespace=None)
"required": False,
"target_type": "case",
"label": "relates_to",
"backref": "_related_{}".format(src_cls.label),
"backref": f"_related_{src_cls.label}",
}

parse_edge(
Expand Down
4 changes: 2 additions & 2 deletions gdcdatamodel/models/caching.py
Expand Up @@ -53,7 +53,7 @@ def get_related_case_edge_cls_name(node):
"""

return "{}RelatesToCase".format(node.__class__.__name__)
return f"{node.__class__.__name__}RelatesToCase"


def get_edge_src(edge):
Expand Down Expand Up @@ -219,7 +219,7 @@ def update_cache_edges(node, session, correct_cases):

# Get information about the existing edges
edge_name = get_related_case_edge_cls_name(node)
existing_edges = getattr(node, "_{}_out".format(edge_name))
existing_edges = getattr(node, f"_{edge_name}_out")

# Remove edges that should no longer exist
cases_disconnected = [
Expand Down
6 changes: 3 additions & 3 deletions gdcdatamodel/models/indexes.py
Expand Up @@ -32,20 +32,20 @@ def index_name(cls, description):
"""

name = "index_{}_{}".format(cls.__tablename__, description)
name = f"index_{cls.__tablename__}_{description}"

# If the name is too long, prepend it with the first 8 hex of it's hash
# truncate the each part of the name
if len(name) > 40:
oldname = index_name
logger.debug("Edge tablename {} too long, shortening".format(oldname))
logger.debug(f"Edge tablename {oldname} too long, shortening")
name = "index_{}_{}_{}".format(
hashlib.md5(py3_to_bytes(cls.__tablename__)).hexdigest()[:8],
"".join([a[:4] for a in cls.get_label().split("_")])[:20],
"_".join([a[:8] for a in description.split("_")])[:25],
)

logger.debug("Shortening {} -> {}".format(oldname, index_name))
logger.debug(f"Shortening {oldname} -> {index_name}")

return name

Expand Down
4 changes: 2 additions & 2 deletions gdcdatamodel/models/versioning.py
Expand Up @@ -7,7 +7,7 @@
UUID_NAMESPACE_SEED = os.getenv(
"UUID_NAMESPACE_SEED", "86bb916a-24c5-48e4-8a46-5ea73a379d47"
)
UUID_NAMESPACE = uuid.UUID("urn:uuid:{}".format(UUID_NAMESPACE_SEED), version=4)
UUID_NAMESPACE = uuid.UUID(f"urn:uuid:{UUID_NAMESPACE_SEED}", version=4)


class TagKeys:
Expand Down Expand Up @@ -106,7 +106,7 @@ def is_taggable(self, node):

def __generate_hash(seed, label):
namespace = UUID_NAMESPACE
name = "{}-{}".format(seed, label)
name = f"{seed}-{label}"
return str(uuid.uuid5(namespace, name))


Expand Down
14 changes: 7 additions & 7 deletions gdcdatamodel/validators/graph_validators.py
@@ -1,7 +1,7 @@
from gdcdictionary import gdcdictionary


class GDCGraphValidator(object):
class GDCGraphValidator:
"""
Validator that validates entities' relationship with existing nodes in
database.
Expand All @@ -28,7 +28,7 @@ def record_errors(self, graph, entities):
self.optional_validators[validator_name].validate()


class GDCLinksValidator(object):
class GDCLinksValidator:
def validate(self, entities, graph=None):
for entity in entities:
for link in gdcdictionary.schema[entity.node.label]["links"]:
Expand Down Expand Up @@ -57,7 +57,7 @@ def validate_edge_group(self, schema, entity):
if schema.get("required") is True and len(submitted_links) == 0:
names = ", ".join(schema_links[:-2] + [" or ".join(schema_links[-2:])])
entity.record_error(
"Entity is missing a required link to {}".format(names),
f"Entity is missing a required link to {names}",
keys=schema_links,
)

Expand All @@ -70,7 +70,7 @@ def validate_edge_group(self, schema, entity):
keys=schema_links,
)
for edge in entity.node.edges_out:
entity.record_error("{}".format(edge.dst.submitter_id))
entity.record_error(f"{edge.dst.submitter_id}")

result = {"length": num_of_edges, "name": ", ".join(schema_links)}

Expand All @@ -86,7 +86,7 @@ def validate_edge(self, link_sub_schema, entity):
if multi in ["many_to_one", "one_to_one"]:
if len(targets) > 1:
entity.record_error(
"'{}' link has to be {}".format(association, multi),
f"'{association}' link has to be {multi}",
keys=[association],
)

Expand All @@ -108,13 +108,13 @@ def validate_edge(self, link_sub_schema, entity):
else:
if link_sub_schema.get("required") is True:
entity.record_error(
"Entity is missing required link to {}".format(association),
f"Entity is missing required link to {association}",
keys=[association],
)
return result


class GDCUniqueKeysValidator(object):
class GDCUniqueKeysValidator:
def validate(self, entities, graph=None):
for entity in entities:
schema = gdcdictionary.schema[entity.node.label]
Expand Down
2 changes: 1 addition & 1 deletion gdcdatamodel/validators/json_validators.py
Expand Up @@ -22,7 +22,7 @@ def get_keys(error_msg):
return []


class GDCJSONValidator(object):
class GDCJSONValidator:
def __init__(self):
self.schemas = gdcdictionary

Expand Down
2 changes: 1 addition & 1 deletion migrations/update_case_cache_append_only.py
Expand Up @@ -161,7 +161,7 @@ def seed_level_1(graph, cls):
cls_to_case_edge_table=case_edge.__tablename__,
)

print("Seeding {} through {}".format(cls.get_label(), case_edge.__name__))
print(f"Seeding {cls.get_label()} through {case_edge.__name__}")
graph.current_session().execute(statement)


Expand Down
2 changes: 1 addition & 1 deletion migrations/update_legacy_states.py
Expand Up @@ -107,7 +107,7 @@ def print_cls_query_summary(graph):
"%s: %d"
% (
"legacy_stateless_nodes".ljust(40),
sum([query.count() for query in cls_queries.itervalues()]),
sum(query.count() for query in cls_queries.itervalues()),
)
)

Expand Down

0 comments on commit 4489324

Please sign in to comment.