Skip to content

Commit

Permalink
Replace use of the root logger with use of module-level logger
Browse files Browse the repository at this point in the history
  • Loading branch information
candleindark committed Sep 20, 2024
1 parent 6ba1b0b commit b183708
Show file tree
Hide file tree
Showing 16 changed files with 134 additions and 97 deletions.
10 changes: 6 additions & 4 deletions linkml_runtime/dumpers/rdflib_dumper.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
from linkml_runtime.utils.schemaview import SchemaView, ElementName, PermissibleValue, PermissibleValueText
from linkml_runtime.utils.yamlutils import YAMLRoot

logger = logging.getLogger(__name__)


class RDFLibDumper(Dumper):
"""
Expand Down Expand Up @@ -44,7 +46,7 @@ def as_rdf_graph(
if isinstance(prefix_map, Converter):
# TODO replace with `prefix_map = prefix_map.bimap` after making minimum requirement on python 3.8
prefix_map = {record.prefix: record.uri_prefix for record in prefix_map.records}
logging.debug(f'PREFIXMAP={prefix_map}')
logger.debug(f'PREFIXMAP={prefix_map}')
if prefix_map:
for k, v in prefix_map.items():
if k == "@base":
Expand Down Expand Up @@ -80,7 +82,7 @@ def inject_triples(self, element: Any, schemaview: SchemaView, graph: Graph, tar
"""
namespaces = schemaview.namespaces()
slot_name_map = schemaview.slot_name_mappings()
logging.debug(f'CONVERT: {element} // {type(element)} // {target_type}')
logger.debug(f'CONVERT: {element} // {type(element)} // {target_type}')
if target_type in schemaview.all_enums():
if isinstance(element, PermissibleValueText):
e = schemaview.get_enum(target_type)
Expand All @@ -105,7 +107,7 @@ def inject_triples(self, element: Any, schemaview: SchemaView, graph: Graph, tar
namespaces["xsd"] = XSD
return Literal(element, datatype=namespaces.uri_for(dt_uri))
else:
logging.warning(f'No datatype specified for : {t.name}, using plain Literal')
logger.warning(f'No datatype specified for : {t.name}, using plain Literal')
return Literal(element)
element_vars = {k: v for k, v in vars(element).items() if not k.startswith('_')}
if len(element_vars) == 0:
Expand Down Expand Up @@ -134,7 +136,7 @@ def inject_triples(self, element: Any, schemaview: SchemaView, graph: Graph, tar
if k in slot_name_map:
k = slot_name_map[k].name
else:
logging.error(f'Slot {k} not in name map')
logger.error(f'Slot {k} not in name map')
slot = schemaview.induced_slot(k, cn)
if not slot.identifier:
slot_uri = URIRef(schemaview.get_uri(slot, expand=True))
Expand Down
4 changes: 3 additions & 1 deletion linkml_runtime/index/object_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
from linkml_runtime.utils import eval_utils
from linkml_runtime.utils.yamlutils import YAMLRoot

logger = logging.getLogger(__name__)


class ObjectIndex:
"""
Expand Down Expand Up @@ -257,7 +259,7 @@ def _map(self, obj: Any, in_range: str) -> Any:
module = inspect.getmodule(self._shadowed)
cls_dict = dict(inspect.getmembers(module, inspect.isclass))
if in_range not in cls_dict:
logging.warning(f"Class {in_range} not found in {module}, classes: {cls_dict}")
logger.warning(f"Class {in_range} not found in {module}, classes: {cls_dict}")
return obj
cls = cls_dict[in_range]
return cls(obj)
Expand Down
5 changes: 4 additions & 1 deletion linkml_runtime/loaders/json_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@
from linkml_runtime.utils.yamlutils import YAMLRoot
from pydantic import BaseModel

logger = logging.getLogger(__name__)


class JSONLoader(Loader):

def load_as_dict(self,
Expand All @@ -31,6 +34,6 @@ def load_any(self,
if isinstance(data_as_dict, dict):
typ = data_as_dict.pop('@type', None)
if typ and typ != target_class.__name__:
logging.warning(f"Warning: input type mismatch. Expected: {target_class.__name__}, Actual: {typ}")
logger.warning(f"Warning: input type mismatch. Expected: {target_class.__name__}, Actual: {typ}")

return self._construct_target_class(data_as_dict, target_class)
29 changes: 16 additions & 13 deletions linkml_runtime/loaders/rdflib_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
from linkml_runtime.utils.yamlutils import YAMLRoot
from pydantic import BaseModel

logger = logging.getLogger(__name__)


VALID_SUBJECT = Union[URIRef, BNode]
ANYDICT = Dict[str, Any]

Expand Down Expand Up @@ -60,7 +63,7 @@ def from_rdf_graph(
if c2.name in schemaview.class_ancestors(cn):
continue
else:
logging.error(f'Inconsistent URI to class map: {uri} -> {c2.name}, {c.name}')
logger.error(f'Inconsistent URI to class map: {uri} -> {c2.name}, {c.name}')
uri_to_class_map[uri] = c
# data prefix map: supplements or overrides existing schema prefix map
if isinstance(prefix_map, Converter):
Expand All @@ -74,7 +77,7 @@ def from_rdf_graph(
target_class_uriref: URIRef = target_class.class_class_uri
root_dicts: List[ANYDICT] = []
root_subjects: List[VALID_SUBJECT] = list(graph.subjects(RDF.type, target_class_uriref))
logging.debug(f'ROOTS = {root_subjects}')
logger.debug(f'ROOTS = {root_subjects}')
# Step 2: walk RDF graph starting from root subjects, constructing dict tree
node_tuples_to_visit: List[Tuple[VALID_SUBJECT, ClassDefinitionName]] ## nodes and their type still to visit
node_tuples_to_visit = [(subject, target_class.class_name) for subject in root_subjects]
Expand All @@ -101,14 +104,14 @@ def from_rdf_graph(
type_classes = [uri_to_class_map[str(x)] for x in type_vals]
if len(type_classes) > 1:
raise ValueError(f'Ambiguous types for {subject} == {type_classes}')
logging.info(f'Replacing {subject_class} with {type_classes}')
logger.info(f'Replacing {subject_class} with {type_classes}')
subject_class = type_classes[0].name
# process all triples for this node
for (_, p, o) in graph.triples((subject, None, None)):
processed_triples.add((subject,p,o))
logging.debug(f' Processing triple {subject} {p} {o}, subject type = {subject_class}')
logger.debug(f' Processing triple {subject} {p} {o}, subject type = {subject_class}')
if p == RDF.type:
logging.debug(f'Ignoring RDF.type for {subject} {o}, we automatically infer this from {subject_class}')
logger.debug(f'Ignoring RDF.type for {subject} {o}, we automatically infer this from {subject_class}')
elif p not in uri_to_slot:
if ignore_unmapped_predicates:
unmapped_predicates.add(p)
Expand All @@ -121,13 +124,13 @@ def from_rdf_graph(
slot_name = underscore(slot.name)
if isinstance(o, Literal):
if EnumDefinition.class_name in range_applicable_elements:
logging.debug(f'Assuming no meaning assigned for value {o} for Enum {slot.range}')
logger.debug(f'Assuming no meaning assigned for value {o} for Enum {slot.range}')
elif TypeDefinition.class_name not in range_applicable_elements:
raise ValueError(f'Cannot map Literal {o} to a slot {slot.name} whose range {slot.range} is not a type;')
v = o.value
elif isinstance(o, BNode):
if not is_inlined:
logging.error(f'blank nodes should be inlined; {slot_name}={o} in {subject}')
logger.error(f'blank nodes should be inlined; {slot_name}={o} in {subject}')
v = Pointer(o)
else:
if ClassDefinition.class_name in range_applicable_elements:
Expand All @@ -137,7 +140,7 @@ def from_rdf_graph(
else:
v = namespaces.curie_for(o)
if v is None:
logging.debug(f'No CURIE for {p}={o} in {subject} [{subject_class}]')
logger.debug(f'No CURIE for {p}={o} in {subject} [{subject_class}]')
v = str(o)
elif EnumDefinition.class_name in range_applicable_elements:
range_union_elements = schemaview.slot_range_as_union(slot)
Expand All @@ -156,7 +159,7 @@ def from_rdf_graph(
v = namespaces.curie_for(o)
if v is None:
v = str(o)
logging.debug(f'Casting {o} to string')
logger.debug(f'Casting {o} to string')
else:
raise ValueError(f'Expected literal value ({range_applicable_elements}) for {slot_name}={o}')
if is_inlined:
Expand All @@ -175,13 +178,13 @@ def from_rdf_graph(
if slot.range in schemaview.all_classes():
node_tuples_to_visit.append((o, ClassDefinitionName(slot.range)))
if unmapped_predicates:
logging.info(f'Unmapped predicated: {unmapped_predicates}')
logger.info(f'Unmapped predicated: {unmapped_predicates}')
unprocessed_triples = set(graph.triples((None, None, None))) - processed_triples
logging.info(f'Triple processed = {len(processed_triples)}, unprocessed = {len(unprocessed_triples)}')
logger.info(f'Triple processed = {len(processed_triples)}, unprocessed = {len(unprocessed_triples)}')
if len(unprocessed_triples) > 0:
if not allow_unprocessed_triples:
for t in unprocessed_triples:
logging.warning(f' Unprocessed: {t}')
logger.warning(f' Unprocessed: {t}')
raise ValueError(f'Unprocessed triples: {len(unprocessed_triples)}')
# Step 2: replace inline pointers with object dicts
def repl(v):
Expand All @@ -195,7 +198,7 @@ def repl(v):
objs_to_visit: List[ANYDICT] = copy(root_dicts)
while len(objs_to_visit) > 0:
obj = objs_to_visit.pop()
logging.debug(f'Replacing pointers for {obj}')
logger.debug(f'Replacing pointers for {obj}')
for k, v in obj.items():
if v is None:
continue
Expand Down
7 changes: 5 additions & 2 deletions linkml_runtime/utils/csvutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
SlotDefinition, ClassDefinition, ClassDefinitionName
from linkml_runtime.utils.schemaview import SchemaView

logger = logging.getLogger(__name__)


def get_configmap(schemaview: SchemaView, index_slot: SlotDefinitionName) -> CONFIGMAP:
"""
Generates a configuration that specifies mapping between a CSV and a JSON structure
Expand All @@ -28,9 +31,9 @@ def get_configmap(schemaview: SchemaView, index_slot: SlotDefinitionName) -> CON
cm[sn] = config
return cm
else:
logging.warning(f'Index slot range not to class: {slot.range}')
logger.warning(f'Index slot range not to class: {slot.range}')
else:
logging.warning(f'Index slot or schema not specified')
logger.warning(f'Index slot or schema not specified')
return {}

def _get_key_config(schemaview: SchemaView, tgt_cls: ClassDefinitionName, sn: SlotDefinitionName, sep='_'):
Expand Down
5 changes: 3 additions & 2 deletions linkml_runtime/utils/distroutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from pathlib import PurePath
from typing import List, Type

logger = logging.getLogger(__name__)


def get_default_paths(file_type: str) -> List[PurePath]:
Expand Down Expand Up @@ -41,7 +42,7 @@ def get_default_paths(file_type: str) -> List[PurePath]:
paths.append(rel_dir)
# YAML files may be in the same directory as the python
paths.append(PurePath('.'))
logging.debug(f"Paths to search: {paths}")
logger.debug(f"Paths to search: {paths}")
return paths

def get_packaged_file_as_str(package: str, file_type: str, rel_paths: List[PurePath]=[], encoding="utf-8") -> str:
Expand All @@ -65,7 +66,7 @@ def get_packaged_file_as_str(package: str, file_type: str, rel_paths: List[PureP
if data:
break
except FileNotFoundError:
logging.debug(f'candidate {path} not found')
logger.debug(f'candidate {path} not found')
if not data:
raise FileNotFoundError(f'package: {package} file: {file_type}')
return data.decode(encoding)
Expand Down
9 changes: 6 additions & 3 deletions linkml_runtime/utils/inference_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@
from linkml_runtime.utils.walker_utils import traverse_object_tree
from linkml_runtime.utils.yamlutils import YAMLRoot

logger = logging.getLogger(__name__)


RESOLVE_FUNC = Callable[[str, Any], Any]

def obj_as_dict_nonrecursive(obj: YAMLRoot, resolve_function: RESOLVE_FUNC = None) -> Dict[str, Any]:
Expand Down Expand Up @@ -73,7 +76,7 @@ def generate_slot_value(obj: YAMLRoot, slot_name: Union[str, SlotDefinitionName]
mapped_slot = schemaview.slot_name_mappings()[slot_name]
slot_name = mapped_slot.name
slot = schemaview.induced_slot(slot_name, class_name)
logging.debug(f' CONF={config}')
logger.debug(f' CONF={config}')
if config.use_string_serialization:
if slot.string_serialization:
if isinstance(obj, JsonObj):
Expand Down Expand Up @@ -106,7 +109,7 @@ def infer_slot_value(obj: YAMLRoot, slot_name: Union[str, SlotDefinitionName], s
if v is not None and policy == Policy.KEEP:
return v
new_v = generate_slot_value(obj, slot_name, schemaview, class_name=class_name, config=config)
logging.debug(f'SETTING {slot_name} = {new_v} // current={v}, {policy}')
logger.debug(f'SETTING {slot_name} = {new_v} // current={v}, {policy}')
if new_v:
# check if new value is different; not str check is necessary as enums may not be converted
if v is not None and new_v != v and str(new_v) != str(v):
Expand Down Expand Up @@ -138,7 +141,7 @@ def infer_all_slot_values(obj: YAMLRoot, schemaview: SchemaView,
:return:
"""
def infer(in_obj: YAMLRoot):
logging.debug(f'INFER={in_obj}')
logger.debug(f'INFER={in_obj}')
if isinstance(in_obj, YAMLRoot) and not isinstance(in_obj, EnumDefinitionImpl) and not isinstance(in_obj, PermissibleValue):
for k, v in vars(in_obj).items():
#print(f' ISV={k} curr={v} policy={policy} in_obj={type(in_obj)}')
Expand Down
6 changes: 4 additions & 2 deletions linkml_runtime/utils/ruleutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
from linkml_runtime.linkml_model.meta import SchemaDefinition, ClassDefinition, SlotDefinition, Expression, \
ClassExpression, ClassDefinitionName, ClassRule, AnonymousClassExpression, SlotExpression, SlotDefinitionName

logger = logging.getLogger(__name__)


class AtomicClassExpression:
"""
Expand Down Expand Up @@ -58,12 +60,12 @@ def get_range_as_disjunction(slot: SlotExpression) -> Set[ClassDefinitionName]:
if isinstance(slot.range_expression, ClassExpression):
conjs.append(get_disjunction(slot.range_expression))
else:
logging.warning(f'Expected range_expression for {slot.name} to be a class expression, not {type(slot.range_expression)}')
logger.warning(f'Expected range_expression for {slot.name} to be a class expression, not {type(slot.range_expression)}')
if len(conjs) == 0:
if slot.range:
conjs.append({slot.range})
else:
logging.warning(f'No range for {slot.name}')
logger.warning(f'No range for {slot.name}')
if len(conjs) > 1:
raise Exception(f'Cannot determine range disjunction for {slot}, got conjunctions: {conjs}')
if len(conjs) == 0:
Expand Down
4 changes: 2 additions & 2 deletions linkml_runtime/utils/schemaview.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def load_import(self, imp: str, from_schema: SchemaDefinition = None):
base_dir = os.path.dirname(from_schema.source_file)
else:
base_dir = None
logging.info(f'Importing {imp} as {sname} from source {from_schema.source_file}; base_dir={base_dir}')
logger.info(f'Importing {imp} as {sname} from source {from_schema.source_file}; base_dir={base_dir}')
schema = load_schema_wrap(sname + '.yaml', base_dir=base_dir)
return schema

Expand Down Expand Up @@ -1403,7 +1403,7 @@ def induced_slot(self, slot_name: SLOT_NAME, class_name: CLASS_NAME = None, impo
# )
if not is_empty(v2):
v = v2
logging.debug(f'{v} takes precedence over {v2} for {induced_slot.name}.{metaslot_name}')
logger.debug(f'{v} takes precedence over {v2} for {induced_slot.name}.{metaslot_name}')
if v is None:
if metaslot_name == 'range':
v = self.schema.default_range
Expand Down
15 changes: 9 additions & 6 deletions linkml_runtime/utils/schemaview_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@
import click
import yaml

logger = logging.getLogger(__name__)


DEFAULT_DISPLAY_COLS = [
'name',
'is_a',
Expand Down Expand Up @@ -58,8 +61,8 @@ def list(schema, columns, element_type):
"""
schema_view = SchemaView(schema)
logging.info(f'id={schema_view.schema.id}')
logging.info(f'name={schema_view.schema.name}')
logger.info(f'id={schema_view.schema.id}')
logger.info(f'name={schema_view.schema.name}')
enames = schema_view.all_element()
elements = [schema_view.get_element(ename) for ename in enames]
if element_type is not None:
Expand All @@ -79,7 +82,7 @@ def islot(schema, columns, class_names):
"""
schema_view = SchemaView(schema)
for cn in class_names:
logging.info(f'Class: {cn}')
logger.info(f'Class: {cn}')
islots = schema_view.class_induced_slots(cn)
_show_elements(islots, columns=columns)

Expand All @@ -97,7 +100,7 @@ def ancs(schema, class_names, is_a, mixins):
"""
schema_view = SchemaView(schema)
for cn in class_names:
logging.info(f'Class: {cn}')
logger.info(f'Class: {cn}')
ancs = schema_view.class_ancestors(cn, is_a=is_a, mixins=mixins)
for a in ancs:
print(f'{cn}\t{a}')
Expand All @@ -116,7 +119,7 @@ def descs(schema, class_names, is_a, mixins):
"""
schema_view = SchemaView(schema)
for cn in class_names:
logging.info(f'Class: {cn}')
logger.info(f'Class: {cn}')
ds = schema_view.class_descendants(cn, is_a=is_a, mixins=mixins)
for d in ds:
print(f'{cn}\t{d}')
Expand All @@ -130,7 +133,7 @@ def delete(schema, class_names):
"""
schema_view = SchemaView(schema)
for cn in class_names:
logging.info(f'Class: {cn}')
logger.info(f'Class: {cn}')
schema_view.delete_class(cn)
print(yaml_dumper.dumps(schema_view.schema))

Expand Down
Loading

0 comments on commit b183708

Please sign in to comment.