Skip to content

Commit

Permalink
lxml type fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
mr-c committed Nov 24, 2023
1 parent 34d4e39 commit 261cd69
Show file tree
Hide file tree
Showing 18 changed files with 852 additions and 58 deletions.
2 changes: 1 addition & 1 deletion lib/galaxy/datatypes/dataproviders/hierarchy.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def matches_selector(self, element, selector=None):
# TODO: fails with '#' - browser thinks it's an anchor - use urlencode
# TODO: need removal/replacement of etree namespacing here - then move to string match
Element = getattr(etree, "_Element", etree.Element)
return bool((selector is None) or (isinstance(element, Element) and selector in element.tag))
return bool((selector is None) or (isinstance(element, etree._Element) and selector in element.tag))

def element_as_dict(self, element):
"""
Expand Down
10 changes: 7 additions & 3 deletions lib/galaxy/dependencies/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,13 @@ def load_job_config_dict(job_conf_dict):
if ".xml" in job_conf_path:
try:
try:
for plugin in parse_xml(job_conf_path).find("plugins").findall("plugin"):
if "load" in plugin.attrib:
self.job_runners.append(plugin.attrib["load"])
plugins = parse_xml(job_conf_path).find("plugins")
if plugins is not None:
for plugin in plugins.findall("plugin"):
if "load" in plugin.attrib:
self.job_runners.append(plugin.attrib["load"])
else:
pass
except OSError:
pass
try:
Expand Down
7 changes: 3 additions & 4 deletions lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
InstallationTarget,
)
from galaxy.util import (
Element,
etree,
parse_xml_string,
xml_to_string,
Expand All @@ -34,7 +33,7 @@

class DataManagerHandler:
app: InstallationTarget
root: Optional[Element] = None
root: Optional[etree._Element] = None

def __init__(self, app: InstallationTarget):
self.app = app
Expand All @@ -47,7 +46,7 @@ def data_managers_path(self) -> Optional[str]:
return root.get("tool_path", None)
return None

def _data_manager_config_elems_to_xml_file(self, config_elems: List[Element], config_filename: StrPath) -> None:
def _data_manager_config_elems_to_xml_file(self, config_elems: List[etree._Element], config_filename: StrPath) -> None:
"""
Persist the current in-memory list of config_elems to a file named by the value
of config_filename.
Expand Down Expand Up @@ -171,7 +170,7 @@ def install_data_managers(
)
if data_manager:
rval.append(data_manager)
elif elem.tag is etree.Comment:
elif isinstance(elem, etree._Comment):
pass
else:
log.warning(f"Encountered unexpected element '{elem.tag}':\n{xml_to_string(elem)}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ def generate_tool_panel_elem_list(
owner="",
):
"""Generate a list of ElementTree Element objects for each section or tool."""
elem_list: List[etree.Element] = []
elem_list: List[etree._Element] = []
tool_elem = None
cleaned_repository_clone_url = remove_protocol_and_user_from_clone_url(repository_clone_url)
if not owner:
Expand Down Expand Up @@ -335,6 +335,7 @@ def generate_tool_panel_elem_list(
tool,
tool_section if inside_section else None,
)
assert tool_section is not None
if inside_section:
if section_in_elem_list is not None:
elem_list[section_in_elem_list] = tool_section
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tool_shed/tools/data_table_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def __init__(self, app: RequiredAppT):

def generate_repository_info_elem(
self, tool_shed: str, repository_name: str, changeset_revision: str, owner: str, parent_elem=None, **kwd
) -> etree.Element:
) -> etree._Element:
"""Create and return an ElementTree repository info Element."""
if parent_elem is None:
elem = etree.Element("tool_shed_repository")
Expand Down
51 changes: 28 additions & 23 deletions lib/galaxy/tool_util/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
from galaxy import util
from galaxy.exceptions import MessageException
from galaxy.util import (
Element,
etree,
RW_R__R__,
)
from galaxy.util.compression_utils import decompress_path_to_directory
Expand Down Expand Up @@ -133,12 +133,12 @@ class FileNameInfoT(TypedDict):
filename: str
from_shed_config: bool
tool_data_path: Optional[StrPath]
config_element: Optional[Element]
config_element: Optional[etree._Element]
tool_shed_repository: Optional[Dict[str, Any]]
errors: ErrorListT


LoadInfoT = Tuple[Tuple[Element, Optional[StrPath]], Dict[str, Any]]
LoadInfoT = Tuple[Tuple[etree._Element, Optional[StrPath]], Dict[str, Any]]


class ToolDataTable(Dictifiable):
Expand All @@ -162,7 +162,7 @@ def from_dict(cls, d):

def __init__(
self,
config_element: Element,
config_element: etree._Element,
tool_data_path: Optional[StrPath],
tool_data_path_files: ToolDataPathFiles,
from_shed_config: bool = False,
Expand All @@ -179,7 +179,7 @@ def __init__(
self.tool_data_path = tool_data_path
self.tool_data_path_files = tool_data_path_files
self.other_config_dict = other_config_dict or {}
self.missing_index_file = None
self.missing_index_file: Optional[str] = None
# increment this variable any time a new entry is added, or when the table is totally reloaded
# This value has no external meaning, and does not represent an abstract version of the underlying data
self._loaded_content_version = 1
Expand All @@ -192,7 +192,9 @@ def __init__(
"filename": filename,
},
)
self._merged_load_info: List[Tuple[Type[ToolDataTable], Tuple[Tuple[Element, StrPath], Dict[str, Any]]]] = []
self._merged_load_info: List[
Tuple[Type[ToolDataTable], Tuple[Tuple[etree._Element, StrPath], Dict[str, Any]]]
] = []

def _update_version(self, version: Optional[int] = None) -> int:
if version is not None:
Expand Down Expand Up @@ -303,6 +305,7 @@ class TabularToolDataTable(ToolDataTable):
"""

comment_char: str
dict_collection_visible_keys = ["name"]
dict_element_visible_keys = ["name", "fields"]
dict_export_visible_keys = ["name", "data", "largest_index", "columns", "missing_index_file"]
Expand All @@ -311,7 +314,7 @@ class TabularToolDataTable(ToolDataTable):

def __init__(
self,
config_element: Element,
config_element: etree._Element,
tool_data_path: Optional[StrPath],
tool_data_path_files: ToolDataPathFiles,
from_shed_config: bool = False,
Expand All @@ -332,7 +335,7 @@ def __init__(

def configure_and_load(
self,
config_element: Element,
config_element: etree._Element,
tool_data_path: Optional[StrPath],
from_shed_config: bool = False,
url_timeout: float = 10,
Expand All @@ -349,10 +352,10 @@ def configure_and_load(
repo_elem = config_element.find("tool_shed_repository")
if repo_elem is not None:
repo_info = dict(
tool_shed=repo_elem.find("tool_shed").text,
name=repo_elem.find("repository_name").text,
owner=repo_elem.find("repository_owner").text,
installed_changeset_revision=repo_elem.find("installed_changeset_revision").text,
tool_shed=cast(etree._Element, repo_elem.find("tool_shed")).text,
name=cast(etree._Element, repo_elem.find("repository_name")).text,
owner=cast(etree._Element, repo_elem.find("repository_owner")).text,
installed_changeset_revision=cast(etree._Element, repo_elem.find("installed_changeset_revision")).text,
)
else:
repo_info = None
Expand All @@ -378,6 +381,7 @@ def configure_and_load(
filename = file_element.get("from_config", None) or None
if filename:
filename = self.other_config_dict.get(filename, None)
assert filename is not None
filename = file_path = _expand_here_template(filename, here=self.here)
found = False
if file_path is None:
Expand Down Expand Up @@ -507,7 +511,7 @@ def get_named_fields_list(self) -> List[Dict[Union[str, int], str]]:
def get_version_fields(self):
return (self._loaded_content_version, self.get_fields())

def parse_column_spec(self, config_element: Element) -> None:
def parse_column_spec(self, config_element: etree._Element) -> None:
"""
Parse column definitions, which can either be a set of 'column' elements
with a name and index (as in dynamic options config), or a shorthand
Expand All @@ -528,12 +532,12 @@ def parse_column_spec(self, config_element: Element) -> None:
for column_elem in config_element.findall("column"):
name = column_elem.get("name", None)
assert name is not None, "Required 'name' attribute missing from column def"
index = column_elem.get("index", None)
index2 = column_elem.get("index", None)
assert index is not None, "Required 'index' attribute missing from column def"
index = int(index)
self.columns[name] = index
if index > self.largest_index:
self.largest_index = index
index3 = int(cast(str, index2))
self.columns[name] = index3
if index3 > self.largest_index:
self.largest_index = index3
empty_field_value = column_elem.get("empty_field_value", None)
if empty_field_value is not None:
self.empty_field_values[name] = empty_field_value
Expand Down Expand Up @@ -965,7 +969,7 @@ def to_json(self, path: StrPath) -> None:

def load_from_config_file(
self, config_filename: StrPath, tool_data_path: Optional[StrPath], from_shed_config: bool = False
) -> List[Element]:
) -> List[etree._Element]:
"""
This method is called under 3 conditions:
Expand All @@ -989,6 +993,7 @@ def load_from_config_file(
)
table_elems.append(table_elem)
if table.name not in self.data_tables:
assert table.name is not None
self.data_tables[table.name] = table
log.debug("Loaded tool data table '%s' from file '%s'", table.name, config_filename)
else:
Expand All @@ -1005,7 +1010,7 @@ def load_from_config_file(

def from_elem(
self,
table_elem: Element,
table_elem: etree._Element,
tool_data_path: Optional[StrPath],
from_shed_config: bool,
filename: StrPath,
Expand All @@ -1029,7 +1034,7 @@ def add_new_entries_from_config_file(
tool_data_path: Optional[StrPath],
shed_tool_data_table_config: StrPath,
persist: bool = False,
) -> Tuple[List[Element], str]:
) -> Tuple[List[etree._Element], str]:
"""
This method is called when a tool shed repository that includes a tool_data_table_conf.xml.sample file is being
installed into a local galaxy instance. We have 2 cases to handle, files whose root tag is <tables>, for example::
Expand Down Expand Up @@ -1070,8 +1075,8 @@ def add_new_entries_from_config_file(
def to_xml_file(
self,
shed_tool_data_table_config: StrPath,
new_elems: Optional[List[Element]] = None,
remove_elems: Optional[List[Element]] = None,
new_elems: Optional[List[etree._Element]] = None,
remove_elems: Optional[List[etree._Element]] = None,
) -> None:
"""
Write the current in-memory version of the shed_tool_data_table_conf.xml file to disk.
Expand Down
22 changes: 13 additions & 9 deletions lib/galaxy/tool_util/data/bundles/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

from galaxy.util import (
asbool,
Element,
etree,
)

DEFAULT_VALUE_TRANSLATION_TYPE = "template"
Expand Down Expand Up @@ -170,22 +170,24 @@ class DataTableBundle(BaseModel):
repo_info: Optional[RepoInfo] = None


def _xml_to_data_table_output_column_move(move_elem: Element) -> DataTableBundleProcessorDataTableOutputColumnMove:
def _xml_to_data_table_output_column_move(
move_elem: etree._Element,
) -> DataTableBundleProcessorDataTableOutputColumnMove:
move_type = move_elem.get("type", "directory")
relativize_symlinks = move_elem.get(
"relativize_symlinks", False
) # TODO: should we instead always relativize links?
source_elem = move_elem.find("source")
if source_elem is None:
source_base = None
source_value = ""
source_value: Optional[str] = ""
else:
source_base = source_elem.get("base", None)
source_value = source_elem.text
target_elem = move_elem.find("target")
if target_elem is None:
target_base = None
target_value = ""
target_value: Optional[str] = ""
else:
target_base = target_elem.get("base", None)
target_value = target_elem.text
Expand All @@ -200,7 +202,7 @@ def _xml_to_data_table_output_column_move(move_elem: Element) -> DataTableBundle


def _xml_to_data_table_output_column_translation(
value_translation_elem: Element,
value_translation_elem: etree._Element,
) -> Optional[DataTableBundleProcessorDataTableOutputColumnTranslation]:
value_translation = value_translation_elem.text
if value_translation is not None:
Expand All @@ -212,7 +214,7 @@ def _xml_to_data_table_output_column_translation(
return None


def _xml_to_data_table_output_column(column_elem: Element) -> DataTableBundleProcessorDataTableOutputColumn:
def _xml_to_data_table_output_column(column_elem: etree._Element) -> DataTableBundleProcessorDataTableOutputColumn:
column_name = column_elem.get("name", None)
assert column_name is not None, "Name is required for column entry"
data_table_column_name = column_elem.get("data_table_name", column_name)
Expand All @@ -239,7 +241,9 @@ def _xml_to_data_table_output_column(column_elem: Element) -> DataTableBundlePro
)


def _xml_to_data_table_output(output_elem: Optional[Element]) -> Optional[DataTableBundleProcessorDataTableOutput]:
def _xml_to_data_table_output(
output_elem: Optional[etree._Element],
) -> Optional[DataTableBundleProcessorDataTableOutput]:
if output_elem is not None:
columns = []
for column_elem in output_elem.findall("column"):
Expand All @@ -249,7 +253,7 @@ def _xml_to_data_table_output(output_elem: Optional[Element]) -> Optional[DataTa
return None


def _xml_to_data_table(data_table_elem: Element) -> DataTableBundleProcessorDataTable:
def _xml_to_data_table(data_table_elem: etree._Element) -> DataTableBundleProcessorDataTable:
data_table_name = data_table_elem.get("name")
assert data_table_name is not None, "A name is required for a data table entry"

Expand All @@ -258,7 +262,7 @@ def _xml_to_data_table(data_table_elem: Element) -> DataTableBundleProcessorData
return DataTableBundleProcessorDataTable(name=data_table_name, output=output)


def convert_data_tables_xml(elem: Element) -> DataTableBundleProcessorDescription:
def convert_data_tables_xml(elem: etree._Element) -> DataTableBundleProcessorDescription:
undeclared_tables = asbool(elem.get("undeclared_tables", False))
data_tables = []
for data_table_elem in elem.findall("data_table"):
Expand Down
3 changes: 2 additions & 1 deletion lib/galaxy/tool_util/toolbox/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from collections import namedtuple
from errno import ENOENT
from typing import (
cast,
Any,
Dict,
List,
Expand Down Expand Up @@ -660,7 +661,7 @@ def _load_integrated_tool_panel_keys(self):
elif elem.tag == "section":
section = ToolSection(elem)
for section_elem in elem:
section_id = section_elem.get("id")
section_id = cast(str, section_elem.get("id"))
if section_elem.tag == "tool":
section.elems.stub_tool(section_id)
elif section_elem.tag == "workflow":
Expand Down
8 changes: 5 additions & 3 deletions lib/galaxy/tool_util/verify/asserts/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
getmembers,
)
from tempfile import NamedTemporaryFile
from typing import List, Optional, Union

from galaxy.util import unicodify
from galaxy.util.compression_utils import get_fileobj
Expand Down Expand Up @@ -32,11 +33,12 @@
assertion_functions[member] = value


def verify_assertions(data: bytes, assertion_description_list, decompress=None):
def verify_assertions(data: Union[bytes, str], assertion_description_list: List[str], decompress: Optional[bool] = None) -> None:
"""This function takes a list of assertions and a string to check
these assertions against."""
if decompress:
with NamedTemporaryFile() as tmpfh:
mode = "rb" if isinstance(data, bytes) else "rt"
with NamedTemporaryFile(mode) as tmpfh:
tmpfh.write(data)
tmpfh.flush()
with get_fileobj(tmpfh.name, mode="rb", compressed_formats=None) as fh:
Expand All @@ -45,7 +47,7 @@ def verify_assertions(data: bytes, assertion_description_list, decompress=None):
verify_assertion(data, assertion_description)


def verify_assertion(data: bytes, assertion_description):
def verify_assertion(data: Union[bytes, str], assertion_description) -> None:
tag = assertion_description["tag"]
assert_function_name = "assert_" + tag
assert_function = assertion_functions.get(assert_function_name)
Expand Down
Loading

0 comments on commit 261cd69

Please sign in to comment.