Skip to content

Commit

Permalink
Move debug statements to info
Browse files Browse the repository at this point in the history
  • Loading branch information
bkieft-usa committed Feb 7, 2025
1 parent 64e5dbb commit c11c5b4
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 12 deletions.
4 changes: 2 additions & 2 deletions metatlas/datastructures/groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@ def filter_lcmsruns(
)
else:
post_exclude = post_include
#for run in post_exclude:
#logger.debug("Run: %s", run.name)
for run in post_exclude:
logger.debug("Run: %s", run.name)
logger.debug("After filtering, %s LCMS output files remain.", len(post_exclude))
try:
if len(post_exclude) == 0:
Expand Down
7 changes: 4 additions & 3 deletions metatlas/datastructures/metatlas_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,9 +116,9 @@ def store(objects, **kwargs):
workspace = Workspace.get_instance()
if isinstance(objects, list):
for obj in objects:
logger.debug('Depositing %s to database', obj.name)
logger.info('Depositing %s to database', obj.name)
else:
logger.debug('Depositing %s to database', objects.name)
logger.info('Depositing %s to database', objects.name)
workspace.save_objects(objects, **kwargs)


Expand Down Expand Up @@ -186,7 +186,7 @@ def clone(self, recursive=False):
obj: MetatlasObject
Cloned object.
"""
logger.debug('Cloning instance of %s with recursive=%s', self.__class__.__name__, recursive)
logger.info('Cloning instance of %s with recursive=%s', self.__class__.__name__, recursive)
obj = self.__class__()
for (tname, trait) in self.traits().items():
if tname.startswith('_') or trait.metadata.get('readonly', False):
Expand All @@ -200,6 +200,7 @@ def clone(self, recursive=False):
obj.prev_uid = self.unique_id
obj.head_id = self.unique_id
obj.unique_id = uuid.uuid4().hex
logger.info('Finished cloning %s object %s to %s', self.__class__.__name__, self.unique_id, obj.unique_id)
return obj

def show_diff(self, unique_id=None):
Expand Down
10 changes: 5 additions & 5 deletions metatlas/datastructures/object_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def convert_to_double(self, table, entry):

def save_objects(self, objects, _override=False):
"""Save objects to the database"""
logger.debug('Entering Workspace.save_objects')
logger.info('Entering Workspace.save_objects')
if not isinstance(objects, (list, set)):
objects = [objects]
self._seen = dict()
Expand All @@ -209,17 +209,17 @@ def save_objects(self, objects, _override=False):
if self._inserts:
logger.debug('Workspace._inserts=%s', self._inserts)
if self._updates:
logger.debug('Workspace._updates=%s', self._updates)
logger.info('Workspace._updates=%s', self._updates)
if self._link_updates:
logger.debug('Workspace._link_updates=%s', self._link_updates)
logger.info('Workspace._link_updates=%s', self._link_updates)
db = self.get_connection()
db.begin()
try:
for (table_name, updates) in self._link_updates.items():
if table_name not in db:
continue
for (uid, prev_uid) in updates:
logger.debug('QUERY: update `%s` set source_id = "%s" where source_id = "%s"' %
logger.info('QUERY: update `%s` set source_id = "%s" where source_id = "%s"' %
(table_name, prev_uid, uid))
db.query('update `%s` set source_id = "%s" where source_id = "%s"' %
(table_name, prev_uid, uid))
Expand All @@ -229,7 +229,7 @@ def save_objects(self, objects, _override=False):
if 'sqlite' not in self.path:
self.fix_table(table_name)
for (uid, prev_uid) in updates:
logger.debug('QUERY: update `%s` set unique_id = "%s" where unique_id = "%s"' %
logger.info('QUERY: update `%s` set unique_id = "%s" where unique_id = "%s"' %
(table_name, prev_uid, uid))
db.query('update `%s` set unique_id = "%s" where unique_id = "%s"' %
(table_name, prev_uid, uid))
Expand Down
3 changes: 1 addition & 2 deletions metatlas/targeted/rt_alignment.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def generate_rt_alignment_models(
return (linear, poly, offset)


def generate_outputs(data: MetatlasDataset, workflow: Workflow, set_parameters: dict) -> list:
def generate_outputs(data: MetatlasDataset, workflow: Workflow, set_parameters: dict) -> None:
"""
Generate the RT alignment models, associated atlases with relative RT values, follow up notebooks
set_parameters contains the parameters set in the RT-Alignment notebook, before config file processing
Expand Down Expand Up @@ -161,7 +161,6 @@ def generate_outputs(data: MetatlasDataset, workflow: Workflow, set_parameters:
papermill.execute_notebook(in_file_name, out_file_name, {}, kernel_name="papermill")
copy_outputs_to_google_drive(ids)
logger.info("RT-Alignment notebook complete. Switch to an analysis notebook to continue.")
return atlases

def get_rts(metatlas_dataset: MetatlasDataset, include_atlas_rt_peak: bool = True) -> pd.DataFrame:
"""Returns RT values in DataFrame format"""
Expand Down

0 comments on commit c11c5b4

Please sign in to comment.