Skip to content

Commit

Permalink
Merge branch 'master' into development
Browse files Browse the repository at this point in the history
  • Loading branch information
vsoch authored Dec 11, 2017
2 parents a9d4883 + 0ea6534 commit 44b727a
Show file tree
Hide file tree
Showing 6 changed files with 23 additions and 5 deletions.
2 changes: 1 addition & 1 deletion sendit/apps/api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def metrics_view(request):
'QUEUE':Batch.objects.filter(status="QUEUE").count()}

response = {"timestamp":timestamp,
"data_base": base,
"data_root": base,
"data_total": len(glob("%s/*" %(base))),
"batches": batchlog}

Expand Down
2 changes: 2 additions & 0 deletions sendit/apps/main/tasks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,5 @@
from .update import (
replace_identifiers
)

from .finish import upload_storage
1 change: 0 additions & 1 deletion sendit/apps/main/tasks/finish.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,4 +274,3 @@ def get_client(bucket_name, project_name):

return Client(bucket_name=bucket_name,
project=project_name)

3 changes: 1 addition & 2 deletions sendit/apps/main/tasks/get.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@
)

from .update import replace_identifiers
from .finish import upload_storage

from som.api.identifiers import Client

Expand Down Expand Up @@ -303,6 +302,7 @@ def get_identifiers(bid,study=None,run_replace_identifiers=True):
batch_ids.response = result['results']
batch_ids.ids = ids
batch_ids.save()
batch.qa['DasherFinishTime'] = time.time()
if run_replace_identifiers is True:
return replace_identifiers(bid=bid)
else:
Expand All @@ -315,7 +315,6 @@ def get_identifiers(bid,study=None,run_replace_identifiers=True):
bot.debug("Restful de-identification skipped [ANONYMIZE_RESTFUL is False]")
change_status(batch,"DONEPROCESSING")
change_status(batch.image_set.all(),"DONEPROCESSING")
return upload_storage(bid=bid)



Expand Down
3 changes: 3 additions & 0 deletions sendit/apps/main/tasks/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,9 @@ def replace_identifiers(bid, run_upload_storage=False):
batch.qa['ProcessFinishTime'] = time.time()

# We don't get here if the call above failed
change_status(batch,"DONEPROCESSING")
batch.save()

if run_upload_storage is True:
return upload_storage(bid=bid)
else:
Expand Down
17 changes: 16 additions & 1 deletion sendit/apps/main/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,7 @@ def start_queue(subfolder=None, max_count=None):
conditions of multiple workers trying to grab a job at the same time.
'''
from sendit.apps.main.tasks import import_dicomdir

contenders = Batch.objects.filter(status="QUEUE")
if len(contenders) == 0:
update_cached(subfolder)
Expand All @@ -156,7 +157,6 @@ def start_queue(subfolder=None, max_count=None):
dicom_dir = batch.logs.get('DICOM_DIR')
if dicom_dir is not None:
import_dicomdir.apply_async(kwargs={"dicom_dir":dicom_dir})
# If user supplies a count, only start first N
started +=1
if max_count is not None:
if started >= max_count:
Expand All @@ -165,6 +165,21 @@ def start_queue(subfolder=None, max_count=None):
print("Added %s tasks to the active queue." %started)


def upload_finished(batches=False, chunk_size=1000):
'''upload finished will upload datasets with status DONEPROCESSING
to google storage. We do this with one worker to reduce the number
of concurrent API calls. In the future, this will be better optimized.
'''
from sendit.apps.main.tasks import upload_storage
from sendit.apps.main.tasks.utils import chunks

if batches is False:
upload_storage.apply_async()
else:
batch_ids = [b.id for b in Batch.objects.filter(status="DONEPROCESSING")]
for subset in chunks(batch_ids, chunk_size):
upload_storage.apply_async(kwargs={"batch_ids": subset})


def get_contenders(base,current=None, filters=None):
''' get contenders will return a full set of contender folders from
Expand Down

0 comments on commit 44b727a

Please sign in to comment.