Skip to content

Commit

Permalink
no copyfile
Browse files Browse the repository at this point in the history
  • Loading branch information
antgonza committed Dec 20, 2024
1 parent 56eb4f3 commit e83913d
Showing 1 changed file with 23 additions and 14 deletions.
37 changes: 23 additions & 14 deletions qiita_pet/handlers/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
from tempfile import mkdtemp
from zipfile import ZipFile
from io import BytesIO
from shutil import copyfile


class BaseHandlerDownload(BaseHandler):
Expand Down Expand Up @@ -383,6 +382,10 @@ class DownloadDataReleaseFromPrep(BaseHandlerDownload):
@coroutine
@execute_as_transaction
def get(self, prep_template_id):
""" This method constructs an on the fly ZIP with all the files
required for a data-prep release/data-delivery. Mainly sample, prep
info, bioms and coverage
"""
user = self.current_user
if user.level not in ('admin', 'web-lab admin'):
raise HTTPError(403, reason="%s doesn't have access to download "
Expand Down Expand Up @@ -411,6 +414,7 @@ def get(self, prep_template_id):
'',
]

# helper dict to add "user/human" friendly names to the bioms
human_names = {
'ec.biom': 'KEGG Enzyme (EC)',
'per-gene.biom': 'Per gene Predictions',
Expand All @@ -421,18 +425,21 @@ def get(self, prep_template_id):
'rna_copy_counts.biom': 'RNA copy counts'
}

# sample-info creation
fn = join(td, f'sample_information_from_prep_{pid}.tsv')
readme.append(f'Sample information: {basename(fn)}')
files.append(fn)
files.append([fn, basename(fn)])
st.to_dataframe(samples=list(pt)).to_csv(fn, sep='\t')

# prep-info creation
fn = join(td, f'prep_information_{pid}.tsv')
readme.append(f'Prep information: {basename(fn)}')
files.append(fn)
files.append([fn, basename(fn)])
pt.to_dataframe().to_csv(fn, sep='\t')

readme.append('')

# finding the bioms to be added
bioms = dict()
coverages = None
for a in Study(sid).artifacts(artifact_type='BIOM'):
Expand All @@ -447,25 +454,27 @@ def get(self, prep_template_id):
if biom is None:
continue
biom_fn = basename(biom['fp'])
# there is a small but real chance that the same prep has the same
# artifacts so using the latests
if biom_fn not in bioms:
bioms[biom_fn] = [a, biom]
else:
if getctime(biom['fp']) > getctime(bioms[biom_fn][1]['fp']):
bioms[biom_fn] = [a, biom]

# once we have all the bioms, we can add them to the list of zips
# and to the readme the biom details and all the processing
for fn, (a, fp) in bioms.items():
aname = basename(fp["fp"])
nname = f'{a.id}_{aname}'
nfile = join(td, nname)
copyfile(fp['fp'], nfile)
files.append(nfile)
files.append(fp['fp'], nname)

hname = ''
if aname in human_names:
hname = human_names[aname]
readme.append(f'{nname}\t{hname}')

for an in a.ancestors.nodes():
for an in set(a.ancestors.nodes()):
p = an.processing_parameters
if p is not None:
c = p.command
Expand All @@ -476,22 +485,22 @@ def get(self, prep_template_id):
pd = p.dump()
readme.append(f'\t{cn}\t{sn}\t{sv}\t{pd}')

# if a coverage was found, add it to the list of files
if coverages is not None:
aname = basename(coverages)
nfile = join(td, aname)
copyfile(coverages, nfile)
files.append(nfile)
fn = basename(coverages)
readme.append(f'{fn}\tcoverage files')
files.append([coverages, fn])

fn = join(td, 'README.txt')
with open(fn, 'w') as fp:
fp.write('\n'.join(readme))
files.append(fn)
files.append([fn, basename(fn)])

zp_fn = f'data_release_{pid}_{date}.zip'
zp = BytesIO()
with ZipFile(zp, 'w') as zipf:
for fp in files:
zipf.write(fp, basename(fp))
for fp, fn in files:
zipf.write(fp, fn)

self.set_header('Content-Type', 'application/zip')
self.set_header("Content-Disposition", f"attachment; filename={zp_fn}")
Expand Down

0 comments on commit e83913d

Please sign in to comment.