# HG changeset patch
# User galaxytrakr
# Date 1774555532 0
# Node ID 4f93334e2a83887bbb582e3ab4587dbe8fbcaa0b
planemo upload commit 494109a0410b49a6d05b5b0aa61639b16893bd04
diff -r 000000000000 -r 4f93334e2a83 data_manager/data_manager_humann2_download.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager/data_manager_humann2_download.py Thu Mar 26 20:05:32 2026 +0000
@@ -0,0 +1,177 @@
+#!/usr/bin/env python
+#
+# Data manager for reference data for the 'humann2' Galaxy tools
+import datetime
+import json
+import optparse
+import os
+import shutil
+import subprocess
+import sys
+
+
+HUMANN2_REFERENCE_DATA = {
+ "full": "Full",
+ "DEMO": "Demo",
+ "uniref50_diamond": "Full UniRef50",
+ "uniref50_ec_filtered_diamond": "EC-filtered UniRef50",
+ "uniref50_GO_filtered_rapsearch2": "GO filtered UniRef50 for rapsearch2",
+ "uniref90_diamond": "Full UniRef90",
+ "uniref90_ec_filtered_diamond": "EC-filtered UniRef90",
+ "DEMO_diamond": "Demo"
+}
+
+
+# Utility functions for interacting with Galaxy JSON
+def read_input_json(jsonfile):
+ """Read the JSON supplied from the data manager tool
+
+ Returns a tuple (param_dict,extra_files_path)
+
+ 'param_dict' is an arbitrary dictionary of parameters
+ input into the tool; 'extra_files_path' is the path
+ to a directory where output files must be put for the
+ receiving data manager to pick them up.
+
+ NB the directory pointed to by 'extra_files_path'
+ doesn't exist initially, it is the job of the script
+ to create it if necessary.
+
+ """
+ with open(jsonfile) as fh:
+ params = json.load(fh)
+ return (params['param_dict'],
+ params['output_data'][0]['extra_files_path'])
+
+
+# Utility functions for creating data table dictionaries
+#
+# Example usage:
+# >>> d = create_data_tables_dict()
+# >>> add_data_table(d,'my_data')
+# >>> add_data_table_entry(dict(dbkey='hg19',value='human'))
+# >>> add_data_table_entry(dict(dbkey='mm9',value='mouse'))
+# >>> print(json.dumps(d))
+def create_data_tables_dict():
+ """Return a dictionary for storing data table information
+
+ Returns a dictionary that can be used with 'add_data_table'
+ and 'add_data_table_entry' to store information about a
+ data table. It can be converted to JSON to be sent back to
+ the data manager.
+
+ """
+ d = {}
+ d['data_tables'] = {}
+ return d
+
+
+def add_data_table(d, table):
+ """Add a data table to the data tables dictionary
+
+ Creates a placeholder for a data table called 'table'.
+
+ """
+ d['data_tables'][table] = []
+
+
+def add_data_table_entry(d, table, entry):
+ """Add an entry to a data table
+
+ Appends an entry to the data table 'table'. 'entry'
+ should be a dictionary where the keys are the names of
+ columns in the data table.
+
+ Raises an exception if the named data table doesn't
+ exist.
+
+ """
+ try:
+ d['data_tables'][table].append(entry)
+ except KeyError:
+ raise Exception("add_data_table_entry: no table '%s'" % table)
+
+
+def download_humann2_db(data_tables, table_name, database, build, target_dir):
+ """Download HUMAnN2 database
+
+ Creates references to the specified file(s) on the Galaxy
+ server in the appropriate data table (determined from the
+ file extension).
+
+ The 'data_tables' dictionary should have been created using
+ the 'create_data_tables_dict' and 'add_data_table' functions.
+
+ Arguments:
+ data_tables: a dictionary containing the data table info
+ table_name: name of the table
+ database: database to download (chocophlan or uniref)
+ build: build of the database to download
+ target_dir: directory to put copy or link to the data file
+
+ """
+ value = "{}-{}-{}".format(database, build, datetime.date.today().isoformat())
+ db_target_dir = os.path.join(target_dir, database)
+ build_target_dir = os.path.join(db_target_dir, build)
+ cmd = "humann2_databases --download {} {} {} --update-config no".format(
+ database,
+ build,
+ db_target_dir)
+ subprocess.check_call(cmd, shell=True)
+ shutil.move(os.path.join(db_target_dir, database), build_target_dir)
+ add_data_table_entry(
+ data_tables,
+ table_name,
+ dict(
+ dbkey=build,
+ value=value,
+ name=HUMANN2_REFERENCE_DATA[build],
+ path=build_target_dir))
+
+
+if __name__ == "__main__":
+ print("Starting...")
+
+ # Read command line
+ parser = optparse.OptionParser(description='Download HUMAnN2 database')
+ parser.add_option('--database', help="Database name")
+ parser.add_option('--build', help="Build of the database")
+ options, args = parser.parse_args()
+ print("args : %s" % args)
+
+ # Check for JSON file
+ if len(args) != 1:
+ sys.stderr.write("Need to supply JSON file name")
+ sys.exit(1)
+
+ jsonfile = args[0]
+
+ # Read the input JSON
+ params, target_dir = read_input_json(jsonfile)
+
+ # Make the target directory
+ print("Making %s" % target_dir)
+ os.mkdir(target_dir)
+
+ # Set up data tables dictionary
+ data_tables = create_data_tables_dict()
+
+ if options.database == "chocophlan":
+ table_name = 'humann2_nucleotide_database'
+ else:
+ table_name = 'humann2_protein_database'
+ add_data_table(data_tables, table_name)
+
+ # Fetch data from specified data sources
+ download_humann2_db(
+ data_tables,
+ table_name,
+ options.database,
+ options.build,
+ target_dir)
+
+ # Write output JSON
+ print("Outputting JSON")
+ with open(jsonfile, 'w') as fh:
+ json.dump(data_tables, fh, sort_keys=True)
+ print("Done.")
\ No newline at end of file
diff -r 000000000000 -r 4f93334e2a83 data_manager/data_manager_humann2_download.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager/data_manager_humann2_download.xml Thu Mar 26 20:05:32 2026 +0000
@@ -0,0 +1,56 @@
+
+ Download HUMAnN2 database
+
+ 0.11.1
+
+
+ quay.io/galaxytrakr/humann2:0.11.1
+
+
+
+
+
+
+ python '$__tool_directory__/data_manager_humann2_download.py'
+ --database '$db.database'
+ --build $db.build
+ '${out_file}'
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+This tool downloads the HUMAnN2 databases.
+
+Read more about the tool at http://huttenhower.sph.harvard.edu/humann2/manual .
+
+
+ 10.1371/journal.pcbi.1003153
+
+
\ No newline at end of file
diff -r 000000000000 -r 4f93334e2a83 data_manager_conf.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager_conf.xml Thu Mar 26 20:05:32 2026 +0000
@@ -0,0 +1,33 @@
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff -r 000000000000 -r 4f93334e2a83 tool-data/humann2_nucleotide_database.loc.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tool-data/humann2_nucleotide_database.loc.sample Thu Mar 26 20:05:32 2026 +0000
@@ -0,0 +1,4 @@
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of metagenomics files.
+#file has this format (white space characters are TAB characters)
+#02_16_2014 ChocoPhlAn chocophlan /path/to/data
\ No newline at end of file
diff -r 000000000000 -r 4f93334e2a83 tool-data/humann2_protein_database.loc.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tool-data/humann2_protein_database.loc.sample Thu Mar 26 20:05:32 2026 +0000
@@ -0,0 +1,9 @@
+
+#This is a sample file distributed with Galaxy that enables tools
+#to use a directory of metagenomics files.
+#file has this format (white space characters are TAB characters)
+#02_16_2014 Full UniRef50 uniref50_diamond /path/to/data
+#02_16_2014 EC-filtered UniRef50 uniref50_ec_filtered_diamond /path/to/data
+#02_16_2014 GO filtered UniRef50 for rapsearch2 uniref50_GO_filtered_rapsearch2 /path/to/data
+#02_16_2014 Full UniRef90 uniref90_diamond /path/to/data
+#02_16_2014 EC-filtered UniRef90 uniref90_ec_filtered_diamond /path/to/data
\ No newline at end of file
diff -r 000000000000 -r 4f93334e2a83 tool_data_table_conf.xml.sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tool_data_table_conf.xml.sample Thu Mar 26 20:05:32 2026 +0000
@@ -0,0 +1,10 @@
+
+