annotate 0.1.0/bin/index_pdg_metadata.py @ 0:c8597e9e1a97

"planemo upload"
author kkonganti
date Mon, 27 Nov 2023 12:37:44 -0500
parents
children
rev   line source
kkonganti@0 1 #!/usr/bin/env python3
kkonganti@0 2
kkonganti@0 3 # Kranti Konganti
kkonganti@0 4
kkonganti@0 5 import argparse
kkonganti@0 6 import inspect
kkonganti@0 7 import logging
kkonganti@0 8 import os
kkonganti@0 9 import pickle
kkonganti@0 10 import pprint
kkonganti@0 11 import re
kkonganti@0 12 from collections import defaultdict
kkonganti@0 13
kkonganti@0 14
kkonganti@0 15 # Multiple inheritence for pretty printing of help text.
kkonganti@0 16 class MultiArgFormatClasses(argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter):
kkonganti@0 17 pass
kkonganti@0 18
kkonganti@0 19
kkonganti@0 20 # Main
kkonganti@0 21 def main() -> None:
kkonganti@0 22 """
kkonganti@0 23 This script works only in the context of `cronology_db` Nextflow workflow.
kkonganti@0 24 It takes an UNIX path to directory containing the following files:
kkonganti@0 25 1. PDG metadata file (Ex: `PDG000000043.204.metadata.tsv`)
kkonganti@0 26 2. PDG SNP Cluster metadata file (Ex: `PDG000000043.204.reference_target.cluster_list.tsv`)
kkonganti@0 27 3. A list of possibly downloadable assembly accessions (one per line) from the metadata file.
kkonganti@0 28 and then generates a pickled file with relevant metadata columns mentioned with the -cols option.
kkonganti@0 29 """
kkonganti@0 30
kkonganti@0 31 # Set logging.
kkonganti@0 32 logging.basicConfig(
kkonganti@0 33 format="\n" + "=" * 55 + "\n%(asctime)s - %(levelname)s\n" + "=" * 55 + "\n%(message)s\n\n",
kkonganti@0 34 level=logging.DEBUG,
kkonganti@0 35 )
kkonganti@0 36
kkonganti@0 37 # Debug print.
kkonganti@0 38 ppp = pprint.PrettyPrinter(width=55)
kkonganti@0 39 prog_name = os.path.basename(inspect.stack()[0].filename)
kkonganti@0 40
kkonganti@0 41 parser = argparse.ArgumentParser(
kkonganti@0 42 prog=prog_name, description=main.__doc__, formatter_class=MultiArgFormatClasses
kkonganti@0 43 )
kkonganti@0 44
kkonganti@0 45 required = parser.add_argument_group("required arguments")
kkonganti@0 46
kkonganti@0 47 required.add_argument(
kkonganti@0 48 "-pdg_dir",
kkonganti@0 49 dest="pdg_dir",
kkonganti@0 50 default=False,
kkonganti@0 51 required=True,
kkonganti@0 52 help="Absolute UNIX path to directory containing the following files.\nEx:"
kkonganti@0 53 + "\n1. PDG000000043.204.metadata.tsv"
kkonganti@0 54 + "\n2. PDG000000043.204.reference_target.cluster_list.tsv"
kkonganti@0 55 + "\n3. A file of assembly accessions, one per line parsed out from"
kkonganti@0 56 + "\n the metadata file.",
kkonganti@0 57 )
kkonganti@0 58 parser.add_argument(
kkonganti@0 59 "-mlst",
kkonganti@0 60 dest="mlst_results",
kkonganti@0 61 required=False,
kkonganti@0 62 help="Absolute UNIX path to MLST results file\nIf MLST results exists for a accession, they"
kkonganti@0 63 + "\nwill be included in the index.",
kkonganti@0 64 )
kkonganti@0 65 parser.add_argument(
kkonganti@0 66 "-pdg_meta_pat",
kkonganti@0 67 dest="pdg_meta_pat",
kkonganti@0 68 default="PDG\d+\.\d+\.metadata\.tsv",
kkonganti@0 69 required=False,
kkonganti@0 70 help="The pattern to be used to search for PDG metadata\nfile.",
kkonganti@0 71 )
kkonganti@0 72 parser.add_argument(
kkonganti@0 73 "-pdg_snp_meta_pat",
kkonganti@0 74 dest="pdg_snp_meta_pat",
kkonganti@0 75 default="PDG\d+\.\d+\.reference\_target\.cluster\_list\.tsv",
kkonganti@0 76 required=False,
kkonganti@0 77 help="The pattern to be used to search for PDG SNP Cluster metadata\nfile.",
kkonganti@0 78 )
kkonganti@0 79 parser.add_argument(
kkonganti@0 80 "-pdg_accs_filename_pat",
kkonganti@0 81 dest="pdg_accs_fn_pat",
kkonganti@0 82 default="accs_all.txt",
kkonganti@0 83 required=False,
kkonganti@0 84 help="The filename to look for where all the parsed GC[AF] accessions are stored,\n"
kkonganti@0 85 + "one per line.",
kkonganti@0 86 )
kkonganti@0 87 parser.add_argument(
kkonganti@0 88 "-cols",
kkonganti@0 89 dest="metadata_cols",
kkonganti@0 90 default="epi_type,collected_by,collection_date,host,"
kkonganti@0 91 + "\nhost_disease,isolation_source,outbreak,sample_name,scientific_name,serovar,"
kkonganti@0 92 + "\nsource_type,strain,computed_types,target_acc",
kkonganti@0 93 required=False,
kkonganti@0 94 help="The data in these metadata columns will be indexed for each\nisolate.",
kkonganti@0 95 )
kkonganti@0 96 parser.add_argument(
kkonganti@0 97 "-fs",
kkonganti@0 98 dest="force_write_pick",
kkonganti@0 99 action="store_true",
kkonganti@0 100 required=False,
kkonganti@0 101 help="By default, when -s flag is on, the pickle file named *.IDXD_PDG_METAD.pickle"
kkonganti@0 102 + "\nis written to CWD. If the file exists, the program will not overwrite"
kkonganti@0 103 + "\nand exit. Use -fs option to overwrite.",
kkonganti@0 104 )
kkonganti@0 105 parser.add_argument(
kkonganti@0 106 "-op",
kkonganti@0 107 dest="out_prefix",
kkonganti@0 108 default="IDXD_PDG_METAD",
kkonganti@0 109 help="Set the output file prefix for indexed PDG metadata.",
kkonganti@0 110 )
kkonganti@0 111 parser.add_argument(
kkonganti@0 112 "-pfs",
kkonganti@0 113 dest="pdg_meta_fs",
kkonganti@0 114 default="\t",
kkonganti@0 115 help="Change the field separator of the PDG metadata file.",
kkonganti@0 116 )
kkonganti@0 117
kkonganti@0 118 args = parser.parse_args()
kkonganti@0 119 pdg_dir = os.path.abspath(args.pdg_dir)
kkonganti@0 120 mcols = args.metadata_cols
kkonganti@0 121 f_write_pick = args.force_write_pick
kkonganti@0 122 out_prefix = args.out_prefix
kkonganti@0 123 pdg_meta_fs = args.pdg_meta_fs
kkonganti@0 124 mlst_res = args.mlst_results
kkonganti@0 125 acc_pat = re.compile(r"^GC[AF]\_\d+\.?\d*")
kkonganti@0 126 mcols_pat = re.compile(r"[\w+\,]")
kkonganti@0 127 pdg_meta_pat = re.compile(f"{args.pdg_meta_pat}")
kkonganti@0 128 pdg_snp_meta_pat = re.compile(f"{args.pdg_snp_meta_pat}")
kkonganti@0 129 pdg_accs_fn_pat = re.compile(f"{args.pdg_accs_fn_pat}")
kkonganti@0 130 target_acc_col = 41
kkonganti@0 131 acc_col = 9
kkonganti@0 132 num_accs_check = list()
kkonganti@0 133 mlst_sts = dict()
kkonganti@0 134 acceptable_num_mlst_cols = 10
kkonganti@0 135 mlst_st_col = 2
kkonganti@0 136 mlst_acc_col = 0
kkonganti@0 137
kkonganti@0 138 # Basic checks
kkonganti@0 139
kkonganti@0 140 if os.path.exists(pdg_dir) and os.path.isdir(pdg_dir):
kkonganti@0 141 pdg_meta_file = [f for f in os.listdir(pdg_dir) if pdg_meta_pat.match(f)]
kkonganti@0 142 pdg_snp_meta_file = [f for f in os.listdir(pdg_dir) if pdg_snp_meta_pat.match(f)]
kkonganti@0 143 pdg_acc_all = [f for f in os.listdir(pdg_dir) if pdg_accs_fn_pat.match(f)]
kkonganti@0 144 req_files = [len(pdg_meta_file), len(pdg_snp_meta_file), len(pdg_acc_all)]
kkonganti@0 145 if any(x > 1 for x in req_files):
kkonganti@0 146 logging.error(
kkonganti@0 147 f"Directory {os.path.basename(pdg_dir)} contains"
kkonganti@0 148 + "\ncontains mulitple files matching the search pattern."
kkonganti@0 149 )
kkonganti@0 150 exit(1)
kkonganti@0 151 elif any(x == 0 for x in req_files):
kkonganti@0 152 logging.error(
kkonganti@0 153 f"Directory {os.path.basename(pdg_dir)} does not contain"
kkonganti@0 154 + "\nany files matching the following file patterns:"
kkonganti@0 155 + f"\n{pdg_meta_pat.pattern}"
kkonganti@0 156 + f"\n{pdg_snp_meta_pat.pattern}"
kkonganti@0 157 + f"\n{pdg_accs_fn_pat.pattern}"
kkonganti@0 158 )
kkonganti@0 159 exit(1)
kkonganti@0 160 pdg_meta_file = os.path.join(pdg_dir, "".join(pdg_meta_file))
kkonganti@0 161 pdg_snp_meta_file = os.path.join(pdg_dir, "".join(pdg_snp_meta_file))
kkonganti@0 162 pdg_acc_all = os.path.join(pdg_dir, "".join(pdg_acc_all))
kkonganti@0 163 else:
kkonganti@0 164 logging.error(f"Directory path {pdg_dir} does not exist.")
kkonganti@0 165 exit(1)
kkonganti@0 166
kkonganti@0 167 if mlst_res and not (os.path.exists(mlst_res) or os.path.getsize(mlst_res) > 0):
kkonganti@0 168 logging.error(
kkonganti@0 169 f"Requested to index MLST results. but the file {os.path.basename(mlst_res)}"
kkonganti@0 170 + "does not exist or the file is empty."
kkonganti@0 171 )
kkonganti@0 172 exit(1)
kkonganti@0 173 elif mlst_res:
kkonganti@0 174 with open(mlst_res, "r") as mlst_res_fh:
kkonganti@0 175 header = mlst_res_fh.readline()
kkonganti@0 176 mlst_res_has_10_cols = False
kkonganti@0 177
kkonganti@0 178 for line in mlst_res_fh:
kkonganti@0 179 cols = line.strip().split("\t")
kkonganti@0 180 acc = acc_pat.findall(cols[mlst_acc_col])
kkonganti@0 181 if len(acc) > 1:
kkonganti@0 182 logging.error(f"Found more than 1 accession in column:\ncols[mlst_acc_col]\n")
kkonganti@0 183 exit(1)
kkonganti@0 184 else:
kkonganti@0 185 acc = "".join(acc)
kkonganti@0 186 if len(cols) == acceptable_num_mlst_cols and re.match(r"\d+|\-", cols[mlst_st_col]):
kkonganti@0 187 mlst_res_has_10_cols = True
kkonganti@0 188 if re.match(r"\-", cols[mlst_st_col]):
kkonganti@0 189 mlst_sts[acc] = "NULL"
kkonganti@0 190 else:
kkonganti@0 191 mlst_sts[acc] = cols[mlst_st_col]
kkonganti@0 192
kkonganti@0 193 if not mlst_res_has_10_cols:
kkonganti@0 194 logging.error(
kkonganti@0 195 "Requested to incorporate MLST ST's but file"
kkonganti@0 196 + f"\n{os.path.basename(mlst_res)}"
kkonganti@0 197 + "\ndoes not have 10 columns in all rows."
kkonganti@0 198 )
kkonganti@0 199 exit(1)
kkonganti@0 200
kkonganti@0 201 mlst_res_fh.close()
kkonganti@0 202
kkonganti@0 203 with open(pdg_acc_all, "r") as pdg_acc_all_fh:
kkonganti@0 204 for a in pdg_acc_all_fh.readlines():
kkonganti@0 205 num_accs_check.append(a.strip())
kkonganti@0 206
kkonganti@0 207 if not mcols_pat.match(mcols):
kkonganti@0 208 logging.error(
kkonganti@0 209 f"Supplied columns' names should only be"
kkonganti@0 210 + "\nalphanumeric (including _) separated by a comma."
kkonganti@0 211 )
kkonganti@0 212 exit(1)
kkonganti@0 213 else:
kkonganti@0 214 mcols = re.sub("\n", "", mcols).split(",")
kkonganti@0 215
kkonganti@0 216 if (
kkonganti@0 217 pdg_snp_meta_file
kkonganti@0 218 and os.path.exists(pdg_snp_meta_file)
kkonganti@0 219 and os.path.getsize(pdg_snp_meta_file) > 0
kkonganti@0 220 ):
kkonganti@0 221 acc2snp = defaultdict()
kkonganti@0 222 acc2meta = defaultdict(defaultdict)
kkonganti@0 223 init_pickled_sero = os.path.join(os.getcwd(), out_prefix + ".pickle")
kkonganti@0 224
kkonganti@0 225 if (
kkonganti@0 226 os.path.exists(init_pickled_sero)
kkonganti@0 227 and os.path.getsize(init_pickled_sero)
kkonganti@0 228 and not f_write_pick
kkonganti@0 229 ):
kkonganti@0 230 logging.error(
kkonganti@0 231 f"File {os.path.basename(init_pickled_sero)} already exists in\n{os.getcwd()}\n"
kkonganti@0 232 + "Use -fs to force overwrite it."
kkonganti@0 233 )
kkonganti@0 234 exit(1)
kkonganti@0 235
kkonganti@0 236 with open(pdg_snp_meta_file, "r") as snp_meta:
kkonganti@0 237 header = snp_meta.readline()
kkonganti@0 238 skipped_acc2snp = set()
kkonganti@0 239 for line in snp_meta:
kkonganti@0 240 cols = line.strip().split(pdg_meta_fs)
kkonganti@0 241 if not 4 <= len(cols) < 5:
kkonganti@0 242 logging.error(
kkonganti@0 243 f"The metadata file {pdg_snp_meta_file} is malformed.\n"
kkonganti@0 244 + f"Expected 4 columns. Got {len(cols)} columns.\n"
kkonganti@0 245 )
kkonganti@0 246 exit(1)
kkonganti@0 247
kkonganti@0 248 if re.match("NULL", cols[3]):
kkonganti@0 249 skipped_acc2snp.add(f"Isolate {cols[1]} has no genome accession: {cols[3]}")
kkonganti@0 250 elif not acc_pat.match(cols[3]):
kkonganti@0 251 logging.error(
kkonganti@0 252 f"Did not find accession in either field number 4\n"
kkonganti@0 253 + "or field number 10 of column 4."
kkonganti@0 254 + f"\nLine: {line}"
kkonganti@0 255 )
kkonganti@0 256 exit(1)
kkonganti@0 257 elif not re.match("NULL", cols[3]):
kkonganti@0 258 acc2snp[cols[3]] = cols[0]
kkonganti@0 259
kkonganti@0 260 if len(skipped_acc2snp) > 0:
kkonganti@0 261 logging.info(
kkonganti@0 262 f"While indexing\n{os.path.basename(pdg_snp_meta_file)},"
kkonganti@0 263 + "\nthese isolates were skipped:\n\n"
kkonganti@0 264 + "\n".join(skipped_acc2snp)
kkonganti@0 265 )
kkonganti@0 266
kkonganti@0 267 with open(pdg_meta_file, "r") as pdg_meta:
kkonganti@0 268 header = pdg_meta.readline().strip().split(pdg_meta_fs)
kkonganti@0 269 user_req_cols = [mcol_i for mcol_i, mcol in enumerate(header) if mcol in mcols]
kkonganti@0 270 cols_not_found = [mcol for mcol in mcols if mcol not in header]
kkonganti@0 271 null_wgs_accs = set()
kkonganti@0 272 if len(cols_not_found) > 0:
kkonganti@0 273 logging.error(
kkonganti@0 274 f"The following columns do not exist in the"
kkonganti@0 275 + f"\nmetadata file [ {os.path.basename(pdg_meta_file)} ]:\n"
kkonganti@0 276 + "".join(cols_not_found)
kkonganti@0 277 )
kkonganti@0 278 exit(1)
kkonganti@0 279
kkonganti@0 280 for line in pdg_meta.readlines():
kkonganti@0 281 cols = line.strip().split(pdg_meta_fs)
kkonganti@0 282 pdg_assm_acc = cols[acc_col]
kkonganti@0 283 if not acc_pat.match(pdg_assm_acc):
kkonganti@0 284 null_wgs_accs.add(
kkonganti@0 285 f"Isolate {cols[target_acc_col]} has no genome accession: {pdg_assm_acc}"
kkonganti@0 286 )
kkonganti@0 287 continue
kkonganti@0 288
kkonganti@0 289 if pdg_assm_acc in mlst_sts.keys():
kkonganti@0 290 acc2meta[pdg_assm_acc].setdefault("mlst_sequence_type", []).append(
kkonganti@0 291 str(mlst_sts[pdg_assm_acc])
kkonganti@0 292 )
kkonganti@0 293
kkonganti@0 294 for col in user_req_cols:
kkonganti@0 295 acc2meta[pdg_assm_acc].setdefault(header[col], []).append(str(cols[col]))
kkonganti@0 296
kkonganti@0 297 if len(null_wgs_accs) > 0:
kkonganti@0 298 logging.info(
kkonganti@0 299 f"While indexing\n{os.path.basename(pdg_meta_file)},"
kkonganti@0 300 + "\nthese isolates were skipped:\n\n"
kkonganti@0 301 + "\n".join(null_wgs_accs)
kkonganti@0 302 )
kkonganti@0 303
kkonganti@0 304 with open(init_pickled_sero, "wb") as write_pickled_sero:
kkonganti@0 305 pickle.dump(file=write_pickled_sero, obj=acc2meta)
kkonganti@0 306
kkonganti@0 307 if len(num_accs_check) != len(acc2meta.keys()):
kkonganti@0 308 logging.error(
kkonganti@0 309 "Failed the accession count check."
kkonganti@0 310 + f"\nExpected {len(num_accs_check)} accessions but got {len(acc2meta.keys())}."
kkonganti@0 311 )
kkonganti@0 312 exit(1)
kkonganti@0 313 else:
kkonganti@0 314 logging.info(
kkonganti@0 315 f"Number of valid accessions: {len(num_accs_check)}"
kkonganti@0 316 + f"\nNumber of accessions indexed: {len(acc2meta.keys())}"
kkonganti@0 317 + f"\nNumber of accessions participating in any of the SNP Clusters: {len(acc2snp.keys())}"
kkonganti@0 318 + f"\n\nCreated the pickle file for\n{os.path.basename(pdg_meta_file)}."
kkonganti@0 319 + "\nThis was the only requested function."
kkonganti@0 320 )
kkonganti@0 321
kkonganti@0 322 snp_meta.close()
kkonganti@0 323 write_pickled_sero.close()
kkonganti@0 324 exit(0)
kkonganti@0 325 elif pdg_meta_file and not (
kkonganti@0 326 os.path.exists(pdg_meta_file) and os.path.getsize(pdg_meta_file) > 0
kkonganti@0 327 ):
kkonganti@0 328 logging.error(
kkonganti@0 329 "Requested to create pickle from metadata, but\n"
kkonganti@0 330 + f"the file, {os.path.basename(pdg_meta_file)} is empty or\ndoes not exist!"
kkonganti@0 331 )
kkonganti@0 332 exit(1)
kkonganti@0 333
kkonganti@0 334 pdg_acc_all_fh.close()
kkonganti@0 335 snp_meta.close()
kkonganti@0 336 pdg_meta.close()
kkonganti@0 337 write_pickled_sero.close()
kkonganti@0 338
kkonganti@0 339
kkonganti@0 340 if __name__ == "__main__":
kkonganti@0 341 main()