Mercurial > repos > jpayne > snp_pipeline
diff snp-cache.py @ 41:9022b00a9198
planemo upload commit 0399c6bd696435a7a99d8d8b4c237e5a78ee5856-dirty
author | jpayne |
---|---|
date | Thu, 01 Mar 2018 15:37:04 -0500 |
parents | 6adaecff5f2b |
children | 11296a86e01b |
line wrap: on
line diff
--- a/snp-cache.py Wed Feb 14 13:10:23 2018 -0500 +++ b/snp-cache.py Thu Mar 01 15:37:04 2018 -0500 @@ -95,17 +95,17 @@ def main(table, id, command=None, output=None, *a, **k): id = id.strip() table = table.strip() - name = f"{table}/{id}" + name = "{table}/{id}".format(**locals()) with open(output, 'wb') as output_f: #lookup ID in table and get a FH to the resource try: api_key = os.environ.get('AWS_API_KEY', '') s3 = boto3.resource('s3').Bucket(CACHE_NAMESPACE) s3.download_fileobj(name, output_f) - logging.getLogger('snp-cache.cache').info(f"cache hit on {name}, retrieved.") + logging.getLogger('snp-cache.cache').info("cache hit on {name}, retrieved.".format(**locals())) except (DataNotFoundError, NoCredentialsError, BotoCoreError, ClientError) as e: if type(e) is DataNotFoundError: - logging.getLogger('snp-cache.cache').info(f"cache miss on {name}") + logging.getLogger('snp-cache.cache').info("cache miss on {name}".format(**locals())) else: logging.getLogger('snp-cache.cache').error(e) #if we couldn't find the data, we need to run the command to generate it