changeset 64:b5cf2ec0c540 tip

planemo upload
author jpayne
date Sat, 29 Jun 2024 06:56:11 -0400
parents fb44b003e29b
children
files snp-cache.py
diffstat 1 files changed, 41 insertions(+), 41 deletions(-) [+]
line wrap: on
line diff
--- a/snp-cache.py	Fri Jun 28 23:03:53 2024 -0400
+++ b/snp-cache.py	Sat Jun 29 06:56:11 2024 -0400
@@ -23,22 +23,22 @@
 class NoCacheNoCommandException(Exception):
 	pass
 
-@contextlib.contextmanager
-def open(filename=None, mode='r'):
-	"basically a wrapper to make sys.stdout usable where there's a contextmanager"
-	writer = sys.stdout.buffer
-	try:
-		if filename:
-			writer = io.FileIO(filename, mode)
-			if 'r' in mode:
-				writer = io.BufferedReader(writer)
-			elif 'w' in mode:
-				writer = io.BufferedWriter(writer)
-		yield writer
-		writer.flush()
-	finally:
-		if filename:
-			writer.close()
+# @contextlib.contextmanager
+# def open(filename=None, mode='r'):
+# 	"basically a wrapper to make sys.stdout usable where there's a contextmanager"
+# 	writer = sys.stdout.buffer
+# 	try:
+# 		if filename:
+# 			writer = io.FileIO(filename, mode)
+# 			if 'r' in mode:
+# 				writer = io.BufferedReader(writer)
+# 			elif 'w' in mode:
+# 				writer = io.BufferedWriter(writer)
+# 		yield writer
+# 		writer.flush()
+# 	finally:
+# 		if filename:
+# 			writer.close()
 
 # class stream_over(io.IOBase):
 # 	"a file-like object that works as a tee, for API's that accept a file-like"
@@ -85,11 +85,11 @@
 # 	multistream.flush()
 # 	multistream.close()
 
-def stream_to(input_stream, output_stream):
-	for i, line in enumerate(input_stream.readlines()):
-		if i < 8:
-			logging.getLogger('strm').info(str(line[:70]))
-		output_stream.write(line)
+# def stream_to(input_stream, output_stream):
+# 	for i, line in enumerate(input_stream.readlines()):
+# 		if i < 8:
+# 			logging.getLogger('strm').info(str(line[:70]))
+# 		output_stream.write(line)
 		
 
 
@@ -98,29 +98,29 @@
 	table = table.strip()
 	name = "{table}/{id}".format(**locals())
 	with open(output, 'wb') as output_f:
-		#lookup ID in table and get a FH to the resource
-		try:
-			import boto3
-			api_key = os.environ.get('AWS_API_KEY', '')
-			s3 = boto3.resource('s3').Bucket(CACHE_NAMESPACE)
-			s3.download_fileobj(name, output_f)
-			logging.getLogger('snp-cache.cache').info("cache hit on {name}, retrieved.".format(**locals()))
-		except Exception as e:
-			if type(e) is not ImportError:
-				logging.getLogger('snp-cache.cache').info("cache miss on {name}".format(**locals()))
-			else:
-				logging.getLogger('snp-cache.cache').error(e)
-			#if we couldn't find the data, we need to run the command to generate it
-			if not command:
-				raise NoCacheNoCommandException("No cached result for this id, and no command given to generate.")
-			logging.getLogger('snp-cache.cmd').info(command)
-			# sub = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-			# cached, err = sub.communicate()
-			# cached, err = io.BytesIO(cached), io.BytesIO(err)
+		# #lookup ID in table and get a FH to the resource
+		# try:
+		# 	import boto3
+		# 	api_key = os.environ.get('AWS_API_KEY', '')
+		# 	s3 = boto3.resource('s3').Bucket(CACHE_NAMESPACE)
+		# 	s3.download_fileobj(name, output_f)
+		# 	logging.getLogger('snp-cache.cache').info("cache hit on {name}, retrieved.".format(**locals()))
+		# except Exception as e:
+		# 	if type(e) is not ImportError:
+		# 		logging.getLogger('snp-cache.cache').info("cache miss on {name}".format(**locals()))
+		# 	else:
+		# 		logging.getLogger('snp-cache.cache').error(e)
+		# 	#if we couldn't find the data, we need to run the command to generate it
+		# 	if not command:
+		# 		raise NoCacheNoCommandException("No cached result for this id, and no command given to generate.")
+		# 	logging.getLogger('snp-cache.cmd').info(command)
+		# 	# sub = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+		# 	# cached, err = sub.communicate()
+		# 	# cached, err = io.BytesIO(cached), io.BytesIO(err)
 			try:
 				cached = subprocess.check_output(command, shell=True)
 				try:
-					s3.upload_fileobj(BytesIO(cached), name)
+					pass # s3.upload_fileobj(BytesIO(cached), name)
 				except Exception as e:
 					logging.getLogger('snp-cache.cache').error('Error writing to cache:')
 					logging.getLogger('snp-cache.cache').error(e)