jpayne@13
|
1 #! /usr/bin/env python
|
jpayne@0
|
2
|
jpayne@42
|
3 from __future__ import print_function
|
jpayne@42
|
4
|
jpayne@60
|
5
|
jpayne@0
|
6
|
jpayne@0
|
7 import argparse
|
jpayne@0
|
8 import subprocess
|
jpayne@0
|
9 import contextlib
|
jpayne@0
|
10 import logging
|
jpayne@0
|
11 import io
|
jpayne@0
|
12 import shutil
|
jpayne@0
|
13 import os, sys
|
jpayne@63
|
14 # from builtins import open as _open
|
jpayne@0
|
15 from copy import copy
|
jpayne@0
|
16 from functools import partial
|
jpayne@0
|
17 from itertools import tee
|
jpayne@0
|
18 from io import BytesIO
|
jpayne@0
|
19 from threading import Thread
|
jpayne@0
|
20
|
jpayne@0
|
21 CACHE_NAMESPACE = 'cfsan-galaxytrakr-cache'
|
jpayne@0
|
22
|
jpayne@0
|
23 class NoCacheNoCommandException(Exception):
|
jpayne@0
|
24 pass
|
jpayne@0
|
25
|
jpayne@64
|
26 # @contextlib.contextmanager
|
jpayne@64
|
27 # def open(filename=None, mode='r'):
|
jpayne@64
|
28 # "basically a wrapper to make sys.stdout usable where there's a contextmanager"
|
jpayne@64
|
29 # writer = sys.stdout.buffer
|
jpayne@64
|
30 # try:
|
jpayne@64
|
31 # if filename:
|
jpayne@64
|
32 # writer = io.FileIO(filename, mode)
|
jpayne@64
|
33 # if 'r' in mode:
|
jpayne@64
|
34 # writer = io.BufferedReader(writer)
|
jpayne@64
|
35 # elif 'w' in mode:
|
jpayne@64
|
36 # writer = io.BufferedWriter(writer)
|
jpayne@64
|
37 # yield writer
|
jpayne@64
|
38 # writer.flush()
|
jpayne@64
|
39 # finally:
|
jpayne@64
|
40 # if filename:
|
jpayne@64
|
41 # writer.close()
|
jpayne@0
|
42
|
jpayne@0
|
43 # class stream_over(io.IOBase):
|
jpayne@0
|
44 # "a file-like object that works as a tee, for API's that accept a file-like"
|
jpayne@0
|
45 # def __init__(self, output_streams, input_stream=None):
|
jpayne@0
|
46 # self.streams = output_streams
|
jpayne@0
|
47 # self.input = input_stream
|
jpayne@0
|
48
|
jpayne@0
|
49 # def writable(self, *a, **k):
|
jpayne@0
|
50 # return all([s.writeable(*a, **k) for s in self.streams])
|
jpayne@0
|
51
|
jpayne@0
|
52 # def write(self, *a, **k):
|
jpayne@0
|
53 # [s.write(*a, **k) for s in self.streams]
|
jpayne@0
|
54
|
jpayne@0
|
55 # def writelines(self, *a, **k):
|
jpayne@0
|
56 # [s.writelines(*a, **k) for s in self.streams]
|
jpayne@0
|
57
|
jpayne@0
|
58 # def flush(self, *a, **k):
|
jpayne@0
|
59 # [s.flush(*a, **k) for s in self.streams]
|
jpayne@0
|
60
|
jpayne@0
|
61 # def close(self, *a, **k):
|
jpayne@0
|
62 # if self.input:
|
jpayne@0
|
63 # self.input.close()
|
jpayne@0
|
64 # [s.close(*a, **k) for s in self.streams]
|
jpayne@0
|
65
|
jpayne@0
|
66 # def read(self, *a, **k):
|
jpayne@0
|
67 # if self.input:
|
jpayne@0
|
68 # bts = self.input.read(*a, **k)
|
jpayne@0
|
69 # self.write(bts)
|
jpayne@0
|
70 # return bts
|
jpayne@0
|
71 # raise ValueError("Not created with a readable stream; read ops not supported.")
|
jpayne@0
|
72
|
jpayne@0
|
73 # def readlines(self, *a, **k):
|
jpayne@0
|
74 # if self.input:
|
jpayne@0
|
75 # return self.input.readlines(*a, **k)
|
jpayne@0
|
76 # raise ValueError("Not created with a readable stream; read ops not supported.")
|
jpayne@0
|
77
|
jpayne@0
|
78 # def seekable(self):
|
jpayne@0
|
79 # return False
|
jpayne@0
|
80
|
jpayne@0
|
81 # @contextlib.contextmanager
|
jpayne@0
|
82 # def multiwrite(*streams):
|
jpayne@0
|
83 # multistream = stream_over(streams)
|
jpayne@0
|
84 # yield multistream
|
jpayne@0
|
85 # multistream.flush()
|
jpayne@0
|
86 # multistream.close()
|
jpayne@0
|
87
|
jpayne@64
|
88 # def stream_to(input_stream, output_stream):
|
jpayne@64
|
89 # for i, line in enumerate(input_stream.readlines()):
|
jpayne@64
|
90 # if i < 8:
|
jpayne@64
|
91 # logging.getLogger('strm').info(str(line[:70]))
|
jpayne@64
|
92 # output_stream.write(line)
|
jpayne@0
|
93
|
jpayne@0
|
94
|
jpayne@0
|
95
|
jpayne@0
|
96 def main(table, id, command=None, output=None, *a, **k):
|
jpayne@0
|
97 id = id.strip()
|
jpayne@0
|
98 table = table.strip()
|
jpayne@41
|
99 name = "{table}/{id}".format(**locals())
|
jpayne@0
|
100 with open(output, 'wb') as output_f:
|
jpayne@64
|
101 # #lookup ID in table and get a FH to the resource
|
jpayne@64
|
102 # try:
|
jpayne@64
|
103 # import boto3
|
jpayne@64
|
104 # api_key = os.environ.get('AWS_API_KEY', '')
|
jpayne@64
|
105 # s3 = boto3.resource('s3').Bucket(CACHE_NAMESPACE)
|
jpayne@64
|
106 # s3.download_fileobj(name, output_f)
|
jpayne@64
|
107 # logging.getLogger('snp-cache.cache').info("cache hit on {name}, retrieved.".format(**locals()))
|
jpayne@64
|
108 # except Exception as e:
|
jpayne@64
|
109 # if type(e) is not ImportError:
|
jpayne@64
|
110 # logging.getLogger('snp-cache.cache').info("cache miss on {name}".format(**locals()))
|
jpayne@64
|
111 # else:
|
jpayne@64
|
112 # logging.getLogger('snp-cache.cache').error(e)
|
jpayne@64
|
113 # #if we couldn't find the data, we need to run the command to generate it
|
jpayne@64
|
114 # if not command:
|
jpayne@64
|
115 # raise NoCacheNoCommandException("No cached result for this id, and no command given to generate.")
|
jpayne@64
|
116 # logging.getLogger('snp-cache.cmd').info(command)
|
jpayne@64
|
117 # # sub = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
jpayne@64
|
118 # # cached, err = sub.communicate()
|
jpayne@64
|
119 # # cached, err = io.BytesIO(cached), io.BytesIO(err)
|
jpayne@0
|
120 try:
|
jpayne@0
|
121 cached = subprocess.check_output(command, shell=True)
|
jpayne@0
|
122 try:
|
jpayne@64
|
123 pass # s3.upload_fileobj(BytesIO(cached), name)
|
jpayne@60
|
124 except Exception as e:
|
jpayne@0
|
125 logging.getLogger('snp-cache.cache').error('Error writing to cache:')
|
jpayne@0
|
126 logging.getLogger('snp-cache.cache').error(e)
|
jpayne@0
|
127 finally:
|
jpayne@0
|
128 #stream_to(cached, output_f) #stream FROM cached TO output_f
|
jpayne@0
|
129 output_f.write(cached)
|
jpayne@0
|
130 except subprocess.CalledProcessError as e:
|
jpayne@0
|
131 print(e.output, file=sys.stderr)
|
jpayne@0
|
132 return e.returncode
|
jpayne@0
|
133 return 0
|
jpayne@0
|
134
|
jpayne@0
|
135
|
jpayne@0
|
136
|
jpayne@0
|
137
|
jpayne@0
|
138 if __name__ == '__main__':
|
jpayne@0
|
139 parser = argparse.ArgumentParser(description="lookup result for file in data table, or compute and install")
|
jpayne@0
|
140 parser.add_argument('table', type=str)
|
jpayne@0
|
141 parser.add_argument('id', type=str)
|
jpayne@0
|
142 parser.add_argument('-c', dest='command')
|
jpayne@0
|
143 parser.add_argument('-o', dest='output')
|
jpayne@0
|
144 parser.add_argument('-l', dest='logging', default='/dev/null')
|
jpayne@0
|
145 params = parser.parse_args()
|
jpayne@0
|
146
|
jpayne@0
|
147 logging.basicConfig(filename=params.logging,level=logging.INFO)
|
jpayne@0
|
148
|
jpayne@0
|
149 quit(main(**vars(params))) |