|
13 | 13 | from delphi.epidata.client.delphi_epidata import Epidata |
14 | 14 | import delphi.operations.secrets as secrets |
15 | 15 | import delphi.epidata.acquisition.covidcast.database as live |
16 | | -from delphi.epidata.acquisition.covidcast.covidcast_meta_cache_updater import main |
| 16 | +from delphi.epidata.acquisition.covidcast.covidcast_meta_cache_updater import ( |
| 17 | + main, |
| 18 | +) |
17 | 19 |
|
18 | 20 | # py3tester coverage target (equivalent to `import *`) |
19 | 21 | __test_target__ = ( |
20 | | - 'delphi.epidata.acquisition.covidcast.' |
21 | | - 'covidcast_meta_cache_updater' |
| 22 | + "delphi.epidata.acquisition.covidcast." "covidcast_meta_cache_updater" |
22 | 23 | ) |
23 | 24 |
|
24 | | -# use the local instance of the Epidata API |
25 | | -BASE_URL = 'http://delphi_web_epidata/epidata/api.php' |
26 | | -AUTH = ('epidata', 'key') |
27 | | - |
28 | 25 |
|
29 | 26 | class CovidcastMetaCacheTests(unittest.TestCase): |
30 | | - """Tests covidcast metadata caching.""" |
31 | | - |
32 | | - def setUp(self): |
33 | | - """Perform per-test setup.""" |
34 | | - |
35 | | - # connect to the `epidata` database |
36 | | - cnx = mysql.connector.connect( |
37 | | - user='user', |
38 | | - password='pass', |
39 | | - host='delphi_database_epidata', |
40 | | - database='covid') |
41 | | - cur = cnx.cursor() |
42 | | - |
43 | | - # clear all tables |
44 | | - cur.execute("truncate table epimetric_load") |
45 | | - cur.execute("truncate table epimetric_full") |
46 | | - cur.execute("truncate table epimetric_latest") |
47 | | - cur.execute("truncate table geo_dim") |
48 | | - cur.execute("truncate table signal_dim") |
49 | | - # reset the `covidcast_meta_cache` table (it should always have one row) |
50 | | - cur.execute('update covidcast_meta_cache set timestamp = 0, epidata = "[]"') |
51 | | - cnx.commit() |
52 | | - cur.close() |
53 | | - |
54 | | - # make connection and cursor available to test cases |
55 | | - self.cnx = cnx |
56 | | - self.cur = cnx.cursor() |
57 | | - |
58 | | - # use the local instance of the epidata database |
59 | | - secrets.db.host = 'delphi_database_epidata' |
60 | | - secrets.db.epi = ('user', 'pass') |
61 | | - |
62 | | - # use the local instance of the Epidata API |
63 | | - Epidata.BASE_URL = BASE_URL |
64 | | - Epidata.auth = AUTH |
65 | | - |
66 | | - def tearDown(self): |
67 | | - """Perform per-test teardown.""" |
68 | | - self.cur.close() |
69 | | - self.cnx.close() |
70 | | - |
71 | | - def test_caching(self): |
72 | | - """Populate, query, cache, query, and verify the cache.""" |
73 | | - |
74 | | - # insert dummy data |
75 | | - self.cur.execute(f''' |
| 27 | + """Tests covidcast metadata caching.""" |
| 28 | + |
| 29 | + def setUp(self): |
| 30 | + """Perform per-test setup.""" |
| 31 | + |
| 32 | + # connect to the `epidata` database |
| 33 | + cnx = mysql.connector.connect( |
| 34 | + user="user", |
| 35 | + password="pass", |
| 36 | + host="delphi_database_epidata", |
| 37 | + database="covid", |
| 38 | + ) |
| 39 | + cur = cnx.cursor() |
| 40 | + |
| 41 | + # clear all tables |
| 42 | + cur.execute("truncate table epimetric_load") |
| 43 | + cur.execute("truncate table epimetric_full") |
| 44 | + cur.execute("truncate table epimetric_latest") |
| 45 | + cur.execute("truncate table geo_dim") |
| 46 | + cur.execute("truncate table signal_dim") |
| 47 | + # reset the `covidcast_meta_cache` table (it should always have one row) |
| 48 | + cur.execute( |
| 49 | + 'update covidcast_meta_cache set timestamp = 0, epidata = "[]"' |
| 50 | + ) |
| 51 | + cnx.commit() |
| 52 | + cur.close() |
| 53 | + |
| 54 | + # make connection and cursor available to test cases |
| 55 | + self.cnx = cnx |
| 56 | + self.cur = cnx.cursor() |
| 57 | + |
| 58 | + # use the local instance of the epidata database |
| 59 | + secrets.db.host = "delphi_database_epidata" |
| 60 | + secrets.db.epi = ("user", "pass") |
| 61 | + |
| 62 | + # use the local instance of the Epidata API |
| 63 | + Epidata.BASE_URL = "http://delphi_web_epidata/epidata/api.php" |
| 64 | + Epidata.auth = ("epidata", "key") |
| 65 | + |
| 66 | + def tearDown(self): |
| 67 | + """Perform per-test teardown.""" |
| 68 | + self.cur.close() |
| 69 | + self.cnx.close() |
| 70 | + |
| 71 | + def test_caching(self): |
| 72 | + """Populate, query, cache, query, and verify the cache.""" |
| 73 | + |
| 74 | + # insert dummy data |
| 75 | + self.cur.execute( |
| 76 | + """ |
76 | 77 | INSERT INTO `signal_dim` (`signal_key_id`, `source`, `signal`) |
77 | 78 | VALUES |
78 | 79 | (42, 'src', 'sig'); |
79 | | - ''') |
80 | | - self.cur.execute(f''' |
| 80 | + """ |
| 81 | + ) |
| 82 | + self.cur.execute( |
| 83 | + """ |
81 | 84 | INSERT INTO `geo_dim` (`geo_key_id`, `geo_type`, `geo_value`) |
82 | 85 | VALUES |
83 | | - (96, 'state', 'pa'), |
| 86 | + (96, 'state', 'pa'), |
84 | 87 | (97, 'state', 'wa'); |
85 | | - ''') |
86 | | - self.cur.execute(f''' |
| 88 | + """ |
| 89 | + ) |
| 90 | + self.cur.execute( |
| 91 | + f""" |
87 | 92 | INSERT INTO |
88 | 93 | `epimetric_latest` (`epimetric_id`, `signal_key_id`, `geo_key_id`, `time_type`, |
89 | | - `time_value`, `value_updated_timestamp`, |
| 94 | + `time_value`, `value_updated_timestamp`, |
90 | 95 | `value`, `stderr`, `sample_size`, |
91 | 96 | `issue`, `lag`, `missing_value`, |
92 | 97 | `missing_stderr`,`missing_sample_size`) |
93 | 98 | VALUES |
94 | 99 | (15, 42, 96, 'day', 20200422, |
95 | 100 | 123, 1, 2, 3, 20200422, 0, {Nans.NOT_MISSING}, {Nans.NOT_MISSING}, {Nans.NOT_MISSING}), |
96 | 101 | (16, 42, 97, 'day', 20200422, |
97 | | - 789, 1, 2, 3, 20200423, 1, {Nans.NOT_MISSING}, {Nans.NOT_MISSING}, {Nans.NOT_MISSING}) |
98 | | - ''') |
99 | | - self.cnx.commit() |
100 | | - |
101 | | - # make sure the live utility is serving something sensible |
102 | | - cvc_database = live.Database() |
103 | | - cvc_database.connect() |
104 | | - epidata1 = cvc_database.compute_covidcast_meta() |
105 | | - cvc_database.disconnect(False) |
106 | | - self.assertEqual(len(epidata1),1) |
107 | | - self.assertEqual(epidata1, [ |
108 | | - { |
109 | | - 'data_source': 'src', |
110 | | - 'signal': 'sig', |
111 | | - 'time_type': 'day', |
112 | | - 'geo_type': 'state', |
113 | | - 'min_time': 20200422, |
114 | | - 'max_time': 20200422, |
115 | | - 'num_locations': 2, |
116 | | - 'last_update': 789, |
117 | | - 'min_value': 1, |
118 | | - 'max_value': 1, |
119 | | - 'mean_value': 1, |
120 | | - 'stdev_value': 0, |
121 | | - 'max_issue': 20200423, |
122 | | - 'min_lag': 0, |
123 | | - 'max_lag': 1, |
124 | | - } |
125 | | - ]) |
126 | | - epidata1={'result':1, 'message':'success', 'epidata':epidata1} |
127 | | - |
128 | | - # make sure the API covidcast_meta is still blank, since it only serves |
129 | | - # the cached version and we haven't cached anything yet |
130 | | - epidata2 = Epidata.covidcast_meta() |
131 | | - self.assertEqual(epidata2['result'], -2, json.dumps(epidata2)) |
132 | | - |
133 | | - # update the cache |
134 | | - args = None |
135 | | - main(args) |
136 | | - |
137 | | - # fetch the cached version |
138 | | - epidata3 = Epidata.covidcast_meta() |
139 | | - |
140 | | - # cached version should now equal live version |
141 | | - self.assertEqual(epidata1, epidata3) |
142 | | - |
143 | | - # insert dummy data timestamped as of now |
144 | | - self.cur.execute(''' |
| 102 | + 789, 1, 2, 3, 20200423, 1 , {Nans.NOT_MISSING}, {Nans.NOT_MISSING}, {Nans.NOT_MISSING}) |
| 103 | + """ |
| 104 | + ) |
| 105 | + self.cnx.commit() |
| 106 | + |
| 107 | + # make sure the live utility is serving something sensible |
| 108 | + cvc_database = live.Database() |
| 109 | + cvc_database.connect() |
| 110 | + epidata1 = cvc_database.compute_covidcast_meta() |
| 111 | + cvc_database.disconnect(False) |
| 112 | + self.assertEqual(len(epidata1), 1) |
| 113 | + self.assertEqual( |
| 114 | + epidata1, |
| 115 | + [ |
| 116 | + { |
| 117 | + "data_source": "src", |
| 118 | + "signal": "sig", |
| 119 | + "time_type": "day", |
| 120 | + "geo_type": "state", |
| 121 | + "min_time": 20200422, |
| 122 | + "max_time": 20200422, |
| 123 | + "num_locations": 2, |
| 124 | + "last_update": 789, |
| 125 | + "min_value": 1, |
| 126 | + "max_value": 1, |
| 127 | + "mean_value": 1, |
| 128 | + "stdev_value": 0, |
| 129 | + "max_issue": 20200423, |
| 130 | + "min_lag": 0, |
| 131 | + "max_lag": 1, |
| 132 | + } |
| 133 | + ], |
| 134 | + ) |
| 135 | + epidata1 = {"result": 1, "message": "success", "epidata": epidata1} |
| 136 | + |
| 137 | + # make sure the API covidcast_meta is still blank, since it only serves |
| 138 | + # the cached version and we haven't cached anything yet |
| 139 | + epidata2 = Epidata.covidcast_meta() |
| 140 | + self.assertEqual(epidata2["result"], -2, json.dumps(epidata2)) |
| 141 | + |
| 142 | + # update the cache |
| 143 | + args = None |
| 144 | + main(args) |
| 145 | + |
| 146 | + # fetch the cached version |
| 147 | + epidata3 = Epidata.covidcast_meta() |
| 148 | + |
| 149 | + # cached version should now equal live version |
| 150 | + self.assertEqual(epidata1, epidata3) |
| 151 | + |
| 152 | + # insert dummy data timestamped as of now |
| 153 | + self.cur.execute( |
| 154 | + """ |
145 | 155 | update covidcast_meta_cache set |
146 | 156 | timestamp = UNIX_TIMESTAMP(NOW()), |
147 | 157 | epidata = '[{"hello": "world"}]' |
148 | | - ''') |
149 | | - self.cnx.commit() |
150 | | - |
151 | | - # fetch the cached version (manually) |
152 | | - params = {'endpoint': 'covidcast_meta', 'cached': 'true'} |
153 | | - response = requests.get(BASE_URL, params=params, auth=AUTH) |
154 | | - response.raise_for_status() |
155 | | - epidata4 = response.json() |
156 | | - |
157 | | - # make sure the cache was actually served |
158 | | - self.assertEqual(epidata4, { |
159 | | - 'result': 1, |
160 | | - 'epidata': [{ |
161 | | - 'hello': 'world', |
162 | | - }], |
163 | | - 'message': 'success', |
164 | | - }) |
165 | | - |
166 | | - # insert dummy data timestamped as 2 hours old |
167 | | - self.cur.execute(''' |
| 158 | + """ |
| 159 | + ) |
| 160 | + self.cnx.commit() |
| 161 | + |
| 162 | + # fetch the cached version (manually) |
| 163 | + params = {"endpoint": "covidcast_meta", "cached": "true"} |
| 164 | + response = requests.get(Epidata.BASE_URL, params=params, auth=Epidata.auth) |
| 165 | + response.raise_for_status() |
| 166 | + epidata4 = response.json() |
| 167 | + |
| 168 | + # make sure the cache was actually served |
| 169 | + self.assertEqual( |
| 170 | + epidata4, |
| 171 | + { |
| 172 | + "result": 1, |
| 173 | + "epidata": [ |
| 174 | + { |
| 175 | + "hello": "world", |
| 176 | + } |
| 177 | + ], |
| 178 | + "message": "success", |
| 179 | + }, |
| 180 | + ) |
| 181 | + |
| 182 | + # insert dummy data timestamped as 2 hours old |
| 183 | + self.cur.execute( |
| 184 | + """ |
168 | 185 | update covidcast_meta_cache set |
169 | 186 | timestamp = UNIX_TIMESTAMP(NOW()) - 3600 * 2, |
170 | 187 | epidata = '[{"hello": "world"}]' |
171 | | - ''') |
172 | | - self.cnx.commit() |
173 | | - |
174 | | - # fetch the cached version (manually) |
175 | | - params = {'endpoint': 'covidcast_meta', 'cached': 'true'} |
176 | | - response = requests.get(BASE_URL, params=params, auth=AUTH) |
177 | | - response.raise_for_status() |
178 | | - epidata5 = response.json() |
179 | | - |
180 | | - # make sure the cache was returned anyhow |
181 | | - self.assertEqual(epidata4, epidata5) |
| 188 | + """ |
| 189 | + ) |
| 190 | + self.cnx.commit() |
| 191 | + |
| 192 | + # fetch the cached version (manually) |
| 193 | + params = {"endpoint": "covidcast_meta", "cached": "true"} |
| 194 | + response = requests.get(Epidata.BASE_URL, params=params, auth=Epidata.auth) |
| 195 | + response.raise_for_status() |
| 196 | + epidata5 = response.json() |
| 197 | + |
| 198 | + # make sure the cache was returned anyhow |
| 199 | + self.assertEqual(epidata4, epidata5) |
0 commit comments