Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions ansible/templates/changehc-params-prod.json.j2
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
"start_date": null,
"end_date": null,
"drop_date": null,
"generate_backfill_files": true,
"backfill_dir": "/common/backfill/chng",
"backfill_merge_day": 0,
"n_backfill_days": 60,
Expand Down
1 change: 1 addition & 0 deletions ansible/templates/claims_hosp-params-prod.json.j2
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"start_date": "2020-02-01",
"end_date": null,
"drop_date": null,
"generate_backfill_files": true,
"backfill_dir": "/common/backfill/claims_hosp",
"backfill_merge_day": 0,
"n_backfill_days": 70,
Expand Down
1 change: 1 addition & 0 deletions ansible/templates/quidel_covidtest-params-prod.json.j2
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
"export_end_date": "",
"pull_start_date": "2020-05-26",
"pull_end_date":"",
"generate_backfill_files": true,
"backfill_dir": "/common/backfill/quidel_covidtest",
"backfill_merge_day": 0,
"export_day_range":40,
Expand Down
30 changes: 18 additions & 12 deletions changehc/delphi_changehc/load_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,8 @@ def load_chng_data(filepath, dropdate, base_geo,


def load_combined_data(denom_filepath, covid_filepath, base_geo,
backfill_dir, geo, weekday, numtype, backfill_merge_day):
backfill_dir, geo, weekday, numtype,
generate_backfill_files, backfill_merge_day):
"""Load in denominator and covid data, and combine them.

Args:
Expand Down Expand Up @@ -114,15 +115,17 @@ def load_combined_data(denom_filepath, covid_filepath, base_geo,
data = data[["num", "den"]]

# Store for backfill
merge_backfill_file(backfill_dir, numtype, geo, weekday, backfill_merge_day,
issue_date, test_mode=False, check_nd=25)
store_backfill_file(data, issue_date, backfill_dir, numtype, geo, weekday)
if generate_backfill_files:
merge_backfill_file(backfill_dir, numtype, geo, weekday, backfill_merge_day,
issue_date, test_mode=False, check_nd=25)
store_backfill_file(data, issue_date, backfill_dir, numtype, geo, weekday)
return data


def load_cli_data(denom_filepath, flu_filepath, mixed_filepath, flu_like_filepath,
covid_like_filepath, base_geo,
backfill_dir, geo, weekday, numtype, backfill_merge_day):
backfill_dir, geo, weekday, numtype,
generate_backfill_files, backfill_merge_day):
"""Load in denominator and covid-like data, and combine them.

Args:
Expand Down Expand Up @@ -172,14 +175,16 @@ def load_cli_data(denom_filepath, flu_filepath, mixed_filepath, flu_like_filepat
data = data[["num", "den"]]

# Store for backfill
merge_backfill_file(backfill_dir, numtype, geo, weekday, backfill_merge_day,
issue_date, test_mode=False, check_nd=25)
store_backfill_file(data, issue_date, backfill_dir, numtype, geo, weekday)
if generate_backfill_files:
merge_backfill_file(backfill_dir, numtype, geo, weekday, backfill_merge_day,
issue_date, test_mode=False, check_nd=25)
store_backfill_file(data, issue_date, backfill_dir, numtype, geo, weekday)
return data


def load_flu_data(denom_filepath, flu_filepath, base_geo,
backfill_dir, geo, weekday, numtype, backfill_merge_day):
backfill_dir, geo, weekday, numtype,
generate_backfill_files, backfill_merge_day):
"""Load in denominator and flu data, and combine them.

Args:
Expand Down Expand Up @@ -215,7 +220,8 @@ def load_flu_data(denom_filepath, flu_filepath, base_geo,
data = data[["num", "den"]]

# Store for backfill
merge_backfill_file(backfill_dir, numtype, geo, weekday, backfill_merge_day,
issue_date, test_mode=False, check_nd=25)
store_backfill_file(data, issue_date, backfill_dir, numtype, geo, weekday)
if generate_backfill_files:
merge_backfill_file(backfill_dir, numtype, geo, weekday, backfill_merge_day,
issue_date, test_mode=False, check_nd=25)
store_backfill_file(data, issue_date, backfill_dir, numtype, geo, weekday)
return data
29 changes: 15 additions & 14 deletions changehc/delphi_changehc/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@

# first party
from .download_ftp_files import download_counts
from .load_data import (load_combined_data, load_cli_data, load_flu_data,
store_backfill_file, merge_backfill_file)
from .load_data import (load_combined_data, load_cli_data, load_flu_data)
from .update_sensor import CHCSensorUpdater


Expand Down Expand Up @@ -134,18 +133,19 @@ def run_module(params: Dict[str, Dict[str, Any]]):
# range of estimates to produce
n_backfill_days = params["indicator"]["n_backfill_days"] # produce estimates for n_backfill_days
n_waiting_days = params["indicator"]["n_waiting_days"] # most recent n_waiting_days won't be est
backfill_dir = params["indicator"]["backfill_dir"]
backfill_merge_day = params["indicator"]["backfill_merge_day"]

generate_backfill_files = params["indicator"].get("generate_backfill_files", True)
backfill_dir = ""
backfill_merge_day = 0
if generate_backfill_files:
backfill_dir = params["indicator"]["backfill_dir"]
backfill_merge_day = params["indicator"]["backfill_merge_day"]

enddate_dt = dropdate_dt - timedelta(days=n_waiting_days)
startdate_dt = enddate_dt - timedelta(days=n_backfill_days)
enddate = str(enddate_dt.date())
startdate = str(startdate_dt.date())

# now allow manual overrides
if params["indicator"]["end_date"] is not None:
enddate = params["indicator"]["end_date"]
if params["indicator"]["start_date"] is not None:
startdate = params["indicator"]["start_date"]
enddate = enddate = params["indicator"].get("end_date",str(enddate_dt.date()))
startdate = params["indicator"].get("start_date", str(startdate_dt.date()))

logger.info("generating signal and exporting to CSV",
first_sensor_date = startdate,
Expand Down Expand Up @@ -185,15 +185,16 @@ def run_module(params: Dict[str, Dict[str, Any]]):
data = load_combined_data(file_dict["denom"],
file_dict["covid"], "fips",
backfill_dir, geo, weekday, numtype,
backfill_merge_day)
generate_backfill_files, backfill_merge_day)
elif numtype == "cli":
data = load_cli_data(file_dict["denom"],file_dict["flu"],file_dict["mixed"],
file_dict["flu_like"],file_dict["covid_like"], "fips",
backfill_dir, geo, weekday, numtype, backfill_merge_day)
backfill_dir, geo, weekday, numtype,
generate_backfill_files, backfill_merge_day)
elif numtype == "flu":
data = load_flu_data(file_dict["denom"],file_dict["flu"],
"fips",backfill_dir, geo, weekday,
numtype, backfill_merge_day)
numtype, generate_backfill_files, backfill_merge_day)
more_stats = su_inst.update_sensor(
data,
params["common"]["export_dir"],
Expand Down
1 change: 1 addition & 0 deletions changehc/params.json.template
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
"start_date": null,
"end_date": null,
"drop_date": null,
"generate_backfill_files": false,
"backfill_dir": "./backfill",
"backfill_merge_day": 0,
"n_backfill_days": 60,
Expand Down
2 changes: 1 addition & 1 deletion changehc/tests/test_backfill.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@

combined_data = load_combined_data(DENOM_FILEPATH, COVID_FILEPATH,
"fips", backfill_dir, geo, weekday, "covid",
backfill_merge_day)
True, backfill_merge_day)

class TestBackfill:

Expand Down
8 changes: 4 additions & 4 deletions changehc/tests/test_load_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ class TestLoadData:
Config.COVID_COLS, Config.COVID_DTYPES, Config.COVID_COL)
combined_data = load_combined_data(DENOM_FILEPATH, COVID_FILEPATH,
"fips", backfill_dir, geo, weekday, "covid",
backfill_merge_day)
True, backfill_merge_day)
flu_data = load_flu_data(DENOM_FILEPATH, FLU_FILEPATH, "fips",
backfill_dir, geo, weekday, "flu", backfill_merge_day)
backfill_dir, geo, weekday, "flu", True, backfill_merge_day)
gmpr = GeoMapper()

def test_base_unit(self):
Expand All @@ -55,11 +55,11 @@ def test_base_unit(self):

with pytest.raises(AssertionError):
load_combined_data(DENOM_FILEPATH, COVID_FILEPATH, "foo",
backfill_dir, geo, weekday, "covid", backfill_merge_day)
backfill_dir, geo, weekday, "covid", True, backfill_merge_day)

with pytest.raises(AssertionError):
load_flu_data(DENOM_FILEPATH, FLU_FILEPATH, "foo",
backfill_dir, geo, weekday, "covid", backfill_merge_day)
backfill_dir, geo, weekday, "covid", True, backfill_merge_day)

def test_denom_columns(self):
assert "fips" in self.denom_data.index.names
Expand Down
2 changes: 1 addition & 1 deletion changehc/tests/test_sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
class TestLoadData:
combined_data = load_combined_data(DENOM_FILEPATH, COVID_FILEPATH,
"fips", backfill_dir, geo, weekday, "covid",
backfill_merge_day)
True, backfill_merge_day)

def test_backfill(self):
num0 = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8], dtype=float).reshape(-1, 1)
Expand Down
9 changes: 5 additions & 4 deletions claims_hosp/delphi_claims_hosp/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,11 @@ def run_module(params):
startdate = params["indicator"]['start_date']

# Store backfill data
backfill_dir = params["indicator"]["backfill_dir"]
backfill_merge_day = params["indicator"]["backfill_merge_day"]
merge_backfill_file(backfill_dir, backfill_merge_day, datetime.today())
store_backfill_file(claims_file, dropdate_dt, backfill_dir)
if params["indicator"].get("generate_backfill_files", True):
backfill_dir = params["indicator"]["backfill_dir"]
backfill_merge_day = params["indicator"]["backfill_merge_day"]
merge_backfill_file(backfill_dir, backfill_merge_day, datetime.today())
store_backfill_file(claims_file, dropdate_dt, backfill_dir)

# print out information
logger.info("Loaded params",
Expand Down
1 change: 1 addition & 0 deletions claims_hosp/params.json.template
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
"end_date": null,
"drop_date": null,
"n_backfill_days": 70,
"generate_backfill_files": false,
"backfill_dir": "./backfill",
"backfill_merge_day": 0,
"n_waiting_days": 3,
Expand Down
28 changes: 17 additions & 11 deletions quidel_covidtest/delphi_quidel_covidtest/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,24 +86,30 @@ def run_module(params: Dict[str, Any]):
stats = []
atexit.register(log_exit, start_time, stats, logger)
cache_dir = params["indicator"]["input_cache_dir"]
backfill_dir = params["indicator"]["backfill_dir"]
backfill_merge_day = params["indicator"]["backfill_merge_day"]
export_dir = params["common"]["export_dir"]
export_start_date = params["indicator"]["export_start_date"]
export_end_date = params["indicator"]["export_end_date"]
export_day_range = params["indicator"]["export_day_range"]

# Pull data and update export date
df, _end_date = pull_quidel_covidtest(params["indicator"], logger)
# Merge 4 weeks' data into one file to save runtime
# Notice that here we don't check the _end_date(receive date)
# since we always want such merging happens on a certain day of a week
merge_backfill_file(backfill_dir, backfill_merge_day, datetime.today())
if _end_date is None:
logger.info("The data is up-to-date. Currently, no new data to be ingested.")
return
# Store the backfill intermediate file
store_backfill_file(df, _end_date, backfill_dir)

# Allow user to turn backfill file generation on or off. Defaults to True
# (generate files).
if params["indicator"].get("generate_backfill_files", True):
backfill_dir = params["indicator"]["backfill_dir"]
backfill_merge_day = params["indicator"]["backfill_merge_day"]

# Merge 4 weeks' data into one file to save runtime
# Notice that here we don't check the _end_date(receive date)
# since we always want such merging happens on a certain day of a week
merge_backfill_file(backfill_dir, backfill_merge_day, datetime.today())
if _end_date is None:
logger.info("The data is up-to-date. Currently, no new data to be ingested.")
return
# Store the backfill intermediate file
store_backfill_file(df, _end_date, backfill_dir)

export_end_date = check_export_end_date(
export_end_date, _end_date, END_FROM_TODAY_MINUS)
export_start_date = check_export_start_date(
Expand Down
1 change: 1 addition & 0 deletions quidel_covidtest/params.json.template
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
"indicator": {
"static_file_dir": "./static",
"input_cache_dir": "./cache",
"generate_backfill_files": false,
"backfill_dir": "./backfill",
"backfill_merge_day": 0,
"export_start_date": "2020-05-26",
Expand Down