Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 12 additions & 9 deletions test/integration/elasticsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,20 +261,23 @@ def reset(self, es_dir):
self._enable_xpack(es_dir)

@staticmethod
def is_listening(password=None):
def cluster_name(password=None):
auth = ("elastic", password) if password else None
try:
req = requests.get("http://localhost:%s" % Elasticsearch.ES_PORT, auth=auth, timeout=.5)
resp = requests.get("http://localhost:%s" % Elasticsearch.ES_PORT, auth=auth, timeout=.5)
except (requests.Timeout, requests.ConnectionError):
return False
if req.status_code != 200:
return None
if resp.status_code != 200:
if password:
raise Exception("unexpected ES response code received: %s" % req.status_code)
raise Exception("unexpected ES response code received: %s" % resp.status_code)
else:
return True
if "You Know, for Search" not in req.text:
raise Exception("unexpected ES answer received: %s" % req.text)
return True
return ""
if "cluster_name" not in resp.json():
raise Exception("unexpected ES answer received: %s" % resp.text)
return resp.json().get("cluster_name")

@staticmethod
def is_listening(password=None):
return Elasticsearch.cluster_name(password) is not None

# vim: set noet fenc=utf-8 ff=dos sts=0 sw=4 ts=4 tw=118 :
20 changes: 14 additions & 6 deletions test/integration/ites.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,13 @@ def ites(args):
# run the tests
if not args.skip_tests:
assert(data is not None)
tests = Testing(data, args.dsn)
tests.perform()
cluster_name = es.cluster_name(Elasticsearch.AUTH_PASSWORD)
assert(len(cluster_name))
if args.dsn:
Testing(data, cluster_name, args.dsn).perform()
else:
Testing(data, cluster_name, "Packing=JSON;").perform()
Testing(data, cluster_name, "Packing=CBOR;").perform()

def main():
parser = argparse.ArgumentParser(description='Integration Testing with Elasticsearch.')
Expand All @@ -78,11 +83,11 @@ def main():
stage_grp.add_argument("-p", "--pre-staged", help="Use a pre-staged and running Elasticsearch instance",
action="store_true", default=False)

driver_grp = parser.add_mutually_exclusive_group()
driver_grp.add_argument("-d", "--driver", help="The path to the driver file to test; if not provided, the driver "
parser.add_argument("-d", "--driver", help="The path to the driver file to test; if not provided, the driver "
"is assumed to have been installed.")
driver_grp.add_argument("-c", "--dsn", help="The connection string to use with a preinstalled driver; the DSN must"
" contain the name under which the driver to test is registered.")
parser.add_argument("-c", "--dsn", help="The full or partial connection string to use with a preinstalled "
"driver; if the provided string contains the name under which the driver to test is registered, it will "
"be used as such; otherwise it will be appended as additional parameters to a pre-configured DSN.")
parser.add_argument("-o", "--offline_dir", help="The directory path holding the files to copy the test data from, "
"as opposed to downloading them.")
parser.add_argument("-e", "--ephemeral", help="Remove the staged Elasticsearch and installed driver after testing"
Expand All @@ -104,6 +109,9 @@ def main():
if not (args.driver or args.version or args.es_reset or args.pre_staged):
parser.error("don't know what Elasticsearch version to test against.")

if args.driver and args.dsn and "Driver=" in args.dsn:
parser.error("driver specified both by -d/--driver and -c/--dsn arguments")

try:
started_at = time.time()

Expand Down
101 changes: 72 additions & 29 deletions test/integration/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,19 +16,26 @@

UID = "elastic"
CONNECT_STRING = 'Driver={Elasticsearch Driver};UID=%s;PWD=%s;Secure=0;' % (UID, Elasticsearch.AUTH_PASSWORD)
CATALOG = "elasticsearch" # nightly built
#CATALOG = "distribution_run" # source built
CATALOG = "distribution_run" # source built, "elasticsearch": nightly builds

class Testing(unittest.TestCase):

_data = None
_dsn = None
_pyodbc = None
_catalog = None

def __init__(self, test_data, dsn=None):
def __init__(self, test_data, catalog=CATALOG, dsn=None):
super().__init__()
self._data = test_data
self._dsn = dsn if dsn else CONNECT_STRING
self._catalog = catalog
if dsn:
if "Driver=" not in dsn:
self._dsn = CONNECT_STRING + dsn
else:
self._dsn = dsn
else:
self._dsn = CONNECT_STRING
print("Using DSN: '%s'." % self._dsn)

# only import pyODBC if running tests (vs. for instance only loading test data in ES)
Expand Down Expand Up @@ -120,8 +127,8 @@ def _catalog_tables(self, no_table_type_as=""):
res = curs.tables("", "%", "", no_table_type_as).fetchall()
self.assertEqual(len(res), 1)
for i in range(0,10):
self.assertEqual(res[0][i], None if i else CATALOG)
#self.assertEqual(res, [tuple([CATALOG] + [None for i in range(9)])]) # XXX?
self.assertEqual(res[0][i], None if i else self._catalog)
#self.assertEqual(res, [tuple([self._catalog] + [None for i in range(9)])]) # XXX?

# enumerate table types
res = curs.tables("", "", "", "%").fetchall()
Expand All @@ -146,11 +153,12 @@ def _catalog_columns(self, use_catalog=False, use_surrogate=True):
cnxn.autocommit = True
curs = cnxn.cursor()
if not use_surrogate:
res = curs.columns(table=TestData.BATTERS_INDEX, catalog=CATALOG if use_catalog else None).fetchall()
res = curs.columns(table=TestData.BATTERS_INDEX, \
catalog=self._catalog if use_catalog else None).fetchall()
else:
if use_catalog:
stmt = "SYS COLUMNS CATALOG '%s' TABLE LIKE '%s' ESCAPE '\\' LIKE '%%' ESCAPE '\\'" % \
(CATALOG, TestData.BATTERS_INDEX)
(self._catalog, TestData.BATTERS_INDEX)
else:
stmt = "SYS COLUMNS TABLE LIKE '%s' ESCAPE '\\' LIKE '%%' ESCAPE '\\'" % TestData.BATTERS_INDEX
res = curs.execute(stmt)
Expand Down Expand Up @@ -207,6 +215,8 @@ def _type_to_instance(self, data_type, data_val):
instance = float(data_val)
elif data_type == "float":
instance = float(data_val.strip("fF"))
# reduce precision, py's float is a double
instance = ctypes.c_float(instance).value
elif data_type in ["datetime", "date", "time"]:
fmt = "%H:%M:%S"
fmt = "%Y-%m-%dT" + fmt
Expand Down Expand Up @@ -257,32 +267,65 @@ def _proto_tests(self):
for t in tests:
(query, col_name, data_type, data_val, cli_val, disp_size) = t
# print("T: %s, %s, %s, %s, %s, %s" % (query, col_name, data_type, data_val, cli_val, disp_size))
with cnxn.execute(query) as curs:
self.assertEqual(curs.rowcount, 1)
res = curs.fetchone()[0]

if data_val != cli_val: # INTERVAL tests
assert(query.lower().startswith("select interval"))
# extract the literal value (`INTERVAL -'1 1' -> `-1 1``)
expect = re.match("[^-]*(-?\s*'[^']*').*", query).groups()[0]
expect = expect.replace("'", "")
# filter out tests with fractional seconds:
# https://github.com/elastic/elasticsearch/issues/41635
if re.search("\d*\.\d+", expect):
continue
else: # non-INTERVAL tests
assert(data_type.lower() == data_type)
# Change the value read in the tests to type and format of the result expected to be
# returned by driver.
expect = self._type_to_instance(data_type, data_val)

self.assertEqual(res, expect)

if data_val != cli_val: # INTERVAL tests
assert(query.lower().startswith("select interval"))
# extract the literal value (`INTERVAL -'1 1' -> `-1 1``)
expect = re.match("[^-]*(-?\s*'[^']*').*", query).groups()[0]
expect = expect.replace("'", "")
# filter out tests with fractional seconds:
# https://github.com/elastic/elasticsearch/issues/41635
if re.search("\d*\.\d+", expect):
continue
# intervals not supported as params; PyODBC has no interval type support
# https://github.com/elastic/elasticsearch/issues/45915
params = []
else: # non-INTERVAL tests
assert(data_type.lower() == data_type)
# Change the value read in the tests to type and format of the result expected to be
# returned by driver.
expect = self._type_to_instance(data_type, data_val)

if data_type.lower() == "null":
query += " WHERE ? IS NULL"
params = [expect]
else:
if data_type.lower() == "time":
if col_name.find("+") <= 0:
# ODBC's TIME_STRUCT lacks fractional component -> strip it away
col_name = re.sub(r"(\d{2})\.\d+", "\\1", col_name)
query += " WHERE %s = ?" % col_name
params = [expect]
else: # it's a time with offset
# TIE_STRUCT lacks offset component -> perform the simple SELECT
params = []
else:
query += " WHERE %s = ?" % col_name
params = [expect]
# print("Query: %s" % query)

last_ex = None
with cnxn.execute(query, *params) as curs:
try:
self.assertEqual(curs.rowcount, 1)
res = curs.fetchone()[0]
if data_type == "float":
# PyODBC will fetch a REAL/float as a double => reduce precision
res = ctypes.c_float(res).value
self.assertEqual(res, expect)
except Exception as e:
print(e)
last_ex = e

if last_ex:
raise last_ex

finally:
cnxn.clear_output_converters()

def perform(self):
self._check_info(self._pyodbc.SQL_USER_NAME, UID)
self._check_info(self._pyodbc.SQL_DATABASE_NAME, CATALOG)
self._check_info(self._pyodbc.SQL_DATABASE_NAME, self._catalog)

# simulate catalog querying as apps do in ES/GH#40775 do
self._catalog_tables(no_table_type_as = "")
Expand Down