diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..9a6b956
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,20 @@
+FROM python:3.11-slim-bullseye as builder
+RUN pip install future
+
+WORKDIR /bld
+COPY . .
+RUN python3 dev_setup.py
+RUN python3 build.py
+RUN python build.py build manylinux1_x86_64
+RUN apt update && apt -y install bzip2 wget
+RUN wget https://github.com/microsoft/go-sqlcmd/releases/download/v0.10.0/sqlcmd-v0.10.0-linux-x64.tar.bz2
+RUN tar -xvf sqlcmd-v0.10.0-linux-x64.tar.bz2
+
+
+FROM python:3.11-slim-bullseye
+COPY --from=builder /bld/sqlcmd /usr/bin/sqlcmd
+COPY --from=builder /bld/dist/mssql_scripter-1.0.0a23-py2.py3-none-manylinux1_x86_64.whl /tmp
+RUN pip install /tmp/mssql_scripter-1.0.0a23-py2.py3-none-manylinux1_x86_64.whl
+RUN rm -rf /tmp/mssql_scripter-1.0.0a23-py2.py3-none-manylinux1_x86_64.whl
+RUN apt update && apt -y install libicu67
+CMD ["python3", "-m","sqlscripter"]
diff --git a/build.py b/build.py
index 833a756..9cfd4b7 100644
--- a/build.py
+++ b/build.py
@@ -6,7 +6,6 @@
# --------------------------------------------------------------------------------------------
from __future__ import print_function
-from azure.storage.blob import BlockBlobService, ContentSettings
import os
import sys
import utility
@@ -54,47 +53,6 @@ def build(platform_names):
mssqltoolsservice.clean_up_sqltoolsservice()
-def _upload_index_file(service, blob_name, title, links):
- print('Uploading index file {}'.format(blob_name))
- service.create_blob_from_text(
- container_name=BLOB_CONTAINER_NAME,
- blob_name=blob_name,
- text="
{0}{0}
{1}"
- .format(title, '\n'.join(
- ['{0}
'.format(link) for link in links])),
- content_settings=ContentSettings(
- content_type='text/html',
- content_disposition=None,
- content_encoding=None,
- content_language=None,
- content_md5=None,
- cache_control=None
- )
- )
-
-
-def _gen_pkg_index_html(service, pkg_name):
- links = []
- index_file_name = pkg_name+'/'
- for blob in list(service.list_blobs(BLOB_CONTAINER_NAME, prefix=index_file_name)):
- if blob.name == index_file_name:
- # Exclude the index file from being added to the list
- continue
- links.append(blob.name.replace(index_file_name, ''))
- _upload_index_file(service, index_file_name, 'Links for {}'.format(pkg_name), links)
- UPLOADED_PACKAGE_LINKS.append(index_file_name)
-
-
-def _upload_package(service, file_path, pkg_name):
- print('Uploading {}'.format(file_path))
- file_name = os.path.basename(file_path)
- blob_name = '{}/{}'.format(pkg_name, file_name)
- service.create_blob_from_path(
- container_name=BLOB_CONTAINER_NAME,
- blob_name=blob_name,
- file_path=file_path
- )
-
def validate_package(platform_names):
"""
@@ -113,40 +71,6 @@ def validate_package(platform_names):
root_dir, continue_on_error=False)
-def publish_daily(platforms_names):
- """
- Publish mssql-scripter wheel package to daily storage account.
- """
- print('Publishing to simple container within storage account.')
- assert AZURE_STORAGE_CONNECTION_STRING, 'Set AZURE_STORAGE_CONNECTION_STRING environment variable'
-
- blob_service = BlockBlobService(connection_string=AZURE_STORAGE_CONNECTION_STRING)
-
- print_heading('Uploading packages to blob storage ')
- for pkg in os.listdir(utility.MSSQLSCRIPTER_DIST_DIRECTORY):
- pkg_path = os.path.join(utility.MSSQLSCRIPTER_DIST_DIRECTORY, pkg)
- print('Uploading package {}'.format(pkg_path))
- _upload_package(blob_service, pkg_path, 'mssql-scripter')
-
- # Upload index files
- _gen_pkg_index_html(blob_service, 'mssql-scripter')
- _upload_index_file(blob_service, 'index.html', 'Simple Index', UPLOADED_PACKAGE_LINKS)
-
-
-def publish_official(platforms_names):
- """
- Publish mssql-scripter wheel package to PyPi.
- """
- mssqlscripter_wheel_dir = os.listdir(utility.MSSQLSCRIPTER_DIST_DIRECTORY)
- # Run twine action for mssqlscripter.
- # Only authorized users with credentials will be able to upload this package.
- # Credentials will be stored in a .pypirc file.
- for mssqlscripter_wheel_name in mssqlscripter_wheel_dir:
- utility.exec_command(
- 'twine upload {}'.format(mssqlscripter_wheel_name),
- utility.MSSQLSCRIPTER_DIST_DIRECTORY)
-
-
if __name__ == '__main__':
action = 'build'
supported_platforms = [
@@ -158,8 +82,6 @@ def publish_official(platforms_names):
targets = {
'build': build,
'validate_package': validate_package,
- 'publish_daily': publish_daily,
- 'publish_official': publish_official
}
if len(sys.argv) > 1:
diff --git a/dev_requirements.txt b/dev_requirements.txt
index bb69fee..862cf47 100644
--- a/dev_requirements.txt
+++ b/dev_requirements.txt
@@ -12,4 +12,5 @@ pytest >= 3.0.7
pytest-cov >= 2.5.1
readme_renderer >= 17.2
docutils >= 0.13.1
-azure-storage >= 0.33.0
\ No newline at end of file
+azure-storage-blob >= 12.14.1
+
diff --git a/mssqlscripter/jsonrpc/tests/test_jsonrpcclient.py b/mssqlscripter/jsonrpc/tests/test_jsonrpcclient.py
index 167eed5..0d39930 100644
--- a/mssqlscripter/jsonrpc/tests/test_jsonrpcclient.py
+++ b/mssqlscripter/jsonrpc/tests/test_jsonrpcclient.py
@@ -59,8 +59,8 @@ def test_response_dequeued(self):
self.assertEqual(response, baseline)
self.shutdown_background_threads(test_client)
# All background threads should be shut down.
- self.assertFalse(test_client.request_thread.isAlive())
- self.assertFalse(test_client.response_thread.isAlive())
+ self.assertFalse(test_client.request_thread.is_alive())
+ self.assertFalse(test_client.response_thread.is_alive())
def test_submit_simple_request(self):
"""
@@ -75,8 +75,8 @@ def test_submit_simple_request(self):
test_client.start()
time.sleep(.5)
# Verify threads are alive and running.
- self.assertTrue(test_client.request_thread.isAlive())
- self.assertFalse(test_client.response_thread.isAlive())
+ self.assertTrue(test_client.request_thread.is_alive())
+ self.assertFalse(test_client.response_thread.is_alive())
test_client.submit_request(
u'scriptingService/ScriptDatabase', {u'ScriptDatabaseOptions': u'True'})
@@ -88,8 +88,8 @@ def test_submit_simple_request(self):
expected = b'Content-Length: 120\r\n\r\n{"id": null, "jsonrpc": "2.0", "method": "scriptingService/ScriptDatabase", "params": {"ScriptDatabaseOptions": "True"}}'
self.assertEqual(input_stream.getvalue(), expected)
- self.assertFalse(test_client.request_thread.isAlive())
- self.assertFalse(test_client.response_thread.isAlive())
+ self.assertFalse(test_client.request_thread.is_alive())
+ self.assertFalse(test_client.response_thread.is_alive())
def test_send_multiple_request(self):
"""
@@ -105,8 +105,8 @@ def test_send_multiple_request(self):
time.sleep(.5)
# request thread is alive.
# response thread is dead due to reaching EOF.
- self.assertTrue(test_client.request_thread.isAlive())
- self.assertFalse(test_client.response_thread.isAlive())
+ self.assertTrue(test_client.request_thread.is_alive())
+ self.assertFalse(test_client.response_thread.is_alive())
test_client.submit_request(
u'scriptingService/ScriptDatabase', {u'ScriptDatabaseOptions': u'True'})
@@ -129,8 +129,8 @@ def test_send_multiple_request(self):
b'Content-Length: 113\r\n\r\n{"id": null, "jsonrpc": "2.0", "method": "scriptingService/ScriptDatabase", "params": {"ScriptDefaults": "True"}}'
self.assertEqual(input_stream.getvalue(), expected)
- self.assertFalse(test_client.request_thread.isAlive())
- self.assertFalse(test_client.response_thread.isAlive())
+ self.assertFalse(test_client.request_thread.is_alive())
+ self.assertFalse(test_client.response_thread.is_alive())
def test_normal_shutdown(self):
"""
@@ -145,15 +145,15 @@ def test_normal_shutdown(self):
test_client.start()
time.sleep(.5)
# Verify threads alive.
- self.assertTrue(test_client.request_thread.isAlive())
+ self.assertTrue(test_client.request_thread.is_alive())
# Response thread is dead due to EOF.
- self.assertFalse(test_client.response_thread.isAlive())
+ self.assertFalse(test_client.response_thread.is_alive())
test_client.shutdown()
- self.assertFalse(test_client.request_thread.isAlive())
- self.assertFalse(test_client.response_thread.isAlive())
+ self.assertFalse(test_client.request_thread.is_alive())
+ self.assertFalse(test_client.response_thread.is_alive())
def test_send_invalid_request(self):
"""
@@ -187,10 +187,10 @@ def test_receive_invalid_response_exception(self):
str(exception),
u'Content-Length was not found in headers received.')
# Lookup exception for invalid content length spelling.
- self.assertTrue(test_client.request_thread.isAlive())
- self.assertFalse(test_client.response_thread.isAlive())
+ self.assertTrue(test_client.request_thread.is_alive())
+ self.assertFalse(test_client.response_thread.is_alive())
test_client.shutdown()
- self.assertFalse(test_client.request_thread.isAlive())
+ self.assertFalse(test_client.request_thread.is_alive())
def test_response_stream_closed_exception(self):
"""
@@ -211,10 +211,10 @@ def test_response_stream_closed_exception(self):
self.assertEqual(
str(exception), u'I/O operation on closed file.')
- self.assertTrue(test_client.request_thread.isAlive())
- self.assertFalse(test_client.response_thread.isAlive())
+ self.assertTrue(test_client.request_thread.is_alive())
+ self.assertFalse(test_client.response_thread.is_alive())
test_client.shutdown()
- self.assertFalse(test_client.request_thread.isAlive())
+ self.assertFalse(test_client.request_thread.is_alive())
@unittest.skip("Disabling until scenario is valid")
def test_stream_has_no_response(self):
@@ -230,11 +230,11 @@ def test_stream_has_no_response(self):
response = test_client.get_response()
self.assertEqual(response, None)
- self.assertTrue(test_client.request_thread.isAlive())
- self.assertTrue(test_client.response_thread.isAlive())
+ self.assertTrue(test_client.request_thread.is_alive())
+ self.assertTrue(test_client.response_thread.is_alive())
test_client.shutdown()
- self.assertFalse(test_client.request_thread.isAlive())
- self.assertFalse(test_client.response_thread.isAlive())
+ self.assertFalse(test_client.request_thread.is_alive())
+ self.assertFalse(test_client.response_thread.is_alive())
def test_stream_closed_during_process(self):
"""
@@ -260,7 +260,7 @@ def test_stream_closed_during_process(self):
self.assertEqual(
str(exception), u'I/O operation on closed file.')
# Verify response thread is dead.
- self.assertFalse(test_client.request_thread.isAlive())
+ self.assertFalse(test_client.request_thread.is_alive())
test_client.shutdown()
def test_get_response_with_id(self):
diff --git a/mssqlscripter/mssqltoolsservice/external.py b/mssqlscripter/mssqltoolsservice/external.py
index d59f3bd..be0fccb 100644
--- a/mssqlscripter/mssqltoolsservice/external.py
+++ b/mssqlscripter/mssqltoolsservice/external.py
@@ -18,7 +18,7 @@
# Supported platform key's must match those in mssqlscript's setup.py.
SUPPORTED_PLATFORMS = {
- 'manylinux1_x86_64': SQLTOOLSSERVICE_BASE + 'manylinux1/' + 'Microsoft.SqlTools.ServiceLayer-linux-x64-netcoreapp2.1.tar.gz',
+ 'manylinux1_x86_64': SQLTOOLSSERVICE_BASE + 'manylinux1/' + 'Microsoft.SqlTools.ServiceLayer-rhel-x64-net6.0.tar.gz',
'macosx_10_11_intel': SQLTOOLSSERVICE_BASE + 'macosx_10_11_intel/' + 'Microsoft.SqlTools.ServiceLayer-osx-x64-netcoreapp2.1.tar.gz',
'win_amd64': SQLTOOLSSERVICE_BASE + 'win_amd64/' + 'Microsoft.SqlTools.ServiceLayer-win-x64-netcoreapp2.1.zip',
'win32': SQLTOOLSSERVICE_BASE + 'win32/' + 'Microsoft.SqlTools.ServiceLayer-win-x86-netcoreapp2.1.zip'
@@ -54,7 +54,7 @@ def copy_sqltoolsservice(platform):
print(u'Bin placing sqltoolsservice for this platform: {}.'.format(platform))
print(u'Extracting files from {}'.format(copy_file_path))
compressed_file.extractall(TARGET_DIRECTORY)
-
+
def clean_up_sqltoolsservice():
utility.clean_up(directory=TARGET_DIRECTORY)
diff --git a/sqltoolsservice/manylinux1/Microsoft.SqlTools.ServiceLayer-linux-x64-netcoreapp2.1.tar.gz b/sqltoolsservice/manylinux1/Microsoft.SqlTools.ServiceLayer-rhel-x64-net6.0.tar.gz
similarity index 51%
rename from sqltoolsservice/manylinux1/Microsoft.SqlTools.ServiceLayer-linux-x64-netcoreapp2.1.tar.gz
rename to sqltoolsservice/manylinux1/Microsoft.SqlTools.ServiceLayer-rhel-x64-net6.0.tar.gz
index e905b48..4823667 100644
Binary files a/sqltoolsservice/manylinux1/Microsoft.SqlTools.ServiceLayer-linux-x64-netcoreapp2.1.tar.gz and b/sqltoolsservice/manylinux1/Microsoft.SqlTools.ServiceLayer-rhel-x64-net6.0.tar.gz differ
diff --git a/tox.ini b/tox.ini
index fba45fc..c543071 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,7 +2,7 @@
# It is recommended to install both python 2.7 and python 3.6, but we
# won't fail the test run if only one is found.
skip_missing_interpreters=True
-envlist = py27,py36
+envlist = py27,py36,py311
# We will build the wheel ourselves dynamically.
skipsdist=True