Skip to content

Commit 18663ee

Browse files
authored
[Monitor] Apply black formatting (#38129)
Signed-off-by: Paul Van Eck <[email protected]>
1 parent ca890b8 commit 18663ee

39 files changed

+547
-587
lines changed

sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/_operations/_operations.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -81,8 +81,7 @@ def _upload( # pylint: disable=inconsistent-return-statements
8181
content_encoding: Optional[str] = None,
8282
content_type: str = "application/json",
8383
**kwargs: Any
84-
) -> None:
85-
...
84+
) -> None: ...
8685

8786
@overload
8887
def _upload( # pylint: disable=inconsistent-return-statements
@@ -94,8 +93,7 @@ def _upload( # pylint: disable=inconsistent-return-statements
9493
content_encoding: Optional[str] = None,
9594
content_type: str = "application/json",
9695
**kwargs: Any
97-
) -> None:
98-
...
96+
) -> None: ...
9997

10098
@distributed_trace
10199
def _upload( # pylint: disable=inconsistent-return-statements

sdk/monitor/azure-monitor-ingestion/azure/monitor/ingestion/aio/_operations/_operations.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,8 +46,7 @@ async def _upload( # pylint: disable=inconsistent-return-statements
4646
content_encoding: Optional[str] = None,
4747
content_type: str = "application/json",
4848
**kwargs: Any
49-
) -> None:
50-
...
49+
) -> None: ...
5150

5251
@overload
5352
async def _upload( # pylint: disable=inconsistent-return-statements
@@ -59,8 +58,7 @@ async def _upload( # pylint: disable=inconsistent-return-statements
5958
content_encoding: Optional[str] = None,
6059
content_type: str = "application/json",
6160
**kwargs: Any
62-
) -> None:
63-
...
61+
) -> None: ...
6462

6563
@distributed_trace_async
6664
async def _upload( # pylint: disable=inconsistent-return-statements

sdk/monitor/azure-monitor-ingestion/samples/async_samples/sample_custom_error_callback_async.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -59,9 +59,10 @@ async def on_error_pass(_) -> None:
5959
# Sample callback that raises the error if it corresponds to a specific HTTP error code.
6060
# This aborts the rest of the upload.
6161
async def on_error_abort(error: LogsUploadError) -> None:
62-
if isinstance(error.error, HttpResponseError) and cast(HttpResponseError, error.error).status_code in (400, 401, 403):
63-
print("Aborting upload...")
64-
raise error.error
62+
if isinstance(error.error, HttpResponseError):
63+
if cast(HttpResponseError, error.error).status_code in (400, 401, 403):
64+
print("Aborting upload...")
65+
raise error.error
6566

6667
client = LogsIngestionClient(endpoint=endpoint, credential=credential, logging_enable=True)
6768
async with client:

sdk/monitor/azure-monitor-ingestion/samples/sample_custom_error_callback.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,8 @@
4545
{"Time": "2021-12-08T23:51:14.1104269Z", "Computer": "Computer2", "AdditionalContext": "context"},
4646
]
4747

48-
failed_logs: List[MutableMapping[str, str]] = []
48+
failed_logs: List[MutableMapping[str, str]] = []
49+
4950

5051
# Sample callback that stores the logs that failed to upload.
5152
def on_error_save(error: LogsUploadError) -> None:
@@ -61,9 +62,10 @@ def on_error_pass(_) -> None:
6162
# Sample callback that raises the error if it corresponds to a specific HTTP error code.
6263
# This aborts the rest of the upload.
6364
def on_error_abort(error: LogsUploadError) -> None:
64-
if isinstance(error.error, HttpResponseError) and cast(HttpResponseError, error.error).status_code in (400, 401, 403):
65-
print("Aborting upload...")
66-
raise error.error
65+
if isinstance(error.error, HttpResponseError):
66+
if cast(HttpResponseError, error.error).status_code in (400, 401, 403):
67+
print("Aborting upload...")
68+
raise error.error
6769

6870

6971
client.upload(rule_id=rule_id, stream_name=os.environ["LOGS_DCR_STREAM_NAME"], logs=body, on_error=on_error_save)
Lines changed: 44 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
#!/usr/bin/env python
22

3-
#-------------------------------------------------------------------------
3+
# -------------------------------------------------------------------------
44
# Copyright (c) Microsoft Corporation. All rights reserved.
55
# Licensed under the MIT License. See License.txt in the project root for
66
# license information.
7-
#--------------------------------------------------------------------------
7+
# --------------------------------------------------------------------------
88

99
import re
1010
import os.path
@@ -16,76 +16,78 @@
1616
PACKAGE_PPRINT_NAME = "Azure Monitor Ingestion"
1717

1818
# a-b-c => a/b/c
19-
package_folder_path = PACKAGE_NAME.replace('-', '/')
19+
package_folder_path = PACKAGE_NAME.replace("-", "/")
2020
# a-b-c => a.b.c
21-
namespace_name = PACKAGE_NAME.replace('-', '.')
21+
namespace_name = PACKAGE_NAME.replace("-", ".")
2222

2323
# azure v0.x is not compatible with this package
2424
# azure v0.x used to have a __version__ attribute (newer versions don't)
2525
try:
2626
import azure
27+
2728
try:
2829
ver = azure.__version__
2930
raise Exception(
30-
'This package is incompatible with azure=={}. '.format(ver) +
31-
'Uninstall it with "pip uninstall azure".'
31+
"This package is incompatible with azure=={}. ".format(ver) + 'Uninstall it with "pip uninstall azure".'
3232
)
3333
except AttributeError:
3434
pass
3535
except ImportError:
3636
pass
3737

3838
# Version extraction inspired from 'requests'
39-
with open(os.path.join(package_folder_path, 'version.py')
40-
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
41-
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
42-
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
43-
fd.read(), re.MULTILINE).group(1)
39+
with open(
40+
(
41+
os.path.join(package_folder_path, "version.py")
42+
if os.path.exists(os.path.join(package_folder_path, "version.py"))
43+
else os.path.join(package_folder_path, "_version.py")
44+
),
45+
"r",
46+
) as fd:
47+
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1)
4448

4549
if not version:
46-
raise RuntimeError('Cannot find version information')
50+
raise RuntimeError("Cannot find version information")
4751

48-
with open('README.md', encoding='utf-8') as f:
52+
with open("README.md", encoding="utf-8") as f:
4953
readme = f.read()
50-
with open('CHANGELOG.md', encoding='utf-8') as f:
54+
with open("CHANGELOG.md", encoding="utf-8") as f:
5155
changelog = f.read()
5256

5357
setup(
5458
name=PACKAGE_NAME,
5559
version=version,
56-
description='Microsoft {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
57-
long_description=readme + '\n\n' + changelog,
58-
long_description_content_type='text/markdown',
59-
license='MIT License',
60-
author='Microsoft Corporation',
61-
author_email='[email protected]',
62-
url='https://github.com/Azure/azure-sdk-for-python',
60+
description="Microsoft {} Client Library for Python".format(PACKAGE_PPRINT_NAME),
61+
long_description=readme + "\n\n" + changelog,
62+
long_description_content_type="text/markdown",
63+
license="MIT License",
64+
author="Microsoft Corporation",
65+
author_email="[email protected]",
66+
url="https://github.com/Azure/azure-sdk-for-python",
6367
keywords="azure, azure sdk",
6468
classifiers=[
6569
"Development Status :: 5 - Production/Stable",
66-
'Programming Language :: Python',
67-
'Programming Language :: Python :: 3 :: Only',
68-
'Programming Language :: Python :: 3',
69-
'Programming Language :: Python :: 3.8',
70-
'Programming Language :: Python :: 3.9',
71-
'Programming Language :: Python :: 3.10',
72-
'Programming Language :: Python :: 3.11',
73-
'Programming Language :: Python :: 3.12',
74-
'License :: OSI Approved :: MIT License',
70+
"Programming Language :: Python",
71+
"Programming Language :: Python :: 3 :: Only",
72+
"Programming Language :: Python :: 3",
73+
"Programming Language :: Python :: 3.8",
74+
"Programming Language :: Python :: 3.9",
75+
"Programming Language :: Python :: 3.10",
76+
"Programming Language :: Python :: 3.11",
77+
"Programming Language :: Python :: 3.12",
78+
"License :: OSI Approved :: MIT License",
7579
],
7680
python_requires=">=3.8",
7781
zip_safe=False,
78-
packages=find_packages(exclude=[
79-
'tests',
80-
'samples',
81-
# Exclude packages that will be covered by PEP420 or nspkg
82-
'azure',
83-
'azure.monitor',
84-
]),
82+
packages=find_packages(
83+
exclude=[
84+
"tests",
85+
"samples",
86+
# Exclude packages that will be covered by PEP420 or nspkg
87+
"azure",
88+
"azure.monitor",
89+
]
90+
),
8591
include_package_data=True,
86-
install_requires=[
87-
'azure-core>=1.28.0',
88-
'isodate>=0.6.0',
89-
"typing-extensions>=4.0.1"
90-
]
92+
install_requires=["azure-core>=1.28.0", "isodate>=0.6.0", "typing-extensions>=4.0.1"],
9193
)

sdk/monitor/azure-monitor-ingestion/tests/base_testcase.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
AUDIENCE_MAP = {
1313
"AzureCloud": "https://monitor.azure.com",
1414
"AzureChinaCloud": "https://monitor.azure.cn",
15-
"AzureUSGovernment": "https://monitor.azure.us"
15+
"AzureUSGovernment": "https://monitor.azure.us",
1616
}
1717

1818

sdk/monitor/azure-monitor-ingestion/tests/conftest.py

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
add_general_regex_sanitizer,
3333
add_header_regex_sanitizer,
3434
set_custom_default_matcher,
35-
add_oauth_response_sanitizer
35+
add_oauth_response_sanitizer,
3636
)
3737

3838

@@ -60,15 +60,14 @@ def add_sanitizers(test_proxy, environment_variables):
6060
ENV_CLIENT_SECRET: TEST_ID,
6161
ENV_DCE: TEST_DCE,
6262
ENV_STREAM_NAME: TEST_STREAM_NAME,
63-
ENV_DCR_ID: TEST_ID
63+
ENV_DCR_ID: TEST_ID,
6464
}
6565
environment_variables.sanitize_batch(sanitization_mapping)
6666
set_custom_default_matcher(
6767
compare_bodies=False, excluded_headers="Authorization,Content-Length,x-ms-client-request-id,x-ms-request-id"
6868
)
6969
add_general_regex_sanitizer(
70-
value="fakeresource",
71-
regex="(?<=\\/\\/)[a-z-]+(?=\\.westus2-1\\.ingest\\.monitor\\.azure\\.com)"
70+
value="fakeresource", regex="(?<=\\/\\/)[a-z-]+(?=\\.westus2-1\\.ingest\\.monitor\\.azure\\.com)"
7271
)
7372
add_body_key_sanitizer(json_path="access_token", value="fakekey")
7473
add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]")
@@ -80,19 +79,16 @@ def monitor_info(environment_variables):
8079
yield {
8180
"stream_name": environment_variables.get(ENV_STREAM_NAME),
8281
"dce": environment_variables.get(ENV_DCE),
83-
"dcr_id": environment_variables.get(ENV_DCR_ID)
82+
"dcr_id": environment_variables.get(ENV_DCR_ID),
8483
}
8584

8685

8786
@pytest.fixture(scope="session")
8887
def large_data():
8988
logs = []
90-
content = "a" * (1024 * 100) # 100 KiB string
89+
content = "a" * (1024 * 100) # 100 KiB string
9190

9291
# Ensure total size is > 2 MiB data
9392
for i in range(24):
94-
logs.append({
95-
"Time": datetime.now().isoformat(),
96-
"AdditionalContext": content
97-
})
93+
logs.append({"Time": datetime.now().isoformat(), "AdditionalContext": content})
9894
return logs
Lines changed: 34 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
1-
#-------------------------------------------------------------------------
1+
# -------------------------------------------------------------------------
22
# Copyright (c) Microsoft Corporation. All rights reserved.
33
# Licensed under the MIT License. See License.txt in the project root for
44
# license information.
5-
#--------------------------------------------------------------------------
5+
# --------------------------------------------------------------------------
66
from datetime import datetime
77
import json
88
import random
@@ -17,8 +17,9 @@
1717

1818
ALPHANUMERIC_CHARACTERS = string.ascii_letters + string.digits
1919

20+
2021
def _get_random_string(length: int):
21-
return ''.join(random.choice(ALPHANUMERIC_CHARACTERS) for _ in range(length))
22+
return "".join(random.choice(ALPHANUMERIC_CHARACTERS) for _ in range(length))
2223

2324

2425
def _get_repeating_string(length: int):
@@ -37,13 +38,9 @@ def __init__(self, arguments):
3738
self.async_credential = AsyncDefaultAzureCredential()
3839

3940
# Create clients
40-
self.client = LogsIngestionClient(
41-
endpoint=self.data_collection_endpoint,
42-
credential=self.credential
43-
)
41+
self.client = LogsIngestionClient(endpoint=self.data_collection_endpoint, credential=self.credential)
4442
self.async_client = AsyncLogsIngestionClient(
45-
endpoint=self.data_collection_endpoint,
46-
credential=self.async_credential
43+
endpoint=self.data_collection_endpoint, credential=self.async_credential
4744
)
4845

4946
async def close(self):
@@ -58,36 +55,43 @@ async def setup(self):
5855
# Create log entries to upload
5956
self.logs = []
6057
for i in range(self.args.num_logs):
61-
content = _get_random_string(self.args.log_content_length) if self.args.random_log_content \
58+
content = (
59+
_get_random_string(self.args.log_content_length)
60+
if self.args.random_log_content
6261
else _get_repeating_string(self.args.log_content_length)
63-
self.logs.append({
64-
"Time": datetime.now().isoformat(),
65-
"Computer": f"Computer {i}",
66-
"AdditionalContext": content
67-
})
68-
print(f'{len(json.dumps(self.logs))} bytes of logs to be uploaded.')
62+
)
63+
self.logs.append(
64+
{"Time": datetime.now().isoformat(), "Computer": f"Computer {i}", "AdditionalContext": content}
65+
)
66+
print(f"{len(json.dumps(self.logs))} bytes of logs to be uploaded.")
6967

7068
@staticmethod
7169
def add_arguments(parser):
7270
super(UploadLogsTest, UploadLogsTest).add_arguments(parser)
73-
parser.add_argument("-n", "--num-logs", nargs="?", type=int,
74-
help="Number of logs to be uploaded. Defaults to 100", default=100)
75-
parser.add_argument("-l", "--log-content-length", nargs="?", type=int,
76-
help="Length of the 'AdditionalContext' value for each log entry. Defaults to 20", default=20)
77-
parser.add_argument("-r", "--random-log-content", action="store_true",
71+
parser.add_argument(
72+
"-n", "--num-logs", nargs="?", type=int, help="Number of logs to be uploaded. Defaults to 100", default=100
73+
)
74+
parser.add_argument(
75+
"-l",
76+
"--log-content-length",
77+
nargs="?",
78+
type=int,
79+
help="Length of the 'AdditionalContext' value for each log entry. Defaults to 20",
80+
default=20,
81+
)
82+
parser.add_argument(
83+
"-r",
84+
"--random-log-content",
85+
action="store_true",
7886
help="Whether to use a random alphanumeric string for each 'AdditionalContext' value. "
79-
"If False, uses a repeating 'a' character. Defaults to False", default=False)
87+
"If False, uses a repeating 'a' character. Defaults to False",
88+
default=False,
89+
)
8090

8191
def run_sync(self):
82-
self.client.upload(
83-
rule_id=self.data_collection_rule_id,
84-
stream_name=self.stream_name,
85-
logs=self.logs
86-
)
92+
self.client.upload(rule_id=self.data_collection_rule_id, stream_name=self.stream_name, logs=self.logs)
8793

8894
async def run_async(self):
8995
await self.async_client.upload(
90-
rule_id=self.data_collection_rule_id,
91-
stream_name=self.stream_name,
92-
logs=self.logs
96+
rule_id=self.data_collection_rule_id, stream_name=self.stream_name, logs=self.logs
9397
)

0 commit comments

Comments
 (0)