diff --git a/.flake8 b/.flake8
index f7f571c..74043ff 100644
--- a/.flake8
+++ b/.flake8
@@ -14,6 +14,8 @@ per-file-ignores =
# supress some docstring requirements in tests
tests/unit_tests/*.py: D
tests/unit_tests/**/*.py: D
+ tests/it_tests/*.py: D
+ tests/it_tests/**/*.py: D
# these files are from OTEL so should use OTEL license.
*/deep/api/types.py: NCF102
*/deep/api/resource/__init__.py: NCF102
diff --git a/.github/workflows/it_tests.yaml b/.github/workflows/it_tests.yaml
new file mode 100644
index 0000000..4660f50
--- /dev/null
+++ b/.github/workflows/it_tests.yaml
@@ -0,0 +1,26 @@
+name: IT Tests
+
+on:
+ workflow_dispatch:
+ schedule:
+ - cron: '0 0 * * *'
+
+jobs:
+ itTests:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+
+ - run: |
+ echo "${{ github.event_name }}"
+ NEW_COMMIT_COUNT=$(git log --oneline --since '24 hours ago' | wc -l)
+ echo "$NEW_COMMIT_COUNT"
+ echo "CHANGE_COUNT=$NEW_COMMIT_COUNT" >> $GITHUB_ENV
+
+ - name: Setup Python # Set Python version
+ uses: actions/setup-python@v4
+ with:
+ python-version: 3.11
+ - name: Run IT Tests
+ if: ${{ github.event.event_name == 'workflow_dispatch' || env.CHANGE_COUNT > 0 }}
+ run: make it-tests
\ No newline at end of file
diff --git a/.github/workflows/on_push.yaml b/.github/workflows/on_push.yaml
index b392648..6f1aca3 100644
--- a/.github/workflows/on_push.yaml
+++ b/.github/workflows/on_push.yaml
@@ -44,7 +44,6 @@ jobs:
make coverage
tests:
-
runs-on: ubuntu-latest
strategy:
matrix:
@@ -62,7 +61,6 @@ jobs:
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install -r dev-requirements.txt
- pip install .
- name: Test with pytest
run: pytest tests/unit_tests --doctest-modules --junitxml=junit/test-results-${{ matrix.python-version }}.xml
- name: Upload pytest test results
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8eb8867..73ccbbd 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,7 @@
- **[CHANGE]**: change(build): add doc string check to flake8 [#14](https://github.com/intergral/deep/pull/14) [@Umaaz](https://github.com/Umaaz)
- **[FEATURE]**: feat(logging): initial implementation of log points [#3](https://github.com/intergral/deep/pull/3) [@Umaaz](https://github.com/Umaaz)
+- **[ENHANCEMENT]**: enhancement(trigger): change tracepoint handling to use triggers [#16](https://github.com/intergral/deep/pull/16) [@Umaaz](https://github.com/Umaaz)
- **[BUGFIX]**: feat(api): add api function to register tracepoint directly [#8](https://github.com/intergral/deep/pull/8) [@Umaaz](https://github.com/Umaaz)
# 1.0.1 (22/06/2023)
diff --git a/Makefile b/Makefile
index 2eadb9c..f40da87 100644
--- a/Makefile
+++ b/Makefile
@@ -18,9 +18,13 @@ endif
test:
pytest tests/unit_tests
+.PHONY: it-test
+it-test:
+ pytest tests/it_tests
+
.PHONY: coverage
coverage:
- pytest tests/unit_tests --cov=deep --cov-report term --cov-fail-under=77 --cov-report html --cov-branch
+ pytest tests/unit_tests --cov=deep --cov-report term --cov-fail-under=82 --cov-report html --cov-branch
.PHONY: lint
lint:
@@ -60,4 +64,7 @@ docs:
.PHONY: clean
clean:
- rm -Rf _site docs/apidocs .pytest_cache test/.pytest_cache
\ No newline at end of file
+ rm -Rf _site docs/apidocs .pytest_cache test/.pytest_cache
+
+.PHONY: precommit
+precommit: lint tests coverage
\ No newline at end of file
diff --git a/deep-python-client.iml b/deep-python-client.iml
index 7db90e3..b2c6f49 100644
--- a/deep-python-client.iml
+++ b/deep-python-client.iml
@@ -8,6 +8,7 @@
+
diff --git a/dev/test-server/src/test_server/__init__.py b/dev/test-server/src/test_server/__init__.py
new file mode 100644
index 0000000..759367f
--- /dev/null
+++ b/dev/test-server/src/test_server/__init__.py
@@ -0,0 +1,17 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+
+"""This is a basic example of setting up a GRPC server to consume Deep protobuf messages."""
diff --git a/dev/test-server/src/test-server/server.py b/dev/test-server/src/test_server/server.py
similarity index 100%
rename from dev/test-server/src/test-server/server.py
rename to dev/test-server/src/test_server/server.py
diff --git a/docs/config/config.md b/docs/config/config.md
index 090a153..35dbbbc 100644
--- a/docs/config/config.md
+++ b/docs/config/config.md
@@ -4,15 +4,15 @@ A list of the possible config values for the deep python agent. They can be set
Note: When setting as environment variable prefix the key with 'DEEP_'. e.g. DEEP_SERVICE_URL
-| Key | Default | Description |
-|-----------------------|------------|----------------------------------------------------------------------------------------------------------------------------------------------------------|
-| SERVICE_URL | deep:43315 | The url (hostname:port) of the deep service to connect to. |
-| SERVICE_SECURE | True | Can be set to False if the service doesn't support secure connections. |
-| LOGGING_CONF | None | Can be used to override the python logging config used by the agent. |
-| POLL_TIMER | 10 | The time (in seconds) of the interval between polls. |
-| SERVICE_AUTH_PROVIDER | None | The auth provider to use, each provider can have their own config, see available [auth providers](../auth/providers.md) for details. |
-| IN_APP_INCLUDE | None | A string of comma (,) seperated values that indicate a package is part of the app. |
-| IN_APP_EXCLUDE | None | A string of comma (,) seperated values that indicate a package is not part of the app. |
+| Key | Default | Description |
+|-----------------------|------------|------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| SERVICE_URL | deep:43315 | The url (hostname:port) of the deep service to connect to. |
+| SERVICE_SECURE | True | Can be set to False if the service doesn't support secure connections. |
+| LOGGING_CONF | None | Can be used to override the python logging config used by the agent. |
+| POLL_TIMER | 10 | The time (in seconds) of the interval between polls. |
+| SERVICE_AUTH_PROVIDER | None | The auth provider to use, each provider can have their own config, see available [auth providers](../auth/providers.md) for details. |
+| IN_APP_INCLUDE | None | A string of comma (,) seperated values that indicate a package is part of the app. |
+| IN_APP_EXCLUDE | None | A string of comma (,) seperated values that indicate a package is not part of the app. |
| APP_ROOT | Calculated | This is the root folder in which the application is running. If not set it is calculated as the directory in which the file that calls `Deep.start` is in. |
diff --git a/docs/index.md b/docs/index.md
index e92b0d8..5437e33 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -2,7 +2,7 @@
This is the python client for Deep, a dynamic monitor and debugging tool.
# Getting started
-You will need to have a running version of the [DEEP server](#) to connect this client to.
+You will need to have a running version of the [DEEP server](https://github.com/intergral/deep) to connect this client to.
## Install Agent
To install the python agent just add the dependency 'deep-agent' to your project.
@@ -12,7 +12,7 @@ pip install deep-agent
```
## Setup
-Once installed you need to setup the agent. At the earliest part of the code you should add the following code:
+Once installed you need to set up the agent. At the earliest part of the code you should add the following code:
```python
import deep
diff --git a/examples/simple-app-metrics/src/simple-app/base_test.py b/examples/simple-app-metrics/src/simple-app/base_test.py
new file mode 100644
index 0000000..20d06e9
--- /dev/null
+++ b/examples/simple-app-metrics/src/simple-app/base_test.py
@@ -0,0 +1,43 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""A simple test object for examples."""
+
+import random
+import uuid
+
+
+class BaseTest:
+ """A basic test that is used in examples."""
+
+ def new_id(self):
+ """Create new id."""
+ return str(uuid.uuid4())
+
+ def next_max(self):
+ """Create new random max."""
+ return random.randint(1, 101)
+
+ def make_char_count_map(self, in_str):
+ """Create char count map."""
+ res = {}
+
+ for i in range(0, len(in_str)):
+ c = in_str[i]
+ if c not in res:
+ res[c] = 0
+ else:
+ res[c] = res[c] + 1
+ return res
diff --git a/examples/simple-app-metrics/src/simple-app/main.py b/examples/simple-app-metrics/src/simple-app/main.py
new file mode 100644
index 0000000..70a9f99
--- /dev/null
+++ b/examples/simple-app-metrics/src/simple-app/main.py
@@ -0,0 +1,91 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+
+"""Simple example showing usage with prometheus metrics."""
+
+import signal
+import time
+
+from prometheus_client import Summary, start_http_server
+
+import deep
+from simple_test import SimpleTest
+
+
+class GracefulKiller:
+ """Ensure clean shutdown."""
+
+ kill_now = False
+
+ def __init__(self):
+ """Crate new killer."""
+ signal.signal(signal.SIGINT, self.exit_gracefully)
+ signal.signal(signal.SIGTERM, self.exit_gracefully)
+
+ def exit_gracefully(self, *args):
+ """Exit example."""
+ self.kill_now = True
+
+
+def main():
+ """Run the example."""
+ killer = GracefulKiller()
+ ts = SimpleTest("This is a test")
+ while not killer.kill_now:
+ try:
+ ts.message(ts.new_id())
+ except BaseException as e:
+ print(e)
+ ts.reset()
+
+ time.sleep(0.1)
+
+
+# Create a metric to track time spent and requests made.
+REQUEST_TIME = Summary('request_processing_seconds', 'Time spent processing request')
+
+
+# Decorate function with metric.
+@REQUEST_TIME.time()
+def process_request(t):
+ """
+ Sleep.
+
+ A dummy function that takes some time.
+ """
+ time.sleep(t)
+
+
+if __name__ == '__main__':
+ start_http_server(8000)
+ d = deep.start({
+ 'SERVICE_URL': 'localhost:43315',
+ 'SERVICE_SECURE': 'False',
+ })
+
+ d.register_tracepoint("simple_test.py", 31)
+
+ print("app running")
+ main()
diff --git a/examples/simple-app-metrics/src/simple-app/simple_test.py b/examples/simple-app-metrics/src/simple-app/simple_test.py
new file mode 100644
index 0000000..5811e00
--- /dev/null
+++ b/examples/simple-app-metrics/src/simple-app/simple_test.py
@@ -0,0 +1,73 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""A simple test object for examples."""
+
+import time
+
+from base_test import BaseTest
+
+
+class SimpleTest(BaseTest):
+ """A basic test that is used in examples."""
+
+ def __init__(self, test_name):
+ """Create new test object."""
+ super().__init__()
+ self._started_at = round(time.time() * 1000)
+ self.__cnt = 0
+ self.char_counter = {}
+ self.test_name = test_name
+ self.max_executions = self.next_max()
+
+ def message(self, uuid):
+ """Print message to console."""
+ print("%s:%s" % (self.__cnt, uuid))
+ self.__cnt += 1
+ self.check_end(self.__cnt, self.max_executions)
+
+ info = self.make_char_count_map(uuid)
+ self.merge(self.char_counter, info)
+ if self.__cnt % 30 == 0:
+ self.dump()
+
+ def merge(self, char_counter, new_info):
+ """Merge captured data."""
+ for key in new_info:
+ new_val = new_info[key]
+
+ if key not in char_counter:
+ char_counter[key] = new_val
+ else:
+ char_counter[key] = new_val + char_counter[key]
+
+ def dump(self):
+ """Dump message to console."""
+ print(self.char_counter)
+ self.char_counter = {}
+
+ def check_end(self, value, max_executions):
+ """Check if we are at end."""
+ if value > max_executions:
+ raise Exception("Hit max executions %s %s " % (value, max_executions))
+
+ def __str__(self) -> str:
+ """Represent this as a string."""
+ return self.__class__.__name__ + ":" + self.test_name + ":" + str(self._started_at)
+
+ def reset(self):
+ """Reset the count."""
+ self.__cnt = 0
+ self.max_executions = self.next_max()
diff --git a/examples/simple-app-otel/src/simple-app/base_test.py b/examples/simple-app-otel/src/simple-app/base_test.py
new file mode 100644
index 0000000..20d06e9
--- /dev/null
+++ b/examples/simple-app-otel/src/simple-app/base_test.py
@@ -0,0 +1,43 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""A simple test object for examples."""
+
+import random
+import uuid
+
+
+class BaseTest:
+ """A basic test that is used in examples."""
+
+ def new_id(self):
+ """Create new id."""
+ return str(uuid.uuid4())
+
+ def next_max(self):
+ """Create new random max."""
+ return random.randint(1, 101)
+
+ def make_char_count_map(self, in_str):
+ """Create char count map."""
+ res = {}
+
+ for i in range(0, len(in_str)):
+ c = in_str[i]
+ if c not in res:
+ res[c] = 0
+ else:
+ res[c] = res[c] + 1
+ return res
diff --git a/examples/simple-app-otel/src/simple-app/main.py b/examples/simple-app-otel/src/simple-app/main.py
new file mode 100644
index 0000000..d02f4fb
--- /dev/null
+++ b/examples/simple-app-otel/src/simple-app/main.py
@@ -0,0 +1,78 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Simple example showing usage with OTEL."""
+
+import signal
+import time
+
+from opentelemetry import trace
+from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
+from opentelemetry.sdk.resources import SERVICE_NAME, Resource
+from opentelemetry.sdk.trace import TracerProvider
+from opentelemetry.sdk.trace.export import BatchSpanProcessor
+
+import deep
+from simple_test import SimpleTest
+
+
+class GracefulKiller:
+ """Ensure clean shutdown."""
+
+ kill_now = False
+
+ def __init__(self):
+ """Crate new killer."""
+ signal.signal(signal.SIGINT, self.exit_gracefully)
+ signal.signal(signal.SIGTERM, self.exit_gracefully)
+
+ def exit_gracefully(self, *args):
+ """Exit example."""
+ self.kill_now = True
+
+
+def main():
+ """Run the example."""
+ killer = GracefulKiller()
+ ts = SimpleTest("This is a test")
+ while not killer.kill_now:
+ with trace.get_tracer(__name__).start_as_current_span("loop"):
+ with trace.get_tracer(__name__).start_as_current_span("loop-inner"):
+ try:
+ ts.message(ts.new_id())
+ except BaseException as e:
+ print(e)
+ ts.reset()
+
+ time.sleep(0.1)
+
+
+if __name__ == '__main__':
+ resource = Resource(attributes={
+ SERVICE_NAME: "your-service-name"
+ })
+ provider = TracerProvider(resource=resource)
+ processor = BatchSpanProcessor(OTLPSpanExporter(endpoint="http://localhost:4317/api/traces"))
+ provider.add_span_processor(processor)
+ # Sets the global default tracer provider
+ trace.set_tracer_provider(provider)
+
+ deep.start({
+ 'SERVICE_URL': 'localhost:43315',
+ 'SERVICE_SECURE': 'False',
+ })
+
+ print("app running")
+ main()
diff --git a/examples/simple-app-otel/src/simple-app/simple_test.py b/examples/simple-app-otel/src/simple-app/simple_test.py
new file mode 100644
index 0000000..5811e00
--- /dev/null
+++ b/examples/simple-app-otel/src/simple-app/simple_test.py
@@ -0,0 +1,73 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""A simple test object for examples."""
+
+import time
+
+from base_test import BaseTest
+
+
+class SimpleTest(BaseTest):
+ """A basic test that is used in examples."""
+
+ def __init__(self, test_name):
+ """Create new test object."""
+ super().__init__()
+ self._started_at = round(time.time() * 1000)
+ self.__cnt = 0
+ self.char_counter = {}
+ self.test_name = test_name
+ self.max_executions = self.next_max()
+
+ def message(self, uuid):
+ """Print message to console."""
+ print("%s:%s" % (self.__cnt, uuid))
+ self.__cnt += 1
+ self.check_end(self.__cnt, self.max_executions)
+
+ info = self.make_char_count_map(uuid)
+ self.merge(self.char_counter, info)
+ if self.__cnt % 30 == 0:
+ self.dump()
+
+ def merge(self, char_counter, new_info):
+ """Merge captured data."""
+ for key in new_info:
+ new_val = new_info[key]
+
+ if key not in char_counter:
+ char_counter[key] = new_val
+ else:
+ char_counter[key] = new_val + char_counter[key]
+
+ def dump(self):
+ """Dump message to console."""
+ print(self.char_counter)
+ self.char_counter = {}
+
+ def check_end(self, value, max_executions):
+ """Check if we are at end."""
+ if value > max_executions:
+ raise Exception("Hit max executions %s %s " % (value, max_executions))
+
+ def __str__(self) -> str:
+ """Represent this as a string."""
+ return self.__class__.__name__ + ":" + self.test_name + ":" + str(self._started_at)
+
+ def reset(self):
+ """Reset the count."""
+ self.__cnt = 0
+ self.max_executions = self.next_max()
diff --git a/examples/simple-app/src/simple-app/main.py b/examples/simple-app/src/simple-app/main.py
index 6b6c515..c67a26a 100644
--- a/examples/simple-app/src/simple-app/main.py
+++ b/examples/simple-app/src/simple-app/main.py
@@ -18,13 +18,8 @@
import signal
import time
-from opentelemetry import trace
-from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
-from opentelemetry.sdk.resources import SERVICE_NAME, Resource
-from opentelemetry.sdk.trace import TracerProvider
-from opentelemetry.sdk.trace.export import BatchSpanProcessor
-
import deep
+from deep.api.tracepoint.constants import FIRE_COUNT
from simple_test import SimpleTest
@@ -48,31 +43,21 @@ def main():
killer = GracefulKiller()
ts = SimpleTest("This is a test")
while not killer.kill_now:
- with trace.get_tracer(__name__).start_as_current_span("loop"):
- with trace.get_tracer(__name__).start_as_current_span("loop-inner"):
- try:
- ts.message(ts.new_id())
- except BaseException as e:
- print(e)
- ts.reset()
+ try:
+ ts.message(ts.new_id())
+ except BaseException as e:
+ print(e)
+ ts.reset()
- time.sleep(0.1)
+ time.sleep(0.1)
if __name__ == '__main__':
- resource = Resource(attributes={
- SERVICE_NAME: "your-service-name"
- })
- provider = TracerProvider(resource=resource)
- processor = BatchSpanProcessor(OTLPSpanExporter(endpoint="http://localhost:4317/api/traces"))
- provider.add_span_processor(processor)
- # Sets the global default tracer provider
- trace.set_tracer_provider(provider)
-
- deep.start({
+ _deep = deep.start({
'SERVICE_URL': 'localhost:43315',
'SERVICE_SECURE': 'False',
})
+ _deep.register_tracepoint("simple_test.py", 35, {FIRE_COUNT: '-1'})
print("app running")
main()
diff --git a/pyproject.toml b/pyproject.toml
index 46d7f69..fcd34db 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -40,7 +40,7 @@ include = [
[tool.hatch.build.targets.wheel]
packages = ["src/deep"]
-
+# read version from version.py file
[tool.hatch.version]
path = "src/deep/version.py"
@@ -58,4 +58,4 @@ pythonpath = [
exclude_lines = [
"if TYPE_CHECKING:",
"@abc.abstractmethod"
-]
\ No newline at end of file
+]
diff --git a/requirements.txt b/requirements.txt
index 3c1ea48..2792d22 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,3 +1,3 @@
grpcio>=1.51.3
-deep-proto>=1.0.2
+deep-proto>=1.0.5
protobuf>=3.20.3
diff --git a/scripts/gendocs.py b/scripts/gendocs.py
index c4ae52a..1fbaf51 100644
--- a/scripts/gendocs.py
+++ b/scripts/gendocs.py
@@ -25,13 +25,13 @@
import yaml
-def dump_nav(nav, depth=0):
+def dump_nav(_nav, depth=0):
keys = []
- for key in nav.keys():
+ for key in _nav.keys():
keys.append(key)
keys.sort()
for k in keys:
- val = nav[k]
+ val = _nav[k]
if type(val) is dict:
print("%s - %s:" % (' ' * (depth * 2), k))
dump_nav(val, depth + 1)
@@ -44,24 +44,24 @@ def covert_nav(new_nav):
for k in new_nav:
val = new_nav[k]
if type(val) is dict:
- nav = covert_nav(val)
- as_list.append({k: nav})
+ _nav = covert_nav(val)
+ as_list.append({k: _nav})
else:
as_list.append({k: val})
# sort the nav alphabetically (each list item is a single element dict, so use first key to sort)
return sorted(as_list, key=lambda x: list(x.keys())[0])
-def update_nav(project_root, new_nav):
+def update_nav(_project_root, new_nav):
loaded = None
- with open("%s/mkdocs.yml" % project_root, 'r') as mkdocs:
+ with open("%s/mkdocs.yml" % _project_root, 'r') as mkdocs:
read = mkdocs.read()
loaded = yaml.load(read, Loader=yaml.Loader)
if loaded is None:
print("Cannot load mkdocs.yml")
exit()
loaded['nav'].append({'apidocs': covert_nav(new_nav)})
- with open("%s/mkdocs-mod.yml" % project_root, 'w') as mkdocs:
+ with open("%s/mkdocs-mod.yml" % _project_root, 'w') as mkdocs:
yaml.dump(loaded, mkdocs)
diff --git a/src/deep/__init__.py b/src/deep/__init__.py
index 054b31a..bc346fb 100644
--- a/src/deep/__init__.py
+++ b/src/deep/__init__.py
@@ -22,7 +22,7 @@
from deep.api import Deep
-def start(config=None):
+def start(config=None) -> 'Deep':
"""
Start DEEP.
diff --git a/src/deep/api/attributes/__init__.py b/src/deep/api/attributes/__init__.py
index 5818b91..7da6e96 100644
--- a/src/deep/api/attributes/__init__.py
+++ b/src/deep/api/attributes/__init__.py
@@ -129,7 +129,7 @@ class BoundedAttributes(MutableMapping):
def __init__(
self,
- maxlen: Optional[int] = None,
+ max_length: Optional[int] = None,
attributes: types.Attributes = None,
immutable: bool = True,
max_value_len: Optional[int] = None,
@@ -137,17 +137,17 @@ def __init__(
"""
Create new attributes.
- :param maxlen: max number of attributes
+ :param max_length: max number of attributes
:param attributes: existing attributes to copy
:param immutable: are these attributes immutable
:param max_value_len: max length of the attribute values
"""
- if maxlen is not None:
- if not isinstance(maxlen, int) or maxlen < 0:
+ if max_length is not None:
+ if not isinstance(max_length, int) or max_length < 0:
raise ValueError(
- "maxlen must be valid int greater or equal to 0"
+ "max_length must be valid int greater or equal to 0"
)
- self.maxlen = maxlen
+ self.max_length = max_length
self.dropped = 0
self.max_value_len = max_value_len
self._dict = OrderedDict() # type: OrderedDict
@@ -160,7 +160,7 @@ def __init__(
def __repr__(self):
"""Represent this as a string."""
return (
- f"{type(self).__name__}({dict(self._dict)}, maxlen={self.maxlen})"
+ f"{type(self).__name__}({dict(self._dict)}, max_length={self.max_length})"
)
def __getitem__(self, key):
@@ -172,7 +172,7 @@ def __setitem__(self, key, value):
if getattr(self, "_immutable", False):
raise TypeError
with self._lock:
- if self.maxlen is not None and self.maxlen == 0:
+ if self.max_length is not None and self.max_length == 0:
self.dropped += 1
return
@@ -181,7 +181,7 @@ def __setitem__(self, key, value):
if key in self._dict:
del self._dict[key]
elif (
- self.maxlen is not None and len(self._dict) == self.maxlen
+ self.max_length is not None and len(self._dict) == self.max_length
):
self._dict.popitem(last=False)
self.dropped += 1
diff --git a/src/deep/api/deep.py b/src/deep/api/deep.py
index 1fbf685..5a8f2db 100644
--- a/src/deep/api/deep.py
+++ b/src/deep/api/deep.py
@@ -14,16 +14,16 @@
# along with this program. If not, see .from typing import Dict, List
"""The main services for Deep."""
+
from typing import Dict, List
from deep.api.plugin import load_plugins
from deep.api.resource import Resource
-from deep.api.tracepoint import TracePointConfig
from deep.config import ConfigService
from deep.config.tracepoint_config import TracepointConfigService
from deep.grpc import GRPCService
from deep.poll import LongPoll
-from deep.processor import TriggerHandler
+from deep.processor.trigger_handler import TriggerHandler
from deep.push import PushService
from deep.task import TaskHandler
@@ -67,12 +67,13 @@ def shutdown(self):
"""Shutdown deep."""
if not self.started:
return
+ self.trigger_handler.shutdown()
self.task_handler.flush()
self.poll.shutdown()
self.started = False
def register_tracepoint(self, path: str, line: int, args: Dict[str, str] = None,
- watches: List[str] = None) -> 'TracepointRegistration':
+ watches: List[str] = None, metrics=None) -> 'TracepointRegistration':
"""
Register a new tracepoint.
@@ -80,33 +81,32 @@ def register_tracepoint(self, path: str, line: int, args: Dict[str, str] = None,
:param line: the line number
:param args: the args
:param watches: the watches
+ :param metrics: the metrics
:return: the new registration
"""
+ if metrics is None:
+ metrics = []
if watches is None:
watches = []
if args is None:
args = {}
- tp_config = self.config.tracepoints.add_custom(path, line, args, watches)
- return TracepointRegistration(tp_config, self.config.tracepoints)
+ tp_id = self.config.tracepoints.add_custom(path, line, args, watches, metrics)
+ return TracepointRegistration(tp_id, self.config.tracepoints)
class TracepointRegistration:
"""Registration of a new tracepoint."""
- def __init__(self, cfg: TracePointConfig, tracepoints: TracepointConfigService):
+ def __init__(self, _id: str, tracepoints: TracepointConfigService):
"""
Create a new registration.
- :param cfg: the created config
+ :param _id: the created config id
:param tracepoints: the config service
"""
- self._cfg = cfg
- self._tpServ = tracepoints
-
- def get(self) -> TracePointConfig:
- """Get the created tracepoint."""
- return self._cfg
+ self.__id: str = _id
+ self.__tpServ: TracepointConfigService = tracepoints
def unregister(self):
"""Remove this custom tracepoint."""
- self._tpServ.remove_custom(self._cfg)
+ self.__tpServ.remove_custom(self.__id)
diff --git a/src/deep/api/plugin/__init__.py b/src/deep/api/plugin/__init__.py
index 40bafc7..666f0e7 100644
--- a/src/deep/api/plugin/__init__.py
+++ b/src/deep/api/plugin/__init__.py
@@ -49,7 +49,7 @@ def load_plugins(custom=None) -> 'Tuple[list[Plugin], BoundedAttributes]':
"""
Load all the deep plugins.
- Attempt to load each plugin, if successful merge a attributes list of each plugin.
+ Attempt to load each plugin, if successful merge an attributes list of each plugin.
:return: the loaded plugins and attributes.
"""
diff --git a/src/deep/api/plugin/otel.py b/src/deep/api/plugin/otel.py
index b229b10..43264af 100644
--- a/src/deep/api/plugin/otel.py
+++ b/src/deep/api/plugin/otel.py
@@ -22,6 +22,7 @@
try:
from opentelemetry import trace
+ # noinspection PyProtectedMember
from opentelemetry.sdk.trace import _Span, TracerProvider
except ImportError as e:
raise DidNotEnable("opentelemetry is not installed", e)
diff --git a/src/deep/api/resource/__init__.py b/src/deep/api/resource/__init__.py
index 845bbf7..3c41a08 100644
--- a/src/deep/api/resource/__init__.py
+++ b/src/deep/api/resource/__init__.py
@@ -261,7 +261,7 @@ def detect(self) -> "Resource":
"""
Create a resource.
- :return: the created resrouce
+ :return: the created resource
"""
raise NotImplementedError()
diff --git a/src/deep/api/tracepoint/constants.py b/src/deep/api/tracepoint/constants.py
new file mode 100644
index 0000000..471825c
--- /dev/null
+++ b/src/deep/api/tracepoint/constants.py
@@ -0,0 +1,104 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Constant values used in tracepoint args."""
+
+# Below are constants used in the configuration of a tracepoint
+
+FIRE_COUNT = "fire_count"
+"""The number of times this tracepoint should fire"""
+
+WINDOW_START = "window_start"
+"""The start of the time period this tracepoint can fire in"""
+
+WINDOW_END = "window_end"
+"""The end of the time period this tracepoint can fire in"""
+
+FIRE_PERIOD = "fire_period"
+"""The minimum time between successive triggers, in ms"""
+
+CONDITION = "condition"
+"""The condition that has to be 'truthy' for this tracepoint to fire"""
+
+FRAME_TYPE = 'frame_type'
+"""This is the key to indicate the frame collection type"""
+
+STACK_TYPE = 'stack_type'
+"""This is the key to indicate the stack collection type"""
+
+SINGLE_FRAME_TYPE = 'single_frame'
+"""Collect only the frame we are on"""
+
+ALL_FRAME_TYPE = 'all_frame'
+"""Collect from all available frames"""
+
+NO_FRAME_TYPE = 'no_frame'
+"""Collect on frame data"""
+
+STACK = 'stack'
+"""Collect the full stack"""
+
+NO_STACK = 'no_stack'
+"""Do not collect the stack data"""
+
+LOG_MSG = 'log_msg'
+"""The log message to interpolate at position of tracepoint"""
+
+METHOD_NAME = "method_name"
+"""This is the key for the arg that defines a method tracepoint."""
+
+SPAN = "span"
+"""This is the key for the arg that defines a span type."""
+
+LINE = "line"
+"""This is used for SPAN type. This type means we should wrap the method the tracepoint is in."""
+
+METHOD = "method"
+"""This is used for SPAN type. This type means we should only wrap the line the tracepoint is on."""
+
+SNAPSHOT = "snapshot"
+"""This is the key to determine the collection state of the snapshot."""
+
+COLLECT = "collect"
+"""This is the default collection type and tells Deep to collect and send the snapshot."""
+
+NO_COLLECT = "no_collect"
+"""This type tells Deep to not collect any data and not to send the snapshot."""
+
+STAGE = "stage"
+"""The stage this tracepoint is configured to trigger at."""
+
+LINE_START = "line_start"
+"""Line start stage"""
+LINE_END = "line_end"
+"""Line end stage"""
+LINE_CAPTURE = "line_capture"
+"""Line capture stage"""
+
+LINE_STAGES = [LINE_CAPTURE, LINE_START, LINE_END]
+"""All stages linked to lines."""
+
+METHOD_START = "method_start"
+"""Method start stage"""
+METHOD_END = "method_end"
+"""Method end stage"""
+METHOD_CAPTURE = "method_capture"
+"""Method capture stage"""
+
+METHOD_STAGES = [METHOD_START, METHOD_CAPTURE, METHOD_END]
+"""All stages linked to methods."""
+
+WATCHES = "watches"
+"""Key for watch config"""
diff --git a/src/deep/api/tracepoint/eventsnapshot.py b/src/deep/api/tracepoint/eventsnapshot.py
index 62bfd74..58ed8aa 100644
--- a/src/deep/api/tracepoint/eventsnapshot.py
+++ b/src/deep/api/tracepoint/eventsnapshot.py
@@ -289,7 +289,7 @@ def type(self):
@property
def value(self):
- """The string value of variable.."""
+ """The string value of variable."""
return self._value
@property
diff --git a/src/deep/api/tracepoint/tracepoint_config.py b/src/deep/api/tracepoint/tracepoint_config.py
index 2b16342..e246bc7 100644
--- a/src/deep/api/tracepoint/tracepoint_config.py
+++ b/src/deep/api/tracepoint/tracepoint_config.py
@@ -15,48 +15,10 @@
"""Internal type for configured tracepoints."""
-from typing import List
+from typing import List, Optional
-# Below are constants used in the configuration of a tracepoint
-
-FIRE_COUNT = "fire_count"
-"""The number of times this tracepoint should fire"""
-
-WINDOW_START = "window_start"
-"""The start of the time period this tracepoint can fire in"""
-
-WINDOW_END = "window_end"
-"""The end of the time period this tracepoint can fire in"""
-
-FIRE_PERIOD = "fire_period"
-"""The minimum time between successive triggers, in ms"""
-
-CONDITION = "condition"
-"""The condition that has to be 'truthy' for this tracepoint to fire"""
-
-FRAME_TYPE = 'frame_type'
-"""This is the key to indicate the frame collection type"""
-
-STACK_TYPE = 'stack_type'
-"""This is the key to indicate the stack collection type"""
-
-SINGLE_FRAME_TYPE = 'single_frame'
-"""Collect only the frame we are on"""
-
-ALL_FRAME_TYPE = 'all_frame'
-"""Collect from all available frames"""
-
-NO_FRAME_TYPE = 'no_frame'
-"""Collect on frame data"""
-
-STACK = 'stack'
-"""Collect the full stack"""
-
-NO_STACK = 'no_stack'
-"""Do not collect the stack data"""
-
-LOG_MSG = 'log_msg'
-"""The log message to interpolate at position of tracepoint"""
+from deep.api.tracepoint.constants import SINGLE_FRAME_TYPE, ALL_FRAME_TYPE, NO_FRAME_TYPE, FRAME_TYPE, STACK_TYPE, \
+ STACK, FIRE_COUNT, CONDITION
def frame_type_ordinal(frame_type) -> int:
@@ -114,6 +76,62 @@ def in_window(self, ts):
return self._start <= ts <= self._end
+class LabelExpression:
+ """A metric label expression."""
+
+ def __init__(self, key: str, static: Optional[any], expression: Optional[str]):
+ """
+ Create a new label expression.
+
+ :param key: the label key
+ :param static: the label static value
+ :param expression: the label expression
+ """
+ self.__key = key
+ self.__static = static
+ self.__expression = expression
+
+ @property
+ def key(self):
+ """The label key."""
+ return self.__key
+
+ @property
+ def static(self):
+ """The label static value."""
+ return self.__static
+
+ @property
+ def expression(self):
+ """The label expression."""
+ return self.__expression
+
+
+class MetricDefinition:
+ """The definition of a metric to collect."""
+
+ def __init__(self, name: str, labels: List[LabelExpression], type_p: str, expression: Optional[str],
+ namespace: Optional[str], help_p: Optional[str], unit: Optional[str]):
+ """
+ Create a new metric definition.
+
+ :param name: the metric name
+ :param labels: the metric labels
+ :param type_p: the metrics type
+ :param expression: the metrics expression
+ :param namespace: the metric namespace
+ :param help_p: the metric help into
+ :param unit: the metric unit
+ """
+ self.__name = name
+ self.__labels = labels
+ self.__type = type_p
+ self.__expression = expression
+ self.__namespace = namespace
+ self.__help = help_p
+ self.__unit = unit
+
+
class TracePointConfig:
"""
This represents the configuration of a single tracepoint.
@@ -121,7 +139,8 @@ class TracePointConfig:
This is a python version of the GRPC data collected from the LongPoll.
"""
- def __init__(self, tp_id: str, path: str, line_no: int, args: dict, watches: List[str]):
+ def __init__(self, tp_id: str, path: str, line_no: int, args: dict, watches: List[str],
+ metrics: List[MetricDefinition]):
"""
Create a new tracepoint config.
@@ -136,8 +155,6 @@ def __init__(self, tp_id: str, path: str, line_no: int, args: dict, watches: Lis
self._line_no = line_no
self._args = args
self._watches = watches
- self._window = TracepointWindow(self.get_arg(WINDOW_START, 0), self.get_arg(WINDOW_END, 0))
- self._stats = TracepointExecutionStats()
@property
def id(self):
@@ -213,42 +230,6 @@ def get_arg_int(self, name: str, default_value: int):
except ValueError:
return default_value
- def can_trigger(self, ts):
- """
- Check if the tracepoint can trigger.
-
- This is to check the config. e.g. fire count, fire windows etc
-
- :param ts: the time the tracepoint has been triggered
- :return: true, if we should collect data; else false
- """
- # Have we exceeded the fire count?
- if self.fire_count != -1 and self.fire_count <= self._stats.fire_count:
- return False
-
- # Are we in the time window?
- if not self._window.in_window(ts):
- return False
-
- # Have we fired too quickly?
- last_fire = self._stats.last_fire
- if last_fire != 0:
- time_since_last = ts - last_fire
- if time_since_last < self.get_arg_int(FIRE_PERIOD, 1000):
- return False
-
- return True
-
- def record_triggered(self, ts):
- """
- Record a fire.
-
- Call this to record this tracepoint being triggered.
-
- :param ts: the time in nanoseconds
- """
- self._stats.fire(ts)
-
def __str__(self) -> str:
"""Represent this object as a string."""
return str({'id': self._id, 'path': self._path, 'line_no': self._line_no, 'args': self._args,
diff --git a/src/deep/api/tracepoint/trigger.py b/src/deep/api/tracepoint/trigger.py
new file mode 100644
index 0000000..7250f33
--- /dev/null
+++ b/src/deep/api/tracepoint/trigger.py
@@ -0,0 +1,591 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Handlers for triggers and action configs."""
+
+import abc
+from enum import Enum
+
+from typing import Optional, Dict, List
+
+from deep.api.tracepoint.constants import WINDOW_START, WINDOW_END, FIRE_COUNT, FIRE_PERIOD, LOG_MSG, WATCHES, \
+ LINE_START, METHOD_START, METHOD_END, LINE_END, LINE_CAPTURE, METHOD_CAPTURE, NO_COLLECT, SNAPSHOT, CONDITION, \
+ FRAME_TYPE, STACK_TYPE, SINGLE_FRAME_TYPE, STACK, SPAN, STAGE, METHOD_NAME, LINE_STAGES, METHOD_STAGES
+from deep.api.tracepoint.tracepoint_config import TracepointWindow, TracepointExecutionStats, MetricDefinition, \
+ TracePointConfig
+
+
+class LocationAction(object):
+ """
+ This defines an action to perform. This action can be any action that is configured via a tracepoint.
+
+ Supported actions are:
+ - snapshot: collect local variable data and stack frames at location
+ - log: evaluate a log message at the location
+ - metric: evaluate a metric and process via provider
+ - span: create a span at this location
+ """
+
+ class ActionType(Enum):
+ """The type of action."""
+
+ Snapshot = 1
+ Log = 2
+ Metric = 3
+ Span = 4
+
+ def __str__(self):
+ """Represent this as a string."""
+ return self.name
+
+ def __repr__(self):
+ """Represent this as a string."""
+ return self.name
+
+ def __init__(self, tp_id: str, condition: Optional[str], config: Dict[str, any], action_type: ActionType):
+ """
+ Create a new location action.
+
+ :param tp_id: the tracepoint id
+ :param condition: the condition
+ :param config: the config
+ :param action_type: the action type
+ """
+ self.__id = tp_id
+ self.__condition = condition
+ self.__config = config
+ self.__window = TracepointWindow(self.__config.get(WINDOW_START, 0), self.__config.get(WINDOW_END, 0))
+ self.__stats = TracepointExecutionStats()
+ self.__action_type = action_type
+ self.__location: 'Location | None' = None
+
+ @property
+ def id(self) -> str:
+ """
+ The id of the tracepoint that created this action.
+
+ :return: the tracepoint id
+ """
+ return self.__id
+
+ @property
+ def condition(self) -> Optional[str]:
+ """
+ The condition that is set on the tracepoint.
+
+ :return: the condition if set
+ """
+ return self.__condition
+
+ @property
+ def config(self) -> Dict[str, any]:
+ """
+ The config for this action.
+
+ :return: the full action config.
+ """
+ return self.__config
+
+ @property
+ def fire_count(self):
+ """
+ Get the allowed number of triggers.
+
+ :return: the configured number of triggers, or -1 for unlimited triggers
+ """
+ return self.__get_int(FIRE_COUNT, 1)
+
+ @property
+ def fire_period(self):
+ """
+ Get the minimum amount of time that has to have elapsed before this can trigger again.
+
+ :return: the time in ms
+ """
+ return self.__get_int(FIRE_PERIOD, 1000)
+
+ @property
+ def action_type(self) -> ActionType:
+ """Get the action type."""
+ return self.__action_type
+
+ @property
+ def tracepoint(self) -> TracePointConfig:
+ """Get the tracepoint config for this trigger."""
+ args = dict(self.__config)
+ if WATCHES in args:
+ del args[WATCHES]
+ if LOG_MSG in args and args[LOG_MSG] is None:
+ del args[LOG_MSG]
+ return TracePointConfig(self.id, self.__location.path, self.__location.line, args,
+ self.__config.get(WATCHES, []), [])
+
+ def __fire_period_ns(self):
+ return self.fire_period * 1_000_000
+
+ def can_trigger(self, ts):
+ """
+ Check if the tracepoint can trigger.
+
+ This is to check the config. e.g. fire count, fire windows etc.
+ :param ts: the time the tracepoint has been triggered
+ :return: true, if we should collect data; else false
+ """
+ # Have we exceeded the fire count?
+ if self.fire_count != -1 and self.fire_count <= self.__stats.fire_count:
+ return False
+
+ # Are we in the time window?
+ if not self.__window.in_window(ts):
+ return False
+
+ # Have we fired too quickly?
+ last_fire = self.__stats.last_fire
+ if last_fire != 0:
+ time_since_last = ts - last_fire
+ if time_since_last < self.__fire_period_ns():
+ return False
+
+ return True
+
+ def record_triggered(self, ts):
+ """
+ Record a fire.
+
+ Call this to record this tracepoint being triggered.
+
+ :param ts: the time in nanoseconds
+ """
+ self.__stats.fire(ts)
+
+ def __get_int(self, name: str, default_value: int):
+ try:
+ return int(self.__config.get(name, default_value))
+ except ValueError:
+ return default_value
+
+ def __str__(self):
+ """Represent this as a string."""
+ return str({
+ 'id': self.__id,
+ 'condition': self.__condition,
+ 'config': self.__config,
+ 'type': self.__action_type
+ })
+
+ def __repr__(self):
+ """Represent this as a string."""
+ return self.__str__()
+
+ def __eq__(self, __value):
+ """Check if this is equal to another."""
+ if self.__id == __value.__id and self.__condition == __value.__condition and self.__config == __value.__config:
+ return True
+ return False
+
+ def with_location(self, location: 'Location') -> 'LocationAction':
+ """
+ Attach the location to this action.
+
+ It is sometimes required to get the location information from an action. So we attach them here.
+
+ :param location: the location we are attached to.
+ :return: self
+ """
+ self.__location = location
+ return self
+
+
+class Location(abc.ABC):
+ """A location is the line or method at which actions should be performed."""
+
+ class Position(Enum):
+ """Position lets the location be at the start, end or capture."""
+
+ START = 1
+ END = 2
+ CAPTURE = 3
+
+ @classmethod
+ def from_stage(cls, stage_: str):
+ """
+ Get the stage enum from a string.
+
+ :param (str) stage_: the input string
+ :return: the appropriate stage enum
+ """
+ if stage_ in [LINE_START, METHOD_START]:
+ return Location.Position.START
+ if stage_ in [LINE_END, METHOD_END]:
+ return Location.Position.END
+ if stage_ in [LINE_CAPTURE, METHOD_CAPTURE]:
+ return Location.Position.CAPTURE
+ return Location.Position.START
+
+ def __init__(self, position: Position = None):
+ """
+ Create a new location.
+
+ :param position: the position of this location
+ """
+ self.position = position
+
+ @abc.abstractmethod
+ def at_location(self, event: str, file: str, line: int, method: str) -> bool:
+ """
+ Check if we are at the location defined by this location.
+
+ :param event: the trigger event
+ :param file: the file path
+ :param line: the line number
+ :param method: the method name
+ :return: True, if we are at this location we expect, else False.
+ """
+ pass
+
+ @property
+ @abc.abstractmethod
+ def id(self) -> str:
+ """The location id."""
+ pass
+
+ @property
+ @abc.abstractmethod
+ def path(self) -> str:
+ """The source file path."""
+ pass
+
+ @property
+ @abc.abstractmethod
+ def line(self) -> int:
+ """The line number."""
+ pass
+
+
+class Trigger(Location):
+ """A trigger is a location with action."""
+
+ def __init__(self, location: Location, actions: List[LocationAction]):
+ """
+ Create new trigger.
+
+ :param location: the underlying location
+ :param actions: the actions
+ """
+ super().__init__()
+ self.__location = location
+ self.__actions = actions
+
+ def at_location(self, event: str, file: str, line: int, method: str) -> bool:
+ """
+ Check if we are at the location defined by this location.
+
+ :param event: the trigger event
+ :param file: the file path
+ :param line: the line number
+ :param method: the method name
+ :return: True, if we are at this location we expect, else False.
+ """
+ return self.__location.at_location(event, file, line, method)
+
+ @property
+ def actions(self) -> List[LocationAction]:
+ """The actions that are attached to this location."""
+ return [action.with_location(self) for action in self.__actions]
+
+ @property
+ def id(self):
+ """The location id."""
+ return self.__location.id
+
+ @property
+ def path(self):
+ """The source file path."""
+ return self.__location.path
+
+ @property
+ def line(self):
+ """The line number."""
+ return self.__location.line
+
+ def __str__(self):
+ """Represent this as a string."""
+ return str({
+ 'location': self.__location,
+ 'actions': self.__actions
+ })
+
+ def __repr__(self):
+ """Represent this as a string."""
+ return self.__str__()
+
+ def __eq__(self, __value):
+ """Check if this is equal to another."""
+ if self.__location == __value.__location and self.__actions == __value.__actions:
+ return True
+ return False
+
+ def merge_actions(self, actions: List[LocationAction]):
+ """Merge more actions into this location."""
+ self.__actions += actions
+
+
+class LineLocation(Location):
+ """A location for a line entry/exit/capture point."""
+
+ def __init__(self, path: str, line: int, position: Location.Position):
+ """
+ Create new line location.
+
+ :param path: the source file path
+ :param line: the line number
+ :param position: the position
+ """
+ super().__init__(position)
+ self.__path = path
+ self.__line = line
+
+ def at_location(self, event: str, file: str, line: int, method: str):
+ """
+ Check if we are at the location defined by this location.
+
+ :param event: the trigger event
+ :param file: the file path
+ :param line: the line number
+ :param method: the method name
+ :return: True, if we are at this location we expect, else False.
+ """
+ if event == "line" and file == self.path and line == self.line:
+ return True
+ return False
+
+ @property
+ def id(self):
+ """The location id."""
+ return "%s#%s" % (self.path, self.line)
+
+ @property
+ def path(self):
+ """The source file path."""
+ return self.__path
+
+ @property
+ def line(self):
+ """The line number."""
+ return self.__line
+
+ def __str__(self):
+ """Represent this as a string."""
+ return str(self.__dict__)
+
+ def __repr__(self):
+ """Represent this as a string."""
+ return self.__str__()
+
+ def __eq__(self, __value):
+ """Check if this is equal to another."""
+ if self.path == __value.path and self.line == __value.line:
+ return True
+ return False
+
+
+class MethodLocation(Location):
+ """A location for a method entry/exit/capture point."""
+
+ def __init__(self, path: str, method: str, position: Location.Position):
+ """
+ Create a new method location.
+
+ :param path: the source file path
+ :param method: the method name
+ :param position: the position
+ """
+ super().__init__(position)
+ self.method = method
+ self.__path = path
+
+ def at_location(self, event: str, file: str, line: int, method: str):
+ """
+ Check if we are at the location defined by this location.
+
+ :param event: the trigger event
+ :param file: the file path
+ :param line: the line number
+ :param method: the method name
+ :return: True, if we are at this location we expect, else False.
+ """
+ if event == "CALL" and method == self.method and file == self.path:
+ return True
+ return False
+
+ @property
+ def id(self):
+ """The location id."""
+ return "%s#%s" % (self.path, self.method)
+
+ @property
+ def path(self):
+ """The source file path."""
+ return self.__path
+
+ @property
+ def line(self):
+ """The method location always has a line of -1."""
+ return -1
+
+ def __str__(self):
+ """Represent this as a string."""
+ return str(self.__dict__)
+
+ def __repr__(self):
+ """Represent this as a string."""
+ return self.__str__()
+
+ def __eq__(self, __value):
+ """Check if this is equal to another."""
+ if self.path == __value.path and self.method == __value.method:
+ return True
+ return False
+
+
+def build_snapshot_action(tp_id: str, args: Dict[str, str], watches: List[str]) -> Optional[LocationAction]:
+ """
+ Create an action to create a snapshot.
+
+ :param tp_id: the tracepoint id
+ :param args: the args
+ :param watches: the watch expressions
+ :return: the location action
+ """
+ if SNAPSHOT in args:
+ if args[SNAPSHOT] == NO_COLLECT:
+ return None
+
+ condition = args[CONDITION] if CONDITION in args else None
+ return LocationAction(tp_id, condition, {
+ WATCHES: watches,
+ FRAME_TYPE: args.get(FRAME_TYPE, SINGLE_FRAME_TYPE),
+ STACK_TYPE: args.get(STACK_TYPE, STACK),
+ FIRE_COUNT: args.get(FIRE_COUNT, '1'),
+ FIRE_PERIOD: args.get(FIRE_PERIOD, '1000'),
+ LOG_MSG: args.get(LOG_MSG, None),
+ }, LocationAction.ActionType.Snapshot)
+
+
+def build_log_action(tp_id: str, args: Dict[str, str]) -> Optional[LocationAction]:
+ """
+ Create a log action from the tracepoint arguments.
+
+ :param str tp_id: the tracepoint id
+ :param Dict[str, str] args: the tracepoint arguments
+ :return: the new action, or None
+ """
+ if LOG_MSG not in args:
+ return None
+ if SNAPSHOT not in args or args[SNAPSHOT] != NO_COLLECT:
+ return None
+
+ condition = args[CONDITION] if CONDITION in args else None
+ return LocationAction(tp_id, condition, {
+ LOG_MSG: args[LOG_MSG],
+ FIRE_COUNT: args.get(FIRE_COUNT, '1'),
+ FIRE_PERIOD: args.get(FIRE_PERIOD, '1000'),
+ }, LocationAction.ActionType.Log)
+
+
+def build_metric_action(tp_id: str, args: Dict[str, str], metrics: List[MetricDefinition]) -> Optional[LocationAction]:
+ """
+ Create an action to create a metric.
+
+ :param tp_id: the tracepoint id
+ :param args: the args
+ :return: the location action
+ """
+ if metrics is None or len(metrics) == 0:
+ return None
+
+ condition = args[CONDITION] if CONDITION in args else None
+ return LocationAction(tp_id, condition, {
+ 'metrics': metrics,
+ FIRE_COUNT: args.get(FIRE_COUNT, '1'),
+ FIRE_PERIOD: args.get(FIRE_PERIOD, '1000'),
+ }, LocationAction.ActionType.Metric)
+
+
+def build_span_action(tp_id: str, args: Dict[str, str]) -> Optional[LocationAction]:
+ """
+ Create an action to create a span.
+
+ :param tp_id: the tracepoint id
+ :param args: the args
+ :return: the location action
+ """
+ if SPAN not in args:
+ return None
+
+ condition = args[CONDITION] if CONDITION in args else None
+ return LocationAction(tp_id, condition, {
+ SPAN: args[SPAN],
+ FIRE_COUNT: args.get(FIRE_COUNT, '1'),
+ FIRE_PERIOD: args.get(FIRE_PERIOD, '1000'),
+ }, LocationAction.ActionType.Snapshot)
+
+
+def build_trigger(tp_id: str, path: str, line_no: int, args: Dict[str, str], watches: List[str],
+ metrics: List[MetricDefinition]) -> Optional[Trigger]:
+ """
+ Buidl a trigger definition.
+
+ :param tp_id: the tracepoint id
+ :param path: the source file path
+ :param line_no: the line number
+ :param args: the tracepoint args
+ :param watches: the watch configs
+ :param metrics: the metric configs
+ :return: the trigger with the actions.
+ """
+ stage_ = METHOD_START if METHOD_NAME in args else LINE_START
+ if STAGE in args:
+ stage_ = args[STAGE]
+
+ if stage_ in LINE_STAGES:
+ location = LineLocation(path, line_no, Location.Position.from_stage(stage_))
+ elif stage_ in METHOD_STAGES:
+ location = MethodLocation(path, args[METHOD_NAME], Location.Position.from_stage(stage_))
+ else:
+ return None
+
+ snap_action = build_snapshot_action(tp_id, args, watches)
+ log_action = build_log_action(tp_id, args)
+ metric_action = build_metric_action(tp_id, args, metrics)
+ span_action = build_span_action(tp_id, args)
+
+ actions = [action for action in [snap_action, log_action, metric_action, span_action] if
+ action is not None]
+
+ return Trigger(location, actions)
diff --git a/src/deep/config/__init__.py b/src/deep/config/__init__.py
index 4d27d80..d9e0701 100644
--- a/src/deep/config/__init__.py
+++ b/src/deep/config/__init__.py
@@ -40,6 +40,9 @@
SERVICE_AUTH_PROVIDER = os.getenv('DEEP_SERVICE_AUTH_PROVIDER', None)
"""The Auth provider to use for the service (default: None)"""
+APP_ROOT = ""
+"""App root sets the prefix that can be removed to generate shorter file names. This value is calculated."""
+
# noinspection PyPep8Naming
def IN_APP_INCLUDE():
diff --git a/src/deep/config/config_service.py b/src/deep/config/config_service.py
index 412b94e..21f3332 100644
--- a/src/deep/config/config_service.py
+++ b/src/deep/config/config_service.py
@@ -16,7 +16,7 @@
"""Service for handling deep config."""
import os
-from typing import Any, List
+from typing import Any, List, Dict, Tuple, Optional
from deep import logging
from deep.api.plugin import Plugin
@@ -28,7 +28,7 @@
class ConfigService:
"""This is the main service that handles config for DEEP."""
- def __init__(self, custom=None, tracepoints=TracepointConfigService()):
+ def __init__(self, custom: Dict[str, any] = None, tracepoints=TracepointConfigService()):
"""
Create a new config object.
@@ -149,3 +149,26 @@ def log_tracepoint(self, log_msg: str, tp_id: str, snap_id: str):
:param (str) snap_id: the is of the snapshot that was created by this tracepoint
"""
self._tracepoint_logger.log_tracepoint(log_msg, tp_id, snap_id)
+
+ def is_app_frame(self, filename: str) -> Tuple[bool, Optional[str]]:
+ """
+ Check if the current frame is a user application frame.
+
+ :param filename: the frame file name
+ :return: True if add frame, else False
+ """
+ in_app_include = self.IN_APP_INCLUDE
+ in_app_exclude = self.IN_APP_EXCLUDE
+
+ for path in in_app_exclude:
+ if filename.startswith(path):
+ return False, path
+
+ for path in in_app_include:
+ if filename.startswith(path):
+ return True, path
+
+ if filename.startswith(self.APP_ROOT):
+ return True, self.APP_ROOT
+
+ return False, None
diff --git a/src/deep/config/tracepoint_config.py b/src/deep/config/tracepoint_config.py
index 5262389..e976b5b 100644
--- a/src/deep/config/tracepoint_config.py
+++ b/src/deep/config/tracepoint_config.py
@@ -18,9 +18,32 @@
import abc
import logging
import uuid
-from typing import Dict, List
+from typing import Dict, List, TYPE_CHECKING
-from deep.api.tracepoint import TracePointConfig
+from deep.api.tracepoint.tracepoint_config import MetricDefinition
+
+from deep.api.tracepoint.trigger import build_trigger
+
+if TYPE_CHECKING:
+ from deep.api.tracepoint.trigger import Trigger
+
+
+class ConfigUpdateListener(abc.ABC):
+ """Class to describe a config listener."""
+
+ @abc.abstractmethod
+ def config_change(self, ts: int, old_hash: str, current_hash: str, old_config: List['Trigger'],
+ new_config: List['Trigger']):
+ """
+ Process an update to the tracepoint config.
+
+ :param ts: the ts of the new config
+ :param old_hash: the old config hash
+ :param current_hash: the new config hash
+ :param old_config: the old config
+ :param new_config: the new config
+ """
+ raise NotImplementedError
class TracepointConfigService:
@@ -28,8 +51,8 @@ class TracepointConfigService:
def __init__(self) -> None:
"""Create new tracepoint config service."""
- self._custom = []
- self._tracepoint_config = []
+ self._custom: List['Trigger'] = []
+ self._tracepoint_config: List['Trigger'] = []
self._current_hash = None
self._last_update = 0
self._task_handler = None
@@ -45,7 +68,7 @@ def update_no_change(self, ts):
"""
self._last_update = ts
- def update_new_config(self, ts, new_hash, new_config):
+ def update_new_config(self, ts: int, new_hash: str, new_config: List['Trigger']):
"""
Update to the new config.
@@ -77,7 +100,8 @@ def set_task_handler(self, task_handler):
"""
self._task_handler = task_handler
- def update_listeners(self, ts, old_hash, current_hash, old_config, new_config):
+ def update_listeners(self, ts: int, old_hash: str, current_hash: str, old_config: List['Trigger'],
+ new_config: List['Trigger']):
"""
Update the registered listeners.
@@ -96,7 +120,7 @@ def update_listeners(self, ts, old_hash, current_hash, old_config, new_config):
except Exception:
logging.exception("Error updating listener %s", listeners)
- def add_listener(self, listener: 'ConfigUpdateListener'):
+ def add_listener(self, listener: ConfigUpdateListener):
"""
Add a new listener to the config.
@@ -105,7 +129,7 @@ def add_listener(self, listener: 'ConfigUpdateListener'):
self._listeners.append(listener)
@property
- def current_config(self):
+ def current_config(self) -> List['Trigger']:
"""
The current tracepoint config.
@@ -114,7 +138,7 @@ def current_config(self):
return self._tracepoint_config
@property
- def current_hash(self):
+ def current_hash(self) -> str:
"""
The current hash.
@@ -125,7 +149,8 @@ def current_hash(self):
"""
return self._current_hash
- def add_custom(self, path: str, line: int, args: Dict[str, str], watches: List[str]) -> TracePointConfig:
+ def add_custom(self, path: str, line: int, args: Dict[str, str], watches: List[str],
+ metrics: List[MetricDefinition]) -> str:
"""
Crate a new tracepoint from the input.
@@ -133,38 +158,22 @@ def add_custom(self, path: str, line: int, args: Dict[str, str], watches: List[s
:param line: the source line number
:param args: the tracepoint args
:param watches: the tracepoint watches
+ :param metrics: the tracepoint metrics
:return: the new TracePointConfig
"""
- config = TracePointConfig(str(uuid.uuid4()), path, line, args, watches)
+ config = build_trigger(str(uuid.uuid4()), path, line, args, watches, metrics)
self._custom.append(config)
self.__trigger_update(None, None)
- return config
+ return config.id
- def remove_custom(self, config: TracePointConfig):
+ def remove_custom(self, _id: str):
"""
Remove a custom tracepoint config.
- :param config: the config to remove
+ :param _id: the id of the config to remove
"""
for idx, cfg in enumerate(self._custom):
- if cfg.id == config.id:
+ if cfg.id == _id:
del self._custom[idx]
self.__trigger_update(None, None)
return
-
-
-class ConfigUpdateListener(abc.ABC):
- """Class to describe a config listener."""
-
- @abc.abstractmethod
- def config_change(self, ts, old_hash, current_hash, old_config, new_config):
- """
- Process an update to the tracepoint config.
-
- :param ts: the ts of the new config
- :param old_hash: the old config hash
- :param current_hash: the new config hash
- :param old_config: the old config
- :param new_config: the new config
- """
- raise NotImplementedError
diff --git a/src/deep/grpc/__init__.py b/src/deep/grpc/__init__.py
index 93faa7e..5bb1b5c 100644
--- a/src/deep/grpc/__init__.py
+++ b/src/deep/grpc/__init__.py
@@ -20,6 +20,7 @@
have type definitions that work in IDE. It also makes it easier to deal with agent functionality by
having local types we can modify.
"""
+from typing import List, Dict
# noinspection PyUnresolvedReferences
from deepproto.proto.common.v1.common_pb2 import KeyValue, AnyValue, ArrayValue, KeyValueList
@@ -27,7 +28,8 @@
from deepproto.proto.resource.v1.resource_pb2 import Resource
from .grpc_service import GRPCService
-from ..api.tracepoint.tracepoint_config import TracePointConfig
+from ..api.tracepoint.tracepoint_config import LabelExpression, MetricDefinition
+from ..api.tracepoint.trigger import build_trigger, Trigger
def convert_value(value):
@@ -66,7 +68,7 @@ def __value_as_list(value):
def convert_resource(resource):
"""
- Convert a internal resource to GRPC type.
+ Convert an internal resource to GRPC type.
:param resource: the resource to convert
:return: the converted type as GRPC.
@@ -79,11 +81,50 @@ def __convert_attributes(attributes):
attributes=[KeyValue(key=k, value=convert_value(v)) for k, v in attributes.items()])
-def convert_response(response):
+def __convert_static_value(value):
+ static_value = value.static
+ set_field = static_value.WhichOneof("value")
+ if set_field is None:
+ return None
+ return getattr(static_value, set_field)
+
+
+def convert_label_expressions(label_expressions) -> List[LabelExpression]:
+ """
+ Convert a label expression.
+
+ :param label_expressions: the expression to convert.
+ :return: the converted expression
+ """
+ return [LabelExpression(label.key, __convert_static_value(label), label.expression) for
+ label in label_expressions]
+
+
+def __convert_metric_definition(metrics):
+ return [MetricDefinition(m.name, convert_label_expressions(m.labelExpressions), m.type, m.expression, m.namespace,
+ m.help, m.unit) for m in metrics]
+
+
+def convert_response(response) -> List[Trigger]:
"""
Convert a response from GRPC to internal types.
- :param response: the grpc response.
- :return: the internal types for tracepoints
+ This function should create a list of Triggers from the incoming configuration. The Trigger should be a
+ location with one or more actions to perform at that location.
+
+ :param response: the response from the poll request
+ :return: a list of trigger locations with the appropriate actions
"""
- return [TracePointConfig(r.ID, r.path, r.line_number, dict(r.args), [w for w in r.watches]) for r in response]
+ all_triggers: Dict[str, Trigger] = {}
+ for r in response:
+ # from the incoming tracepoints create a Trigger with actions
+ trigger = build_trigger(r.ID, r.path, r.line_number, dict(r.args), [w for w in r.watches],
+ __convert_metric_definition(r.metrics))
+ location_id = trigger.id
+ # if we already have a trigger for this location then merge the new actions into it
+ if location_id in all_triggers:
+ all_triggers[location_id].merge_actions(trigger.actions)
+ else:
+ all_triggers[location_id] = trigger
+
+ return list(all_triggers.values())
diff --git a/src/deep/logging/__init__.py b/src/deep/logging/__init__.py
index bea71ae..69f0d6b 100644
--- a/src/deep/logging/__init__.py
+++ b/src/deep/logging/__init__.py
@@ -70,6 +70,7 @@ def exception(msg, *args, exc_info=True, **kwargs):
:param msg: the message to log
:param args: the args for the log
+ :param exc_info: include exc info in log
:param kwargs: the kwargs
"""
logging.getLogger("deep").exception(msg, *args, exc_info=exc_info, **kwargs)
diff --git a/src/deep/logging/tracepoint_logger.py b/src/deep/logging/tracepoint_logger.py
index f1fbc31..ac45a13 100644
--- a/src/deep/logging/tracepoint_logger.py
+++ b/src/deep/logging/tracepoint_logger.py
@@ -29,13 +29,13 @@ class TracepointLogger(abc.ABC):
"""
@abc.abstractmethod
- def log_tracepoint(self, log_msg: str, tp_id: str, snap_id: str):
+ def log_tracepoint(self, log_msg: str, tp_id: str, ctx_id: str):
"""
Log the dynamic log message.
:param (str) log_msg: the log message to log
:param (str) tp_id: the id of the tracepoint that generated this log
- :param (str) snap_id: the is of the snapshot that was created by this tracepoint
+ :param (str) ctx_id: the id of the context that was created by this tracepoint
"""
pass
@@ -43,12 +43,12 @@ def log_tracepoint(self, log_msg: str, tp_id: str, snap_id: str):
class DefaultLogger(TracepointLogger):
"""The default tracepoint logger used by Deep."""
- def log_tracepoint(self, log_msg: str, tp_id: str, snap_id: str):
+ def log_tracepoint(self, log_msg: str, tp_id: str, ctx_id: str):
"""
Log the dynamic log message.
:param (str) log_msg: the log message to log
:param (str) tp_id: the id of the tracepoint that generated this log
- :param (str) snap_id: the is of the snapshot that was created by this tracepoint
+ :param (str) ctx_id: the id of the context that was created by this tracepoint
"""
- logging.info(log_msg + " snapshot=%s tracepoint=%s" % (snap_id, tp_id))
+ logging.info(log_msg + " ctx=%s tracepoint=%s" % (ctx_id, tp_id))
diff --git a/src/deep/processor/__init__.py b/src/deep/processor/__init__.py
index 85505af..6e5f613 100644
--- a/src/deep/processor/__init__.py
+++ b/src/deep/processor/__init__.py
@@ -15,6 +15,4 @@
"""Handlers for processing tracepoint hits."""
-from .trigger_handler import TriggerHandler
-
-__all__ = [TriggerHandler.__name__]
+__all__ = []
diff --git a/src/deep/processor/bfs/__init__.py b/src/deep/processor/bfs/__init__.py
index ed6edb0..13b44f9 100644
--- a/src/deep/processor/bfs/__init__.py
+++ b/src/deep/processor/bfs/__init__.py
@@ -40,7 +40,7 @@ def __init__(self, value: 'NodeValue' = None, children: List['Node'] = None, par
if children is None:
children = []
self._value: 'NodeValue' = value
- self._children: list['Node'] = children
+ self._children: List['Node'] = children
self._parent: 'ParentNode' = parent
self._depth = 0
@@ -109,7 +109,7 @@ def __init__(self, name: str, value: any, original_name=None):
Create a new node value.
It is possible to rename variables by providing an original name. This is used when dealing with
- 'private' variables in calsses.
+ 'private' variables in classes.
e.g. A variable called _NodeValue__name is used by python to represent the private variable __name. This
is not known by devs, so we rename the variable to __name, and keep the original name as _NodeValue__name,
diff --git a/src/deep/processor/context/__init__.py b/src/deep/processor/context/__init__.py
new file mode 100644
index 0000000..acc59aa
--- /dev/null
+++ b/src/deep/processor/context/__init__.py
@@ -0,0 +1,16 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Context allow for process fo different action."""
diff --git a/src/deep/processor/context/action_context.py b/src/deep/processor/context/action_context.py
new file mode 100644
index 0000000..d928318
--- /dev/null
+++ b/src/deep/processor/context/action_context.py
@@ -0,0 +1,139 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Handling for action context."""
+
+import abc
+from typing import Tuple, TYPE_CHECKING, Dict
+
+from deep.logging import logging
+from deep.api.tracepoint import WatchResult, Variable
+from deep.processor.variable_set_processor import VariableSetProcessor
+from deep.utils import str2bool
+
+if TYPE_CHECKING:
+ from deep.processor.context.trigger_context import TriggerContext
+ from deep.api.tracepoint.trigger import LocationAction
+
+
+class ActionContext(abc.ABC):
+ """A context for the processing of an action."""
+
+ def __init__(self, parent: 'TriggerContext', action: 'LocationAction'):
+ """
+ Create a new action context.
+
+ :param parent: the parent trigger
+ :param action: the action config
+ """
+ self._parent: 'TriggerContext' = parent
+ self._action: 'LocationAction' = action
+ self._triggered = False
+
+ def __enter__(self):
+ """Enter and open the context."""
+ return self
+
+ def __exit__(self, exception_type, exception_value, exception_traceback):
+ """Exit and close the context."""
+ if self.has_triggered():
+ self._action.record_triggered(self._parent.ts)
+
+ def eval_watch(self, watch: str) -> Tuple[WatchResult, Dict[str, Variable], str]:
+ """
+ Evaluate an expression in the current frame.
+
+ :param watch: The watch expression to evaluate.
+ :return: Tuple with WatchResult, collected variables, and the log string for the expression
+ """
+ var_processor = VariableSetProcessor({}, self._parent.var_cache)
+
+ try:
+ result = self._parent.evaluate_expression(watch)
+ variable_id, log_str = var_processor.process_variable(watch, result)
+
+ return WatchResult(watch, variable_id), var_processor.var_lookup, log_str
+ except BaseException as e:
+ logging.exception("Error evaluating watch %s", watch)
+ return WatchResult(watch, None, str(e)), {}, str(e)
+
+ def process(self):
+ """Process the action."""
+ try:
+ return self._process_action()
+ finally:
+ self._triggered = True
+
+ @abc.abstractmethod
+ def _process_action(self):
+ pass
+
+ def has_triggered(self):
+ """
+ Check if we have triggerd during this context.
+
+ :return: True, if the trigger has been fired.
+ """
+ return self._triggered
+
+ def can_trigger(self) -> bool:
+ """
+ Check if the action can trigger.
+
+ Combine checks for rate limits, windows and condition.
+ :return: True, if the trigger can be triggered.
+ """
+ if not self._action.can_trigger(self._parent.ts):
+ return False
+ if self._action.condition is None:
+ return True
+ result = self._parent.evaluate_expression(self._action.condition)
+ return str2bool(str(result))
+
+
+class MetricActionContext(ActionContext):
+ """Action for metrics."""
+
+ def _process_action(self):
+ print("metric action")
+ pass
+
+
+class SpanActionContext(ActionContext):
+ """Action for spans."""
+
+ def _process_action(self):
+ print("span action")
+ pass
+
+
+class NoActionContext(ActionContext):
+ """Default context if no action can be determined."""
+
+ def _process_action(self):
+ print("Unsupported action type: %s" % self._action)
diff --git a/src/deep/processor/context/action_results.py b/src/deep/processor/context/action_results.py
new file mode 100644
index 0000000..1adb963
--- /dev/null
+++ b/src/deep/processor/context/action_results.py
@@ -0,0 +1,84 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Handler results of actions."""
+
+import abc
+from typing import Optional
+
+from deep.logging.tracepoint_logger import TracepointLogger
+from deep.push import PushService
+
+
+class ActionCallback:
+ """A call back to 'close' an action."""
+
+ def process(self, frame, event) -> bool:
+ """
+ Process a callback.
+
+ :param frame: the frame data
+ :param event: the event
+ :return: True, to keep this callback until next match.
+ """
+ pass
+
+
+class ActionResult(abc.ABC):
+ """
+ ActionResult represents the result of a trigger action.
+
+ This could be the snapshot to ship, logs to process or a span to close.
+ """
+
+ @abc.abstractmethod
+ def process(self, ctx_id: str, logger: TracepointLogger, service: PushService) -> Optional[ActionCallback]:
+ """
+ Process this result.
+
+ Either log or ship the collected data to an endpoint.
+
+ :param ctx_id: the triggering context id
+ :param logger: the log service
+ :param service:the push service
+ :return: an action callback if we need to do something at the 'end', or None
+ """
+ pass
diff --git a/src/deep/processor/context/log_action.py b/src/deep/processor/context/log_action.py
new file mode 100644
index 0000000..ea9e35e
--- /dev/null
+++ b/src/deep/processor/context/log_action.py
@@ -0,0 +1,123 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Handling for log actions."""
+
+from typing import TYPE_CHECKING, List, Dict, Optional
+
+from .action_context import ActionContext
+from .action_results import ActionResult, ActionCallback
+from ...api.tracepoint.constants import LOG_MSG
+from ...api.tracepoint.trigger import LocationAction
+from ...logging.tracepoint_logger import TracepointLogger
+from ...push import PushService
+
+from typing import Tuple
+
+if TYPE_CHECKING:
+ from ...api.tracepoint import WatchResult, Variable
+
+
+class LogActionContext(ActionContext):
+ """The context for processing a log action."""
+
+ def _process_action(self):
+ log_msg = self._action.config.get(LOG_MSG)
+ log, watches, vars_ = self.process_log(log_msg)
+ self._parent.attach_result(LogActionResult(self._action, log))
+
+ def process_log(self, log_msg) -> Tuple[str, List['WatchResult'], Dict[str, 'Variable']]:
+ """
+ Process the log message.
+
+ :param log_msg: the configure log message
+
+ :returns:
+ (str) process_log: the result of the processed log
+ (list) watch: the watch results from the expressions
+ (dic) vars: the collected variables
+ """
+ ctx_self = self
+ watch_results = []
+ _var_lookup = {}
+
+ class FormatDict(dict):
+ """This type is used in the log process to ensure that missing values are formatted don't error."""
+
+ def __missing__(self, key):
+ return "{%s}" % key
+
+ import string
+
+ class FormatExtractor(string.Formatter):
+ """
+ Allows logs to be formatted correctly.
+
+ This type allows us to use watches within log strings and collect the watch
+ as well as interpolate the values.
+ """
+
+ def get_field(self, field_name, args, kwargs):
+ # evaluate watch
+ watch, var_lookup, log_str = ctx_self.eval_watch(field_name)
+ # collect data
+ watch_results.append(watch)
+ _var_lookup.update(var_lookup)
+
+ return log_str, field_name
+
+ log_msg = "[deep] %s" % FormatExtractor().vformat(log_msg, (), FormatDict(self._parent.locals))
+ return log_msg, watch_results, _var_lookup
+
+
+class LogActionResult(ActionResult):
+ """The result of a successful log action."""
+
+ def __init__(self, action: 'LocationAction', log: str):
+ """
+ Create a new result of a log action.
+
+ :param action: the source action
+ :param log: the log result.
+ """
+ self.action = action
+ self.log = log
+
+ def process(self, ctx_id: str, logger: TracepointLogger, service: PushService) -> Optional[ActionCallback]:
+ """
+ Process this result.
+
+ Either log or ship the collected data to an endpoint.
+
+ :param ctx_id: the triggering context id
+ :param logger: the log service
+ :param service:the push service
+ :return: an action callback if we need to do something at the 'end', or None
+ """
+ logger.log_tracepoint(self.log, ctx_id, self.action.id)
+ return None
diff --git a/src/deep/processor/context/snapshot_action.py b/src/deep/processor/context/snapshot_action.py
new file mode 100644
index 0000000..55bb122
--- /dev/null
+++ b/src/deep/processor/context/snapshot_action.py
@@ -0,0 +1,159 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Handling for snapshot actions."""
+
+from typing import Tuple, Optional
+
+from deep.api.attributes import BoundedAttributes
+from deep.api.tracepoint import EventSnapshot
+from deep.api.tracepoint.constants import FRAME_TYPE, SINGLE_FRAME_TYPE, NO_FRAME_TYPE, ALL_FRAME_TYPE
+from deep.api.tracepoint.trigger import LocationAction
+from deep.logging.tracepoint_logger import TracepointLogger
+from deep.processor.context.action_context import ActionContext
+from deep.processor.context.action_results import ActionResult, ActionCallback
+from deep.processor.context.log_action import LOG_MSG, LogActionContext, LogActionResult
+from deep.processor.frame_collector import FrameCollectorContext, FrameCollector
+from deep.processor.variable_set_processor import VariableProcessorConfig
+from deep.push import PushService
+
+
+class SnapshotActionContext(FrameCollectorContext, ActionContext):
+ """The context to use when capturing a snapshot."""
+
+ @property
+ def max_tp_process_time(self) -> int:
+ """The max time to spend processing a tracepoint."""
+ return self._action.config.get('MAX_TP_PROCESS_TIME', 100)
+
+ @property
+ def collection_config(self) -> VariableProcessorConfig:
+ """The variable processing config."""
+ config = VariableProcessorConfig()
+ config.max_string_length = self._action.config.get('MAX_STRING_LENGTH', config.DEFAULT_MAX_STRING_LENGTH)
+ config.max_collection_size = self._action.config.get('MAX_COLLECTION_SIZE', config.DEFAULT_MAX_COLLECTION_SIZE)
+ config.max_variables = self._action.config.get('MAX_VARIABLES', config.DEFAULT_MAX_VARIABLES)
+ config.max_var_depth = self._action.config.get('MAX_VAR_DEPTH', config.DEFAULT_MAX_VAR_DEPTH)
+ return config
+
+ @property
+ def ts(self) -> int:
+ """The timestamp in nanoseconds for this trigger."""
+ return self._parent.ts
+
+ def should_collect_vars(self, current_frame_index: int) -> bool:
+ """
+ Check if we can collect data for a frame.
+
+ Frame indexes start from 0 (as the current frame) and increase as we go back up the stack.
+
+ :param (int) current_frame_index: the current frame index.
+ :return (bool): if we should collect the frame vars.
+ """
+ config_type = self._action.config.get(FRAME_TYPE, SINGLE_FRAME_TYPE)
+ if config_type == NO_FRAME_TYPE:
+ return False
+ if config_type == ALL_FRAME_TYPE:
+ return True
+ return current_frame_index == 0
+
+ def is_app_frame(self, filename: str) -> Tuple[bool, str]:
+ """
+ Check if the current frame is a user application frame.
+
+ :param filename: the frame file name
+ :return: True if add frame, else False
+ """
+ return self._parent.config.is_app_frame(filename)
+
+ @property
+ def watches(self):
+ """The configured watches."""
+ return self._action.config.get("watches", [])
+
+ @property
+ def log_msg(self):
+ """The configured log message on the tracepoint."""
+ return self._action.config.get(LOG_MSG, None)
+
+ def _process_action(self):
+ collector = FrameCollector(self, self._parent.frame)
+
+ frames, variables = collector.collect(self._parent.vars, self._parent.var_cache)
+
+ snapshot = EventSnapshot(self._action.tracepoint, self._parent.ts, self._parent.resource, frames, variables)
+
+ # process the snapshot watches
+ for watch in self.watches:
+ result, watch_lookup, _ = self.eval_watch(watch)
+ snapshot.add_watch_result(result)
+ snapshot.merge_var_lookup(watch_lookup)
+
+ log_msg = self.log_msg
+ if log_msg is not None:
+ # create and process the log message
+ context = LogActionContext(self._parent, LocationAction(self._action.id, None, {
+ LOG_MSG: log_msg,
+ }, LocationAction.ActionType.Log))
+ log, watches, log_vars = context.process_log(log_msg)
+ snapshot.log_msg = log
+ for watch in watches:
+ snapshot.add_watch_result(watch)
+ snapshot.merge_var_lookup(log_vars)
+ self._parent.attach_result(LogActionResult(context._action, log))
+
+ self._parent.attach_result(SendSnapshotActionResult(self._action, snapshot))
+
+
+class SendSnapshotActionResult(ActionResult):
+ """The result of a successful snapshot action."""
+
+ def __init__(self, action: LocationAction, snapshot: EventSnapshot):
+ """
+ Create a new snapshot action result.
+
+ :param action: the action that created this result
+ :param snapshot: the snapshot result
+ """
+ self.action = action
+ self.snapshot = snapshot
+
+ def process(self, ctx_id: str, logger: TracepointLogger, service: PushService) -> Optional[ActionCallback]:
+ """
+ Process this result.
+
+ Either log or ship the collected data to an endpoint.
+
+ :param ctx_id: the triggering context id
+ :param logger: the log service
+ :param service:the push service
+ :return: an action callback if we need to do something at the 'end', or None
+ """
+ self.snapshot.attributes.merge_in(BoundedAttributes(attributes={'ctx_id': ctx_id}))
+ service.push_snapshot(self.snapshot)
+ return None
diff --git a/src/deep/processor/context/trigger_context.py b/src/deep/processor/context/trigger_context.py
new file mode 100644
index 0000000..aa0d076
--- /dev/null
+++ b/src/deep/processor/context/trigger_context.py
@@ -0,0 +1,151 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""A context for the handling of a trigger."""
+
+import uuid
+from types import FrameType
+from typing import Dict, Optional, List
+
+from deep.api.tracepoint import Variable
+from deep.api.tracepoint.trigger import LocationAction
+from deep.config import ConfigService
+from deep.logging.tracepoint_logger import TracepointLogger
+from deep.processor.context.action_context import MetricActionContext, SpanActionContext, NoActionContext, ActionContext
+from deep.processor.context.action_results import ActionResult, ActionCallback
+from deep.processor.context.log_action import LogActionContext
+from deep.processor.context.snapshot_action import SnapshotActionContext
+from deep.processor.frame_collector import FrameCollector
+from deep.processor.variable_set_processor import VariableCacheProvider
+from deep.push import PushService
+from deep.utils import time_ns
+
+
+class TriggerContext:
+ """
+ Context for a trigger.
+
+ A context is created in a valid location is triggered. This context is then used to process all the actions,
+ collect the data and ship of the results.
+ """
+
+ def __init__(self, config: ConfigService, push_service: PushService, frame: FrameType, event: str):
+ """
+ Create a new trigger context.
+
+ :param config: the config service
+ :param push_service: the push service
+ :param frame: the frame data
+ :param event: the trigger event
+ """
+ self.__push_service = push_service
+ self.__event = event
+ self.__frame = frame
+ self.__config = config
+ self.__results: List[ActionResult] = []
+ self.__ts: int = time_ns()
+ self.__id: str = str(uuid.uuid4())
+ self.__frame_collector: Optional[FrameCollector] = None
+ self.var_cache = VariableCacheProvider()
+ self.callbacks: List[ActionCallback] = []
+ self.vars: Dict[str: Variable] = {}
+
+ def __enter__(self):
+ """Start the 'with' statement and open this context."""
+ return self
+
+ def __exit__(self, exception_type, exception_value, exception_traceback):
+ """Complete the 'with' statement, and close this context."""
+ for result in self.__results:
+ new_callback = result.process(self.__id, self.tracepoint_logger, self.push_service)
+ if new_callback is not None:
+ self.callbacks.append(new_callback)
+
+ @property
+ def file_name(self):
+ """The trigger location source file name."""
+ return self.__frame.f_code.co_filename
+
+ @property
+ def locals(self) -> Dict[str, any]:
+ """The local frame variables."""
+ return self.__frame.f_locals
+
+ @property
+ def ts(self):
+ """The timestamp in nanoseconds for this trigger."""
+ return self.__ts
+
+ @property
+ def resource(self):
+ """The client resource information."""
+ return self.__config.resource
+
+ @property
+ def frame(self):
+ """The raw frame data."""
+ return self.__frame
+
+ @property
+ def config(self):
+ """The config service."""
+ return self.__config
+
+ def action_context(self, action: 'LocationAction') -> 'ActionContext':
+ """
+ Create an action context from this context, for the provided action.
+
+ :param action: the action
+ :return: the new action context.
+ """
+ if action.action_type == LocationAction.ActionType.Snapshot:
+ return SnapshotActionContext(self, action)
+ if action.action_type == LocationAction.ActionType.Log:
+ return LogActionContext(self, action)
+ if action.action_type == LocationAction.ActionType.Metric:
+ return MetricActionContext(self, action)
+ if action.action_type == LocationAction.ActionType.Span:
+ return SpanActionContext(self, action)
+ return NoActionContext(self, action)
+
+ def evaluate_expression(self, expression: str) -> any:
+ """
+ Evaluate an expression to a value.
+
+ :param expression: the expression
+ :return: the result of the expression, or the exception that was raised.
+ """
+ try:
+ return eval(expression, None, self.__frame.f_locals)
+ except BaseException as e:
+ return e
+
+ def attach_result(self, result: ActionResult):
+ """
+ Attach a result for this context.
+
+ :param result: the new result
+ """
+ self.__results.append(result)
+
+ @property
+ def tracepoint_logger(self) -> TracepointLogger:
+ """The tracepoint logger service."""
+ return self.__config.tracepoint_logger
+
+ @property
+ def push_service(self) -> PushService:
+ """The push service."""
+ return self.__push_service
diff --git a/src/deep/processor/frame_collector.py b/src/deep/processor/frame_collector.py
index 2b120ee..e7850bb 100644
--- a/src/deep/processor/frame_collector.py
+++ b/src/deep/processor/frame_collector.py
@@ -16,148 +16,122 @@
"""Processing for frame collection."""
import abc
-from typing import Dict, Tuple, List, Optional
+from types import FrameType
+from typing import Tuple, Dict, List
-from deep import logging
-from deep.api.tracepoint import StackFrame, WatchResult, Variable, VariableId
-from deep.processor.bfs import Node, NodeValue, breadth_first_search, ParentNode
+from deep.api.tracepoint import StackFrame, Variable
from deep.utils import time_ns
-from .frame_config import FrameProcessorConfig
-from .variable_processor import process_variable, process_child_nodes, variable_to_string, truncate_string, Collector
-from ..config import ConfigService
+from .variable_set_processor import VariableCacheProvider, VariableSetProcessor, VariableProcessorConfig
-class FrameCollector(Collector):
- """This deals with collecting data from the paused frames."""
-
- def __init__(self, frame, config: ConfigService):
- """
- Create a new collector.
-
- :param frame: the frame data
- :param config: the deep config service
- """
- self._var_cache: Dict[str, str] = {}
- self._config = config
- self._has_time_exceeded = False
- self._ts = time_ns()
- self._frame_config = FrameProcessorConfig()
- self._frame = frame
- self._var_lookup: Dict[str, Variable] = {}
- self._var_id = 0
+class FrameCollectorContext(abc.ABC):
+ """The context that is used to wrap a collection event."""
@property
- def frame_config(self) -> FrameProcessorConfig:
- """
- The frame config.
+ @abc.abstractmethod
+ def max_tp_process_time(self) -> int:
+ """The max time to spend processing a tracepoint."""
+ pass
- :return: the frame config
- """
- return self._frame_config
+ @property
+ @abc.abstractmethod
+ def collection_config(self) -> VariableProcessorConfig:
+ """The variable processing config."""
+ pass
+ @property
@abc.abstractmethod
- def configure_self(self):
- """Process the filtered tracepoints to configure this processor."""
+ def ts(self) -> int:
+ """The timestamp in nanoseconds for this trigger."""
pass
- def add_child_to_lookup(self, parent_id: str, child: VariableId):
+ @abc.abstractmethod
+ def should_collect_vars(self, current_frame_index: int) -> bool:
"""
- Add a child variable to the var lookup parent.
+ Check if we can collect data for a frame.
- :param parent_id: the internal id of the parent
- :param child: the child VariableId to append
- :return:
- """
- self._var_lookup[parent_id].children.append(child)
+ Frame indexes start from 0 (as the current frame) and increase as we go back up the stack.
- def log_tracepoint(self, log_msg: str, tp_id: str, snap_id: str):
- """Send the processed log to the log handler."""
- self._config.log_tracepoint(log_msg, tp_id, snap_id)
+ :param (int) current_frame_index: the current frame index.
+ :return (bool): if we should collect the frame vars.
+ """
+ pass
- def process_log(self, tp, log_msg) -> Tuple[str, List[WatchResult], Dict[str, Variable]]:
+ @abc.abstractmethod
+ def is_app_frame(self, filename: str) -> Tuple[bool, str]:
"""
- Process a log message.
+ Check if the current frame is a user application frame.
- :param tp: the tracepoint config
- :param log_msg: the log message
- :returns:
- (str) log_msg: the processed log message
- (list) watches: the watch results from the log
- (dict) vars: the collected vars for the watches
+ :param filename: the frame file name
+ :return: True if add frame, else False
"""
- frame_col = self
- watch_results = []
- _var_lookup = {}
+ pass
- class FormatDict(dict):
- """This type is used in the log process to ensure that missing values are formatted don't error."""
- def __missing__(self, key):
- return "{%s}" % key
+class FrameCollector:
+ """This deals with collecting data from the paused frames."""
- import string
+ def __init__(self, source: FrameCollectorContext, frame: FrameType):
+ """
+ Create a new collector.
- class FormatExtractor(string.Formatter):
- """
- Allows logs to be formatted correctly.
+ :param source: the collector context
+ :param frame: the frame data
+ """
+ self.__has_time_exceeded = False
+ self.__source = source
+ self.__frame = frame
- This type allows us to use watches within log strings and collect the watch
- as well as interpolate the values.
- """
+ def __time_exceeded(self) -> bool:
+ if self.__has_time_exceeded:
+ return self.__has_time_exceeded
- def get_field(self, field_name, args, kwargs):
- # evaluate watch
- watch, var_lookup, log_str = frame_col.eval_watch(field_name)
- # collect data
- watch_results.append(watch)
- _var_lookup.update(var_lookup)
+ duration = (time_ns() - self.__source.ts) / 1000000 # make duration ms not ns
+ self.__has_time_exceeded = duration > self.__source.max_tp_process_time
+ return self.__has_time_exceeded
- return log_str, field_name
+ def parse_short_name(self, filename) -> Tuple[str, bool]:
+ """
+ Process a file name into a shorter version.
- log_msg = "[deep] %s" % FormatExtractor().vformat(log_msg, (), FormatDict(self._frame.f_locals))
- return log_msg, watch_results, _var_lookup
+ By default, the file names in python are the absolute path to the file on disk. These can be quite long,
+ so we try to shorten the names by looking at the APP_ROOT and converting the file name into a relative path.
- def eval_watch(self, watch: str) -> Tuple[WatchResult, Dict[str, Variable], str]:
- """
- Evaluate an expression in the current frame.
+ e.g. if the file name is '/dev/python/custom_service/api/handler.py' and the APP_ROOT is
+ '/dev/python/custom_service' then we shorten the path to 'custom_service/api/handler.py'.
- :param watch: The watch expression to evaluate.
- :return: Tuple with WatchResult, collected variables, and the log string for the expression
+ :param (str) filename: the file name
+ :returns:
+ (str) filename: the new file name
+ (bool) is_app_frame: True if the file is an application frame file
"""
- # reset var lookup - var cache is still used to reduce duplicates
- self._var_lookup = {}
-
- try:
- result = eval(watch, None, self._frame.f_locals)
- watch_var, var_lookup, log_str = self.__process_watch_result_breadth_first(watch, result)
- # again we reset the local version of the var lookup.
- self._var_lookup = {}
- return WatchResult(watch, watch_var), var_lookup, log_str
- except BaseException as e:
- logging.exception("Error evaluating watch %s", watch)
- return WatchResult(watch, None, str(e)), {}, str(e)
+ is_app_frame, match = self.__source.is_app_frame(filename)
+ if match is not None:
+ return filename[len(match):], is_app_frame
+ return filename, is_app_frame
- def process_frame(self):
+ def collect(self, var_lookup: Dict[str, Variable], var_cache: VariableCacheProvider) \
+ -> Tuple[List[StackFrame], Dict[str, Variable]]:
"""
- Start processing the frame.
+ Collect the data from the current frame.
- :return: Tuple of collected frames and variables
+ :param var_lookup: the var lookup to use
+ :param var_cache: the var cache to use
+ :return:
"""
- current_frame = self._frame
+ current_frame = self.__frame
collected_frames = []
# while we still have frames process them
while current_frame is not None:
# process the current frame
- frame = self._process_frame(current_frame, self._frame_config.should_collect_vars(len(collected_frames)))
+ frame = self._process_frame(var_lookup, var_cache, current_frame,
+ self.__source.should_collect_vars(len(collected_frames)))
collected_frames.append(frame)
current_frame = current_frame.f_back
- # We want to clear the local collected var lookup now that we have processed the frame
- # this is, so we can process watches later while maintaining independence between tracepoints
- _vars = self._var_lookup
- self._var_lookup = {}
- return collected_frames, _vars
+ return collected_frames, var_lookup
- def _process_frame(self, frame, process_vars):
+ def _process_frame(self, var_lookup: Dict[str, Variable], var_cache: VariableCacheProvider,
+ frame: FrameType, collect_vars: bool) -> StackFrame:
# process the current frame info
lineno = frame.f_lineno
filename = frame.f_code.co_filename
@@ -166,182 +140,20 @@ def _process_frame(self, frame, process_vars):
f_locals = frame.f_locals
_self = f_locals.get('self', None)
class_name = None
- if _self is not None:
+ if _self is not None and hasattr(_self, '__class__'):
class_name = _self.__class__.__name__
var_ids = []
# only process vars if we are under the time limit
- if process_vars and not self.__time_exceeded():
- var_ids = self.process_frame_variables_breadth_first(f_locals)
+ if collect_vars and not self.__time_exceeded():
+ processor = VariableSetProcessor(var_lookup, var_cache, self.__source.collection_config)
+ # we process the vars as a single dict of 'locals'
+ variable, log_str = processor.process_variable("locals", f_locals)
+ # now ee 'unwrap' the locals, so they are on the frame directly.
+ if variable.vid in var_lookup:
+ variable_val = var_lookup[variable.vid]
+ del var_lookup[variable.vid]
+ var_ids = variable_val.children
short_path, app_frame = self.parse_short_name(filename)
return StackFrame(filename, short_path, func_name, lineno, var_ids, class_name,
app_frame=app_frame)
-
- def __time_exceeded(self):
- if self._has_time_exceeded:
- return self._has_time_exceeded
-
- duration = (time_ns() - self._ts) / 1000000 # make duration ms not ns
- self._has_time_exceeded = duration > self._frame_config.max_tp_process_time
- return self._has_time_exceeded
-
- def __is_app_frame(self, filename: str) -> Tuple[bool, Optional[str]]:
- in_app_include = self._config.IN_APP_INCLUDE
- in_app_exclude = self._config.IN_APP_EXCLUDE
-
- for path in in_app_exclude:
- if filename.startswith(path):
- return False, path
-
- for path in in_app_include:
- if filename.startswith(path):
- return True, path
-
- if filename.startswith(self._config.APP_ROOT):
- return True, self._config.APP_ROOT
-
- return False, None
-
- def process_frame_variables_breadth_first(self, f_locals):
- """
- Process the variables on a frame.
-
- :param f_locals: the frame locals.
- :return: the list of var ids for the frame.
- """
- var_ids = []
-
- class FrameParent(ParentNode):
-
- def add_child(self, child):
- var_ids.append(child)
-
- root_parent = FrameParent()
-
- initial_nodes = [Node(NodeValue(k, v), parent=root_parent) for k, v in f_locals.items()]
- breadth_first_search(Node(None, initial_nodes, root_parent), self.search_function)
-
- return var_ids
-
- def search_function(self, node: Node) -> bool:
- """
- Process a node using breadth first approach.
-
- :param node: the current node we are process
- :return: True, if we want to continue with the nodes children
- """
- if not self.check_var_count():
- # we have exceeded the var count, so do not continue
- return False
-
- node_value = node.value
- if node_value is None:
- # this node has no value, continue with children
- return True
-
- # process this node variable
- process_result = process_variable(self, node_value)
- var_id = process_result.variable_id
- # add the result to the parent - this maintains the hierarchy in the var look up
- node.parent.add_child(var_id)
-
- # some variables do not want the children processed (e.g. strings)
- if process_result.process_children:
- # process children and add to node
- child_nodes = process_child_nodes(self, var_id.vid, node_value.value, node.depth)
- node.add_children(child_nodes)
- return True
-
- def check_var_count(self):
- """
- Check if we have exceeded our var count.
-
- :return: True, if we should continue.
- """
- if len(self._var_cache) > self._frame_config.max_variables:
- return False
- return True
-
- def __process_watch_result_breadth_first(self, watch: str, result: any) -> (
- Tuple)[VariableId, Dict[str, Variable], str]:
-
- identity_hash_id = str(id(result))
- check_id = self.check_id(identity_hash_id)
- if check_id is not None:
- # this means the watch result is already in the var_lookup
- return VariableId(check_id, watch), {}, str(result)
-
- # else this is an unknown value so process breadth first
- var_ids = []
-
- class FrameParent(ParentNode):
-
- def add_child(self, child):
- var_ids.append(child)
-
- root_parent = FrameParent()
-
- initial_nodes = [Node(NodeValue(watch, result), parent=root_parent)]
- breadth_first_search(Node(None, initial_nodes, root_parent), self.search_function)
-
- var_id = self.check_id(identity_hash_id)
-
- variable_type = type(result)
- variable_value_str, truncated = truncate_string(variable_to_string(variable_type, result),
- self.frame_config.max_string_length)
-
- self._var_lookup[var_id] = Variable(str(variable_type.__name__), variable_value_str, identity_hash_id, [],
- truncated)
- return VariableId(var_id, watch), self._var_lookup, str(result)
-
- def check_id(self, identity_hash_id):
- """
- Check if the identity_hash_id is known to us, and return the lookup id.
-
- :param identity_hash_id: the id of the object
- :return: the lookup id used
- """
- if identity_hash_id in self._var_cache:
- return self._var_cache[identity_hash_id]
- return None
-
- def new_var_id(self, identity_hash_id: str) -> str:
- """
- Create a new cache id for the lookup.
-
- :param identity_hash_id: the id of the object
- :return: the new lookup id
- """
- var_count = len(self._var_cache)
- new_id = str(var_count + 1)
- self._var_cache[identity_hash_id] = new_id
- return new_id
-
- def append_variable(self, var_id, variable):
- """
- Append a variable to var lookup using the var id.
-
- :param var_id: the internal variable id
- :param variable: the variable data to append
- """
- self._var_lookup[var_id] = variable
-
- def parse_short_name(self, filename: str) -> Tuple[str, bool]:
- """
- Process a file name into a shorter version.
-
- By default, the file names in python are the absolute path to the file on disk. These can be quite long,
- so we try to shorten the names by looking at the APP_ROOT and converting the file name into a relative path.
-
- e.g. if the file name is '/dev/python/custom_service/api/handler.py' and the APP_ROOT is
- '/dev/python/custom_service' then we shorten the path to 'custom_service/api/handler.py'.
-
- :param (str) filename: the file name
- :returns:
- (str) filename: the new file name
- (bool) is_app_frame: True if the file is an application frame file
- """
- is_app_frame, match = self.__is_app_frame(filename)
- if match is not None:
- return filename[len(match):], is_app_frame
- return filename, is_app_frame
diff --git a/src/deep/processor/frame_processor.py b/src/deep/processor/frame_processor.py
deleted file mode 100644
index bdfaf1b..0000000
--- a/src/deep/processor/frame_processor.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# Copyright (C) 2023 Intergral GmbH
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see .
-
-"""
-Handle Frame data processing.
-
-When processing a frame we need to ensure that the matched tracepoints can fire and that we collect
-the appropriate information. We need to process the conditions and fire rates of the tracepoints, and check the
-configs to collect the smallest amount of data possible.
-"""
-
-from types import FrameType
-from typing import List
-
-from deep import logging
-from deep.api.attributes import BoundedAttributes
-from deep.api.tracepoint import TracePointConfig, EventSnapshot
-from deep.api.tracepoint.tracepoint_config import LOG_MSG
-from deep.config import ConfigService
-from deep.processor.frame_collector import FrameCollector
-
-
-class FrameProcessor(FrameCollector):
- """This handles a 'hit' and starts the process of collecting the data."""
-
- _filtered_tracepoints: List[TracePointConfig]
-
- def __init__(self, tracepoints: List[TracePointConfig], frame: FrameType, config: ConfigService):
- """
- Create a new processor.
-
- :param tracepoints: the tracepoints for the triggering event
- :param frame: the frame data
- :param config: the deep config service
- """
- super().__init__(frame, config)
- self._tracepoints = tracepoints
- self._filtered_tracepoints = []
-
- def collect(self) -> List[EventSnapshot]:
- """
- Collect the snapshot data for the available tracepoints.
-
- :return: list of completed snapshots
- """
- snapshots = []
- # process the frame to a stack and var list
- stack, variables = self.process_frame()
- # iterate the tracepoints
- for tp in self._filtered_tracepoints:
- # crete a snapshot
- snapshot = EventSnapshot(tp, self._ts, self._config.resource, stack, variables)
- # process the snapshot watches
- for watch in tp.watches:
- result, watch_lookup, _ = self.eval_watch(watch)
- snapshot.add_watch_result(result)
- snapshot.merge_var_lookup(watch_lookup)
-
- log_msg = tp.get_arg(LOG_MSG, None)
- if log_msg is not None:
- processed_log, watch_results, watch_lookup = self.process_log(tp, log_msg)
- snapshot.log_msg = processed_log
- for watch in watch_results:
- snapshot.add_watch_result(watch)
- snapshot.merge_var_lookup(watch_lookup)
- self.log_tracepoint(processed_log, tp.id, format(snapshot.id, "016x"))
-
- # process the snapshot attributes
- attributes = self.process_attributes(tp)
- snapshot.attributes.merge_in(attributes)
- # save the snapshot
- snapshots.append(snapshot)
- # mark tp as triggered
- tp.record_triggered(self._ts)
-
- return snapshots
-
- def can_collect(self):
- """
- Check if we can collect data.
-
- Check if the tracepoints can fire given their configs. Checking time windows, fire rates etc.
-
- :return: True, if any tracepoint can fire
- """
- for tp in self._tracepoints:
- if tp.can_trigger(self._ts) and self.condition_passes(tp):
- # store the filtered tracepoints in a new list
- self._filtered_tracepoints.append(tp)
-
- return len(self._filtered_tracepoints) > 0
-
- def condition_passes(self, tp: TracePointConfig) -> bool:
- """
- Check if the tracepoint condition passes.
-
- :param (TracePointConfig) tp: the tracepoint to check
- :return: True, if the condition passes
- """
- condition = tp.condition
- if condition is None or condition == "":
- # There is no condition so return True
- return True
- logging.debug("Executing condition evaluation: %s", condition)
- try:
- result = eval(condition, None, self._frame.f_locals)
- logging.debug("Condition result: %s", result)
- if result:
- return True
- return False
- except Exception:
- logging.exception("Error evaluating condition %s", condition)
- return False
-
- def configure_self(self):
- """Process the filtered tracepoints to configure this processor."""
- for tp in self._filtered_tracepoints:
- self._frame_config.process_tracepoint(tp)
- self._frame_config.close()
-
- def process_attributes(self, tp: TracePointConfig) -> BoundedAttributes:
- """
- Process the attributes for a tracepoint.
-
- :param (TracePointConfig) tp: the tracepoint to process.
- :return (BoundedAttributes): the attributes for the tracepoint
- """
- attributes = {
- "tracepoint": tp.id,
- "path": tp.path,
- "line": tp.line_no,
- "stack": tp.stack_type,
- "frame": tp.frame_type
- }
- if len(tp.watches) != 0:
- attributes["has_watches"] = True
- if tp.condition is not None:
- attributes["has_condition"] = True
- return BoundedAttributes(attributes=attributes)
diff --git a/src/deep/processor/trigger_handler.py b/src/deep/processor/trigger_handler.py
index bd8fa06..34ba5a5 100644
--- a/src/deep/processor/trigger_handler.py
+++ b/src/deep/processor/trigger_handler.py
@@ -22,15 +22,24 @@
other supported action.
"""
-import logging
import os
import sys
import threading
+from collections import deque
+from types import FrameType
+from typing import Tuple, TYPE_CHECKING, List, Deque, Optional
+from deep import logging
+from deep.api.tracepoint.trigger import Trigger
from deep.config import ConfigService
from deep.config.tracepoint_config import ConfigUpdateListener
-from deep.processor.frame_processor import FrameProcessor
+from deep.processor.context.action_results import ActionCallback
+from deep.processor.context.trigger_context import TriggerContext
from deep.push import PushService
+from deep.thread_local import ThreadLocal
+
+if TYPE_CHECKING:
+ from deep.processor.context.action_context import ActionContext
class TracepointHandlerUpdateListener(ConfigUpdateListener):
@@ -60,15 +69,7 @@ def config_change(self, ts, old_hash, current_hash, old_config, new_config):
:param old_config: the old config
:param new_config: the new config
"""
- sorted_config = {}
- for tracepoint in new_config:
- path = os.path.basename(tracepoint.path)
- line_no = tracepoint.line_no
- by_file = self.__add_or_get(sorted_config, path, {})
- by_line = self.__add_or_get(by_file, line_no, [])
- by_line.append(tracepoint)
-
- self._handler.new_config(sorted_config)
+ self._handler.new_config(new_config)
class TriggerHandler:
@@ -78,6 +79,8 @@ class TriggerHandler:
This is where we 'listen' for a hit, and determine if we should collect data.
"""
+ __callbacks: ThreadLocal[Deque[List[ActionCallback]]] = ThreadLocal(lambda: deque())
+
def __init__(self, config: ConfigService, push_service: PushService):
"""
Create a new tigger handler.
@@ -85,8 +88,10 @@ def __init__(self, config: ConfigService, push_service: PushService):
:param config: the config service
:param push_service: the push service
"""
+ self.__old_thread_trace = None
+ self.__old_sys_trace = None
self._push_service = push_service
- self._tp_config = []
+ self._tp_config: List[Trigger] = []
self._config = config
self._config.add_listener(TracepointHandlerUpdateListener(self))
@@ -96,10 +101,14 @@ def start(self):
# so we allow the settrace to be disabled, so we can at least debug around it
if self._config.NO_TRACE:
return
+ self.__old_sys_trace = sys.gettrace()
+ # gettrace was added in 3.10, so use it if we can, else try to get from property
+ # noinspection PyUnresolvedReferences,PyProtectedMember
+ self.__old_thread_trace = threading.gettrace() if hasattr(threading, 'gettrace') else threading._trace_hook
sys.settrace(self.trace_call)
threading.settrace(self.trace_call)
- def new_config(self, new_config):
+ def new_config(self, new_config: List['Trigger']):
"""
Process a new tracepoint config.
@@ -109,61 +118,98 @@ def new_config(self, new_config):
"""
self._tp_config = new_config
- def trace_call(self, frame, event, arg):
+ def trace_call(self, frame: FrameType, event: str, arg):
"""
Process the data for a trace call.
This is called by the python engine when an event is about to be called.
+ This is called by python with the current frame data
+ The events are as follows:
+ - line: a line is being executed
+ - call: a function is being called
+ - return: a function is being returned
+ - exception: an exception is being raised
+
:param frame: the current frame
:param event: the event 'line', 'call', etc. That we are processing.
:param arg: the args
:return: None to ignore other calls, or our self to continue
"""
+ if event in ["line", "return", "exception"] and self.__callbacks.is_set:
+ self.__process_call_backs(frame, event)
+
# return if we do not have any tracepoints
if len(self._tp_config) == 0:
return None
- tracepoints_for_file, tracepoints_for_line = self.__tracepoints_for(os.path.basename(frame.f_code.co_filename),
- frame.f_lineno)
-
- # return if this is not a 'line' event
- if event != 'line':
- if len(tracepoints_for_file) == 0:
- return None
+ event, file, line, function = self.location_from_event(event, frame)
+ actions = self.__actions_for_location(event, file, line, function)
+ if len(actions) == 0:
return self.trace_call
- if len(tracepoints_for_line) > 0:
- self.process_tracepoints(tracepoints_for_line, frame)
+ trigger_context = TriggerContext(self._config, self._push_service, frame, event)
+ try:
+ with trigger_context:
+ for action in actions:
+ try:
+ ctx: ActionContext
+ with trigger_context.action_context(action) as ctx:
+ if ctx.can_trigger():
+ ctx.process()
+ except BaseException:
+ logging.exception("Cannot process action %s", action)
+ except BaseException:
+ logging.exception("Cannot trigger at %s#%s %s", file, line, function)
+
+ self.__callbacks.get().append(trigger_context.callbacks)
+
return self.trace_call
- def __tracepoints_for(self, filename, lineno):
- if filename in self._tp_config:
- filename_ = self._tp_config[filename]
- if lineno in filename_:
- return filename_, filename_[lineno]
- return filename_, []
- return [], []
+ def __actions_for_location(self, event, file, line, function):
+ actions = []
+ for trigger in self._tp_config:
+ if trigger.at_location(event, file, line, function):
+ actions += trigger.actions
+ return actions
+
+ def __process_call_backs(self, frame: FrameType, event: str):
+ callbacks = self.__callbacks.value.pop()
+ remaining: List[ActionCallback] = []
+ for callback in callbacks:
+ if callback.process(frame, event):
+ remaining.append(callback)
+
+ self.__callbacks.value.append(remaining)
+
+ @staticmethod
+ def location_from_event(event: str, frame: FrameType) -> Tuple[str, str, int, Optional[str]]:
+ """
+ Convert an event into a location.
+
+ The events are as follows:
+ - line: a line is being executed
+ - call: a function is being called
+ - return: a function is being returned
+ - exception: an exception is being raised
+ :param event:
+ :param frame:
+ :returns:
+ - event
+ - file path
+ - line number
+ - function name
+ """
+ filename = os.path.basename(frame.f_code.co_filename)
+ line = frame.f_lineno
+ function = frame.f_code.co_name
+ return event, filename, line, function
- def process_tracepoints(self, tracepoints_for, frame):
+ def shutdown(self):
"""
- We have some tracepoints, now check if we can collect.
+ Shutdown this handler.
- :param tracepoints_for: tracepoints for the file/line
- :param frame: the frame data
+ Reset the settrace to the previous values.
"""
- # create a new frame processor with the config
- processor = FrameProcessor(tracepoints_for, frame, self._config)
- # check if we can collect anything
- can_collect = processor.can_collect()
- if can_collect:
- # we can proceed so have the processor configure from active tracepoints
- processor.configure_self()
- try:
- # collect the data - this can be more than one result
- snapshots = processor.collect()
- for snapshot in snapshots:
- # push each result to services - this is async to allow the program to resume
- self._push_service.push_snapshot(snapshot)
- except Exception:
- logging.exception("Failed to collect snapshot")
+ sys.settrace(self.__old_sys_trace)
+ threading.settrace(self.__old_thread_trace)
diff --git a/src/deep/processor/variable_processor.py b/src/deep/processor/variable_processor.py
index e6ab7c2..c632090 100644
--- a/src/deep/processor/variable_processor.py
+++ b/src/deep/processor/variable_processor.py
@@ -27,7 +27,7 @@
from deep import logging
from deep.api.tracepoint import VariableId, Variable
from .bfs import Node, ParentNode, NodeValue
-from .frame_config import FrameProcessorConfig
+
NO_CHILD_TYPES = [
'str',
@@ -66,22 +66,31 @@ class Collector(abc.ABC):
@property
@abc.abstractmethod
- def frame_config(self) -> FrameProcessorConfig:
+ def max_string_length(self) -> int:
"""
- The frame config.
+ Get the max length of a string.
- :return: the frame config
+ :return int: the configured value
"""
pass
+ @property
+ @abc.abstractmethod
+ def max_collection_size(self) -> int:
+ """
+ Get the max size of a collection.
+
+ :return int: the configured value
+ """
+ pass
+
+ @property
@abc.abstractmethod
- def add_child_to_lookup(self, parent_id: str, child: VariableId):
+ def max_var_depth(self) -> int:
"""
- Add a child variable to the var lookup parent.
+ Get the max depth to process.
- :param parent_id: the internal id of the parent
- :param child: the child VariableId to append
- :return:
+ :return int: the configured value
"""
pass
@@ -108,10 +117,25 @@ def new_var_id(self, identity_hash_id: str) -> str:
@abc.abstractmethod
def append_variable(self, var_id: str, variable: Variable):
"""
- Append a variable to var lookup using the var id.
+ Append a variable to the var lookup.
+
+ This is called when a variable has been processed
+
+ :param var_id: the internal id of the variable
+ :param variable: the internal value of the variable
+ """
+ pass
+
+ @abc.abstractmethod
+ def append_child(self, variable_id: str, child: VariableId):
+ """
+ Append a chile to existing variable.
+
+ This is called when a child variable has been processed and the result should be attached to a
+ variable that has already been processed.
- :param var_id: the internal variable id
- :param variable: the variable data to append
+ :param str variable_id: the internal variable id of the parent variable
+ :param VariableId child: the internal variable id value to attach to the parent
"""
pass
@@ -131,7 +155,7 @@ def variable_id(self):
@property
def process_children(self):
- """Can we process the children of the value."""
+ """Continue with the child nodes."""
return self.__process_children
@@ -168,18 +192,18 @@ def variable_to_string(variable_type, var_value):
elif variable_type is dict \
or variable_type.__name__ in LIST_LIKE_TYPES:
# if we are a collection then we do not want to use built in string as this can be very
- # large, and quite pointless, in stead we just get the size of the collection
+ # large, and quite pointless, instead we just get the size of the collection
return 'Size: %s' % len(var_value)
else:
# everything else just gets a string value
return str(var_value)
-def process_variable(frame_collector: Collector, node: NodeValue) -> VariableResponse:
+def process_variable(var_collector: Collector, node: NodeValue) -> VariableResponse:
"""
Process the variable into a serializable type.
- :param frame_collector: the collector being used
+ :param var_collector: the collector being used
:param node: the variable node to process
:return: a response to determine if we continue
"""
@@ -188,7 +212,7 @@ def process_variable(frame_collector: Collector, node: NodeValue) -> VariableRes
# guess the modifiers
modifiers = var_modifiers(node.name)
# check the collector cache for this id
- cache_id = frame_collector.check_id(identity_hash_id)
+ cache_id = var_collector.check_id(identity_hash_id)
# if we have a cache_id, then this variable is already been processed, so we just return
# a variable id and do not process children. This prevents us from processing the same value over and over. We
# also do not count this towards the max_vars, so we can increase the data we send.
@@ -197,7 +221,7 @@ def process_variable(frame_collector: Collector, node: NodeValue) -> VariableRes
return VariableResponse(VariableId(cache_id, node.name, modifiers, node.original_name), process_children=False)
# if we do not have a cache_id - then create one
- var_id = frame_collector.new_var_id(identity_hash_id)
+ var_id = var_collector.new_var_id(identity_hash_id)
# crete the variable id to use
variable_id = VariableId(var_id, node.name, modifiers, node.original_name)
@@ -205,12 +229,12 @@ def process_variable(frame_collector: Collector, node: NodeValue) -> VariableRes
variable_type = type(node.value)
# create a string value of the variable
variable_value_str, truncated = truncate_string(variable_to_string(variable_type, node.value),
- frame_collector.frame_config.max_string_length)
+ var_collector.max_string_length)
# create a variable for the lookup
variable = Variable(str(variable_type.__name__), variable_value_str, identity_hash_id, [], truncated)
# add to lookup
- frame_collector.append_variable(var_id, variable)
+ var_collector.append_variable(var_id, variable)
# return result - and expand children
return VariableResponse(variable_id, process_children=True)
@@ -227,7 +251,7 @@ def truncate_string(string, max_length):
def process_child_nodes(
- frame_collector: Collector,
+ var_collector: Collector,
variable_id: str,
var_value: any,
frame_depth: int
@@ -237,7 +261,7 @@ def process_child_nodes(
Child node collection is performed via a variety of functions based on the type of the variable we are processing.
- :param frame_collector: the collector we are using
+ :param var_collector: the collector we are using
:param variable_id: the variable if to attach children to
:param var_value: the value we are looking at for children
:param frame_depth: the current depth we are at
@@ -249,17 +273,17 @@ def process_child_nodes(
return []
# if the depth is more than we are configured - return empty
- if frame_depth + 1 >= frame_collector.frame_config.max_var_depth:
+ if frame_depth + 1 >= var_collector.max_var_depth:
return []
class VariableParent(ParentNode):
def add_child(self, child: VariableId):
# look for the child in the lookup and add this id to it
- frame_collector.add_child_to_lookup(variable_id, child)
+ var_collector.append_child(variable_id, child)
# scan the child based on type
- return find_children_for_parent(frame_collector, VariableParent(), var_value, variable_type)
+ return find_children_for_parent(var_collector, VariableParent(), var_value, variable_type)
def correct_names(name, val):
@@ -276,12 +300,12 @@ def correct_names(name, val):
return val
-def find_children_for_parent(frame_collector: Collector, parent_node: ParentNode, value: any,
+def find_children_for_parent(var_collector: Collector, parent_node: ParentNode, value: any,
variable_type: type):
"""
Scan the parent for children based on the type.
- :param frame_collector: the collector we are using
+ :param var_collector: the collector we are using
:param parent_node: the parent node
:param value: the variable value we are processing
:param variable_type: the type of the variable
@@ -290,9 +314,9 @@ def find_children_for_parent(frame_collector: Collector, parent_node: ParentNode
if variable_type is dict:
return process_dict_breadth_first(parent_node, variable_type.__name__, value)
elif variable_type.__name__ in LIST_LIKE_TYPES:
- return process_list_breadth_first(frame_collector, parent_node, value)
+ return process_list_breadth_first(var_collector, parent_node, value)
elif isinstance(value, Exception):
- return process_list_breadth_first(frame_collector, parent_node, value.args)
+ return process_list_breadth_first(var_collector, parent_node, value.args)
elif hasattr(value, '__class__'):
return process_dict_breadth_first(parent_node, variable_type.__name__, value.__dict__, correct_names)
elif hasattr(value, '__dict__'):
@@ -322,14 +346,14 @@ def process_dict_breadth_first(parent_node, type_name, value, func=lambda x, y:
key in value]
-def process_list_breadth_first(frame_collector: Collector, parent_node: ParentNode, value) -> List[Node]:
+def process_list_breadth_first(var_collector: Collector, parent_node: ParentNode, value) -> List[Node]:
"""
Process a list value.
Take a list and collect all the child nodes for the list. Returned list is
limited by the config 'max_collection_size'.
- :param (Collector) frame_collector: the collector that is managing this collection
+ :param (Collector) var_collector: the collector that is managing this collection
:param (ParentNode) parent_node: the node that represents the list, to be used as the parent for the returned nodes
:param (any) value: the list value to process
:return (list): the collected child nodes
@@ -337,7 +361,7 @@ def process_list_breadth_first(frame_collector: Collector, parent_node: ParentNo
nodes = []
total = 0
for val_ in tuple(value):
- if total >= frame_collector.frame_config.max_collection_size:
+ if total >= var_collector.max_collection_size:
break
nodes.append(Node(value=NodeValue(str(total), val_), parent=parent_node))
total += 1
diff --git a/src/deep/processor/variable_set_processor.py b/src/deep/processor/variable_set_processor.py
new file mode 100644
index 0000000..537804f
--- /dev/null
+++ b/src/deep/processor/variable_set_processor.py
@@ -0,0 +1,261 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Handle the processing of variables sets."""
+
+from typing import Tuple, Optional, Dict
+
+from deep.api.tracepoint import Variable, VariableId
+from deep.processor.bfs import ParentNode, Node, NodeValue, breadth_first_search
+from deep.processor.variable_processor import process_variable, \
+ process_child_nodes, Collector
+
+
+class VariableCacheProvider:
+ """
+ Variable cache provider.
+
+ Manage the caching of variables for a trigger context.
+ """
+
+ __cache: Dict[str, str]
+
+ def __init__(self):
+ """Create new cache."""
+ self.__cache = {}
+
+ def check_id(self, identity_hash_id) -> Optional[str]:
+ """
+ Check if id is in the cache.
+
+ :param identity_hash_id: the identity hash to check
+ :return: the internal id for this hash, or None if not set
+ """
+ if identity_hash_id in self.__cache:
+ return self.__cache[identity_hash_id]
+ return None
+
+ @property
+ def size(self):
+ """The number of variables we have cached."""
+ return len(self.__cache)
+
+ def new_var_id(self, identity_hash_id):
+ """
+ Create a new variable id from the hash id.
+
+ :param identity_hash_id: the hash id to map the new id to.
+ :return: the new id
+ """
+ var_count = self.size
+ new_id = str(var_count + 1)
+ self.__cache[identity_hash_id] = new_id
+ return new_id
+
+
+class VariableProcessorConfig:
+ """Variable process config."""
+
+ DEFAULT_MAX_VAR_DEPTH = 5
+ DEFAULT_MAX_VARIABLES = 1000
+ DEFAULT_MAX_COLLECTION_SIZE = 10
+ DEFAULT_MAX_STRING_LENGTH = 1024
+ DEFAULT_MAX_WATCH_VARS = 100
+
+ def __init__(self, max_string_length=DEFAULT_MAX_STRING_LENGTH, max_variables=DEFAULT_MAX_VARIABLES,
+ max_collection_size=DEFAULT_MAX_COLLECTION_SIZE, max_var_depth=DEFAULT_MAX_VAR_DEPTH):
+ """
+ Create a new config for the variable processing.
+
+ :param max_string_length: the max length of a string
+ :param max_variables: the max number of variables
+ :param max_collection_size: the max size of a collection
+ :param max_var_depth: the max depth to process
+ """
+ self.max_var_depth = max_var_depth
+ self.max_collection_size = max_collection_size
+ self.max_variables = max_variables
+ self.max_string_length = max_string_length
+
+
+class VariableSetProcessor(Collector):
+ """Handle the processing of variables."""
+
+ def __init__(self, var_lookup: Dict[str, 'Variable'], var_cache: VariableCacheProvider,
+ config: VariableProcessorConfig = VariableProcessorConfig()):
+ """
+ Create a new variable set processor.
+
+ :param var_lookup: the var lookup to use
+ :param var_cache: the var cache to use
+ :param config: the var process config to use
+ """
+ self.__var_lookup = var_lookup
+ self.__var_cache = var_cache
+ self.__config = config
+
+ def process_variable(self, name: str, value: any) -> Tuple[VariableId, str]:
+ """
+ Process a variable name and value.
+
+ :param name: the variable name
+ :param value: the variable value
+ :return:
+ """
+ identity_hash_id = str(id(value))
+ check_id = self.__var_cache.check_id(identity_hash_id)
+ if check_id is not None:
+ # this means the watch result is already in the var_lookup
+ return VariableId(check_id, name), str(value)
+
+ # else this is an unknown value so process breadth first
+ var_ids = []
+
+ class FrameParent(ParentNode):
+
+ def add_child(self, child):
+ var_ids.append(child)
+
+ root_parent = FrameParent()
+
+ initial_nodes = [Node(NodeValue(name, value), parent=root_parent)]
+ breadth_first_search(Node(None, initial_nodes, root_parent), self.search_function)
+
+ var_id = self.__var_cache.check_id(identity_hash_id)
+
+ return VariableId(var_id, name), str(value)
+
+ def search_function(self, node: Node) -> bool:
+ """
+ Search for child variables using BFS.
+
+ This is the search function to use during BFS.
+
+ :param node: the current node we are process
+ :return: True, if we want to continue with the nodes children
+ """
+ if not self.check_var_count():
+ # we have exceeded the var count, so do not continue
+ return False
+
+ node_value = node.value
+ if node_value is None:
+ # this node has no value, continue with children
+ return True
+
+ # process this node variable
+ process_result = process_variable(self, node_value)
+ var_id = process_result.variable_id
+ # add the result to the parent - this maintains the hierarchy in the var look up
+ node.parent.add_child(var_id)
+
+ # some variables do not want the children processed (e.g. strings)
+ if process_result.process_children:
+ # process children and add to node
+ child_nodes = process_child_nodes(self, var_id.vid, node_value.value, node.depth)
+ node.add_children(child_nodes)
+ return True
+
+ def check_var_count(self):
+ """Check if we have processed our max set of variables."""
+ if self.__var_cache.size > self.__config.max_variables:
+ return False
+ return True
+
+ @property
+ def var_lookup(self):
+ """Get var look up."""
+ return self.__var_lookup
+
+ @property
+ def max_string_length(self) -> int:
+ """
+ Get the max length of a string.
+
+ :return int: the configured value
+ """
+ return self.__config.max_string_length
+
+ @property
+ def max_collection_size(self) -> int:
+ """
+ Get the max size of a collection.
+
+ :return int: the configured value
+ """
+ return self.__config.max_collection_size
+
+ @property
+ def max_var_depth(self) -> int:
+ """
+ Get the max depth to process.
+
+ :return int: the configured value
+ """
+ return self.__config.max_var_depth
+
+ def append_child(self, variable_id, child):
+ """
+ Append a chile to existing variable.
+
+ This is called when a child variable has been processed and the result should be attached to a
+ variable that has already been processed.
+
+ :param str variable_id: the internal variable id of the parent variable
+ :param VariableId child: the internal variable id value to attach to the parent
+ """
+ self.__var_lookup[variable_id].children.append(child)
+
+ def check_id(self, identity_hash_id: str) -> str:
+ """
+ Check if the identity_hash_id is known to us, and return the lookup id.
+
+ :param identity_hash_id: the id of the object
+ :return: the lookup id used
+ """
+ return self.__var_cache.check_id(identity_hash_id)
+
+ def new_var_id(self, identity_hash_id: str) -> str:
+ """
+ Create a new cache id for the lookup.
+
+ :param identity_hash_id: the id of the object
+ :return: the new lookup id
+ """
+ return self.__var_cache.new_var_id(identity_hash_id)
+
+ def append_variable(self, var_id, variable):
+ """
+ Append a variable to the var lookup.
+
+ This is called when a variable has been processed
+
+ :param var_id: the internal id of the variable
+ :param variable: the internal value of the variable
+ """
+ self.__var_lookup[var_id] = variable
diff --git a/src/deep/push/__init__.py b/src/deep/push/__init__.py
index 817d543..15b4e5e 100644
--- a/src/deep/push/__init__.py
+++ b/src/deep/push/__init__.py
@@ -97,5 +97,6 @@ def convert_snapshot(snapshot: EventSnapshot) -> Snapshot:
snapshot.resource.attributes.items()],
log_msg=snapshot.log_msg)
except Exception:
+ # todo should this return None?
logging.exception("Error converting to protobuf")
return None
diff --git a/src/deep/thread_local.py b/src/deep/thread_local.py
new file mode 100644
index 0000000..fe7981d
--- /dev/null
+++ b/src/deep/thread_local.py
@@ -0,0 +1,88 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Provide type to store data based on the calling thread."""
+
+import threading
+from typing import TypeVar, Generic, Callable
+
+
+T = TypeVar('T')
+
+
+class ThreadLocal(Generic[T]):
+ """This type offers the ability to store a value based on the thread that accessed the value."""
+
+ __store = {}
+
+ def __init__(self, default_provider: Callable[[], T] = lambda: None):
+ """
+ Create a new ThreadLocal value.
+
+ :param default_provider: a provider that will produce a default value
+ """
+ self.__default_provider = default_provider
+
+ def get(self) -> T:
+ """
+ Get the value stored for the calling thread.
+
+ :return: the stored value, or the value from the default_provider
+ """
+ current_thread = threading.current_thread()
+ return self.__store.get(current_thread.ident, self.__default_provider())
+
+ def set(self, val: T):
+ """
+ Set the value to get stored.
+
+ :param val: the value to store
+ """
+ current_thread = threading.current_thread()
+ self.__store[current_thread.ident] = val
+
+ def clear(self):
+ """Remove the value for this thread."""
+ current_thread = threading.current_thread()
+ if current_thread in self.__store:
+ del self.__store[current_thread]
+
+ @property
+ def is_set(self):
+ """
+ Check if the value is set for this thread.
+
+ :return: True if there is a value for this thread
+ """
+ current_thread = threading.current_thread()
+ return current_thread in self.__store
+
+ @property
+ def value(self):
+ """
+ Get the value stored for the calling thread.
+
+ :return: the stored value, or the value from the default_provider
+ """
+ return self.get()
+
+ @value.setter
+ def value(self, value):
+ """
+ Set the value to get stored.
+
+ :param value: the value to store
+ """
+ self.set(value)
diff --git a/src/deep/utils.py b/src/deep/utils.py
index 8edb4ed..9927abe 100644
--- a/src/deep/utils.py
+++ b/src/deep/utils.py
@@ -40,7 +40,7 @@ def str2bool(string):
Convert a string to a boolean.
:param string: the string to convert
- :return: True, if string is yes, true, t or 1. (case insensitive)
+ :return: True, if string is yes, true, t or 1. (case-insensitive)
"""
return string.lower() in ("yes", "true", "t", "1", "y")
diff --git a/tests/it_tests/__init__.py b/tests/it_tests/__init__.py
new file mode 100644
index 0000000..5af5165
--- /dev/null
+++ b/tests/it_tests/__init__.py
@@ -0,0 +1,16 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""A collection of IT tests."""
diff --git a/tests/it_tests/it_utils.py b/tests/it_tests/it_utils.py
new file mode 100644
index 0000000..70b075b
--- /dev/null
+++ b/tests/it_tests/it_utils.py
@@ -0,0 +1,179 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Provide GRPC services to use in IT tests."""
+
+import uuid
+from concurrent import futures
+from threading import Thread, Condition
+
+import deepproto
+import grpc
+# noinspection PyUnresolvedReferences
+from deepproto.proto.poll.v1.poll_pb2 import PollResponse, ResponseType
+from deepproto.proto.poll.v1.poll_pb2_grpc import PollConfigServicer
+# noinspection PyUnresolvedReferences
+from deepproto.proto.tracepoint.v1.tracepoint_pb2 import TracePointConfig, SnapshotResponse
+from deepproto.proto.tracepoint.v1.tracepoint_pb2_grpc import SnapshotServiceServicer
+
+
+def start_server() -> 'MockServer':
+ """Create a new MockServer."""
+ server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
+ return MockServer(server)
+
+
+class MockServer:
+ """Create a GRPC service that we can connect to during IT tests."""
+
+ def __init__(self, server):
+ """Create a new MockServer."""
+ self.__thread = None
+ self.__poll_service = PollServicer()
+ self.__snapshot_service = SnapshotServicer()
+ self.__server = server
+
+ def __enter__(self):
+ """Start server in thread when 'with' statement starts."""
+ deepproto.proto.poll.v1.poll_pb2_grpc.add_PollConfigServicer_to_server(
+ self.__poll_service, self.__server)
+ deepproto.proto.tracepoint.v1.tracepoint_pb2_grpc.add_SnapshotServiceServicer_to_server(self.__snapshot_service,
+ self.__server)
+ self.__server.add_insecure_port('[::]:43315')
+ self.__server.start()
+ self.__thread = Thread(target=self.__await)
+ self.__thread.start()
+ return self
+
+ def __await(self):
+ self.__server.wait_for_termination()
+
+ def __exit__(self, exception_type, exception_value, exception_traceback):
+ """Stop and shutdown GRPC service when 'with' statement completes."""
+ self.__server.stop(10)
+ self.__thread.join()
+
+ def config(self, custom=None):
+ """Get the config for deep to connect to this service."""
+ if custom is None:
+ custom = {}
+ custom['SERVICE_URL'] = "127.0.0.1:43315"
+ custom['SERVICE_SECURE'] = 'False'
+ return custom
+
+ @property
+ def snapshot(self):
+ """Get the last received snapshot."""
+ return self.__snapshot_service.snapshot
+
+ def add_tp(self, path, line, args=None, watches=None, metrics=None):
+ """Add a new Tracepoint to the next poll."""
+ if metrics is None:
+ metrics = []
+ if watches is None:
+ watches = []
+ if args is None:
+ args = {}
+ self.__poll_service.tps.append(TracePointConfig(ID=str(uuid.uuid4()),
+ path=path,
+ line_number=line,
+ args=args,
+ watches=watches,
+ metrics=metrics))
+ self.__poll_service.hash = str(uuid.uuid4())
+
+ def await_poll(self):
+ """Await for the next poll to be received. Time out after 10 seconds."""
+ with self.__poll_service.condition:
+ self.__poll_service.condition.wait(10)
+
+ def await_snapshot(self):
+ """Await for the next snapshot to be received. Time out after 10 seconds."""
+ with self.__snapshot_service.condition:
+ self.__snapshot_service.condition.wait(10)
+ return self.__snapshot_service.snapshot
+
+
+class PollServicer(PollConfigServicer):
+ """Class for handling poll requests during IT tests."""
+
+ def __init__(self):
+ """Create a new service."""
+ self.__tps = []
+ self.__hash = str(uuid.uuid4())
+ self.__condition = Condition()
+
+ def poll(self, request, context):
+ """Handle a poll request."""
+ try:
+ return PollResponse(ts_nanos=request.ts_nanos, current_hash=self.__hash, response=self.__tps,
+ response_type=ResponseType.NO_CHANGE if request.current_hash == self.__hash
+ else ResponseType.UPDATE)
+ finally:
+ with self.__condition:
+ self.__condition.notify_all()
+
+ @property
+ def condition(self):
+ """Get the condition used to control this service."""
+ return self.__condition
+
+ @property
+ def tps(self):
+ """The current config."""
+ return self.__tps
+
+ @tps.setter
+ def tps(self, value):
+ """Update current config."""
+ self.__tps = value
+
+ @property
+ def hash(self):
+ """The current config hash."""
+ return self.__hash
+
+ @hash.setter
+ def hash(self, value):
+ """Update current config hash."""
+ self.__hash = value
+
+
+class SnapshotServicer(SnapshotServiceServicer):
+ """Class for handling snapshots during IT tests."""
+
+ def __init__(self):
+ """Create a new service."""
+ self.__snapshot = None
+ self.__condition = Condition()
+
+ def send(self, request, context):
+ """Handle a snapshot send event."""
+ if request.ByteSize() == 0:
+ return SnapshotResponse()
+ self.__snapshot = request
+ with self.__condition:
+ self.__condition.notify_all()
+ return SnapshotResponse()
+
+ @property
+ def snapshot(self):
+ """Get the last received snapshot."""
+ return self.__snapshot
+
+ @property
+ def condition(self):
+ """Get the condition used to control this service."""
+ return self.__condition
diff --git a/tests/it_tests/test_it_basic.py b/tests/it_tests/test_it_basic.py
new file mode 100644
index 0000000..bb381b0
--- /dev/null
+++ b/tests/it_tests/test_it_basic.py
@@ -0,0 +1,58 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""A collection of IT tests that simulate user usage of Deep."""
+
+import unittest
+
+import deep
+import it_tests
+from it_tests.it_utils import start_server, MockServer
+from it_tests.test_target import BPTargetTest
+from test_utils import find_var_in_snap_by_name, find_var_in_snap_by_path
+
+
+class BasicITTest(unittest.TestCase):
+ """These tests are intended to simulate a real user installing and using Deep."""
+
+ def test_simple_it(self):
+ server: MockServer
+ with start_server() as server:
+ server.add_tp("test_target.py", 40, {}, [], [])
+ _deep = deep.start(server.config({}))
+ server.await_poll()
+ test = BPTargetTest("name", 123)
+ _ = test.name
+ snapshot = server.await_snapshot()
+ _deep.shutdown()
+ self.assertIsNotNone(snapshot)
+ frames = snapshot.frames
+ self.assertEqual(it_tests.test_target.__file__, frames[0].file_name)
+ self.assertEqual("/it_tests/test_target.py", frames[0].short_path)
+ self.assertEqual(40, frames[0].line_number)
+ self.assertEqual(4, len(frames[0].variables))
+ self.assertEqual(6, len(snapshot.var_lookup))
+
+ var_name = find_var_in_snap_by_name(snapshot, "name")
+ self.assertIsNotNone(var_name)
+
+ var_i = find_var_in_snap_by_name(snapshot, "i")
+ self.assertIsNotNone(var_i)
+
+ var_self = find_var_in_snap_by_name(snapshot, "self")
+ self.assertIsNotNone(var_self)
+
+ var_not_on_super = find_var_in_snap_by_path(snapshot, "self._BPSuperClass__not_on_super")
+ self.assertIsNotNone(var_not_on_super)
diff --git a/tests/it_tests/test_target.py b/tests/it_tests/test_target.py
new file mode 100644
index 0000000..ee01edf
--- /dev/null
+++ b/tests/it_tests/test_target.py
@@ -0,0 +1,77 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""
+Provide target for tests.
+
+NOTE: the line numbers in this file are used in other tests when installing tracepoints. It is important therefore
+that the line numbers of this file are changed carefully. As changes can result in lots of tests failures.
+"""
+
+import random
+
+
+class BPSuperClass:
+ """This is used to test the discovery of variables in super classes."""
+
+ def __init__(self, name):
+ self.__name = name
+ self.__not_on_super = 11
+
+
+class BPTargetTest(BPSuperClass):
+ """This is a test class that is used by other tests as the target for tracepoints."""
+
+ def __init__(self, name, i):
+ super().__init__("i am a name" + name)
+ self.__name = name
+ self.__i = i
+
+ @property
+ def name(self):
+ return self.__name
+
+ @name.setter
+ def name(self, value):
+ self.__name = value
+
+ def call_something(self, val):
+ return self.name + val
+
+ def error_something(self, val):
+ return len(val)
+
+ def throw_something(self, val):
+ raise Exception(val)
+
+ def catch_something(self, val):
+ try:
+ raise Exception(val)
+ except Exception as e:
+ return str(e)
+
+ def finally_something(self, val):
+ try:
+ raise Exception(val)
+ except Exception as e:
+ return str(e)
+ finally:
+ print("finally_something")
+
+ def some_func_with_body(self, some_arg):
+ name = self.__name
+ new_name = name + some_arg
+ i = random.randint(3, 9)
+ return i + new_name
diff --git a/tests/test_utils.py b/tests/test_utils.py
new file mode 100644
index 0000000..bc6da74
--- /dev/null
+++ b/tests/test_utils.py
@@ -0,0 +1,48 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""Collection of utils for testing."""
+
+
+def find_var_by_name(grpc_snapshot, _vars, name):
+ """Find a variable by name in a list of variables."""
+ var_id = None
+ for var in _vars:
+ if var.name == name:
+ var_id = var
+ break
+ if var_id is None:
+ return None
+ return grpc_snapshot.var_lookup[var_id.ID]
+
+
+def find_var_in_snap_by_name(grpc_snapshot, name):
+ """Find a variable in the snapshot by name."""
+ _vars = grpc_snapshot.frames[0].variables
+ return find_var_by_name(grpc_snapshot, _vars, name)
+
+
+def find_var_in_snap_by_path(grpc_snapshot, path):
+ """Find a variable in a snapshot by using the path."""
+ _vars = grpc_snapshot.frames[0].variables
+ parts = path.split('.')
+ var = None
+ for part in parts:
+ var = find_var_by_name(grpc_snapshot, _vars, part)
+ if var is None:
+ return None
+ else:
+ _vars = var.children
+ return var
diff --git a/tests/unit_tests/api/__init__.py b/tests/unit_tests/api/__init__.py
index 962577d..53e9b3b 100644
--- a/tests/unit_tests/api/__init__.py
+++ b/tests/unit_tests/api/__init__.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2024 Intergral GmbH
+# Copyright (C) 2023 Intergral GmbH
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
diff --git a/tests/unit_tests/api/attributes/test_attributes.py b/tests/unit_tests/api/attributes/test_attributes.py
index a5a5c8c..ad948ce 100644
--- a/tests/unit_tests/api/attributes/test_attributes.py
+++ b/tests/unit_tests/api/attributes/test_attributes.py
@@ -16,6 +16,7 @@
import unittest
from typing import MutableSequence
+# noinspection PyProtectedMember
from deep.api.attributes import _clean_attribute, BoundedAttributes
@@ -97,7 +98,7 @@ class TestBoundedAttributes(unittest.TestCase):
]
)
- def test_negative_maxlen(self):
+ def test_negative_max_length(self):
with self.assertRaises(ValueError):
BoundedAttributes(-1)
@@ -170,7 +171,7 @@ def test_bounded_dict(self):
_ = bdict["new-name"]
def test_no_limit_code(self):
- bdict = BoundedAttributes(maxlen=None, immutable=False)
+ bdict = BoundedAttributes(max_length=None, immutable=False)
for num in range(100):
bdict[str(num)] = num
diff --git a/tests/unit_tests/api/resource/test_resource.py b/tests/unit_tests/api/resource/test_resource.py
index c07720c..2edebbc 100644
--- a/tests/unit_tests/api/resource/test_resource.py
+++ b/tests/unit_tests/api/resource/test_resource.py
@@ -20,6 +20,7 @@
from deep import version, logging
from deep.api.attributes import BoundedAttributes
+# noinspection PyProtectedMember
from deep.api.resource import Resource, TELEMETRY_SDK_NAME, TELEMETRY_SDK_LANGUAGE, TELEMETRY_SDK_VERSION, \
SERVICE_NAME, DEEP_SERVICE_NAME, DEEP_RESOURCE_ATTRIBUTES, ResourceDetector, _DEFAULT_RESOURCE, \
get_aggregated_resources, _DEEP_SDK_VERSION, _EMPTY_RESOURCE, PROCESS_EXECUTABLE_NAME
@@ -148,10 +149,9 @@ def test_resource_merge(self):
self.assertIn(schema_urls[1], log_entry.output[0])
def test_resource_merge_empty_string(self):
- """Verify Resource.merge behavior with the empty string.
+ """Verify Resource#merge behavior with the empty string.
- Attributes from the source Resource take precedence, with
- the exception of the empty string.
+ Attributes from the source Resource take precedence, except the empty string.
"""
left = Resource({"service": "ui", "host": ""})
@@ -190,6 +190,7 @@ def test_immutability(self):
self.assertEqual(resource.attributes, attributes_copy)
with self.assertRaises(AttributeError):
+ # noinspection PyPropertyAccess
resource.schema_url = "bug"
self.assertEqual(resource.schema_url, "")
diff --git a/tests/unit_tests/config/test_tracepoint_config.py b/tests/unit_tests/config/test_tracepoint_config.py
index db7449a..1f272cc 100644
--- a/tests/unit_tests/config/test_tracepoint_config.py
+++ b/tests/unit_tests/config/test_tracepoint_config.py
@@ -94,7 +94,7 @@ def config_change(me, ts, old_hash, current_hash, old_config, new_config):
service.add_listener(TestListener())
- service.add_custom("path", 123, {}, [])
+ service.add_custom("path", 123, {}, [], [])
handler.flush()
@@ -114,7 +114,7 @@ def config_change(me, ts, old_hash, current_hash, old_config, new_config):
service.add_listener(TestListener())
- service.add_custom("path", 123, {}, [])
+ service.add_custom("path", 123, {}, [], [])
handler.flush()
@@ -134,7 +134,7 @@ def config_change(me, ts, old_hash, current_hash, old_config, new_config):
service.add_listener(TestListener())
- custom = service.add_custom("path", 123, {}, [])
+ custom = service.add_custom("path", 123, {}, [], [])
handler.flush()
diff --git a/tests/unit_tests/grpc/test_grpc.py b/tests/unit_tests/grpc/test_grpc.py
index efeebd2..0733cc7 100644
--- a/tests/unit_tests/grpc/test_grpc.py
+++ b/tests/unit_tests/grpc/test_grpc.py
@@ -15,7 +15,9 @@
import unittest
-from deep.grpc import convert_value
+from deep.grpc import convert_value, convert_label_expressions
+# noinspection PyUnresolvedReferences
+from deepproto.proto.tracepoint.v1.tracepoint_pb2 import LabelExpression
class GRPCPackage(unittest.TestCase):
@@ -68,3 +70,15 @@ def test_convert_dict(self):
value = convert_value({'some': 'string'})
self.assertEqual(value.kvlist_value.values[0].key, "some")
self.assertEqual(value.kvlist_value.values[0].value.string_value, "string")
+
+ def test_convert_label_expression(self):
+ expression = LabelExpression(key="test", expression="a thing")
+ value = convert_label_expressions([expression])
+ self.assertEqual("test", value[0].key)
+ self.assertEqual("a thing", value[0].expression)
+
+ def test_convert_label_static(self):
+ expression = LabelExpression(key="test", static=convert_value("a string"))
+ value = convert_label_expressions([expression])
+ self.assertEqual("test", value[0].key)
+ self.assertEqual("a string", value[0].static)
diff --git a/tests/unit_tests/poll/test_poll.py b/tests/unit_tests/poll/test_poll.py
index 3dc189a..2a5828a 100644
--- a/tests/unit_tests/poll/test_poll.py
+++ b/tests/unit_tests/poll/test_poll.py
@@ -43,6 +43,7 @@ def test_can_poll(self):
self.poll_request = None
+ # noinspection PyUnusedLocal
def mock_poll(request, **kwargs):
self.poll_request = request
return PollResponse(response_type=ResponseType.NO_CHANGE)
@@ -68,6 +69,7 @@ def test_can_poll_new_cfg(self):
self.poll_request = None
+ # noinspection PyUnusedLocal
def mock_poll(request, **kwargs):
self.poll_request = request
return PollResponse(response_type=ResponseType.UPDATE)
diff --git a/tests/unit_tests/processor/__init__.py b/tests/unit_tests/processor/__init__.py
index b6573b1..87f6605 100644
--- a/tests/unit_tests/processor/__init__.py
+++ b/tests/unit_tests/processor/__init__.py
@@ -14,7 +14,7 @@
# along with this program. If not, see .
class MockFrame:
- """A Frame used during testing to Mock a debug Frame."""
+ """Create a mock frame that can be used in tests."""
def __init__(self, _locals=None):
if _locals is None:
diff --git a/tests/unit_tests/processor/context/__init__.py b/tests/unit_tests/processor/context/__init__.py
new file mode 100644
index 0000000..962577d
--- /dev/null
+++ b/tests/unit_tests/processor/context/__init__.py
@@ -0,0 +1,14 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
diff --git a/tests/unit_tests/processor/context/test_log_action.py b/tests/unit_tests/processor/context/test_log_action.py
new file mode 100644
index 0000000..2f7f9d5
--- /dev/null
+++ b/tests/unit_tests/processor/context/test_log_action.py
@@ -0,0 +1,71 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import unittest
+
+from parameterized import parameterized
+
+from deep.processor.context.log_action import LogActionContext
+from deep.processor.context.trigger_context import TriggerContext
+from unit_tests.processor import MockFrame
+
+
+class TestLogMessages(unittest.TestCase):
+ @parameterized.expand([
+ ["some log message", "[deep] some log message", {}, []],
+ ["some log message: {name}", "[deep] some log message: bob", {'name': 'bob'}, ['bob']],
+ ["some log message: {len(name)}", "[deep] some log message: 3", {'name': 'bob'}, ['3']],
+ ["some log message: {person}", "[deep] some log message: {'name': 'bob'}",
+ {'person': {'name': 'bob'}}, ["Size: 1"]],
+ ["some log message: {person.name}", "[deep] some log message: 'dict' object has no attribute 'name'",
+ {'person': {'name': 'bob'}}, ["'dict' object has no attribute 'name'"]],
+ ["some log message: {person['name']}", "[deep] some log message: bob", {'person': {'name': 'bob'}}, ["bob"]],
+ ])
+ def test_simple_log_interpolation(self, log_msg, expected_msg, _locals, expected_watches):
+ context = LogActionContext(TriggerContext(None, None, MockFrame(_locals), "test"), None)
+ log, watches, _vars = context.process_log(log_msg)
+ self.assertEqual(expected_msg, log)
+ self.assertEqual(len(expected_watches), len(watches))
+ for i, watch in enumerate(watches):
+ if watch.error is None:
+ self.assertEqual(_vars[watch.result.vid].value, expected_watches[i])
+ else:
+ self.assertEqual(watch.error, expected_watches[i])
diff --git a/tests/unit_tests/processor/test_log_messages.py b/tests/unit_tests/processor/test_log_messages.py
index dd8416b..761f3da 100644
--- a/tests/unit_tests/processor/test_log_messages.py
+++ b/tests/unit_tests/processor/test_log_messages.py
@@ -25,12 +25,13 @@
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
+
import unittest
from parameterized import parameterized
-from deep.config import ConfigService
-from deep.processor.frame_processor import FrameProcessor
+from deep.processor.context.log_action import LogActionContext
+from deep.processor.context.trigger_context import TriggerContext
from unit_tests.processor import MockFrame
@@ -46,11 +47,9 @@ class TestLogMessages(unittest.TestCase):
["some log message: {person['name']}", "[deep] some log message: bob", {'person': {'name': 'bob'}}, ["bob"]],
])
def test_simple_log_interpolation(self, log_msg, expected_msg, _locals, expected_watches):
- # noinspection PyTypeChecker
- # Frame type is final, so we cannot check the type here
- processor = FrameProcessor([], MockFrame(_locals), ConfigService({}))
- processor.configure_self()
- log, watches, _vars = processor.process_log({}, log_msg)
+ context = LogActionContext(TriggerContext(None, None, MockFrame(_locals), "test"), None)
+ log, watches, _vars = context.process_log(log_msg)
+
self.assertEqual(expected_msg, log)
self.assertEqual(len(expected_watches), len(watches))
for i, watch in enumerate(watches):
diff --git a/tests/unit_tests/processor/test_trigger_handler.py b/tests/unit_tests/processor/test_trigger_handler.py
new file mode 100644
index 0000000..bf48dd2
--- /dev/null
+++ b/tests/unit_tests/processor/test_trigger_handler.py
@@ -0,0 +1,200 @@
+# Copyright (C) 2024 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+import threading
+import unittest
+from threading import Thread
+from typing import List
+
+from deep import logging
+from deep.api.resource import Resource
+from deep.api.tracepoint.constants import LOG_MSG, WATCHES
+from deep.api.tracepoint.eventsnapshot import EventSnapshot
+
+from deep.api.tracepoint.trigger import Location, LocationAction, LineLocation, Trigger
+from deep.config import ConfigService
+from deep.logging.tracepoint_logger import TracepointLogger
+from deep.processor.trigger_handler import TriggerHandler
+from deep.push.push_service import PushService
+from unit_tests.test_target import some_test_function
+
+
+class MockPushService(PushService):
+ def __init__(self, config, grpc, task_handler):
+ super().__init__(config, grpc, task_handler)
+ self.pushed: List[EventSnapshot] = []
+
+ def push_snapshot(self, snapshot: EventSnapshot):
+ self.pushed.append(snapshot)
+
+
+class MockTracepointLogger(TracepointLogger):
+
+ def __init__(self):
+ self.logged = []
+
+ def log_tracepoint(self, log_msg: str, tp_id: str, ctx_id: str):
+ self.logged.append(log_msg)
+
+
+class MockConfigService(ConfigService):
+ def __init__(self, custom):
+ super().__init__(custom)
+ self.logger = MockTracepointLogger()
+
+ @property
+ def tracepoint_logger(self) -> 'TracepointLogger':
+ return self.logger
+
+ @property
+ def resource(self) -> Resource:
+ return Resource.get_empty()
+
+
+class TraceCallCapture:
+
+ def __init__(self):
+ self.captured_frame = None
+ self.captured_event = None
+ self.captured_args = None
+
+ def capture_trace_call(self, location: Location):
+ def trace_call(frame, event, args):
+ event, file, line, function = TriggerHandler.location_from_event(event, frame)
+ if location.at_location(event, file, line, function):
+ self.captured_frame = frame
+ self.captured_event = event
+ self.captured_args = args
+ return trace_call
+
+ return trace_call
+
+
+logging.init(MockConfigService({}))
+
+
+class TestTriggerHandler(unittest.TestCase):
+
+ def call_and_capture(self, location, func, args, capture):
+ # here we execute the real code using a mock trace call that will capture the args to trace call
+ # we cannot debug this section of the code
+
+ # we use the _trace_hook and nopt gettrace() as gettrace() is not available in all tested versions of pythong
+ # noinspection PyUnresolvedReferences
+ current = threading._trace_hook
+ threading.settrace(capture.capture_trace_call(location))
+ thread = Thread(target=func, args=args)
+ thread.start()
+ thread.join(10)
+
+ # reset the set trace to the original one
+ threading.settrace(current)
+
+ if capture.captured_frame is None:
+ self.fail("Did not capture")
+
+ def test_log_action(self):
+ capture = TraceCallCapture()
+ config = MockConfigService({})
+ push = MockPushService(None, None, None)
+ handler = TriggerHandler(config, push)
+
+ location = LineLocation('test_target.py', 27, Location.Position.START)
+ handler.new_config(
+ [Trigger(location, [LocationAction("tp_id", None, {LOG_MSG: "some log"}, LocationAction.ActionType.Log)])])
+
+ self.call_and_capture(location, some_test_function, ['args'], capture)
+
+ handler.trace_call(capture.captured_frame, capture.captured_event, capture.captured_args)
+
+ logged = config.logger.logged
+ self.assertEqual(1, len(logged))
+ self.assertEqual("[deep] some log", logged[0])
+
+ def test_log_action_with_watch(self):
+ capture = TraceCallCapture()
+ config = MockConfigService({})
+ push = MockPushService(None, None, None)
+ handler = TriggerHandler(config, push)
+
+ location = LineLocation('test_target.py', 27, Location.Position.START)
+ handler.new_config([Trigger(location, [
+ LocationAction("tp_id", None, {LOG_MSG: "some log {val}"}, LocationAction.ActionType.Log)])])
+
+ self.call_and_capture(location, some_test_function, ['input'], capture)
+
+ handler.trace_call(capture.captured_frame, capture.captured_event, capture.captured_args)
+
+ logged = config.logger.logged
+ self.assertEqual(1, len(logged))
+ self.assertEqual("[deep] some log inputsomething", logged[0])
+
+ def test_snapshot_action(self):
+ capture = TraceCallCapture()
+ config = MockConfigService({})
+ push = MockPushService(None, None, None)
+ handler = TriggerHandler(config, push)
+
+ location = LineLocation('test_target.py', 27, Location.Position.START)
+ handler.new_config([Trigger(location, [
+ LocationAction("tp_id", None, {WATCHES: ['arg']}, LocationAction.ActionType.Snapshot)])])
+
+ self.call_and_capture(location, some_test_function, ['input'], capture)
+
+ handler.trace_call(capture.captured_frame, capture.captured_event, capture.captured_args)
+
+ logged = config.logger.logged
+ self.assertEqual(0, len(logged))
+ pushed = push.pushed
+ self.assertEqual(1, len(pushed))
+ self.assertEqual(2, len(pushed[0].var_lookup))
+ self.assertEqual(2, len(pushed[0].frames[0].variables))
+
+ self.assertEqual(1, len(pushed[0].watches))
+ self.assertEqual("arg", pushed[0].watches[0].expression)
+ self.assertEqual("arg", pushed[0].watches[0].result.name)
+ self.assertEqual("input", pushed[0].var_lookup[pushed[0].watches[0].result.vid].value)
+
+ def test_snapshot_action_with_condition(self):
+ capture = TraceCallCapture()
+ config = MockConfigService({})
+ push = MockPushService(None, None, None)
+ handler = TriggerHandler(config, push)
+
+ location = LineLocation('test_target.py', 27, Location.Position.START)
+ handler.new_config([Trigger(location, [
+ LocationAction("tp_id", "arg == None", {}, LocationAction.ActionType.Snapshot)])])
+
+ self.call_and_capture(location, some_test_function, ['input'], capture)
+
+ handler.trace_call(capture.captured_frame, capture.captured_event, capture.captured_args)
+
+ logged = config.logger.logged
+ self.assertEqual(0, len(logged))
+ pushed = push.pushed
+ self.assertEqual(0, len(pushed))
diff --git a/tests/unit_tests/processor/test_variable_processor.py b/tests/unit_tests/processor/test_variable_processor.py
index 215b0d9..8628d78 100644
--- a/tests/unit_tests/processor/test_variable_processor.py
+++ b/tests/unit_tests/processor/test_variable_processor.py
@@ -54,6 +54,18 @@ def __eq__(self, o: object) -> bool:
class MockCollector(Collector):
+ @property
+ def max_string_length(self) -> int:
+ return self._config.max_string_length
+
+ @property
+ def max_collection_size(self) -> int:
+ return self._config.max_collection_size
+
+ @property
+ def max_var_depth(self) -> int:
+ return self._config.max_var_depth
+
def __init__(self):
self._var_cache = {}
self._var_lookup = {}
@@ -72,7 +84,7 @@ def var_lookup(self):
def frame_config(self) -> FrameProcessorConfig:
return self._config
- def add_child_to_lookup(self, variable_id, child):
+ def append_child(self, variable_id, child):
self._var_lookup[variable_id].children.append(child)
def check_id(self, identity_hash_id):
diff --git a/tests/unit_tests/push/test_push_service.py b/tests/unit_tests/push/test_push_service.py
index ea1982e..1c7cfa3 100644
--- a/tests/unit_tests/push/test_push_service.py
+++ b/tests/unit_tests/push/test_push_service.py
@@ -62,6 +62,7 @@ def test_push_service_function(self):
self.sent_snap = None
+ # noinspection PyUnusedLocal
def mock_send(snap, **kwargs):
self.sent_snap = snap
@@ -98,6 +99,7 @@ def complete(self):
self.sent_snap = None
+ # noinspection PyUnusedLocal
def mock_send(snap, **kwargs):
self.sent_snap = snap
diff --git a/tests/unit_tests/test_target.py b/tests/unit_tests/test_target.py
new file mode 100644
index 0000000..6638c35
--- /dev/null
+++ b/tests/unit_tests/test_target.py
@@ -0,0 +1,27 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+"""
+Provide target for tests.
+
+NOTE: the line numbers in this file are used in other tests when installing tracepoints. It is important therefore
+that the line numbers of this file are changed carefully. As changes can result in lots of tests failures.
+"""
+
+
+def some_test_function(arg):
+ val = arg + "something"
+
+ return val
diff --git a/tests/unit_tests/test_utils.py b/tests/unit_tests/test_utils.py
index 6c115b9..cef7bac 100644
--- a/tests/unit_tests/test_utils.py
+++ b/tests/unit_tests/test_utils.py
@@ -85,6 +85,7 @@ def test_repeated_timer_error():
global count
count = 0
+ # noinspection PyUnusedLocal
def repeated(val):
raise Exception("test")
diff --git a/tests/unit_tests/tracepoint/test_tracepoint_config.py b/tests/unit_tests/tracepoint/test_tracepoint_config.py
index f223fcc..d1fadf4 100644
--- a/tests/unit_tests/tracepoint/test_tracepoint_config.py
+++ b/tests/unit_tests/tracepoint/test_tracepoint_config.py
@@ -15,7 +15,7 @@
import unittest
-from deep.api.tracepoint.tracepoint_config import TracepointWindow, TracePointConfig, FIRE_PERIOD, FIRE_COUNT
+from deep.api.tracepoint.tracepoint_config import TracepointWindow, TracePointConfig
class TestTracepointWindow(unittest.TestCase):
@@ -48,32 +48,13 @@ def test_in_window_end(self):
class TestTracePointConfig(unittest.TestCase):
def test_get_arg(self):
- config = TracePointConfig('tp_id', 'path', 123, {'some': 'value'}, [])
+ config = TracePointConfig('tp_id', 'path', 123, {'some': 'value'}, [], [])
self.assertEqual(config.get_arg('some', 'thing'), 'value')
self.assertEqual(config.get_arg('other', 'thing'), 'thing')
def test_get_arg_int(self):
- config = TracePointConfig('tp_id', 'path', 123, {'some': 'value', 'num': 321}, [])
+ config = TracePointConfig('tp_id', 'path', 123, {'some': 'value', 'num': 321}, [], [])
# noinspection PyTypeChecker
self.assertEqual(config.get_arg_int('some', 'thing'), 'thing')
self.assertEqual(config.get_arg_int('other', 123), 123)
self.assertEqual(config.get_arg_int('num', 123), 321)
-
- def test_fire_count(self):
- config = TracePointConfig('tp_id', 'path', 123, {'some': 'value', 'num': 321}, [])
- self.assertEqual(config.fire_count, 1)
-
- self.assertTrue(config.can_trigger(1000))
- config.record_triggered(1000)
-
- self.assertFalse(config.can_trigger(1001))
-
- def test_fire_period(self):
- config = TracePointConfig('tp_id', 'path', 123, {FIRE_PERIOD: 10_000, FIRE_COUNT: 10}, [])
-
- self.assertEqual(config.fire_count, 10)
-
- self.assertTrue(config.can_trigger(1000))
- config.record_triggered(1000)
-
- self.assertFalse(config.can_trigger(1001))
diff --git a/tests/unit_tests/tracepoint/test_trigger.py b/tests/unit_tests/tracepoint/test_trigger.py
new file mode 100644
index 0000000..5576bbc
--- /dev/null
+++ b/tests/unit_tests/tracepoint/test_trigger.py
@@ -0,0 +1,65 @@
+# Copyright (C) 2023 Intergral GmbH
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+from unittest import TestCase
+
+from parameterized import parameterized
+
+from deep.api.tracepoint.trigger import build_trigger, LineLocation, LocationAction, Trigger, Location
+
+
+class Test(TestCase):
+
+ @parameterized.expand([
+ # Default for line is a snapshot
+ ["some.file", 123, {}, [], [],
+ Trigger(LineLocation("some.file", 123, Location.Position.START), [
+ LocationAction("tp-id", None, {
+ 'watches': [],
+ 'frame_type': 'single_frame',
+ 'stack_type': 'stack',
+ 'fire_count': '1',
+ 'fire_period': '1000',
+ 'log_msg': None,
+ }, LocationAction.ActionType.Snapshot)
+ ])],
+ # create snapshot and log
+ ["some.file", 123, {'log_msg': 'some_log'}, [], [],
+ Trigger(LineLocation("some.file", 123, Location.Position.START), [
+ LocationAction("tp-id", None, {
+ 'watches': [],
+ 'frame_type': 'single_frame',
+ 'stack_type': 'stack',
+ 'fire_count': '1',
+ 'fire_period': '1000',
+ 'log_msg': 'some_log',
+ }, LocationAction.ActionType.Snapshot),
+ ])],
+ # should create all frame snapshot
+ ["some.file", 123, {'log_msg': 'some_log', 'frame_type': 'all_frame'}, [], [],
+ Trigger(LineLocation("some.file", 123, Location.Position.START), [
+ LocationAction("tp-id", None, {
+ 'watches': [],
+ 'frame_type': 'all_frame',
+ 'stack_type': 'stack',
+ 'fire_count': '1',
+ 'fire_period': '1000',
+ 'log_msg': 'some_log',
+ }, LocationAction.ActionType.Snapshot),
+ ])]
+ ])
+ def test_build_triggers(self, file, line, args, watches, metrics, expected):
+ triggers = build_trigger("tp-id", file, line, args, watches, metrics)
+ self.assertEqual(expected, triggers)
diff --git a/tests/utils.py b/tests/utils.py
index bd26948..4dfd917 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -15,6 +15,7 @@
"""Utils used for making testing easier."""
+# noinspection PyProtectedMember
from mockito.matchers import Matcher
from deep.api.resource import Resource
@@ -39,6 +40,8 @@ def mock_tracepoint(**kwargs) -> TracePointConfig:
kwargs['args'] = {}
if 'watches' not in kwargs:
kwargs['watches'] = []
+ if 'metrics' not in kwargs:
+ kwargs['metrics'] = []
return TracePointConfig(**kwargs)