New Upstream Release - sentry-python

Ready changes

Summary

Merged new upstream version: 1.29.2 (was: 1.9.10).

Diff

diff --git a/.craft.yml b/.craft.yml
index 353b02f..43bbfdd 100644
--- a/.craft.yml
+++ b/.craft.yml
@@ -1,4 +1,4 @@
-minVersion: 0.28.1
+minVersion: 0.34.1
 targets:
   - name: pypi
     includeNames: /^sentry[_\-]sdk.*$/
@@ -23,5 +23,7 @@ targets:
           - python3.8
           - python3.9
     license: MIT
+  - name: sentry-pypi
+    internalPypiRepo: getsentry/pypi
 changelog: CHANGELOG.md
 changelogPolicy: auto
diff --git a/.flake8 b/.flake8
index 37f5883..fb02f4f 100644
--- a/.flake8
+++ b/.flake8
@@ -15,3 +15,7 @@ extend-ignore =
   # is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls)
   N804,
 extend-exclude=checkouts,lol*
+exclude =
+  # gRCP generated files
+  grpc_test_service_pb2.py
+  grpc_test_service_pb2_grpc.py
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml
index f6e4792..78f1e03 100644
--- a/.github/ISSUE_TEMPLATE/bug.yml
+++ b/.github/ISSUE_TEMPLATE/bug.yml
@@ -27,6 +27,8 @@ body:
         1. What
         2. you
         3. did.
+
+        Extra points for also including the output of `pip freeze --all`.
     validations:
       required: true
   - type: textarea
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ff9ca8c..7987680 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -26,7 +26,7 @@ jobs:
       - uses: actions/checkout@v3
       - uses: actions/setup-python@v4
         with:
-          python-version: 3.9
+          python-version: 3.11
 
       - run: |
           pip install tox
@@ -53,7 +53,6 @@ jobs:
 
     steps:
       - uses: actions/checkout@v2
-      - uses: actions/setup-node@v1
       - uses: actions/setup-python@v4
         with:
           python-version: 3.9
@@ -69,18 +68,6 @@ jobs:
           pip install virtualenv
           # This will also trigger "make dist" that creates the Python packages
           make aws-lambda-layer
-
-          echo "Saving SDK_VERSION for later"
-          export SDK_VERSION=$(grep "VERSION = " sentry_sdk/consts.py | cut -f3 -d' ' | tr -d '"')
-          echo "SDK_VERSION=$SDK_VERSION"
-          echo "SDK_VERSION=$SDK_VERSION" >> $GITHUB_ENV
-      - name: Upload Python AWS Lambda Layer
-        uses: getsentry/action-build-aws-lambda-extension@v1
-        with:
-          artifact_name: ${{ github.sha }}
-          zip_file_name: sentry-python-serverless-${{ env.SDK_VERSION }}.zip
-          build_cache_paths: ${{ env.CACHED_BUILD_PATHS }}
-          build_cache_key: ${{ env.BUILD_CACHE_KEY }}
       - name: Upload Python Packages
         uses: actions/upload-artifact@v3
         with:
@@ -95,17 +82,16 @@ jobs:
 
     steps:
       - uses: actions/checkout@v2
-      - uses: actions/setup-node@v1
       - uses: actions/setup-python@v4
         with:
-          python-version: 3.9
+          python-version: 3.11
 
       - run: |
           pip install virtualenv
           make apidocs
           cd docs/_build && zip -r gh-pages ./
 
-      - uses: actions/upload-artifact@v2
+      - uses: actions/upload-artifact@v3.1.1
         with:
           name: ${{ github.sha }}
           path: docs/_build/gh-pages.zip
diff --git a/.github/workflows/jira.yml b/.github/workflows/jira.yml
deleted file mode 100644
index 485915b..0000000
--- a/.github/workflows/jira.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-name: Create JIRA issue
-
-on:
-  issues:
-    types: [labeled]
-
-jobs:
-  createIssue:
-    runs-on: ubuntu-latest
-    steps:
-      - uses: getsentry/ga-jira-integration@main
-        with:
-          JIRA_API_HOST: ${{secrets.JIRA_BASEURL}}
-          JIRA_API_TOKEN: ${{secrets.JIRA_APITOKEN}}
-          JIRA_EMAIL: ${{secrets.JIRA_USEREMAIL}}
-          TRIGGER_LABEL: "Jira"
-          JIRA_PROJECT_ID: WEBBACKEND
-          JIRA_ISSUE_NAME: Story
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
deleted file mode 100644
index e195d70..0000000
--- a/.github/workflows/stale.yml
+++ /dev/null
@@ -1,51 +0,0 @@
-name: 'close stale issues/PRs'
-on:
-  schedule:
-    - cron: '0 0 * * *'
-  workflow_dispatch:
-permissions:
-  contents: read
-
-jobs:
-  stale:
-    permissions:
-      issues: write  # for actions/stale to close stale issues
-      pull-requests: write  # for actions/stale to close stale PRs
-    runs-on: ubuntu-latest
-    steps:
-      - uses: actions/stale@v5
-        with:
-          repo-token: ${{ github.token }}
-          days-before-stale: 21
-          days-before-close: 7
-          only-labels: ""
-          operations-per-run: 100
-          remove-stale-when-updated: true
-          debug-only: false
-          ascending: false
-
-          exempt-issue-labels: "Status: Backlog,Status: In Progress"
-          stale-issue-label: "Status: Stale"
-          stale-issue-message: |-
-            This issue has gone three weeks without activity. In another week, I will close it.
-
-            But! If you comment or otherwise update it, I will reset the clock, and if you label it `Status: Backlog` or `Status: In Progress`, I will leave it alone ... forever!
-
-            ----
-
-            "A weed is but an unloved flower." ― _Ella Wheeler Wilcox_ 🥀
-          close-issue-label: ""
-          close-issue-message: ""
-
-          exempt-pr-labels: "Status: Backlog,Status: In Progress"
-          stale-pr-label: "Status: Stale"
-          stale-pr-message: |-
-            This pull request has gone three weeks without activity. In another week, I will close it.
-
-            But! If you comment or otherwise update it, I will reset the clock, and if you label it `Status: Backlog` or `Status: In Progress`, I will leave it alone ... forever!
-
-            ----
-
-            "A weed is but an unloved flower." ― _Ella Wheeler Wilcox_ 🥀
-          close-pr-label:
-          close-pr-message: ""
diff --git a/.github/workflows/test-common.yml b/.github/workflows/test-common.yml
index 2c8964d..08a3eff 100644
--- a/.github/workflows/test-common.yml
+++ b/.github/workflows/test-common.yml
@@ -1,4 +1,4 @@
-name: Test Common
+name: Test common
 
 on:
   push:
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -18,32 +24,20 @@ env:
 
 jobs:
   test:
-    name: Test Python ${{ matrix.python-version }}, ${{ matrix.os }}
+    name: common, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
+
     strategy:
+      fail-fast: false
       matrix:
-        os: [ubuntu-latest]
-        python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10"]
-    services:
-      postgres:
-        image: postgres
-        env:
-          POSTGRES_PASSWORD: sentry
-        # Set health checks to wait until postgres has started
-        options: >-
-          --health-cmd pg_isready
-          --health-interval 10s
-          --health-timeout 5s
-          --health-retries 5
-        # Maps tcp port 5432 on service container to the host
-        ports:
-          - 5432:5432
-    env:
-      SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
-      SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
-      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
     steps:
       - uses: actions/checkout@v3
       - uses: actions/setup-python@v4
@@ -51,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
+
+      - name: Test common
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-      - name: Run Tests
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-common" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: common, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test common
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "py${{ matrix.python-version }}$" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch --ignore=tests/integrations
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+            # Run tests
+            ./scripts/runtox.sh "py2.7-common" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All common tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-aiohttp.yml b/.github/workflows/test-integration-aiohttp.yml
index 62f0a48..6194986 100644
--- a/.github/workflows/test-integration-aiohttp.yml
+++ b/.github/workflows/test-integration-aiohttp.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: aiohttp, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test aiohttp
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All aiohttp tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-arq.yml b/.github/workflows/test-integration-arq.yml
new file mode 100644
index 0000000..3d32b67
--- /dev/null
+++ b/.github/workflows/test-integration-arq.yml
@@ -0,0 +1,83 @@
+name: Test arq
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: arq, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 30
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test arq
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-arq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+
+  check_required_tests:
+    name: All arq tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-asgi.yml b/.github/workflows/test-integration-asgi.yml
index 069ebbf..46f9a42 100644
--- a/.github/workflows/test-integration-asgi.yml
+++ b/.github/workflows/test-integration-asgi.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: asgi, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test asgi
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All asgi tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-aws_lambda.yml b/.github/workflows/test-integration-aws_lambda.yml
index 5e40fed..c4cbd78 100644
--- a/.github/workflows/test-integration-aws_lambda.yml
+++ b/.github/workflows/test-integration-aws_lambda.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: aws_lambda, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test aws_lambda
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-aws_lambda" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-aws_lambda" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All aws_lambda tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-beam.yml b/.github/workflows/test-integration-beam.yml
index 55f8e01..96d204b 100644
--- a/.github/workflows/test-integration-beam.yml
+++ b/.github/workflows/test-integration-beam.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: beam, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test beam
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All beam tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-boto3.yml b/.github/workflows/test-integration-boto3.yml
index 9b8747c..7894203 100644
--- a/.github/workflows/test-integration-boto3.yml
+++ b/.github/workflows/test-integration-boto3.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: boto3, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.6","3.7","3.8"]
-        os: [ubuntu-latest]
+        python-version: ["3.6","3.7","3.8"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test boto3
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: boto3, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test boto3
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All boto3 tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-bottle.yml b/.github/workflows/test-integration-bottle.yml
index 8346382..9169be6 100644
--- a/.github/workflows/test-integration-bottle.yml
+++ b/.github/workflows/test-integration-bottle.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: bottle, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test bottle
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: bottle, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test bottle
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All bottle tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-celery.yml b/.github/workflows/test-integration-celery.yml
index 17feb5a..2c17986 100644
--- a/.github/workflows/test-integration-celery.yml
+++ b/.github/workflows/test-integration-celery.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: celery, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test celery
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: celery, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test celery
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All celery tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-chalice.yml b/.github/workflows/test-integration-chalice.yml
index 36067fc..e46190e 100644
--- a/.github/workflows/test-integration-chalice.yml
+++ b/.github/workflows/test-integration-chalice.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: chalice, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.6","3.7","3.8"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test chalice
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All chalice tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-cloud_resource_context.yml b/.github/workflows/test-integration-cloud_resource_context.yml
new file mode 100644
index 0000000..c3f541b
--- /dev/null
+++ b/.github/workflows/test-integration-cloud_resource_context.yml
@@ -0,0 +1,83 @@
+name: Test cloud_resource_context
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: cloud_resource_context, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 30
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test cloud_resource_context
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-cloud_resource_context" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+
+  check_required_tests:
+    name: All cloud_resource_context tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-django.yml b/.github/workflows/test-integration-django.yml
index db65972..e94b138 100644
--- a/.github/workflows/test-integration-django.yml
+++ b/.github/workflows/test-integration-django.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: django, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
     services:
       postgres:
         image: postgres
@@ -51,6 +55,8 @@ jobs:
       SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
       SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
       SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
+      SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost
+
     steps:
       - uses: actions/checkout@v3
       - uses: actions/setup-python@v4
@@ -58,22 +64,90 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test django
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: django, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+    services:
+      postgres:
+        image: postgres
         env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+          POSTGRES_PASSWORD: sentry
+        # Set health checks to wait until postgres has started
+        options: >-
+          --health-cmd pg_isready
+          --health-interval 10s
+          --health-timeout 5s
+          --health-retries 5
+        # Maps tcp port 5432 on service container to the host
+        ports:
+          - 5432:5432
+    env:
+      SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
+      SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
+      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
+      SENTRY_PYTHON_TEST_POSTGRES_HOST: postgres
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test django
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All django tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-falcon.yml b/.github/workflows/test-integration-falcon.yml
index af4c701..363b8e2 100644
--- a/.github/workflows/test-integration-falcon.yml
+++ b/.github/workflows/test-integration-falcon.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: falcon, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.5","3.6","3.7","3.8","3.9"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test falcon
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: falcon, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test falcon
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All falcon tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-fastapi.yml b/.github/workflows/test-integration-fastapi.yml
index 6352d13..67bcab5 100644
--- a/.github/workflows/test-integration-fastapi.yml
+++ b/.github/workflows/test-integration-fastapi.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: fastapi, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test fastapi
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All fastapi tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-flask.yml b/.github/workflows/test-integration-flask.yml
index 8e35381..358f350 100644
--- a/.github/workflows/test-integration-flask.yml
+++ b/.github/workflows/test-integration-flask.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: flask, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test flask
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: flask, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test flask
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All flask tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-gcp.yml b/.github/workflows/test-integration-gcp.yml
index 8aa4e12..0e8ff18 100644
--- a/.github/workflows/test-integration-gcp.yml
+++ b/.github/workflows/test-integration-gcp.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: gcp, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test gcp
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All gcp tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-gevent.yml b/.github/workflows/test-integration-gevent.yml
new file mode 100644
index 0000000..db89365
--- /dev/null
+++ b/.github/workflows/test-integration-gevent.yml
@@ -0,0 +1,115 @@
+name: Test gevent
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: gevent, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 30
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test gevent
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-gevent" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: gevent, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test gevent
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py2.7-gevent" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All gevent tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-grpc.yml b/.github/workflows/test-integration-grpc.yml
new file mode 100644
index 0000000..e0cb74c
--- /dev/null
+++ b/.github/workflows/test-integration-grpc.yml
@@ -0,0 +1,83 @@
+name: Test grpc
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: grpc, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 30
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test grpc
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-grpc" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+
+  check_required_tests:
+    name: All grpc tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-httpx.yml b/.github/workflows/test-integration-httpx.yml
index f9e1b4e..804b190 100644
--- a/.github/workflows/test-integration-httpx.yml
+++ b/.github/workflows/test-integration-httpx.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: httpx, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test httpx
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All httpx tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-huey.yml b/.github/workflows/test-integration-huey.yml
new file mode 100644
index 0000000..fa87ef5
--- /dev/null
+++ b/.github/workflows/test-integration-huey.yml
@@ -0,0 +1,115 @@
+name: Test huey
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: huey, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 30
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test huey
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-huey" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: huey, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test huey
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py2.7-huey" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All huey tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-loguru.yml b/.github/workflows/test-integration-loguru.yml
new file mode 100644
index 0000000..7bab1ae
--- /dev/null
+++ b/.github/workflows/test-integration-loguru.yml
@@ -0,0 +1,83 @@
+name: Test loguru
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: loguru, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 30
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test loguru
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-loguru" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+
+  check_required_tests:
+    name: All loguru tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-opentelemetry.yml b/.github/workflows/test-integration-opentelemetry.yml
new file mode 100644
index 0000000..872d523
--- /dev/null
+++ b/.github/workflows/test-integration-opentelemetry.yml
@@ -0,0 +1,83 @@
+name: Test opentelemetry
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: opentelemetry, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 30
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test opentelemetry
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-opentelemetry" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+
+  check_required_tests:
+    name: All opentelemetry tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-pure_eval.yml b/.github/workflows/test-integration-pure_eval.yml
index ef39704..2b0cc3d 100644
--- a/.github/workflows/test-integration-pure_eval.yml
+++ b/.github/workflows/test-integration-pure_eval.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: pure_eval, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test pure_eval
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-pure_eval" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-pure_eval" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All pure_eval tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-pymongo.yml b/.github/workflows/test-integration-pymongo.yml
new file mode 100644
index 0000000..780f9b2
--- /dev/null
+++ b/.github/workflows/test-integration-pymongo.yml
@@ -0,0 +1,115 @@
+name: Test pymongo
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: pymongo, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 30
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test pymongo
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-pymongo" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: pymongo, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test pymongo
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py2.7-pymongo" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All pymongo tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-pyramid.yml b/.github/workflows/test-integration-pyramid.yml
index bbd017b..9a1aa94 100644
--- a/.github/workflows/test-integration-pyramid.yml
+++ b/.github/workflows/test-integration-pyramid.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: pyramid, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test pyramid
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: pyramid, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test pyramid
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All pyramid tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-quart.yml b/.github/workflows/test-integration-quart.yml
index de7671d..ea2ffad 100644
--- a/.github/workflows/test-integration-quart.yml
+++ b/.github/workflows/test-integration-quart.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: quart, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test quart
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All quart tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-redis.yml b/.github/workflows/test-integration-redis.yml
index 6035208..3a29033 100644
--- a/.github/workflows/test-integration-redis.yml
+++ b/.github/workflows/test-integration-redis.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: redis, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.7","3.8","3.9"]
-        os: [ubuntu-latest]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test redis
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: redis, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test redis
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All redis tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-rediscluster.yml b/.github/workflows/test-integration-rediscluster.yml
index 5866637..fa52ac1 100644
--- a/.github/workflows/test-integration-rediscluster.yml
+++ b/.github/workflows/test-integration-rediscluster.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: rediscluster, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.7","3.8","3.9"]
-        os: [ubuntu-latest]
+        python-version: ["3.7","3.8","3.9"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test rediscluster
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: rediscluster, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test rediscluster
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All rediscluster tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-requests.yml b/.github/workflows/test-integration-requests.yml
index 7e33b44..2d6bd79 100644
--- a/.github/workflows/test-integration-requests.yml
+++ b/.github/workflows/test-integration-requests.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: requests, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.8","3.9"]
-        os: [ubuntu-latest]
+        python-version: ["3.8","3.9"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test requests
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: requests, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test requests
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All requests tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-rq.yml b/.github/workflows/test-integration-rq.yml
index e2a0eba..c9bb762 100644
--- a/.github/workflows/test-integration-rq.yml
+++ b/.github/workflows/test-integration-rq.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: rq, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test rq
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: rq, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test rq
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All rq tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-sanic.yml b/.github/workflows/test-integration-sanic.yml
index aa99f54..6710ea6 100644
--- a/.github/workflows/test-integration-sanic.yml
+++ b/.github/workflows/test-integration-sanic.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: sanic, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test sanic
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All sanic tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-sqlalchemy.yml b/.github/workflows/test-integration-sqlalchemy.yml
index ea36e0f..aeccd24 100644
--- a/.github/workflows/test-integration-sqlalchemy.yml
+++ b/.github/workflows/test-integration-sqlalchemy.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: sqlalchemy, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["2.7","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,71 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test sqlalchemy
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+  test-py27:
+    name: sqlalchemy, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
         run: |
-          set -x # print commands that are executed
-          coverage erase
+          pip install coverage "tox>=3,<4"
+
+      - name: Test sqlalchemy
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+            # Run tests
+            ./scripts/runtox.sh "py2.7-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+  check_required_tests:
+    name: All sqlalchemy tests passed or skipped
+    needs: [test, test-py27]
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-starlette.yml b/.github/workflows/test-integration-starlette.yml
index a35544e..341a5ff 100644
--- a/.github/workflows/test-integration-starlette.yml
+++ b/.github/workflows/test-integration-starlette.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: starlette, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test starlette
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All starlette tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-starlite.yml b/.github/workflows/test-integration-starlite.yml
new file mode 100644
index 0000000..3d1a2ef
--- /dev/null
+++ b/.github/workflows/test-integration-starlite.yml
@@ -0,0 +1,83 @@
+name: Test starlite
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: starlite, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 30
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test starlite
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-starlite" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
+
+  check_required_tests:
+    name: All starlite tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-tornado.yml b/.github/workflows/test-integration-tornado.yml
index 17c1f18..494862b 100644
--- a/.github/workflows/test-integration-tornado.yml
+++ b/.github/workflows/test-integration-tornado.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: tornado, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test tornado
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All tornado tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-trytond.yml b/.github/workflows/test-integration-trytond.yml
index 12771ff..56641a5 100644
--- a/.github/workflows/test-integration-trytond.yml
+++ b/.github/workflows/test-integration-trytond.yml
@@ -26,13 +26,17 @@ jobs:
   test:
     name: trytond, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
+    timeout-minutes: 30
 
     strategy:
+      fail-fast: false
       matrix:
-        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,22 +45,39 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install coverage "tox>=3,<4"
 
       - name: Test trytond
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
-        run: |
-          set -x # print commands that are executed
-          coverage erase
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-trytond" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
+
 
-          ./scripts/runtox.sh "${{ matrix.python-version }}-trytond" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+  check_required_tests:
+    name: All trytond tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.tool-versions b/.tool-versions
new file mode 100644
index 0000000..d316e6d
--- /dev/null
+++ b/.tool-versions
@@ -0,0 +1 @@
+python 3.7.12
diff --git a/.vscode/settings.json b/.vscode/settings.json
index c167a13..ba2472c 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,4 +1,6 @@
 {
     "python.pythonPath": ".venv/bin/python",
-    "python.formatting.provider": "black"
-}
\ No newline at end of file
+    "python.formatting.provider": "black",
+    "python.testing.unittestEnabled": false,
+    "python.testing.pytestEnabled": true
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1f661d0..fa0df93 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,921 @@
 # Changelog
 
+## 1.29.2
+
+### Various fixes & improvements
+
+- Revert GraphQL integration (#2287) by @sentrivana
+
+## 1.29.1
+
+### Various fixes & improvements
+
+- Fix GraphQL integration swallowing responses (#2286) by @sentrivana
+- Fix typo (#2283) by @sentrivana
+
+## 1.29.0
+
+### Various fixes & improvements
+
+- Capture GraphQL client errors (#2243) by @sentrivana
+  - The SDK will now create dedicated errors whenever an HTTP client makes a request to a `/graphql` endpoint and the response contains an error. You can opt out of this by providing `capture_graphql_errors=False` to the HTTP client integration.
+- Read MAX_VALUE_LENGTH from client options (#2121) (#2171) by @puittenbroek
+- Rename `request_bodies` to `max_request_body_size` (#2247) by @mgaligniana
+- Always sample checkin regardless of `sample_rate` (#2279) by @szokeasaurusrex
+- Add information to short-interval cron error message (#2246) by @lobsterkatie
+- Add DB connection attributes in spans (#2274) by @antonpirker
+- Add `db.system` to remaining Redis spans (#2271) by @AbhiPrasad
+- Clarified the procedure for running tests (#2276) by @szokeasaurusrex
+- Fix Chalice tests (#2278) by @sentrivana
+- Bump Black from 23.3.0 to 23.7.0 (#2256) by @dependabot
+- Remove py3.4 from tox.ini (#2248) by @sentrivana
+
+## 1.28.1
+
+### Various fixes & improvements
+
+- Redis: Add support for redis.asyncio (#1933) by @Zhenay
+- Make sure each task that is started by Celery Beat has its own trace. (#2249) by @antonpirker
+- Add Sampling Decision to Trace Envelope Header (#2239) by @antonpirker
+- Do not add trace headers (`sentry-trace` and `baggage`) to HTTP requests to Sentry (#2240) by @antonpirker
+- Prevent adding `sentry-trace` header multiple times (#2235) by @antonpirker
+- Skip distributions with incomplete metadata (#2231) by @rominf
+- Remove stale.yml (#2245) by @hubertdeng123
+- Django: Fix 404 Handler handler being labeled as "generic ASGI request" (#1277) by @BeryJu
+
+## 1.28.0
+
+### Various fixes & improvements
+
+- Add support for cron jobs in ARQ integration (#2088) by @lewazo
+- Backpressure handling prototype (#2189) by @sl0thentr0py
+- Add "replay" context to event payload (#2234) by @antonpirker
+- Update test Django app to be compatible for Django 4.x (#1794) by @DilLip-Chowdary-Codes
+
+## 1.27.1
+
+### Various fixes & improvements
+
+- Add Starlette/FastAPI template tag for adding Sentry tracing information (#2225) by @antonpirker
+  - By adding `{{ sentry_trace_meta }}` to your Starlette/FastAPI Jinja2 templates we will include Sentry trace information as a meta tag in the rendered HTML to allow your frontend to pick up and continue the trace started in the backend.
+- Fixed generation of baggage when a DSC is already in propagation context (#2232) by @antonpirker
+- Handle explicitly passing `None` for `trace_configs` in `aiohttp` (#2230) by @Harmon758
+- Support newest Starlette versions (#2227) by @antonpirker
+
+## 1.27.0
+
+### Various fixes & improvements
+
+- Support for SQLAlchemy 2.0 (#2200) by @antonpirker
+- Add instrumentation of `aiohttp` client requests (#1761) by @md384
+- Add Django template tag for adding Sentry tracing information (#2222) by @antonpirker
+  - By adding `{{ sentry_trace_meta }}` to your Django templates we will include Sentry trace information as a meta tag in the rendered HTML to allow your frontend to pick up and continue the trace started in the backend.
+
+- Update Flask HTML meta helper (#2203) by @antonpirker
+- Take trace ID always from propagation context (#2209) by @antonpirker
+- Fix trace context in event payload (#2205) by @antonpirker
+- Use new top level API in `trace_propagation_meta` (#2202) by @antonpirker
+- Do not overwrite existing baggage on outgoing requests (#2191, #2214) by @sentrivana
+- Set the transaction/span status from an OTel span (#2115) by @daniil-konovalenko
+- Fix propagation of OTel `NonRecordingSpan` (#2187) by @hartungstenio
+- Fix `TaskLockedException` handling in Huey integration (#2206) by @Zhenay
+- Add message format configuration arguments to Loguru integration (#2208) by @Gwill
+- Profiling: Add client reports for profiles (#2207) by @Zylphrex
+- CI: Fix CI (#2220) by @antonpirker
+- Dependencies: Bump `checkouts/data-schemas` from `7fdde87` to `1b85152` (#2218) by @dependabot
+- Dependencies: Bump `mypy` from 1.3.0 to 1.4.1 (#2194) by @dependabot
+- Docs: Change API doc theme (#2210) by @sentrivana
+- Docs: Allow (some) autocompletion for top-level API (#2213) by @sentrivana
+- Docs: Revert autocomplete hack (#2224) by @sentrivana
+
+## 1.26.0
+
+### Various fixes & improvements
+
+- Tracing without performance (#2136) by @antonpirker
+- Load tracing information from environment (#2176) by @antonpirker
+- Auto-enable HTTPX integration if HTTPX installed (#2177) by @sentrivana
+- Support for SOCKS proxies (#1050) by @Roguelazer
+- Wrap `parse_url` calls in `capture_internal_exceptions` (#2162) by @sentrivana
+- Run 2.7 tests in CI again (#2181) by @sentrivana
+- Crons: Do not support sub-minute cron intervals (#2172) by @antonpirker
+- Profile: Add function name to profiler frame cache (#2164) by @Zylphrex
+- Dependencies: bump checkouts/data-schemas from `0ed3357` to `7fdde87` (#2165) by @dependabot
+- Update changelog (#2163) by @sentrivana
+
+## 1.25.1
+
+### Django update (ongoing)
+
+Collections of improvements to our Django integration.
+
+By: @mgaligniana (#1773)
+
+### Various fixes & improvements
+
+- Fix `parse_url` (#2161) by @sentrivana and @antonpirker
+
+  Our URL sanitization used in multiple integrations broke with the recent Python security update. If you started seeing `ValueError`s with `"'Filtered' does not appear to be an IPv4 or IPv6 address"`, this release fixes that. See [the original issue](https://github.com/getsentry/sentry-python/issues/2160) for more context.
+
+- Better version parsing in integrations (#2152) by @antonpirker
+
+  We now properly support all integration versions that conform to [PEP 440](https://peps.python.org/pep-0440/). This replaces our naïve version parsing that wouldn't accept versions such as `2.0.0rc1` or `2.0.5.post1`.
+
+- Align HTTP status code as span data field `http.response.status_code` (#2113) by @antonpirker
+- Do not encode cached value to determine size (#2143) by @sentrivana
+- Fix using `unittest.mock` whenever available (#1926) by @mgorny
+- Fix 2.7 `common` tests (#2145) by @sentrivana
+- Bump `actions/stale` from `6` to `8` (#1978) by @dependabot
+- Bump `black` from `22.12.0` to `23.3.0` (#1984) by @dependabot
+- Bump `mypy` from `1.2.0` to `1.3.0` (#2110) by @dependabot
+- Bump `sphinx` from `5.3.0` to `7.0.1` (#2112) by @dependabot
+
+## 1.25.0
+
+### Various fixes & improvements
+
+- Support urllib3>=2.0.0 (#2148) by @asottile-sentry
+
+  We're now supporting urllib3's new major version, 2.0.0. If you encounter issues (e.g. some of your dependencies not supporting the new urllib3 version yet) you might consider pinning the urllib3 version to `<2.0.0` manually in your project. Check out the [the urllib3 migration guide](https://urllib3.readthedocs.io/en/latest/v2-migration-guide.html#migrating-as-an-application-developer) for details.
+
+- Auto-retry tests on failure (#2134) by @sentrivana
+- Correct `importlib.metadata` check in `test_modules` (#2149) by @asottile-sentry
+- Fix distribution name normalization (PEP-0503) (#2144) by @rominf
+- Fix `functions_to_trace` typing (#2141) by @rcmarron
+
+## 1.24.0
+
+### Various fixes & improvements
+
+- **New:** Celery Beat exclude tasks option (#2130) by @antonpirker
+
+  You can exclude Celery Beat tasks from being auto-instrumented. To do this, add a list of tasks you want to exclude as option `exclude_beat_tasks` when creating `CeleryIntegration`. The list can contain simple strings with the full task name, as specified in the Celery Beat schedule, or regular expressions to match multiple tasks.
+
+  For more information, see the documentation for [Crons](https://docs.sentry.io/platforms/python/guides/celery/crons/) for more information.
+
+  Usage:
+
+  ```python
+      exclude_beat_tasks = [
+          "some-task-a",
+          "payment-check-.*",
+      ]
+      sentry_sdk.init(
+          dsn='___PUBLIC_DSN___',
+          integrations=[
+              CeleryIntegration(
+                  monitor_beat_tasks=True,
+                  exclude_beat_tasks=exclude_beat_tasks,
+              ),
+          ],
+      )
+  ```
+
+  In this example the task `some-task-a` and all tasks with a name starting with `payment-check-` will be ignored.
+
+- **New:** Add support for **ExceptionGroups** (#2025) by @antonpirker
+
+  _Note:_ If running Self-Hosted Sentry, you should wait to adopt this SDK update until after updating to the 23.6.0 (est. June 2023) release of Sentry. Updating early will not break anything, but you will not get the full benefit of the Exception Groups improvements to issue grouping that were added to the Sentry backend.
+
+- Prefer `importlib.metadata` over `pkg_resources` if available (#2081) by @sentrivana
+- Work with a copy of request, vars in the event (#2125) by @sentrivana
+- Pinned version of dependency that broke the build (#2133) by @antonpirker
+
+## 1.23.1
+
+### Various fixes & improvements
+
+- Disable Django Cache spans by default. (#2120) by @antonpirker
+
+## 1.23.0
+
+### Various fixes & improvements
+
+- **New:** Add `loguru` integration (#1994) by @PerchunPak
+
+  Check [the documentation](https://docs.sentry.io/platforms/python/configuration/integrations/loguru/) for more information.
+
+  Usage:
+
+  ```python
+  from loguru import logger
+  import sentry_sdk
+  from sentry_sdk.integrations.loguru import LoguruIntegration
+
+  sentry_sdk.init(
+      dsn="___PUBLIC_DSN___",
+      integrations=[
+          LoguruIntegration(),
+      ],
+  )
+
+  logger.debug("I am ignored")
+  logger.info("I am a breadcrumb")
+  logger.error("I am an event", extra=dict(bar=43))
+  logger.exception("An exception happened")
+  ```
+
+  - An error event with the message `"I am an event"` will be created.
+  - `"I am a breadcrumb"` will be attached as a breadcrumb to that event.
+  - `bar` will end up in the `extra` attributes of that event.
+  - `"An exception happened"` will send the current exception from `sys.exc_info()` with the stack trace to Sentry. If there's no exception, the current stack will be attached.
+  - The debug message `"I am ignored"` will not be captured by Sentry. To capture it, set `level` to `DEBUG` or lower in `LoguruIntegration`.
+
+- Do not truncate request body if `request_bodies` is `"always"` (#2092) by @sentrivana
+- Fixed Celery headers for Beat auto-instrumentation (#2102) by @antonpirker
+- Add `db.operation` to Redis and MongoDB spans (#2089) by @antonpirker
+- Make sure we're importing `redis` the library (#2106) by @sentrivana
+- Add `include_source_context` option (#2020) by @farhat-nawaz and @sentrivana
+- Import `Markup` from `markupsafe` (#2047) by @rco-ableton
+- Fix `__qualname__` missing attribute in asyncio integration (#2105) by @sl0thentr0py
+- Remove relay extension from AWS Layer (#2068) by @sl0thentr0py
+- Add a note about `pip freeze` to the bug template (#2103) by @sentrivana
+
+## 1.22.2
+
+### Various fixes & improvements
+
+- Fix: Django caching spans when using keyword arguments (#2086) by @antonpirker
+- Fix: Duration in Celery Beat tasks monitoring (#2087) by @antonpirker
+- Fix: Docstrings of SPANDATA (#2084) by @antonpirker
+
+## 1.22.1
+
+### Various fixes & improvements
+
+- Fix: Handle a list of keys (not just a single key) in Django cache spans (#2082) by @antonpirker
+
+## 1.22.0
+
+### Various fixes & improvements
+
+- Add `cache.hit` and `cache.item_size` to Django (#2057) by @antonpirker
+
+  _Note:_ This will add spans for all requests to the caches configured in Django. This will probably add some overhead to your server an also add multiple spans to your performance waterfall diagrams. If you do not want this, you can disable this feature in the DjangoIntegration:
+
+  ```python
+  sentry_sdk.init(
+      dsn="...",
+      integrations=[
+          DjangoIntegration(cache_spans=False),
+      ]
+  )
+  ```
+
+- Use `http.method` instead of `method` (#2054) by @AbhiPrasad
+- Handle non-int `exc.status_code` in Starlette (#2075) by @sentrivana
+- Handle SQLAlchemy `engine.name` being bytes (#2074) by @sentrivana
+- Fix `KeyError` in `capture_checkin` if SDK is not initialized (#2073) by @antonpirker
+- Use `functools.wrap` for `ThreadingIntegration` patches to fix attributes (#2080) by @EpicWink
+- Pin `urllib3` to <2.0.0 for now (#2069) by @sl0thentr0py
+
+## 1.21.1
+
+### Various fixes & improvements
+
+- Do not send monitor_config when unset (#2058) by @evanpurkhiser
+- Add `db.system` span data (#2040, #2042) by @antonpirker
+- Fix memory leak in profiling (#2049) by @Zylphrex
+- Fix crash loop when returning none in before_send (#2045) by @sentrivana
+
+## 1.21.0
+
+### Various fixes & improvements
+
+- Better handling of redis span/breadcrumb data (#2033) by @antonpirker
+
+  _Note:_ With this release we will limit the description of redis db spans and the data in breadcrumbs represting redis db operations to 1024 characters.
+
+  This can can lead to truncated data. If you do not want this there is a new parameter `max_data_size` in `RedisIntegration`. You can set this to `None` for disabling trimming.
+
+  Example for **disabling** trimming of redis commands in spans or breadcrumbs:
+
+  ```python
+  sentry_sdk.init(
+    integrations=[
+      RedisIntegration(max_data_size=None),
+    ]
+  )
+  ```
+
+  Example for custom trim size of redis commands in spans or breadcrumbs:
+
+  ```python
+  sentry_sdk.init(
+    integrations=[
+      RedisIntegration(max_data_size=50),
+    ]
+  )`
+
+  ```
+
+- Add `db.system` to redis and SQLAlchemy db spans (#2037, #2038, #2039) (#2037) by @AbhiPrasad
+- Upgraded linting tooling (#2026) by @antonpirker
+- Made code more resilient. (#2031) by @antonpirker
+
+## 1.20.0
+
+### Various fixes & improvements
+
+- Send all events to /envelope endpoint when tracing is enabled (#2009) by @antonpirker
+
+  _Note:_ If you’re self-hosting Sentry 9, you need to stay in the previous version of the SDK or update your self-hosted to at least 20.6.0
+
+- Profiling: Remove profile context from SDK (#2013) by @Zylphrex
+- Profiling: Additionl performance improvements to the profiler (#1991) by @Zylphrex
+- Fix: Celery Beat monitoring without restarting the Beat process (#2001) by @antonpirker
+- Fix: Using the Codecov uploader instead of deprecated python package (#2011) by @antonpirker
+- Fix: Support for Quart (#2003)` (#2003) by @antonpirker
+
+## 1.19.1
+
+### Various fixes & improvements
+
+- Make auto monitoring beat update support Celery 4 and 5 (#1989) by @antonpirker
+
+## 1.19.0
+
+### Various fixes & improvements
+
+- **New:** [Celery Beat](https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html) auto monitoring (#1967) by @antonpirker
+
+  The CeleryIntegration can now also monitor your Celery Beat scheduled tasks automatically using the new [Crons](https://blog.sentry.io/2023/01/04/cron-job-monitoring-beta-because-scheduled-jobs-fail-too/) feature of Sentry.
+
+  To learn more see our [Celery Beat Auto Discovery](https://docs.sentry.io/platforms/python/guides/celery/crons/) documentation.
+
+  Usage:
+
+  ```python
+  from celery import Celery, signals
+  from celery.schedules import crontab
+
+  import sentry_sdk
+  from sentry_sdk.integrations.celery import CeleryIntegration
+
+
+  app = Celery('tasks', broker='...')
+  app.conf.beat_schedule = {
+      'set-in-beat-schedule': {
+          'task': 'tasks.some_important_task',
+          'schedule': crontab(...),
+      },
+  }
+
+
+  @signals.celeryd_init.connect
+  def init_sentry(**kwargs):
+      sentry_sdk.init(
+          dsn='...',
+          integrations=[CeleryIntegration(monitor_beat_tasks=True)],  # 👈 here
+          environment="local.dev.grace",
+          release="v1.0",
+      )
+  ```
+
+  This will auto detect all schedules tasks in your `beat_schedule` and will monitor them with Sentry [Crons](https://blog.sentry.io/2023/01/04/cron-job-monitoring-beta-because-scheduled-jobs-fail-too/).
+
+- **New:** [gRPC](https://grpc.io/) integration (#1911) by @hossein-raeisi
+
+  The [gRPC](https://grpc.io/) integration instruments all incoming requests and outgoing unary-unary, unary-stream grpc requests using grpcio channels.
+
+  To learn more see our [gRPC Integration](https://docs.sentry.io/platforms/python/configuration/integrations/grpc/) documentation.
+
+  On the server:
+
+  ```python
+  import grpc
+  from sentry_sdk.integrations.grpc.server import ServerInterceptor
+
+
+  server = grpc.server(
+      thread_pool=...,
+      interceptors=[ServerInterceptor()],
+  )
+  ```
+
+  On the client:
+
+  ```python
+  import grpc
+  from sentry_sdk.integrations.grpc.client import ClientInterceptor
+
+
+  with grpc.insecure_channel("example.com:12345") as channel:
+      channel = grpc.intercept_channel(channel, *[ClientInterceptor()])
+
+  ```
+
+- **New:** socket integration (#1911) by @hossein-raeisi
+
+  Use this integration to create spans for DNS resolves (`socket.getaddrinfo()`) and connection creations (`socket.create_connection()`).
+
+  To learn more see our [Socket Integration](https://docs.sentry.io/platforms/python/configuration/integrations/socket/) documentation.
+
+  Usage:
+
+  ```python
+  import sentry_sdk
+  from sentry_sdk.integrations.socket import SocketIntegration
+  sentry_sdk.init(
+      dsn="___PUBLIC_DSN___",
+      integrations=[
+          SocketIntegration(),
+      ],
+  )
+  ```
+
+- Fix: Do not trim span descriptions. (#1983) by @antonpirker
+
+## 1.18.0
+
+### Various fixes & improvements
+
+- **New:** Implement `EventScrubber` (#1943) by @sl0thentr0py
+
+  To learn more see our [Scrubbing Sensitive Data](https://docs.sentry.io/platforms/python/data-management/sensitive-data/#event-scrubber) documentation.
+
+  Add a new `EventScrubber` class that scrubs certain potentially sensitive interfaces with a `DEFAULT_DENYLIST`. The default scrubber is automatically run if `send_default_pii = False`:
+
+  ```python
+  import sentry_sdk
+  from sentry_sdk.scrubber import EventScrubber
+  sentry_sdk.init(
+      # ...
+      send_default_pii=False,
+      event_scrubber=EventScrubber(),  # this is set by default
+  )
+  ```
+
+  You can also pass in a custom `denylist` to the `EventScrubber` class and filter additional fields that you want.
+
+  ```python
+  from sentry_sdk.scrubber import EventScrubber, DEFAULT_DENYLIST
+  # custom denylist
+  denylist = DEFAULT_DENYLIST + ["my_sensitive_var"]
+  sentry_sdk.init(
+      # ...
+      send_default_pii=False,
+      event_scrubber=EventScrubber(denylist=denylist),
+  )
+  ```
+
+- **New:** Added new `functions_to_trace` option for central way of performance instrumentation (#1960) by @antonpirker
+
+  To learn more see our [Tracing Options](https://docs.sentry.io/platforms/python/configuration/options/#functions-to-trace) documentation.
+
+  An optional list of functions that should be set up for performance monitoring. For each function in the list, a span will be created when the function is executed.
+
+  ```python
+  functions_to_trace = [
+      {"qualified_name": "tests.test_basics._hello_world_counter"},
+      {"qualified_name": "time.sleep"},
+      {"qualified_name": "collections.Counter.most_common"},
+  ]
+
+  sentry_sdk.init(
+      # ...
+      traces_sample_rate=1.0,
+      functions_to_trace=functions_to_trace,
+  )
+  ```
+
+- Updated denylist to include other widely used cookies/headers (#1972) by @antonpirker
+- Forward all `sentry-` baggage items (#1970) by @cleptric
+- Update OSS licensing (#1973) by @antonpirker
+- Profiling: Handle non frame types in profiler (#1965) by @Zylphrex
+- Tests: Bad arq dependency in tests (#1966) by @Zylphrex
+- Better naming (#1962) by @antonpirker
+
+## 1.17.0
+
+### Various fixes & improvements
+
+- **New:** Monitor Celery Beat tasks with Sentry [Cron Monitoring](https://docs.sentry.io/product/crons/).
+
+  With this feature you can make sure that your Celery beat tasks run at the right time and see if they where successful or not.
+
+  > **Warning**
+  > Cron Monitoring is currently in beta. Beta features are still in-progress and may have bugs. We recognize the irony.
+  > If you have any questions or feedback, please email us at crons-feedback@sentry.io, reach out via Discord (#cronjobs), or open an issue.
+
+  Usage:
+
+  ```python
+  # File: tasks.py
+
+  from celery import Celery, signals
+  from celery.schedules import crontab
+
+  import sentry_sdk
+  from sentry_sdk.crons import monitor
+  from sentry_sdk.integrations.celery import CeleryIntegration
+
+
+  # 1. Setup your Celery beat configuration
+
+  app = Celery('mytasks', broker='redis://localhost:6379/0')
+  app.conf.beat_schedule = {
+      'set-in-beat-schedule': {
+          'task': 'tasks.tell_the_world',
+          'schedule': crontab(hour='10', minute='15'),
+          'args': ("in beat_schedule set", ),
+      },
+  }
+
+
+  # 2. Initialize Sentry either in `celeryd_init` or `beat_init` signal.
+
+  #@signals.celeryd_init.connect
+  @signals.beat_init.connect
+  def init_sentry(**kwargs):
+      sentry_sdk.init(
+          dsn='...',
+          integrations=[CeleryIntegration()],
+          environment="local.dev.grace",
+          release="v1.0.7-a1",
+      )
+
+
+  # 3. Link your Celery task to a Sentry Cron Monitor
+
+  @app.task
+  @monitor(monitor_slug='3b861d62-ff82-4aa0-9cd6-b2b6403bd0cf')
+  def tell_the_world(msg):
+      print(msg)
+  ```
+
+- **New:** Add decorator for Sentry tracing (#1089) by @ynouri
+
+  This allows you to use a decorator to setup custom performance instrumentation.
+
+  To learn more see [Custom Instrumentation](https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/).
+
+  Usage: Just add the new decorator to your function, and a span will be created for it:
+
+  ```python
+  import sentry_sdk
+
+  @sentry_sdk.trace
+  def my_complex_function():
+    # do stuff
+    ...
+  ```
+
+- Make Django signals tracing optional (#1929) by @antonpirker
+
+  See the [Django Guide](https://docs.sentry.io/platforms/python/guides/django) to learn more.
+
+- Deprecated `with_locals` in favor of `include_local_variables` (#1924) by @antonpirker
+- Added top level API to get current span (#1954) by @antonpirker
+- Profiling: Add profiler options to init (#1947) by @Zylphrex
+- Profiling: Set active thread id for quart (#1830) by @Zylphrex
+- Fix: Update `get_json` function call for werkzeug 2.1.0+ (#1939) by @michielderoos
+- Fix: Returning the tasks result. (#1931) by @antonpirker
+- Fix: Rename MYPY to TYPE_CHECKING (#1934) by @untitaker
+- Fix: Fix type annotation for ignore_errors in sentry_sdk.init() (#1928) by @tiangolo
+- Tests: Start a real http server instead of mocking libs (#1938) by @antonpirker
+
+## 1.16.0
+
+### Various fixes & improvements
+
+- **New:** Add [arq](https://arq-docs.helpmanual.io/) Integration (#1872) by @Zhenay
+
+  This integration will create performance spans when arq jobs will be enqueued and when they will be run.
+  It will also capture errors in jobs and will link them to the performance spans.
+
+  Usage:
+
+  ```python
+  import asyncio
+
+  from httpx import AsyncClient
+  from arq import create_pool
+  from arq.connections import RedisSettings
+
+  import sentry_sdk
+  from sentry_sdk.integrations.arq import ArqIntegration
+  from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
+
+  sentry_sdk.init(
+      dsn="...",
+      integrations=[ArqIntegration()],
+  )
+
+  async def download_content(ctx, url):
+      session: AsyncClient = ctx['session']
+      response = await session.get(url)
+      print(f'{url}: {response.text:.80}...')
+      return len(response.text)
+
+  async def startup(ctx):
+      ctx['session'] = AsyncClient()
+
+  async def shutdown(ctx):
+      await ctx['session'].aclose()
+
+  async def main():
+      with sentry_sdk.start_transaction(name="testing_arq_tasks", source=TRANSACTION_SOURCE_COMPONENT):
+          redis = await create_pool(RedisSettings())
+          for url in ('https://facebook.com', 'https://microsoft.com', 'https://github.com', "asdf"
+                      ):
+              await redis.enqueue_job('download_content', url)
+
+  class WorkerSettings:
+      functions = [download_content]
+      on_startup = startup
+      on_shutdown = shutdown
+
+  if __name__ == '__main__':
+      asyncio.run(main())
+  ```
+
+- Update of [Falcon](https://falconframework.org/) Integration (#1733) by @bartolootrit
+- Adding [Cloud Resource Context](https://docs.sentry.io/platforms/python/configuration/integrations/cloudresourcecontext/) integration (#1882) by @antonpirker
+- Profiling: Use the transaction timestamps to anchor the profile (#1898) by @Zylphrex
+- Profiling: Add debug logs to profiling (#1883) by @Zylphrex
+- Profiling: Start profiler thread lazily (#1903) by @Zylphrex
+- Fixed checks for structured http data (#1905) by @antonpirker
+- Make `set_measurement` public api and remove experimental status (#1909) by @sl0thentr0py
+- Add `trace_propagation_targets` option (#1916) by @antonpirker
+- Add `enable_tracing` to default traces_sample_rate to 1.0 (#1900) by @sl0thentr0py
+- Remove deprecated `tracestate` (#1907) by @sl0thentr0py
+- Sanitize URLs in Span description and breadcrumbs (#1876) by @antonpirker
+- Mechanism should default to true unless set explicitly (#1889) by @sl0thentr0py
+- Better setting of in-app in stack frames (#1894) by @antonpirker
+- Add workflow to test gevent (#1870) by @Zylphrex
+- Updated outdated HTTPX test matrix (#1917) by @antonpirker
+- Switch to MIT license (#1908) by @cleptric
+
+## 1.15.0
+
+### Various fixes & improvements
+
+- New: Add [Huey](https://huey.readthedocs.io/en/latest/) Integration (#1555) by @Zhenay
+
+  This integration will create performance spans when Huey tasks will be enqueued and when they will be executed.
+
+  Usage:
+
+  Task definition in `demo.py`:
+
+  ```python
+  import time
+
+  from huey import SqliteHuey, crontab
+
+  import sentry_sdk
+  from sentry_sdk.integrations.huey import HueyIntegration
+
+  sentry_sdk.init(
+      dsn="...",
+      integrations=[
+          HueyIntegration(),
+      ],
+      traces_sample_rate=1.0,
+  )
+
+  huey = SqliteHuey(filename='/tmp/demo.db')
+
+  @huey.task()
+  def add_numbers(a, b):
+      return a + b
+  ```
+
+  Running the tasks in `run.py`:
+
+  ```python
+  from demo import add_numbers, flaky_task, nightly_backup
+
+  import sentry_sdk
+  from sentry_sdk.integrations.huey import HueyIntegration
+  from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction
+
+
+  def main():
+      sentry_sdk.init(
+          dsn="...",
+          integrations=[
+              HueyIntegration(),
+          ],
+          traces_sample_rate=1.0,
+      )
+
+      with sentry_sdk.start_transaction(name="testing_huey_tasks", source=TRANSACTION_SOURCE_COMPONENT):
+          r = add_numbers(1, 2)
+
+  if __name__ == "__main__":
+      main()
+  ```
+
+- Profiling: Do not send single sample profiles (#1879) by @Zylphrex
+- Profiling: Add additional test coverage for profiler (#1877) by @Zylphrex
+- Profiling: Always use builtin time.sleep (#1869) by @Zylphrex
+- Profiling: Defaul in_app decision to None (#1855) by @Zylphrex
+- Profiling: Remove use of threading.Event (#1864) by @Zylphrex
+- Profiling: Enable profiling on all transactions (#1797) by @Zylphrex
+- FastAPI: Fix check for Starlette in FastAPI integration (#1868) by @antonpirker
+- Flask: Do not overwrite default for username with email address in FlaskIntegration (#1873) by @homeworkprod
+- Tests: Add py3.11 to test-common (#1871) by @Zylphrex
+- Fix: Don't log whole event in before_send / event_processor drops (#1863) by @sl0thentr0py
+
+## 1.14.0
+
+### Various fixes & improvements
+
+- Add `before_send_transaction` (#1840) by @antonpirker
+
+  Adds a hook (similar to `before_send`) that is called for all transaction events (performance releated data).
+
+  Usage:
+
+  ```python
+    import sentry_sdk
+
+    def strip_sensitive_data(event, hint):
+        # modify event here (or return `None` if you want to drop the event entirely)
+        return event
+
+    sentry_sdk.init(
+        # ...
+        before_send_transaction=strip_sensitive_data,
+    )
+  ```
+
+  See also: https://docs.sentry.io/platforms/python/configuration/filtering/#using-platformidentifier-namebefore-send-transaction-
+
+- Django: Always remove values of Django session related cookies. (#1842) by @antonpirker
+- Profiling: Enable profiling for ASGI frameworks (#1824) by @Zylphrex
+- Profiling: Better gevent support (#1822) by @Zylphrex
+- Profiling: Add profile context to transaction (#1860) by @Zylphrex
+- Profiling: Use co_qualname in python 3.11 (#1831) by @Zylphrex
+- OpenTelemetry: fix Use dict for sentry-trace context instead of tuple (#1847) by @AbhiPrasad
+- OpenTelemetry: fix extra dependency (#1825) by @bernardotorres
+- OpenTelemetry: fix NoOpSpan updates scope (#1834) by @Zylphrex
+- OpenTelemetry: Make sure to noop when there is no DSN (#1852) by @antonpirker
+- FastAPI: Fix middleware being patched multiple times (#1841) by @JohnnyDeuss
+- Starlette: Avoid import of pkg_resource with Starlette integration (#1836) by @mgu
+- Removed code coverage target (#1862) by @antonpirker
+
+## 1.13.0
+
+### Various fixes & improvements
+
+- Add Starlite integration (#1748) by @gazorby
+
+  Adding support for the [Starlite](https://starlite-api.github.io/starlite/1.48/) framework. Unhandled errors are captured. Performance spans for Starlite middleware are also captured. Thanks @gazorby for the great work!
+
+  Usage:
+
+  ```python
+  from starlite import Starlite, get
+
+  import sentry_sdk
+  from sentry_sdk.integrations.starlite import StarliteIntegration
+
+  sentry_sdk.init(
+      dsn="...",
+      traces_sample_rate=1.0,
+      integrations=[
+          StarliteIntegration(),
+      ],
+  )
+
+  @get("/")
+  def hello_world() -> dict[str, str]:
+      """Keeping the tradition alive with hello world."""
+      bla = 1/0  # causing an error
+      return {"hello": "world"}
+
+  app = Starlite(route_handlers=[hello_world])
+  ```
+
+- Profiling: Remove sample buffer from profiler (#1791) by @Zylphrex
+- Profiling: Performance tweaks to profile sampler (#1789) by @Zylphrex
+- Add span for Django SimpleTemplateResponse rendering (#1818) by @chdsbd
+- Use @wraps for Django Signal receivers (#1815) by @meanmail
+- Add enqueued_at and started_at to rq job extra (#1024) by @kruvasyan
+- Remove sanic v22 pin (#1819) by @sl0thentr0py
+- Add support for `byterray` and `memoryview` built-in types (#1833) by @Tarty
+- Handle `"rc"` in SQLAlchemy version. (#1812) by @peterschutt
+- Doc: Use .venv (not .env) as a virtual env location in CONTRIBUTING.md (#1790) by @tonyo
+- Auto publish to internal pypi on release (#1823) by @asottile-sentry
+- Added Python 3.11 to test suite (#1795) by @antonpirker
+- Update test/linting dependencies (#1801) by @antonpirker
+- Deps: bump sphinx from 5.2.3 to 5.3.0 (#1686) by @dependabot
+
+## 1.12.1
+
+### Various fixes & improvements
+
+- Link errors to OTel spans (#1787) by @antonpirker
+
+## 1.12.0
+
+### Basic OTel support
+
+This adds support to automatically integrate OpenTelemetry performance tracing with Sentry.
+
+See the documentation on how to set it up:
+https://docs.sentry.io/platforms/python/performance/instrumentation/opentelemetry/
+
+Give it a try and let us know if you have any feedback or problems with using it.
+
+By: @antonpirker (#1772, #1766, #1765)
+
+### Various fixes & improvements
+
+- Tox Cleanup (#1749) by @antonpirker
+- CI: Fix Github action checks (#1780) by @Zylphrex
+- Profiling: Introduce active thread id on scope (#1764) by @Zylphrex
+- Profiling: Eagerly hash stack for profiles (#1755) by @Zylphrex
+- Profiling: Resolve inherited method class names (#1756) by @Zylphrex
+
+## 1.11.1
+
+### Various fixes & improvements
+
+- Move set_transaction_name out of event processor in fastapi/starlette (#1751) by @sl0thentr0py
+- Expose proxy_headers as top level config and use in ProxyManager: https://docs.sentry.io/platforms/python/configuration/options/#proxy-headers (#1746) by @sl0thentr0py
+
+## 1.11.0
+
+### Various fixes & improvements
+
+- Fix signals problem on sentry.io (#1732) by @antonpirker
+- Fix reading FastAPI request body twice. (#1724) by @antonpirker
+- ref(profiling): Do not error if already setup (#1731) by @Zylphrex
+- ref(profiling): Use sleep scheduler by default (#1729) by @Zylphrex
+- feat(profiling): Extract more frame info (#1702) by @Zylphrex
+- Update actions/upload-artifact to v3.1.1 (#1718) by @mattgauntseo-sentry
+- Performance optimizations (#1725) by @antonpirker
+- feat(pymongo): add PyMongo integration (#1590) by @Agalin
+- Move relay to port 5333 to avoid collisions (#1716) by @sl0thentr0py
+- fix(utils): strip_string() checks text length counting bytes not chars (#1711) by @mgaligniana
+- chore: remove jira workflow (#1707) by @vladanpaunovic
+- build(deps): bump checkouts/data-schemas from `a214fbc` to `20ff3b9` (#1703) by @dependabot
+- perf(profiling): Tune the sample profile generation code for performance (#1694) by @Zylphrex
+
+## 1.10.1
+
+### Various fixes & improvements
+
+- Bug fixes for FastAPI and Sentry SDK 1.10.0 (#1699) by @antonpirker
+- The wrapped receive() did not return anything. (#1698) by @antonpirker
+
+## 1.10.0
+
+### Various fixes & improvements
+
+- Unified naming for span ops (#1661) by @antonpirker
+
+  We have unified the strings of our span operations. See https://develop.sentry.dev/sdk/performance/span-operations/
+
+  **WARNING**: If you have Sentry Dashboards or Sentry Discover queries that use `transaction.op` in their fields, conditions, aggregates or columns this change could potentially break your Dashboards/Discover setup.
+  Here is a list of the changes we made to the `op`s. Please adjust your dashboards and Discover queries accordingly:
+
+  | Old operation (`op`)     | New Operation (`op`)   |
+  | ------------------------ | ---------------------- |
+  | `asgi.server`            | `http.server`          |
+  | `aws.request`            | `http.client`          |
+  | `aws.request.stream`     | `http.client.stream`   |
+  | `celery.submit`          | `queue.submit.celery`  |
+  | `celery.task`            | `queue.task.celery`    |
+  | `django.middleware`      | `middleware.django`    |
+  | `django.signals`         | `event.django`         |
+  | `django.template.render` | `template.render`      |
+  | `django.view`            | `view.render`          |
+  | `http`                   | `http.client`          |
+  | `redis`                  | `db.redis`             |
+  | `rq.task`                | `queue.task.rq`        |
+  | `serverless.function`    | `function.aws`         |
+  | `serverless.function`    | `function.gcp`         |
+  | `starlette.middleware`   | `middleware.starlette` |
+
+- Include framework in SDK name (#1662) by @antonpirker
+- Asyncio integration (#1671) by @antonpirker
+- Add exception handling to Asyncio Integration (#1695) by @antonpirker
+- Fix asyncio task factory (#1689) by @antonpirker
+- Have instrumentation for ASGI middleware receive/send callbacks. (#1673) by @antonpirker
+- Use Django internal ASGI handling from Channels version 4.0.0. (#1688) by @antonpirker
+- fix(integrations): Fix http putrequest when url is None (#1693) by @MattFlower
+- build(deps): bump checkouts/data-schemas from `f0a57f2` to `a214fbc` (#1627) by @dependabot
+- build(deps): bump flake8-bugbear from 22.9.11 to 22.9.23 (#1637) by @dependabot
+- build(deps): bump sphinx from 5.1.1 to 5.2.3 (#1653) by @dependabot
+- build(deps): bump actions/stale from 5 to 6 (#1638) by @dependabot
+- build(deps): bump black from 22.8.0 to 22.10.0 (#1670) by @dependabot
+- Remove unused node setup from ci. (#1681) by @antonpirker
+- Check for Decimal is in_valid_sample_rate (#1672) by @Arvind2222
+- Add session for aiohttp integration (#1605) by @denys-pidlisnyi
+- feat(profiling): Extract qualified name for each frame (#1669) by @Zylphrex
+- feat(profiling): Attach thread metadata to profiles (#1660) by @Zylphrex
+- ref(profiling): Rename profiling frame keys (#1680) by @Zylphrex
+- fix(profiling): get_frame_name only look at arguments (#1684) by @Zylphrex
+- fix(profiling): Need to sample profile correctly (#1679) by @Zylphrex
+- fix(profiling): Race condition spawning multiple profiling threads (#1676) by @Zylphrex
+- tests(profiling): Add basic profiling tests (#1677) by @Zylphrex
+- tests(profiling): Add tests for thread schedulers (#1683) by @Zylphrex
+
 ## 1.9.10
 
 ### Various fixes & improvements
@@ -158,7 +1074,7 @@ We can do better and in the future we will do our best to not break your code ag
 
 - fix: avoid sending empty Baggage header (#1507) by @intgr
 - fix: properly freeze Baggage object (#1508) by @intgr
-- docs: fix simple typo, collecter -> collector (#1505) by @timgates42
+- docs: fix simple typo, collecter | collector (#1505) by @timgates42
 
 ## 1.7.2
 
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 48e9aac..c71be18 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -34,9 +34,9 @@ Make sure that you have Python 3 installed. Version 3.7 or higher is required to
 ```bash
 cd sentry-python
 
-python -m venv .env
+python -m venv .venv
 
-source .env/bin/activate
+source .venv/bin/activate
 ```
 
 ### Install `sentry-python` in editable mode
@@ -65,48 +65,20 @@ That's it. You should be ready to make changes, run tests, and make commits! If
 
 ## Running tests
 
-We have a `Makefile` to help people get started with hacking on the SDK
-without having to know or understand the Python ecosystem.
-Run `make` or `make help` to list commands.
-
-So the simplest way to run tests is:
-
+To run the tests, first setup your development environment according to the instructions above. Then, install the required packages for running tests with the following command:
 ```bash
-cd sentry-python
-
-make test
+pip install -r test-requirements.txt
 ```
 
-This will use [Tox](https://tox.wiki/en/latest/) to run our whole test suite
-under Python 2.7 and Python 3.7.
-
-Of course you can always run the underlying commands yourself, which is
-particularly useful when wanting to provide arguments to `pytest` to run
-specific tests:
-
+Once the requirements are installed, you can run all tests with the following command:
 ```bash
-cd sentry-python
-
-# create virtual environment
-python -m venv .env
-
-# activate virtual environment
-source .env/bin/activate
-
-# install sentry-python
-pip install -e .
-
-# install requirements
-pip install -r test-requirements.txt
-
-# run tests
 pytest tests/
 ```
 
-If you want to run the tests for a specific integration you should do so by doing this:
+If you would like to run the tests for a specific integration, use a command similar to the one below:
 
 ```bash
-pytest -rs tests/integrations/flask/
+pytest -rs tests/integrations/flask/  # Replace "flask" with the specific integration you wish to test
 ```
 
 **Hint:** Tests of integrations need additional dependencies. The switch `-rs` will show you why tests where skipped and what dependencies you need to install for the tests to run. (You can also consult the [tox.ini](tox.ini) file to see what dependencies are installed for each integration)
diff --git a/LICENSE b/LICENSE
index 61555f1..fa838f1 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,9 +1,21 @@
-Copyright (c) 2018 Sentry (https://sentry.io) and individual contributors.
-All rights reserved.
+MIT License
 
-Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+Copyright (c) 2018 Functional Software, Inc. dba Sentry
 
-* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
-* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
 
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/Makefile b/Makefile
index bf13e11..2011b1b 100644
--- a/Makefile
+++ b/Makefile
@@ -20,6 +20,7 @@ help:
 
 dist: .venv
 	rm -rf dist dist-serverless build
+	$(VENV_PATH)/bin/pip install wheel
 	$(VENV_PATH)/bin/python setup.py sdist bdist_wheel
 .PHONY: dist
 
@@ -29,7 +30,7 @@ format: .venv
 .PHONY: format
 
 test: .venv
-	@$(VENV_PATH)/bin/tox -e py2.7,py3.7
+	@$(VENV_PATH)/bin/tox -e py3.9
 .PHONY: test
 
 test-all: .venv
@@ -50,7 +51,7 @@ lint: .venv
 apidocs: .venv
 	@$(VENV_PATH)/bin/pip install --editable .
 	@$(VENV_PATH)/bin/pip install -U -r ./docs-requirements.txt
-	@$(VENV_PATH)/bin/sphinx-build -W -b html docs/ docs/_build
+	@$(VENV_PATH)/bin/sphinx-build -vv -W -b html docs/ docs/_build
 .PHONY: apidocs
 
 apidocs-hotfix: apidocs
diff --git a/README.md b/README.md
index 597ed85..7bd6e46 100644
--- a/README.md
+++ b/README.md
@@ -104,4 +104,4 @@ If you need help setting up or configuring the Python SDK (or anything else in t
 
 ## License
 
-Licensed under the BSD license, see [`LICENSE`](LICENSE)
+Licensed under the MIT license, see [`LICENSE`](LICENSE)
diff --git a/codecov.yml b/codecov.yml
index 1989f1c..93a5b68 100644
--- a/codecov.yml
+++ b/codecov.yml
@@ -1,9 +1,11 @@
+comment: false
 coverage:
   status:
     project:
-      default: false
-    patch:
-      default: false
-      python:
-        target: 90%
-comment: false
+      default:
+        target: auto  # auto compares coverage to the previous base commit
+        threshold: 10%  # this allows a 10% drop from the previous base commit coverage
+        informational: true
+ignore:
+  - "tests"
+  - "sentry_sdk/_types.py"
diff --git a/debian/changelog b/debian/changelog
index fc69f75..83aed5b 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+sentry-python (1.29.2-1) UNRELEASED; urgency=low
+
+  * New upstream release.
+
+ -- Debian Janitor <janitor@jelmer.uk>  Mon, 21 Aug 2023 18:18:40 -0000
+
 sentry-python (1.9.10-2) unstable; urgency=medium
 
   * Team upload.
diff --git a/debian/patches/debian-hacks/docs-Use-local-inventory-for-Python3.patch b/debian/patches/debian-hacks/docs-Use-local-inventory-for-Python3.patch
index dd36216..3f38afe 100644
--- a/debian/patches/debian-hacks/docs-Use-local-inventory-for-Python3.patch
+++ b/debian/patches/debian-hacks/docs-Use-local-inventory-for-Python3.patch
@@ -9,11 +9,11 @@ Forwarded: not-needed
  docs/conf.py | 2 +-
  1 file changed, 1 insertion(+), 1 deletion(-)
 
-diff --git a/docs/conf.py b/docs/conf.py
-index 4856f57..c0e62f0 100644
---- a/docs/conf.py
-+++ b/docs/conf.py
-@@ -189,4 +189,4 @@ epub_title = project
+Index: sentry-python.git/docs/conf.py
+===================================================================
+--- sentry-python.git.orig/docs/conf.py
++++ sentry-python.git/docs/conf.py
+@@ -192,4 +192,4 @@ epub_title = project
  # A list of files that should not be packed into the epub file.
  epub_exclude_files = ["search.html"]
  
diff --git a/docs-requirements.txt b/docs-requirements.txt
index 9b3fbfc..e1f6940 100644
--- a/docs-requirements.txt
+++ b/docs-requirements.txt
@@ -1,4 +1,4 @@
-sphinx==5.1.1
-sphinx-rtd-theme
+shibuya
+sphinx==7.0.1
 sphinx-autodoc-typehints[type_comments]>=1.8.0
 typing-extensions
diff --git a/docs/api.rst b/docs/api.rst
index 01bef3e..864e934 100644
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -7,3 +7,9 @@ Main API
 .. automodule:: sentry_sdk
     :members:
     :inherited-members:
+
+.. autoclass:: sentry_sdk.tracing.Span
+   :members:
+
+.. autoclass:: sentry_sdk.tracing.Transaction
+   :members:
diff --git a/docs/conf.py b/docs/conf.py
index 5107e0f..58b5b31 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -2,15 +2,16 @@
 
 import os
 import sys
-
 import typing
+from datetime import datetime
 
 # prevent circular imports
 import sphinx.builders.html
 import sphinx.builders.latex
 import sphinx.builders.texinfo
 import sphinx.builders.text
-import sphinx.ext.autodoc
+import sphinx.ext.autodoc  # noqa: F401
+import urllib3.exceptions  # noqa: F401
 
 typing.TYPE_CHECKING = True
 
@@ -26,10 +27,10 @@ sys.path.insert(0, os.path.abspath(".."))
 # -- Project information -----------------------------------------------------
 
 project = "sentry-python"
-copyright = "2019, Sentry Team and Contributors"
+copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year)
 author = "Sentry Team and Contributors"
 
-release = "1.9.10"
+release = "1.29.2"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
@@ -86,13 +87,15 @@ pygments_style = None
 
 on_rtd = os.environ.get("READTHEDOCS", None) == "True"
 
-html_theme = "alabaster"
+html_theme = "shibuya"
 
 # Theme options are theme-specific and customize the look and feel of a theme
 # further. For a list of options available for each theme, see the
 # documentation.
 #
-# html_theme_options = {}
+html_theme_options = {
+    "github_url": "https://github.com/getsentry/sentry-python",
+}
 
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
@@ -166,7 +169,7 @@ texinfo_documents = [
         "sentry-python Documentation",
         author,
         "sentry-python",
-        "One line description of project.",
+        "The official Sentry SDK for Python.",
         "Miscellaneous",
     )
 ]
diff --git a/linter-requirements.txt b/linter-requirements.txt
index e497c21..d5b8ef1 100644
--- a/linter-requirements.txt
+++ b/linter-requirements.txt
@@ -1,9 +1,11 @@
-mypy==0.971
-black==22.8.0
+mypy==1.4.1
+black==23.7.0
 flake8==5.0.4
 types-certifi
 types-redis
 types-setuptools
-flake8-bugbear==22.9.11
+pymongo # There is no separate types module.
+loguru # There is no separate types module.
+flake8-bugbear==22.12.6
 pep8-naming==0.13.2
 pre-commit # local linting
diff --git a/mypy.ini b/mypy.ini
index 2a15e45..fef90c8 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -1,5 +1,5 @@
 [mypy]
-python_version = 3.7
+python_version = 3.11
 allow_redefinition = True
 check_untyped_defs = True
 ; disallow_any_decorated = True
@@ -59,7 +59,15 @@ ignore_missing_imports = True
 [mypy-sentry_sdk._queue]
 ignore_missing_imports = True
 disallow_untyped_defs = False
+[mypy-sentry_sdk._lru_cache]
+disallow_untyped_defs = False
 [mypy-celery.app.trace]
 ignore_missing_imports = True
 [mypy-flask.signals]
 ignore_missing_imports = True
+[mypy-huey.*]
+ignore_missing_imports = True
+[mypy-arq.*]
+ignore_missing_imports = True
+[mypy-grpc.*]
+ignore_missing_imports = True
diff --git a/scripts/aws-delete-lamba-layer-versions.sh b/scripts/aws-delete-lamba-layer-versions.sh
index 5e1ea38..f467f93 100755
--- a/scripts/aws-delete-lamba-layer-versions.sh
+++ b/scripts/aws-delete-lamba-layer-versions.sh
@@ -8,7 +8,7 @@ set -euo pipefail
 # override default AWS region
 export AWS_REGION=eu-central-1
 
-LAYER_NAME=SentryPythonServerlessSDKLocalDev
+LAYER_NAME=SentryPythonServerlessSDK-local-dev
 VERSION="0"
 
 while [[ $VERSION != "1" ]]
diff --git a/scripts/aws-deploy-local-layer.sh b/scripts/aws-deploy-local-layer.sh
index 9e2d7c7..3f21384 100755
--- a/scripts/aws-deploy-local-layer.sh
+++ b/scripts/aws-deploy-local-layer.sh
@@ -9,55 +9,20 @@
 set -euo pipefail
 
 # Creating Lambda layer
-echo "Creating Lambda layer in ./dist-serverless ..."
+echo "Creating Lambda layer in ./dist ..."
 make aws-lambda-layer
-echo "Done creating Lambda layer in ./dist-serverless."
-
-# IMPORTANT:
-# Please make sure that this part does the same as the GitHub action that
-# is building the Lambda layer in production!
-# see: https://github.com/getsentry/action-build-aws-lambda-extension/blob/main/action.yml#L23-L40
-
-echo "Downloading relay..."
-mkdir -p dist-serverless/relay
-curl -0 --silent \
-    --output dist-serverless/relay/relay \
-    "$(curl -s https://release-registry.services.sentry.io/apps/relay/latest | jq -r .files.\"relay-Linux-x86_64\".url)"
-chmod +x dist-serverless/relay/relay
-echo "Done downloading relay."
-
-echo "Creating start script..."
-mkdir -p dist-serverless/extensions
-cat > dist-serverless/extensions/sentry-lambda-extension << EOT
-#!/bin/bash
-set -euo pipefail
-exec /opt/relay/relay run \
-    --mode=proxy \
-    --shutdown-timeout=2 \
-    --upstream-dsn="\$SENTRY_DSN" \
-    --aws-runtime-api="\$AWS_LAMBDA_RUNTIME_API"
-EOT
-chmod +x dist-serverless/extensions/sentry-lambda-extension
-echo "Done creating start script."
-
-# Zip Lambda layer and included Lambda extension
-echo "Zipping Lambda layer and included Lambda extension..."
-cd dist-serverless/
-zip -r ../sentry-python-serverless-x.x.x-dev.zip \
-    . \
-    --exclude \*__pycache__\* --exclude \*.yml
-cd ..
-echo "Done Zipping Lambda layer and included Lambda extension to ./sentry-python-serverless-x.x.x-dev.zip."
-
+echo "Done creating Lambda layer in ./dist"
 
 # Deploying zipped Lambda layer to AWS
-echo "Deploying zipped Lambda layer to AWS..."
+ZIP=$(ls dist | grep serverless | head -n 1)
+echo "Deploying zipped Lambda layer $ZIP to AWS..."
 
 aws lambda publish-layer-version \
     --layer-name "SentryPythonServerlessSDK-local-dev" \
     --region "eu-central-1" \
-    --zip-file "fileb://sentry-python-serverless-x.x.x-dev.zip" \
+    --zip-file "fileb://dist/$ZIP" \
     --description "Local test build of SentryPythonServerlessSDK (can be deleted)" \
+    --compatible-runtimes python3.6 python3.7 python3.8 python3.9
     --no-cli-pager
 
 echo "Done deploying zipped Lambda layer to AWS as 'SentryPythonServerlessSDK-local-dev'."
diff --git a/scripts/build_aws_lambda_layer.py b/scripts/build_aws_lambda_layer.py
index d694d15..829b7e3 100644
--- a/scripts/build_aws_lambda_layer.py
+++ b/scripts/build_aws_lambda_layer.py
@@ -17,6 +17,7 @@ class LayerBuilder:
         # type: (...) -> None
         self.base_dir = base_dir
         self.python_site_packages = os.path.join(self.base_dir, PYTHON_SITE_PACKAGES)
+        self.out_zip_filename = f"sentry-python-serverless-{SDK_VERSION}.zip"
 
     def make_directories(self):
         # type: (...) -> None
@@ -57,16 +58,35 @@ class LayerBuilder:
             "scripts/init_serverless_sdk.py", f"{serverless_sdk_path}/__init__.py"
         )
 
+    def zip(self):
+        # type: (...) -> None
+        subprocess.run(
+            [
+                "zip",
+                "-q",  # Quiet
+                "-x",  # Exclude files
+                "**/__pycache__/*",  # Files to be excluded
+                "-r",  # Recurse paths
+                self.out_zip_filename,  # Output filename
+                PYTHON_SITE_PACKAGES,  # Files to be zipped
+            ],
+            cwd=self.base_dir,
+            check=True,  # Raises CalledProcessError if exit status is non-zero
+        )
 
-def build_layer_dir():
+        shutil.copy(
+            os.path.join(self.base_dir, self.out_zip_filename),
+            os.path.abspath(DIST_PATH)
+        )
+
+def build_packaged_zip():
     with tempfile.TemporaryDirectory() as base_dir:
         layer_builder = LayerBuilder(base_dir)
         layer_builder.make_directories()
         layer_builder.install_python_packages()
         layer_builder.create_init_serverless_sdk_package()
-
-        shutil.copytree(base_dir, "dist-serverless")
+        layer_builder.zip()
 
 
 if __name__ == "__main__":
-    build_layer_dir()
+    build_packaged_zip()
diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py
index 70e28c4..e2c9f53 100644
--- a/scripts/init_serverless_sdk.py
+++ b/scripts/init_serverless_sdk.py
@@ -10,25 +10,17 @@ import sys
 import re
 
 import sentry_sdk
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.utils import Dsn
 from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
 
 
-def extension_relay_dsn(original_dsn):
-    dsn = Dsn(original_dsn)
-    dsn.host = "localhost"
-    dsn.port = 3000
-    dsn.scheme = "http"
-    return str(dsn)
-
-
 # Configure Sentry SDK
 sentry_sdk.init(
-    dsn=extension_relay_dsn(os.environ["SENTRY_DSN"]),
+    dsn=os.environ["SENTRY_DSN"],
     integrations=[AwsLambdaIntegration(timeout_warning=True)],
     traces_sample_rate=float(os.environ["SENTRY_TRACES_SAMPLE_RATE"]),
 )
diff --git a/scripts/runtox.sh b/scripts/runtox.sh
index a658da4..e099f44 100755
--- a/scripts/runtox.sh
+++ b/scripts/runtox.sh
@@ -1,4 +1,8 @@
 #!/bin/bash
+
+# Usage: sh scripts/runtox.sh py3.7 <pytest-args>
+# Runs all environments with substring py3.7 and the given arguments for pytest
+
 set -ex
 
 if [ -n "$TOXPATH" ]; then
@@ -9,22 +13,15 @@ else
     TOXPATH=./.venv/bin/tox
 fi
 
-# Usage: sh scripts/runtox.sh py3.7 <pytest-args>
-# Runs all environments with substring py3.7 and the given arguments for pytest
-
-if [ -n "$1" ]; then
-    searchstring="$1"
-elif [ -n "$CI_PYTHON_VERSION" ]; then
-    searchstring="$(echo py$CI_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
-    if [ "$searchstring" = "pypy-2.7" ]; then
-        searchstring=pypy
-    fi
-elif [ -n "$AZURE_PYTHON_VERSION" ]; then
-    searchstring="$(echo py$AZURE_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
-    if [ "$searchstring" = pypy2 ]; then
-        searchstring=pypy
-    fi
-fi
+searchstring="$1"
 
 export TOX_PARALLEL_NO_SPINNER=1
-exec $TOXPATH -p auto -e $($TOXPATH -l | grep "$searchstring" | tr $'\n' ',') -- "${@:2}"
+ENV="$($TOXPATH -l | grep "$searchstring" | tr $'\n' ',')"
+
+# Run the common 2.7 suite without the -p flag, otherwise we hit an encoding
+# issue in tox.
+if [ "$ENV" = py2.7-common, ] || [ "$ENV" = py2.7-gevent, ]; then
+    exec $TOXPATH -vv -e "$ENV" -- "${@:2}"
+else
+    exec $TOXPATH -vv -p auto -e "$ENV" -- "${@:2}"
+fi
diff --git a/scripts/split-tox-gh-actions/ci-yaml-services.txt b/scripts/split-tox-gh-actions/ci-yaml-services.txt
index f6a658e..01bb956 100644
--- a/scripts/split-tox-gh-actions/ci-yaml-services.txt
+++ b/scripts/split-tox-gh-actions/ci-yaml-services.txt
@@ -15,4 +15,5 @@
     env:
       SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
       SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
-      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
\ No newline at end of file
+      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
+      SENTRY_PYTHON_TEST_POSTGRES_HOST: {{ postgres_host }}
diff --git a/scripts/split-tox-gh-actions/ci-yaml-test-py27-snippet.txt b/scripts/split-tox-gh-actions/ci-yaml-test-py27-snippet.txt
new file mode 100644
index 0000000..8cf2dcb
--- /dev/null
+++ b/scripts/split-tox-gh-actions/ci-yaml-test-py27-snippet.txt
@@ -0,0 +1,29 @@
+  test-py27:
+    name: {{ framework }}, python 2.7, ubuntu-20.04
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    timeout-minutes: 30
+{{ services }}
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test {{ framework }}
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py2.7-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
diff --git a/scripts/split-tox-gh-actions/ci-yaml-test-snippet.txt b/scripts/split-tox-gh-actions/ci-yaml-test-snippet.txt
new file mode 100644
index 0000000..09ed89e
--- /dev/null
+++ b/scripts/split-tox-gh-actions/ci-yaml-test-snippet.txt
@@ -0,0 +1,37 @@
+  test:
+    name: {{ framework }}, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 30
+{{ strategy_matrix }}
+{{ services }}
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+
+      - name: Test {{ framework }}
+        uses: nick-fields/retry@v2
+        with:
+          timeout_minutes: 15
+          max_attempts: 2
+          retry_wait_seconds: 5
+          shell: bash
+          command: |
+            set -x # print commands that are executed
+            coverage erase
+
+            # Run tests
+            ./scripts/runtox.sh "py${{ matrix.python-version }}-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch &&
+            coverage combine .coverage* &&
+            coverage xml -i
+
+      - uses: codecov/codecov-action@v3
+        with:
+          token: ${{ secrets.CODECOV_TOKEN }}
+          files: coverage.xml
diff --git a/scripts/split-tox-gh-actions/ci-yaml.txt b/scripts/split-tox-gh-actions/ci-yaml.txt
index 2e14cb5..99d8154 100644
--- a/scripts/split-tox-gh-actions/ci-yaml.txt
+++ b/scripts/split-tox-gh-actions/ci-yaml.txt
@@ -23,37 +23,19 @@ env:
     ${{ github.workspace }}/dist-serverless
 
 jobs:
-  test:
-    name: {{ framework }}, python ${{ matrix.python-version }}, ${{ matrix.os }}
-    runs-on: ${{ matrix.os }}
-    timeout-minutes: 45
-    continue-on-error: true
-{{ strategy_matrix }}
-{{ services }}
+{{ test }}
 
-    steps:
-      - uses: actions/checkout@v3
-      - uses: actions/setup-python@v4
-        with:
-          python-version: ${{ matrix.python-version }}
-
-      - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
-        run: |
-          pip install codecov tox
+{{ test_py27 }}
 
-      - name: Test {{ framework }}
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        shell: bash
+  check_required_tests:
+    name: All {{ framework }} tests passed or skipped
+{{ check_needs }}
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
         run: |
-          set -x # print commands that are executed
-          coverage erase
-
-          ./scripts/runtox.sh "${{ matrix.python-version }}-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+{{ check_py27 }}
diff --git a/scripts/split-tox-gh-actions/split-tox-gh-actions.py b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
index 6e0018d..c216534 100755
--- a/scripts/split-tox-gh-actions/split-tox-gh-actions.py
+++ b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
@@ -27,14 +27,35 @@ TOX_FILE = Path(__file__).resolve().parent.parent.parent / "tox.ini"
 TEMPLATE_DIR = Path(__file__).resolve().parent
 TEMPLATE_FILE = TEMPLATE_DIR / "ci-yaml.txt"
 TEMPLATE_FILE_SERVICES = TEMPLATE_DIR / "ci-yaml-services.txt"
+TEMPLATE_SNIPPET_TEST = TEMPLATE_DIR / "ci-yaml-test-snippet.txt"
+TEMPLATE_SNIPPET_TEST_PY27 = TEMPLATE_DIR / "ci-yaml-test-py27-snippet.txt"
 
 FRAMEWORKS_NEEDING_POSTGRES = ["django"]
 
 MATRIX_DEFINITION = """
     strategy:
+      fail-fast: false
       matrix:
         python-version: [{{ python-version }}]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+"""
+
+CHECK_NEEDS = """\
+    needs: test
+"""
+CHECK_NEEDS_PY27 = """\
+    needs: [test, test-py27]
+"""
+
+CHECK_PY27 = """\
+      - name: Check for 2.7 failures
+        if: contains(needs.test-py27.result, 'failure')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
 """
 
 
@@ -44,29 +65,73 @@ def write_yaml_file(
     python_versions,
 ):
     """Write the YAML configuration file for one framework to disk."""
-    # render template for print
+    py_versions = [py.replace("py", "") for py in python_versions]
+    py27_supported = "2.7" in py_versions
+
+    test_loc = template.index("{{ test }}\n")
+    f = open(TEMPLATE_SNIPPET_TEST, "r")
+    test_snippet = f.readlines()
+    template = template[:test_loc] + test_snippet + template[test_loc + 1 :]
+    f.close()
+
+    test_py27_loc = template.index("{{ test_py27 }}\n")
+    if py27_supported:
+        f = open(TEMPLATE_SNIPPET_TEST_PY27, "r")
+        test_py27_snippet = f.readlines()
+        template = (
+            template[:test_py27_loc] + test_py27_snippet + template[test_py27_loc + 1 :]
+        )
+        f.close()
+
+        py_versions.remove("2.7")
+    else:
+        template.pop(test_py27_loc)
+
     out = ""
+    py27_test_part = False
     for template_line in template:
-        if template_line == "{{ strategy_matrix }}\n":
-            py_versions = [f'"{py.replace("py", "")}"' for py in python_versions]
-
+        if template_line.strip() == "{{ strategy_matrix }}":
             m = MATRIX_DEFINITION
             m = m.replace("{{ framework }}", current_framework).replace(
-                "{{ python-version }}", ",".join(py_versions)
+                "{{ python-version }}", ",".join([f'"{v}"' for v in py_versions])
             )
             out += m
 
-        elif template_line == "{{ services }}\n":
+        elif template_line.strip() == "{{ services }}":
             if current_framework in FRAMEWORKS_NEEDING_POSTGRES:
                 f = open(TEMPLATE_FILE_SERVICES, "r")
-                out += "".join(f.readlines())
+                lines = [
+                    line.replace(
+                        "{{ postgres_host }}",
+                        "postgres" if py27_test_part else "localhost",
+                    )
+                    for line in f.readlines()
+                ]
+                out += "".join(lines)
                 f.close()
 
+        elif template_line.strip() == "{{ check_needs }}":
+            if py27_supported:
+                out += CHECK_NEEDS_PY27
+            else:
+                out += CHECK_NEEDS
+
+        elif template_line.strip() == "{{ check_py27 }}":
+            if py27_supported:
+                out += CHECK_PY27
+
         else:
+            if template_line.strip() == "test-py27:":
+                py27_test_part = True
+
             out += template_line.replace("{{ framework }}", current_framework)
 
     # write rendered template
-    outfile_name = OUT_DIR / f"test-integration-{current_framework}.yml"
+    if current_framework == "common":
+        outfile_name = OUT_DIR / f"test-{current_framework}.yml"
+    else:
+        outfile_name = OUT_DIR / f"test-integration-{current_framework}.yml"
+
     print(f"Writing {outfile_name}")
     f = open(outfile_name, "w")
     f.writelines(out)
@@ -77,7 +142,7 @@ def get_yaml_files_hash():
     """Calculate a hash of all the yaml configuration files"""
 
     hasher = hashlib.md5()
-    path_pattern = (OUT_DIR / f"test-integration-*.yml").as_posix()
+    path_pattern = (OUT_DIR / "test-integration-*.yml").as_posix()
     for file in glob(path_pattern):
         with open(file, "rb") as f:
             buf = f.read()
@@ -103,7 +168,7 @@ def main(fail_on_changes):
 
     python_versions = defaultdict(list)
 
-    print("Parse tox.ini nevlist")
+    print("Parse tox.ini envlist")
 
     for line in lines:
         # normalize lines
@@ -127,7 +192,7 @@ def main(fail_on_changes):
                 if python_version not in python_versions[framework]:
                     python_versions[framework].append(python_version)
 
-        except ValueError as err:
+        except ValueError:
             print(f"ERROR reading line {line}")
 
     for framework in python_versions:
diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py
index ab5123e..f4baf78 100644
--- a/sentry_sdk/__init__.py
+++ b/sentry_sdk/__init__.py
@@ -7,6 +7,9 @@ from sentry_sdk.api import *  # noqa
 
 from sentry_sdk.consts import VERSION  # noqa
 
+from sentry_sdk.crons import monitor  # noqa
+from sentry_sdk.tracing import trace  # noqa
+
 __all__ = [  # noqa
     "Hub",
     "Scope",
@@ -31,6 +34,11 @@ __all__ = [  # noqa
     "set_extra",
     "set_user",
     "set_level",
+    "set_measurement",
+    "get_current_span",
+    "get_traceparent",
+    "get_baggage",
+    "continue_trace",
 ]
 
 # Initialize the debug support after everything is loaded
diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py
index 2061774..0e56608 100644
--- a/sentry_sdk/_compat.py
+++ b/sentry_sdk/_compat.py
@@ -1,8 +1,8 @@
 import sys
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional
     from typing import Tuple
     from typing import Any
@@ -15,6 +15,8 @@ if MYPY:
 PY2 = sys.version_info[0] == 2
 PY33 = sys.version_info[0] == 3 and sys.version_info[1] >= 3
 PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7
+PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10
+PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11
 
 if PY2:
     import urlparse
@@ -25,6 +27,7 @@ if PY2:
     number_types = (int, long, float)  # noqa
     int_types = (int, long)  # noqa
     iteritems = lambda x: x.iteritems()  # noqa: B301
+    binary_sequence_types = (bytearray, memoryview)
 
     def implements_str(cls):
         # type: (T) -> T
@@ -43,6 +46,7 @@ else:
     number_types = (int, float)  # type: Tuple[type, type]
     int_types = (int,)
     iteritems = lambda x: x.items()
+    binary_sequence_types = (bytes, bytearray, memoryview)
 
     def implements_str(x):
         # type: (T) -> T
@@ -78,7 +82,10 @@ def check_thread_support():
     if "threads" in opt:
         return
 
-    if str(opt.get("enable-threads", "0")).lower() in ("false", "off", "no", "0"):
+    # put here because of circular import
+    from sentry_sdk.consts import FALSE_VALUES
+
+    if str(opt.get("enable-threads", "0")).lower() in FALSE_VALUES:
         from warnings import warn
 
         warn(
diff --git a/sentry_sdk/_functools.py b/sentry_sdk/_functools.py
index 8dcf79c..6bcc85f 100644
--- a/sentry_sdk/_functools.py
+++ b/sentry_sdk/_functools.py
@@ -1,13 +1,68 @@
 """
 A backport of Python 3 functools to Python 2/3. The only important change
 we rely upon is that `update_wrapper` handles AttributeError gracefully.
+
+Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+
+All Rights Reserved
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
 """
 
 from functools import partial
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
 
diff --git a/sentry_sdk/_lru_cache.py b/sentry_sdk/_lru_cache.py
new file mode 100644
index 0000000..91cf55d
--- /dev/null
+++ b/sentry_sdk/_lru_cache.py
@@ -0,0 +1,156 @@
+"""
+A fork of Python 3.6's stdlib lru_cache (found in Python's 'cpython/Lib/functools.py')
+adapted into a data structure for single threaded uses.
+
+https://github.com/python/cpython/blob/v3.6.12/Lib/functools.py
+
+
+Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+
+All Rights Reserved
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+"""
+
+SENTINEL = object()
+
+
+# aliases to the entries in a node
+PREV = 0
+NEXT = 1
+KEY = 2
+VALUE = 3
+
+
+class LRUCache(object):
+    def __init__(self, max_size):
+        assert max_size > 0
+
+        self.max_size = max_size
+        self.full = False
+
+        self.cache = {}
+
+        # root of the circularly linked list to keep track of
+        # the least recently used key
+        self.root = []  # type: ignore
+        # the node looks like [PREV, NEXT, KEY, VALUE]
+        self.root[:] = [self.root, self.root, None, None]
+
+        self.hits = self.misses = 0
+
+    def set(self, key, value):
+        link = self.cache.get(key, SENTINEL)
+
+        if link is not SENTINEL:
+            # have to move the node to the front of the linked list
+            link_prev, link_next, _key, _value = link
+
+            # first remove the node from the lsnked list
+            link_prev[NEXT] = link_next
+            link_next[PREV] = link_prev
+
+            # insert the node between the root and the last
+            last = self.root[PREV]
+            last[NEXT] = self.root[PREV] = link
+            link[PREV] = last
+            link[NEXT] = self.root
+
+            # update the value
+            link[VALUE] = value
+
+        elif self.full:
+            # reuse the root node, so update its key/value
+            old_root = self.root
+            old_root[KEY] = key
+            old_root[VALUE] = value
+
+            self.root = old_root[NEXT]
+            old_key = self.root[KEY]
+
+            self.root[KEY] = self.root[VALUE] = None
+
+            del self.cache[old_key]
+
+            self.cache[key] = old_root
+
+        else:
+            # insert new node after last
+            last = self.root[PREV]
+            link = [last, self.root, key, value]
+            last[NEXT] = self.root[PREV] = self.cache[key] = link
+            self.full = len(self.cache) >= self.max_size
+
+    def get(self, key, default=None):
+        link = self.cache.get(key, SENTINEL)
+
+        if link is SENTINEL:
+            self.misses += 1
+            return default
+
+        # have to move the node to the front of the linked list
+        link_prev, link_next, _key, _value = link
+
+        # first remove the node from the lsnked list
+        link_prev[NEXT] = link_next
+        link_next[PREV] = link_prev
+
+        # insert the node between the root and the last
+        last = self.root[PREV]
+        last[NEXT] = self.root[PREV] = link
+        link[PREV] = last
+        link[NEXT] = self.root
+
+        self.hits += 1
+
+        return link[VALUE]
diff --git a/sentry_sdk/_queue.py b/sentry_sdk/_queue.py
index fc845f7..129b6e5 100644
--- a/sentry_sdk/_queue.py
+++ b/sentry_sdk/_queue.py
@@ -1,14 +1,74 @@
 """
-A fork of Python 3.6's stdlib queue with Lock swapped out for RLock to avoid a
-deadlock while garbage collecting.
+A fork of Python 3.6's stdlib queue (found in Pythons 'cpython/Lib/queue.py')
+with Lock swapped out for RLock to avoid a deadlock while garbage collecting.
 
-See
+https://github.com/python/cpython/blob/v3.6.12/Lib/queue.py
+
+
+See also
 https://codewithoutrules.com/2017/08/16/concurrency-python/
 https://bugs.python.org/issue14976
 https://github.com/sqlalchemy/sqlalchemy/blob/4eb747b61f0c1b1c25bdee3856d7195d10a0c227/lib/sqlalchemy/queue.py#L1
 
 We also vendor the code to evade eventlet's broken monkeypatching, see
 https://github.com/getsentry/sentry-python/pull/484
+
+
+Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+
+All Rights Reserved
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
 """
 
 import threading
@@ -16,9 +76,9 @@ import threading
 from collections import deque
 from time import time
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
 
 __all__ = ["EmptyError", "FullError", "Queue"]
diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py
index 3c985f2..cbead04 100644
--- a/sentry_sdk/_types.py
+++ b/sentry_sdk/_types.py
@@ -1,10 +1,14 @@
 try:
-    from typing import TYPE_CHECKING as MYPY
+    from typing import TYPE_CHECKING as TYPE_CHECKING
 except ImportError:
-    MYPY = False
+    TYPE_CHECKING = False
 
 
-if MYPY:
+# Re-exported for compat, since code out there in the wild might use this variable.
+MYPY = TYPE_CHECKING
+
+
+if TYPE_CHECKING:
     from types import TracebackType
     from typing import Any
     from typing import Callable
@@ -30,6 +34,7 @@ if MYPY:
     EventProcessor = Callable[[Event, Hint], Optional[Event]]
     ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]]
     BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]]
+    TransactionProcessor = Callable[[Event, Hint], Optional[Event]]
 
     TracesSampler = Callable[[SamplingContext], Union[float, int, bool]]
 
@@ -80,3 +85,5 @@ if MYPY:
 
     FractionUnit = Literal["ratio", "percent"]
     MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str]
+
+    ProfilerMode = Literal["sleep", "thread", "gevent", "unknown"]
diff --git a/sentry_sdk/_werkzeug.py b/sentry_sdk/_werkzeug.py
new file mode 100644
index 0000000..197c5c1
--- /dev/null
+++ b/sentry_sdk/_werkzeug.py
@@ -0,0 +1,100 @@
+"""
+Copyright (c) 2007 by the Pallets team.
+
+Some rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+* Redistributions of source code must retain the above copyright notice,
+  this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+  notice, this list of conditions and the following disclaimer in the
+  documentation and/or other materials provided with the distribution.
+
+* Neither the name of the copyright holder nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
+BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+"""
+
+from sentry_sdk._compat import iteritems
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Dict
+    from typing import Iterator
+    from typing import Tuple
+
+
+#
+# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders`
+# https://github.com/pallets/werkzeug/blob/0.14.1/werkzeug/datastructures.py#L1361
+#
+# We need this function because Django does not give us a "pure" http header
+# dict. So we might as well use it for all WSGI integrations.
+#
+def _get_headers(environ):
+    # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
+    """
+    Returns only proper HTTP headers.
+    """
+    for key, value in iteritems(environ):
+        key = str(key)
+        if key.startswith("HTTP_") and key not in (
+            "HTTP_CONTENT_TYPE",
+            "HTTP_CONTENT_LENGTH",
+        ):
+            yield key[5:].replace("_", "-").title(), value
+        elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
+            yield key.replace("_", "-").title(), value
+
+
+#
+# `get_host` comes from `werkzeug.wsgi.get_host`
+# https://github.com/pallets/werkzeug/blob/1.0.1/src/werkzeug/wsgi.py#L145
+#
+def get_host(environ, use_x_forwarded_for=False):
+    # type: (Dict[str, str], bool) -> str
+    """
+    Return the host for the given WSGI environment.
+    """
+    if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ:
+        rv = environ["HTTP_X_FORWARDED_HOST"]
+        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
+            rv = rv[:-3]
+        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
+            rv = rv[:-4]
+    elif environ.get("HTTP_HOST"):
+        rv = environ["HTTP_HOST"]
+        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
+            rv = rv[:-3]
+        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
+            rv = rv[:-4]
+    elif environ.get("SERVER_NAME"):
+        rv = environ["SERVER_NAME"]
+        if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in (
+            ("https", "443"),
+            ("http", "80"),
+        ):
+            rv += ":" + environ["SERVER_PORT"]
+    else:
+        # In spite of the WSGI spec, SERVER_NAME might not be present.
+        rv = "unknown"
+
+    return rv
diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py
index cec914a..f0c6a87 100644
--- a/sentry_sdk/api.py
+++ b/sentry_sdk/api.py
@@ -1,11 +1,11 @@
 import inspect
 
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.hub import Hub
 from sentry_sdk.scope import Scope
+from sentry_sdk.tracing import NoOpSpan, Transaction
 
-from sentry_sdk._types import MYPY
-
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import Optional
@@ -15,8 +15,15 @@ if MYPY:
     from typing import ContextManager
     from typing import Union
 
-    from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint, ExcInfo
-    from sentry_sdk.tracing import Span, Transaction
+    from sentry_sdk._types import (
+        Event,
+        Hint,
+        Breadcrumb,
+        BreadcrumbHint,
+        ExcInfo,
+        MeasurementUnit,
+    )
+    from sentry_sdk.tracing import Span
 
     T = TypeVar("T")
     F = TypeVar("F", bound=Callable[..., Any])
@@ -44,6 +51,11 @@ __all__ = [
     "set_extra",
     "set_user",
     "set_level",
+    "set_measurement",
+    "get_current_span",
+    "get_traceparent",
+    "get_baggage",
+    "continue_trace",
 ]
 
 
@@ -210,5 +222,48 @@ def start_transaction(
     transaction=None,  # type: Optional[Transaction]
     **kwargs  # type: Any
 ):
-    # type: (...) -> Transaction
+    # type: (...) -> Union[Transaction, NoOpSpan]
     return Hub.current.start_transaction(transaction, **kwargs)
+
+
+def set_measurement(name, value, unit=""):
+    # type: (str, float, MeasurementUnit) -> None
+    transaction = Hub.current.scope.transaction
+    if transaction is not None:
+        transaction.set_measurement(name, value, unit)
+
+
+def get_current_span(hub=None):
+    # type: (Optional[Hub]) -> Optional[Span]
+    """
+    Returns the currently active span if there is one running, otherwise `None`
+    """
+    if hub is None:
+        hub = Hub.current
+
+    current_span = hub.scope.span
+    return current_span
+
+
+def get_traceparent():
+    # type: () -> Optional[str]
+    """
+    Returns the traceparent either from the active span or from the scope.
+    """
+    return Hub.current.get_traceparent()
+
+
+def get_baggage():
+    # type: () -> Optional[str]
+    """
+    Returns Baggage either from the active span or from the scope.
+    """
+    return Hub.current.get_baggage()
+
+
+def continue_trace(environ_or_headers, op=None, name=None, source=None):
+    # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction
+    """
+    Sets the propagation context from environment or headers and returns a transaction.
+    """
+    return Hub.current.continue_trace(environ_or_headers, op, name, source)
diff --git a/sentry_sdk/attachments.py b/sentry_sdk/attachments.py
index b7b6b0b..c15afd4 100644
--- a/sentry_sdk/attachments.py
+++ b/sentry_sdk/attachments.py
@@ -1,10 +1,10 @@
 import os
 import mimetypes
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.envelope import Item, PayloadRef
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional, Union, Callable
 
 
diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 06923c5..02006e9 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -1,3 +1,4 @@
+from importlib import import_module
 import os
 import uuid
 import random
@@ -10,28 +11,38 @@ from sentry_sdk.utils import (
     current_stacktrace,
     disable_capture_event,
     format_timestamp,
+    get_sdk_name,
     get_type_name,
     get_default_release,
     handle_in_app,
     logger,
 )
 from sentry_sdk.serializer import serialize
+from sentry_sdk.tracing import trace, has_tracing_enabled
 from sentry_sdk.transport import make_transport
-from sentry_sdk.consts import DEFAULT_OPTIONS, SDK_INFO, ClientConstructor
+from sentry_sdk.consts import (
+    DEFAULT_MAX_VALUE_LENGTH,
+    DEFAULT_OPTIONS,
+    INSTRUMENTER,
+    VERSION,
+    ClientConstructor,
+)
 from sentry_sdk.integrations import setup_integrations
 from sentry_sdk.utils import ContextVar
 from sentry_sdk.sessions import SessionFlusher
 from sentry_sdk.envelope import Envelope
-from sentry_sdk.profiler import setup_profiler
-from sentry_sdk.tracing_utils import has_tracestate_enabled, reinflate_tracestate
+from sentry_sdk.profiler import has_profiling_enabled, setup_profiler
+from sentry_sdk.scrubber import EventScrubber
+from sentry_sdk.monitor import Monitor
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Dict
     from typing import Optional
+    from typing import Sequence
 
     from sentry_sdk.scope import Scope
     from sentry_sdk._types import Event, Hint
@@ -41,6 +52,13 @@ if MYPY:
 _client_init_debug = ContextVar("client_init_debug")
 
 
+SDK_INFO = {
+    "name": "sentry.python",  # SDK name will be overridden after integrations have been loaded with sentry_sdk.integrations.setup_integrations()
+    "version": VERSION,
+    "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
+}
+
+
 def _get_options(*args, **kwargs):
     # type: (*Optional[str], **Any) -> Dict[str, Any]
     if args and (isinstance(args[0], (text_type, bytes, str)) or args[0] is None):
@@ -59,7 +77,28 @@ def _get_options(*args, **kwargs):
 
     for key, value in iteritems(options):
         if key not in rv:
+            # Option "with_locals" was renamed to "include_local_variables"
+            if key == "with_locals":
+                msg = (
+                    "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. "
+                    "Please use 'include_local_variables'. The option 'with_locals' will be removed in the future."
+                )
+                logger.warning(msg)
+                rv["include_local_variables"] = value
+                continue
+
+            # Option "request_bodies" was renamed to "max_request_body_size"
+            if key == "request_bodies":
+                msg = (
+                    "Deprecated: The option 'request_bodies' was renamed to 'max_request_body_size'. "
+                    "Please use 'max_request_body_size'. The option 'request_bodies' will be removed in the future."
+                )
+                logger.warning(msg)
+                rv["max_request_body_size"] = value
+                continue
+
             raise TypeError("Unknown option %r" % (key,))
+
         rv[key] = value
 
     if rv["dsn"] is None:
@@ -74,9 +113,34 @@ def _get_options(*args, **kwargs):
     if rv["server_name"] is None and hasattr(socket, "gethostname"):
         rv["server_name"] = socket.gethostname()
 
+    if rv["instrumenter"] is None:
+        rv["instrumenter"] = INSTRUMENTER.SENTRY
+
+    if rv["project_root"] is None:
+        try:
+            project_root = os.getcwd()
+        except Exception:
+            project_root = None
+
+        rv["project_root"] = project_root
+
+    if rv["enable_tracing"] is True and rv["traces_sample_rate"] is None:
+        rv["traces_sample_rate"] = 1.0
+
+    if rv["event_scrubber"] is None:
+        rv["event_scrubber"] = EventScrubber()
+
     return rv
 
 
+try:
+    # Python 3.6+
+    module_not_found_error = ModuleNotFoundError
+except Exception:
+    # Older Python versions
+    module_not_found_error = ImportError  # type: ignore
+
+
 class _Client(object):
     """The client is internally responsible for capturing the events and
     forwarding them to sentry through the configured transport.  It takes
@@ -87,6 +151,7 @@ class _Client(object):
     def __init__(self, *args, **kwargs):
         # type: (*Any, **Any) -> None
         self.options = get_options(*args, **kwargs)  # type: Dict[str, Any]
+
         self._init_impl()
 
     def __getstate__(self):
@@ -98,6 +163,52 @@ class _Client(object):
         self.options = state["options"]
         self._init_impl()
 
+    def _setup_instrumentation(self, functions_to_trace):
+        # type: (Sequence[Dict[str, str]]) -> None
+        """
+        Instruments the functions given in the list `functions_to_trace` with the `@sentry_sdk.tracing.trace` decorator.
+        """
+        for function in functions_to_trace:
+            class_name = None
+            function_qualname = function["qualified_name"]
+            module_name, function_name = function_qualname.rsplit(".", 1)
+
+            try:
+                # Try to import module and function
+                # ex: "mymodule.submodule.funcname"
+
+                module_obj = import_module(module_name)
+                function_obj = getattr(module_obj, function_name)
+                setattr(module_obj, function_name, trace(function_obj))
+                logger.debug("Enabled tracing for %s", function_qualname)
+
+            except module_not_found_error:
+                try:
+                    # Try to import a class
+                    # ex: "mymodule.submodule.MyClassName.member_function"
+
+                    module_name, class_name = module_name.rsplit(".", 1)
+                    module_obj = import_module(module_name)
+                    class_obj = getattr(module_obj, class_name)
+                    function_obj = getattr(class_obj, function_name)
+                    setattr(class_obj, function_name, trace(function_obj))
+                    setattr(module_obj, class_name, class_obj)
+                    logger.debug("Enabled tracing for %s", function_qualname)
+
+                except Exception as e:
+                    logger.warning(
+                        "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.",
+                        function_qualname,
+                        e,
+                    )
+
+            except Exception as e:
+                logger.warning(
+                    "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.",
+                    function_qualname,
+                    e,
+                )
+
     def _init_impl(self):
         # type: () -> None
         old_debug = _client_init_debug.get(False)
@@ -111,13 +222,20 @@ class _Client(object):
             _client_init_debug.set(self.options["debug"])
             self.transport = make_transport(self.options)
 
+            self.monitor = None
+            if self.transport:
+                if self.options["_experiments"].get(
+                    "enable_backpressure_handling", False
+                ):
+                    self.monitor = Monitor(self.transport)
+
             self.session_flusher = SessionFlusher(capture_func=_capture_envelope)
 
-            request_bodies = ("always", "never", "small", "medium")
-            if self.options["request_bodies"] not in request_bodies:
+            max_request_body_size = ("always", "never", "small", "medium")
+            if self.options["max_request_body_size"] not in max_request_body_size:
                 raise ValueError(
-                    "Invalid value for request_bodies. Must be one of {}".format(
-                        request_bodies
+                    "Invalid value for max_request_body_size. Must be one of {}".format(
+                        max_request_body_size
                     )
                 )
 
@@ -128,16 +246,22 @@ class _Client(object):
                     "auto_enabling_integrations"
                 ],
             )
+
+            sdk_name = get_sdk_name(list(self.integrations.keys()))
+            SDK_INFO["name"] = sdk_name
+            logger.debug("Setting SDK name to '%s'", sdk_name)
+
         finally:
             _client_init_debug.set(old_debug)
 
-        profiles_sample_rate = self.options["_experiments"].get("profiles_sample_rate")
-        if profiles_sample_rate is not None and profiles_sample_rate > 0:
+        if has_profiling_enabled(self.options):
             try:
                 setup_profiler(self.options)
             except ValueError as e:
                 logger.debug(str(e))
 
+        self._setup_instrumentation(self.options.get("functions_to_trace", []))
+
     @property
     def dsn(self):
         # type: () -> Optional[str]
@@ -157,7 +281,7 @@ class _Client(object):
 
         if scope is not None:
             is_transaction = event.get("type") == "transaction"
-            event_ = scope.apply_to_event(event, hint)
+            event_ = scope.apply_to_event(event, hint, self.options)
 
             # one of the event/error processors returned None
             if event_ is None:
@@ -181,7 +305,12 @@ class _Client(object):
                     "values": [
                         {
                             "stacktrace": current_stacktrace(
-                                self.options["with_locals"]
+                                include_local_variables=self.options.get(
+                                    "include_local_variables", True
+                                ),
+                                max_value_length=self.options.get(
+                                    "max_value_length", DEFAULT_MAX_VALUE_LENGTH
+                                ),
                             ),
                             "crashed": False,
                             "current": True,
@@ -201,32 +330,60 @@ class _Client(object):
             event["platform"] = "python"
 
         event = handle_in_app(
-            event, self.options["in_app_exclude"], self.options["in_app_include"]
+            event,
+            self.options["in_app_exclude"],
+            self.options["in_app_include"],
+            self.options["project_root"],
         )
 
+        if event is not None:
+            event_scrubber = self.options["event_scrubber"]
+            if event_scrubber and not self.options["send_default_pii"]:
+                event_scrubber.scrub_event(event)
+
         # Postprocess the event here so that annotated types do
         # generally not surface in before_send
         if event is not None:
             event = serialize(
                 event,
-                smart_transaction_trimming=self.options["_experiments"].get(
-                    "smart_transaction_trimming"
-                ),
+                max_request_body_size=self.options.get("max_request_body_size"),
+                max_value_length=self.options.get("max_value_length"),
             )
 
         before_send = self.options["before_send"]
-        if before_send is not None and event.get("type") != "transaction":
+        if (
+            before_send is not None
+            and event is not None
+            and event.get("type") != "transaction"
+        ):
             new_event = None
             with capture_internal_exceptions():
                 new_event = before_send(event, hint or {})
             if new_event is None:
-                logger.info("before send dropped event (%s)", event)
+                logger.info("before send dropped event")
                 if self.transport:
                     self.transport.record_lost_event(
                         "before_send", data_category="error"
                     )
             event = new_event  # type: ignore
 
+        before_send_transaction = self.options["before_send_transaction"]
+        if (
+            before_send_transaction is not None
+            and event is not None
+            and event.get("type") == "transaction"
+        ):
+            new_event = None
+            with capture_internal_exceptions():
+                new_event = before_send_transaction(event, hint or {})
+            if new_event is None:
+                logger.info("before send transaction dropped event")
+                if self.transport:
+                    self.transport.record_lost_event(
+                        "before_send", data_category="transaction"
+                    )
+            event = new_event  # type: ignore
+
         return event
 
     def _is_ignored_error(self, event, hint):
@@ -315,7 +472,7 @@ class _Client(object):
 
         if session.user_agent is None:
             headers = (event.get("request") or {}).get("headers")
-            for (k, v) in iteritems(headers or {}):
+            for k, v in iteritems(headers or {}):
                 if k.lower() == "user-agent":
                     user_agent = v
                     break
@@ -340,6 +497,9 @@ class _Client(object):
 
         :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
 
+        :param scope: An optional scope to use for determining whether this event
+            should be captured.
+
         :returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help.
         """
         if disable_capture_event.get(False):
@@ -357,6 +517,8 @@ class _Client(object):
         if not self._should_capture(event, hint, scope):
             return None
 
+        profile = event.pop("profile", None)
+
         event_opt = self._prepare_event(event, hint, scope)
         if event_opt is None:
             return None
@@ -368,65 +530,55 @@ class _Client(object):
             self._update_session_from_event(session, event)
 
         is_transaction = event_opt.get("type") == "transaction"
+        is_checkin = event_opt.get("type") == "check_in"
 
-        if not is_transaction and not self._should_sample_error(event):
+        if (
+            not is_transaction
+            and not is_checkin
+            and not self._should_sample_error(event)
+        ):
             return None
 
+        tracing_enabled = has_tracing_enabled(self.options)
         attachments = hint.get("attachments")
 
-        # this is outside of the `if` immediately below because even if we don't
-        # use the value, we want to make sure we remove it before the event is
-        # sent
-        raw_tracestate = (
-            event_opt.get("contexts", {}).get("trace", {}).pop("tracestate", "")
-        )
+        trace_context = event_opt.get("contexts", {}).get("trace") or {}
+        dynamic_sampling_context = trace_context.pop("dynamic_sampling_context", {})
 
-        dynamic_sampling_context = (
-            event_opt.get("contexts", {})
-            .get("trace", {})
-            .pop("dynamic_sampling_context", {})
+        # If tracing is enabled all events should go to /envelope endpoint.
+        # If no tracing is enabled only transactions, events with attachments, and checkins should go to the /envelope endpoint.
+        should_use_envelope_endpoint = (
+            tracing_enabled or is_transaction or is_checkin or bool(attachments)
         )
-
-        # Transactions or events with attachments should go to the /envelope/
-        # endpoint.
-        if is_transaction or attachments:
-
+        if should_use_envelope_endpoint:
             headers = {
                 "event_id": event_opt["event_id"],
                 "sent_at": format_timestamp(datetime.utcnow()),
             }
 
-            if has_tracestate_enabled():
-                tracestate_data = raw_tracestate and reinflate_tracestate(
-                    raw_tracestate.replace("sentry=", "")
-                )
-
-                if tracestate_data:
-                    headers["trace"] = tracestate_data
-            elif dynamic_sampling_context:
+            if dynamic_sampling_context:
                 headers["trace"] = dynamic_sampling_context
 
             envelope = Envelope(headers=headers)
 
             if is_transaction:
-                if "profile" in event_opt:
-                    event_opt["profile"]["environment"] = event_opt.get("environment")
-                    event_opt["profile"]["release"] = event_opt.get("release", "")
-                    event_opt["profile"]["timestamp"] = event_opt.get("timestamp", "")
-                    event_opt["profile"]["transactions"][0]["id"] = event_opt[
-                        "event_id"
-                    ]
-                    envelope.add_profile(event_opt.pop("profile"))
+                if profile is not None:
+                    envelope.add_profile(profile.to_json(event_opt, self.options))
                 envelope.add_transaction(event_opt)
+            elif is_checkin:
+                envelope.add_checkin(event_opt)
             else:
                 envelope.add_event(event_opt)
 
             for attachment in attachments or ():
                 envelope.add_item(attachment.to_envelope_item())
+
             self.transport.capture_envelope(envelope)
+
         else:
-            # All other events go to the /store/ endpoint.
+            # All other events go to the legacy /store/ endpoint (will be removed in the future).
             self.transport.capture_event(event_opt)
+
         return event_id
 
     def capture_session(
@@ -451,6 +603,8 @@ class _Client(object):
         if self.transport is not None:
             self.flush(timeout=timeout, callback=callback)
             self.session_flusher.kill()
+            if self.monitor:
+                self.monitor.kill()
             self.transport.kill()
             self.transport = None
 
@@ -482,9 +636,9 @@ class _Client(object):
         self.close()
 
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     # Make mypy, PyCharm and other static analyzers think `get_options` is a
     # type to have nicer autocompletion for params.
     #
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index ceba6b5..23cca00 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -1,6 +1,9 @@
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+# up top to prevent circular import due to integration import
+DEFAULT_MAX_VALUE_LENGTH = 1024
+
+if TYPE_CHECKING:
     import sentry_sdk
 
     from typing import Optional
@@ -19,7 +22,9 @@ if MYPY:
         BreadcrumbProcessor,
         Event,
         EventProcessor,
+        ProfilerMode,
         TracesSampler,
+        TransactionProcessor,
     )
 
     # Experiments are feature flags to enable and disable certain unstable SDK
@@ -31,17 +36,156 @@ if MYPY:
         {
             "max_spans": Optional[int],
             "record_sql_params": Optional[bool],
-            "smart_transaction_trimming": Optional[bool],
-            "propagate_tracestate": Optional[bool],
-            "custom_measurements": Optional[bool],
+            # TODO: Remove these 2 profiling related experiments
             "profiles_sample_rate": Optional[float],
-            "profiler_mode": Optional[str],
+            "profiler_mode": Optional[ProfilerMode],
+            "enable_backpressure_handling": Optional[bool],
         },
         total=False,
     )
 
 DEFAULT_QUEUE_SIZE = 100
 DEFAULT_MAX_BREADCRUMBS = 100
+MATCH_ALL = r".*"
+
+FALSE_VALUES = [
+    "false",
+    "no",
+    "off",
+    "n",
+    "0",
+]
+
+
+class INSTRUMENTER:
+    SENTRY = "sentry"
+    OTEL = "otel"
+
+
+class SPANDATA:
+    """
+    Additional information describing the type of the span.
+    See: https://develop.sentry.dev/sdk/performance/span-data-conventions/
+    """
+
+    DB_NAME = "db.name"
+    """
+    The name of the database being accessed. For commands that switch the database, this should be set to the target database (even if the command fails).
+    Example: myDatabase
+    """
+
+    DB_OPERATION = "db.operation"
+    """
+    The name of the operation being executed, e.g. the MongoDB command name such as findAndModify, or the SQL keyword.
+    See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md
+    Example: findAndModify, HMSET, SELECT
+    """
+
+    DB_SYSTEM = "db.system"
+    """
+    An identifier for the database management system (DBMS) product being used.
+    See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md
+    Example: postgresql
+    """
+
+    CACHE_HIT = "cache.hit"
+    """
+    A boolean indicating whether the requested data was found in the cache.
+    Example: true
+    """
+
+    CACHE_ITEM_SIZE = "cache.item_size"
+    """
+    The size of the requested data in bytes.
+    Example: 58
+    """
+
+    HTTP_QUERY = "http.query"
+    """
+    The Query string present in the URL.
+    Example: ?foo=bar&bar=baz
+    """
+
+    HTTP_FRAGMENT = "http.fragment"
+    """
+    The Fragments present in the URL.
+    Example: #foo=bar
+    """
+
+    HTTP_METHOD = "http.method"
+    """
+    The HTTP method used.
+    Example: GET
+    """
+
+    HTTP_STATUS_CODE = "http.response.status_code"
+    """
+    The HTTP status code as an integer.
+    Example: 418
+    """
+
+    SERVER_ADDRESS = "server.address"
+    """
+    Name of the database host.
+    Example: example.com
+    """
+
+    SERVER_PORT = "server.port"
+    """
+    Logical server port number
+    Example: 80; 8080; 443
+    """
+
+    SERVER_SOCKET_ADDRESS = "server.socket.address"
+    """
+    Physical server IP address or Unix socket address.
+    Example: 10.5.3.2
+    """
+
+    SERVER_SOCKET_PORT = "server.socket.port"
+    """
+    Physical server port.
+    Recommended: If different than server.port.
+    Example: 16456
+    """
+
+
+class OP:
+    CACHE_GET_ITEM = "cache.get_item"
+    DB = "db"
+    DB_REDIS = "db.redis"
+    EVENT_DJANGO = "event.django"
+    FUNCTION = "function"
+    FUNCTION_AWS = "function.aws"
+    FUNCTION_GCP = "function.gcp"
+    GRPC_CLIENT = "grpc.client"
+    GRPC_SERVER = "grpc.server"
+    HTTP_CLIENT = "http.client"
+    HTTP_CLIENT_STREAM = "http.client.stream"
+    HTTP_SERVER = "http.server"
+    MIDDLEWARE_DJANGO = "middleware.django"
+    MIDDLEWARE_STARLETTE = "middleware.starlette"
+    MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive"
+    MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send"
+    MIDDLEWARE_STARLITE = "middleware.starlite"
+    MIDDLEWARE_STARLITE_RECEIVE = "middleware.starlite.receive"
+    MIDDLEWARE_STARLITE_SEND = "middleware.starlite.send"
+    QUEUE_SUBMIT_ARQ = "queue.submit.arq"
+    QUEUE_TASK_ARQ = "queue.task.arq"
+    QUEUE_SUBMIT_CELERY = "queue.submit.celery"
+    QUEUE_TASK_CELERY = "queue.task.celery"
+    QUEUE_TASK_RQ = "queue.task.rq"
+    QUEUE_SUBMIT_HUEY = "queue.submit.huey"
+    QUEUE_TASK_HUEY = "queue.task.huey"
+    SUBPROCESS = "subprocess"
+    SUBPROCESS_WAIT = "subprocess.wait"
+    SUBPROCESS_COMMUNICATE = "subprocess.communicate"
+    TEMPLATE_RENDER = "template.render"
+    VIEW_RENDER = "view.render"
+    VIEW_RESPONSE_RENDER = "view.response.render"
+    WEBSOCKET_SERVER = "websocket.server"
+    SOCKET_CONNECTION = "socket.connection"
+    SOCKET_DNS = "socket.dns"
 
 
 # This type exists to trick mypy and PyCharm into thinking `init` and `Client`
@@ -50,7 +194,6 @@ class ClientConstructor(object):
     def __init__(
         self,
         dsn=None,  # type: Optional[str]
-        with_locals=True,  # type: bool
         max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS,  # type: int
         release=None,  # type: Optional[str]
         environment=None,  # type: Optional[str]
@@ -67,8 +210,8 @@ class ClientConstructor(object):
         send_default_pii=False,  # type: bool
         http_proxy=None,  # type: Optional[str]
         https_proxy=None,  # type: Optional[str]
-        ignore_errors=[],  # type: List[Union[type, str]]  # noqa: B006
-        request_bodies="medium",  # type: str
+        ignore_errors=[],  # type: Sequence[Union[type, str]]  # noqa: B006
+        max_request_body_size="medium",  # type: str
         before_send=None,  # type: Optional[EventProcessor]
         before_breadcrumb=None,  # type: Optional[BreadcrumbProcessor]
         debug=False,  # type: bool
@@ -77,10 +220,26 @@ class ClientConstructor(object):
         propagate_traces=True,  # type: bool
         traces_sample_rate=None,  # type: Optional[float]
         traces_sampler=None,  # type: Optional[TracesSampler]
+        profiles_sample_rate=None,  # type: Optional[float]
+        profiles_sampler=None,  # type: Optional[TracesSampler]
+        profiler_mode=None,  # type: Optional[ProfilerMode]
         auto_enabling_integrations=True,  # type: bool
         auto_session_tracking=True,  # type: bool
         send_client_reports=True,  # type: bool
         _experiments={},  # type: Experiments  # noqa: B006
+        proxy_headers=None,  # type: Optional[Dict[str, str]]
+        instrumenter=INSTRUMENTER.SENTRY,  # type: Optional[str]
+        before_send_transaction=None,  # type: Optional[TransactionProcessor]
+        project_root=None,  # type: Optional[str]
+        enable_tracing=None,  # type: Optional[bool]
+        include_local_variables=True,  # type: Optional[bool]
+        include_source_context=True,  # type: Optional[bool]
+        trace_propagation_targets=[  # noqa: B006
+            MATCH_ALL
+        ],  # type: Optional[Sequence[str]]
+        functions_to_trace=[],  # type: Sequence[Dict[str, str]]  # noqa: B006
+        event_scrubber=None,  # type: Optional[sentry_sdk.scrubber.EventScrubber]
+        max_value_length=DEFAULT_MAX_VALUE_LENGTH,  # type: int
     ):
         # type: (...) -> None
         pass
@@ -104,9 +263,4 @@ DEFAULT_OPTIONS = _get_default_options()
 del _get_default_options
 
 
-VERSION = "1.9.10"
-SDK_INFO = {
-    "name": "sentry.python",
-    "version": VERSION,
-    "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
-}
+VERSION = "1.29.2"
diff --git a/sentry_sdk/crons/__init__.py b/sentry_sdk/crons/__init__.py
new file mode 100644
index 0000000..5d1fe35
--- /dev/null
+++ b/sentry_sdk/crons/__init__.py
@@ -0,0 +1,3 @@
+from sentry_sdk.crons.api import capture_checkin  # noqa
+from sentry_sdk.crons.consts import MonitorStatus  # noqa
+from sentry_sdk.crons.decorator import monitor  # noqa
diff --git a/sentry_sdk/crons/api.py b/sentry_sdk/crons/api.py
new file mode 100644
index 0000000..cd240a7
--- /dev/null
+++ b/sentry_sdk/crons/api.py
@@ -0,0 +1,57 @@
+import uuid
+
+from sentry_sdk import Hub
+from sentry_sdk._types import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Optional
+
+
+def _create_check_in_event(
+    monitor_slug=None,
+    check_in_id=None,
+    status=None,
+    duration_s=None,
+    monitor_config=None,
+):
+    # type: (Optional[str], Optional[str], Optional[str], Optional[float], Optional[Dict[str, Any]]) -> Dict[str, Any]
+    options = Hub.current.client.options if Hub.current.client else {}
+    check_in_id = check_in_id or uuid.uuid4().hex  # type: str
+
+    check_in = {
+        "type": "check_in",
+        "monitor_slug": monitor_slug,
+        "check_in_id": check_in_id,
+        "status": status,
+        "duration": duration_s,
+        "environment": options.get("environment", None),
+        "release": options.get("release", None),
+    }
+
+    if monitor_config:
+        check_in["monitor_config"] = monitor_config
+
+    return check_in
+
+
+def capture_checkin(
+    monitor_slug=None,
+    check_in_id=None,
+    status=None,
+    duration=None,
+    monitor_config=None,
+):
+    # type: (Optional[str], Optional[str], Optional[str], Optional[float], Optional[Dict[str, Any]]) -> str
+    check_in_event = _create_check_in_event(
+        monitor_slug=monitor_slug,
+        check_in_id=check_in_id,
+        status=status,
+        duration_s=duration,
+        monitor_config=monitor_config,
+    )
+
+    hub = Hub.current
+    hub.capture_event(check_in_event)
+
+    return check_in_event["check_in_id"]
diff --git a/sentry_sdk/crons/consts.py b/sentry_sdk/crons/consts.py
new file mode 100644
index 0000000..be686b4
--- /dev/null
+++ b/sentry_sdk/crons/consts.py
@@ -0,0 +1,4 @@
+class MonitorStatus:
+    IN_PROGRESS = "in_progress"
+    OK = "ok"
+    ERROR = "error"
diff --git a/sentry_sdk/crons/decorator.py b/sentry_sdk/crons/decorator.py
new file mode 100644
index 0000000..41ff6d2
--- /dev/null
+++ b/sentry_sdk/crons/decorator.py
@@ -0,0 +1,74 @@
+from functools import wraps
+import sys
+
+from sentry_sdk._compat import reraise
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.crons import capture_checkin
+from sentry_sdk.crons.consts import MonitorStatus
+from sentry_sdk.utils import now
+
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Optional
+
+
+def monitor(monitor_slug=None):
+    # type: (Optional[str]) -> Callable[..., Any]
+    """
+    Decorator to capture checkin events for a monitor.
+
+    Usage:
+    ```
+    import sentry_sdk
+
+    app = Celery()
+
+    @app.task
+    @sentry_sdk.monitor(monitor_slug='my-fancy-slug')
+    def test(arg):
+        print(arg)
+    ```
+
+    This does not have to be used with Celery, but if you do use it with celery,
+    put the `@sentry_sdk.monitor` decorator below Celery's `@app.task` decorator.
+    """
+
+    def decorate(func):
+        # type: (Callable[..., Any]) -> Callable[..., Any]
+        if not monitor_slug:
+            return func
+
+        @wraps(func)
+        def wrapper(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+            start_timestamp = now()
+            check_in_id = capture_checkin(
+                monitor_slug=monitor_slug, status=MonitorStatus.IN_PROGRESS
+            )
+
+            try:
+                result = func(*args, **kwargs)
+            except Exception:
+                duration_s = now() - start_timestamp
+                capture_checkin(
+                    monitor_slug=monitor_slug,
+                    check_in_id=check_in_id,
+                    status=MonitorStatus.ERROR,
+                    duration=duration_s,
+                )
+                exc_info = sys.exc_info()
+                reraise(*exc_info)
+
+            duration_s = now() - start_timestamp
+            capture_checkin(
+                monitor_slug=monitor_slug,
+                check_in_id=check_in_id,
+                status=MonitorStatus.OK,
+                duration=duration_s,
+            )
+
+            return result
+
+        return wrapper
+
+    return decorate
diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py
index 24eb87b..fed5ed4 100644
--- a/sentry_sdk/envelope.py
+++ b/sentry_sdk/envelope.py
@@ -3,11 +3,11 @@ import json
 import mimetypes
 
 from sentry_sdk._compat import text_type, PY2
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.session import Session
 from sentry_sdk.utils import json_dumps, capture_internal_exceptions
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
     from typing import Union
@@ -68,6 +68,12 @@ class Envelope(object):
         # type: (...) -> None
         self.add_item(Item(payload=PayloadRef(json=profile), type="profile"))
 
+    def add_checkin(
+        self, checkin  # type: Any
+    ):
+        # type: (...) -> None
+        self.add_item(Item(payload=PayloadRef(json=checkin), type="check_in"))
+
     def add_session(
         self, session  # type: Union[Session, Any]
     ):
diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py
index 3d4a28d..ac77fb4 100644
--- a/sentry_sdk/hub.py
+++ b/sentry_sdk/hub.py
@@ -5,10 +5,23 @@ from datetime import datetime
 from contextlib import contextmanager
 
 from sentry_sdk._compat import with_metaclass
+from sentry_sdk.consts import INSTRUMENTER
 from sentry_sdk.scope import Scope
 from sentry_sdk.client import Client
-from sentry_sdk.tracing import Span, Transaction
+from sentry_sdk.profiler import Profile
+from sentry_sdk.tracing import (
+    NoOpSpan,
+    Span,
+    Transaction,
+    BAGGAGE_HEADER_NAME,
+    SENTRY_TRACE_HEADER_NAME,
+)
 from sentry_sdk.session import Session
+from sentry_sdk.tracing_utils import (
+    has_tracing_enabled,
+    normalize_incoming_data,
+)
+
 from sentry_sdk.utils import (
     exc_info_from_error,
     event_from_exception,
@@ -16,9 +29,9 @@ from sentry_sdk.utils import (
     ContextVar,
 )
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Union
     from typing import Any
     from typing import Optional
@@ -123,9 +136,9 @@ def _init(*args, **kwargs):
     return rv
 
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     # Make mypy, PyCharm and other static analyzers think `init` is a type to
     # have nicer autocompletion for params.
     #
@@ -221,7 +234,7 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
 
     # Mypy doesn't pick up on the metaclass.
 
-    if MYPY:
+    if TYPE_CHECKING:
         current = None  # type: Hub
         main = None  # type: Hub
 
@@ -320,15 +333,16 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
         top = self._stack[-1]
         self._stack[-1] = (new, top[1])
 
-    def capture_event(
-        self,
-        event,  # type: Event
-        hint=None,  # type: Optional[Hint]
-        scope=None,  # type: Optional[Any]
-        **scope_args  # type: Any
-    ):
-        # type: (...) -> Optional[str]
-        """Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`."""
+    def capture_event(self, event, hint=None, scope=None, **scope_args):
+        # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str]
+        """
+        Captures an event.
+
+        Alias of :py:meth:`sentry_sdk.Client.capture_event`.
+
+        :param scope_args: For supported `**scope_args` see
+            :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
+        """
         client, top_scope = self._stack[-1]
         scope = _update_scope(top_scope, scope, scope_args)
         if client is not None:
@@ -339,16 +353,19 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
             return rv
         return None
 
-    def capture_message(
-        self,
-        message,  # type: str
-        level=None,  # type: Optional[str]
-        scope=None,  # type: Optional[Any]
-        **scope_args  # type: Any
-    ):
-        # type: (...) -> Optional[str]
-        """Captures a message.  The message is just a string.  If no level
-        is provided the default level is `info`.
+    def capture_message(self, message, level=None, scope=None, **scope_args):
+        # type: (str, Optional[str], Optional[Scope], Any) -> Optional[str]
+        """
+        Captures a message.
+
+        :param message: The string to send as the message.
+
+        :param level: If no level is provided, the default level is `info`.
+
+        :param scope: An optional :py:class:`sentry_sdk.Scope` to use.
+
+        :param scope_args: For supported `**scope_args` see
+            :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
 
         :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
         """
@@ -360,17 +377,15 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
             {"message": message, "level": level}, scope=scope, **scope_args
         )
 
-    def capture_exception(
-        self,
-        error=None,  # type: Optional[Union[BaseException, ExcInfo]]
-        scope=None,  # type: Optional[Any]
-        **scope_args  # type: Any
-    ):
-        # type: (...) -> Optional[str]
+    def capture_exception(self, error=None, scope=None, **scope_args):
+        # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str]
         """Captures an exception.
 
         :param error: An exception to catch. If `None`, `sys.exc_info()` will be used.
 
+        :param scope_args: For supported `**scope_args` see
+            :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
+
         :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
         """
         client = self.client
@@ -401,13 +416,8 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
         """
         logger.error("Internal error in sentry_sdk", exc_info=exc_info)
 
-    def add_breadcrumb(
-        self,
-        crumb=None,  # type: Optional[Breadcrumb]
-        hint=None,  # type: Optional[BreadcrumbHint]
-        **kwargs  # type: Any
-    ):
-        # type: (...) -> None
+    def add_breadcrumb(self, crumb=None, hint=None, **kwargs):
+        # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None
         """
         Adds a breadcrumb.
 
@@ -447,23 +457,28 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
         while len(scope._breadcrumbs) > max_breadcrumbs:
             scope._breadcrumbs.popleft()
 
-    def start_span(
-        self,
-        span=None,  # type: Optional[Span]
-        **kwargs  # type: Any
-    ):
-        # type: (...) -> Span
+    def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+        # type: (Optional[Span], str, Any) -> Span
         """
-        Create and start timing a new span whose parent is the currently active
-        span or transaction, if any. The return value is a span instance,
+        Start a span whose parent is the currently active span or transaction, if any.
+
+        The return value is a :py:class:`sentry_sdk.tracing.Span` instance,
         typically used as a context manager to start and stop timing in a `with`
         block.
 
         Only spans contained in a transaction are sent to Sentry. Most
         integrations start a transaction at the appropriate time, for example
-        for every incoming HTTP request. Use `start_transaction` to start a new
-        transaction when one is not already in progress.
+        for every incoming HTTP request. Use
+        :py:meth:`sentry_sdk.start_transaction` to start a new transaction when
+        one is not already in progress.
+
+        For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`.
         """
+        configuration_instrumenter = self.client and self.client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
         # TODO: consider removing this in a future release.
         # This is for backwards compatibility with releases before
         # start_transaction existed, to allow for a smoother transition.
@@ -489,14 +504,19 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
         if span is not None:
             return span.start_child(**kwargs)
 
+        # If there is already a trace_id in the propagation context, use it.
+        if "trace_id" not in kwargs:
+            traceparent = self.get_traceparent()
+            trace_id = traceparent.split("-")[0] if traceparent else None
+            if trace_id is not None:
+                kwargs["trace_id"] = trace_id
+
         return Span(**kwargs)
 
     def start_transaction(
-        self,
-        transaction=None,  # type: Optional[Transaction]
-        **kwargs  # type: Any
+        self, transaction=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs
     ):
-        # type: (...) -> Transaction
+        # type: (Optional[Transaction], str, Any) -> Union[Transaction, NoOpSpan]
         """
         Start and return a transaction.
 
@@ -518,7 +538,14 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
 
         When the transaction is finished, it will be sent to Sentry with all its
         finished child spans.
+
+        For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`.
         """
+        configuration_instrumenter = self.client and self.client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
         custom_sampling_context = kwargs.pop("custom_sampling_context", {})
 
         # if we haven't been given a transaction, make one
@@ -535,6 +562,9 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
         sampling_context.update(custom_sampling_context)
         transaction._set_initial_sampling_decision(sampling_context=sampling_context)
 
+        profile = Profile(transaction, hub=self)
+        profile._set_initial_sampling_decision(sampling_context=sampling_context)
+
         # we don't bother to keep spans if we already know we're not going to
         # send the transaction
         if transaction.sampled:
@@ -545,6 +575,22 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
 
         return transaction
 
+    def continue_trace(self, environ_or_headers, op=None, name=None, source=None):
+        # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction
+        """
+        Sets the propagation context from environment or headers and returns a transaction.
+        """
+        with self.configure_scope() as scope:
+            scope.generate_propagation_context(environ_or_headers)
+
+        transaction = Transaction.continue_from_headers(
+            normalize_incoming_data(environ_or_headers),
+            op=op,
+            name=name,
+            source=source,
+        )
+        return transaction
+
     @overload
     def push_scope(
         self, callback=None  # type: Optional[None]
@@ -560,7 +606,9 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
         pass
 
     def push_scope(  # noqa
-        self, callback=None  # type: Optional[Callable[[Scope], None]]
+        self,
+        callback=None,  # type: Optional[Callable[[Scope], None]]
+        continue_trace=True,  # type: bool
     ):
         # type: (...) -> Optional[ContextManager[Scope]]
         """
@@ -578,7 +626,13 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
             return None
 
         client, scope = self._stack[-1]
-        new_layer = (client, copy.copy(scope))
+
+        new_scope = copy.copy(scope)
+
+        if continue_trace:
+            new_scope.generate_propagation_context()
+
+        new_layer = (client, new_scope)
         self._stack.append(new_layer)
 
         return _ScopeManager(self)
@@ -609,7 +663,9 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
         pass
 
     def configure_scope(  # noqa
-        self, callback=None  # type: Optional[Callable[[Scope], None]]
+        self,
+        callback=None,  # type: Optional[Callable[[Scope], None]]
+        continue_trace=True,  # type: bool
     ):
         # type: (...) -> Optional[ContextManager[Scope]]
 
@@ -622,6 +678,10 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
         """
 
         client, scope = self._stack[-1]
+
+        if continue_trace:
+            scope.generate_propagation_context()
+
         if callback is not None:
             if client is not None:
                 callback(scope)
@@ -697,6 +757,36 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
         if client is not None:
             return client.flush(timeout=timeout, callback=callback)
 
+    def get_traceparent(self):
+        # type: () -> Optional[str]
+        """
+        Returns the traceparent either from the active span or from the scope.
+        """
+        if self.client is not None:
+            if has_tracing_enabled(self.client.options) and self.scope.span is not None:
+                return self.scope.span.to_traceparent()
+
+        return self.scope.get_traceparent()
+
+    def get_baggage(self):
+        # type: () -> Optional[str]
+        """
+        Returns Baggage either from the active span or from the scope.
+        """
+        if (
+            self.client is not None
+            and has_tracing_enabled(self.client.options)
+            and self.scope.span is not None
+        ):
+            baggage = self.scope.span.to_baggage()
+        else:
+            baggage = self.scope.get_baggage()
+
+        if baggage is not None:
+            return baggage.serialize()
+
+        return None
+
     def iter_trace_propagation_headers(self, span=None):
         # type: (Optional[Span]) -> Generator[Tuple[str, str], None, None]
         """
@@ -704,29 +794,46 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
         from the span representing the request, if available, or the current
         span on the scope if not.
         """
-        span = span or self.scope.span
-        if not span:
-            return
-
         client = self._stack[-1][0]
-
         propagate_traces = client and client.options["propagate_traces"]
         if not propagate_traces:
             return
 
-        for header in span.iter_headers():
-            yield header
+        span = span or self.scope.span
+
+        if client and has_tracing_enabled(client.options) and span is not None:
+            for header in span.iter_headers():
+                yield header
+        else:
+            for header in self.scope.iter_headers():
+                yield header
 
     def trace_propagation_meta(self, span=None):
         # type: (Optional[Span]) -> str
         """
-        Return meta tags which should be injected into the HTML template
-        to allow propagation of trace data.
+        Return meta tags which should be injected into HTML templates
+        to allow propagation of trace information.
         """
+        if span is not None:
+            logger.warning(
+                "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future."
+            )
+
         meta = ""
 
-        for name, content in self.iter_trace_propagation_headers(span):
-            meta += '<meta name="%s" content="%s">' % (name, content)
+        sentry_trace = self.get_traceparent()
+        if sentry_trace is not None:
+            meta += '<meta name="%s" content="%s">' % (
+                SENTRY_TRACE_HEADER_NAME,
+                sentry_trace,
+            )
+
+        baggage = self.get_baggage()
+        if baggage is not None:
+            meta += '<meta name="%s" content="%s">' % (
+                BAGGAGE_HEADER_NAME,
+                baggage,
+            )
 
         return meta
 
diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py
index 8d32741..9870471 100644
--- a/sentry_sdk/integrations/__init__.py
+++ b/sentry_sdk/integrations/__init__.py
@@ -6,9 +6,9 @@ from threading import Lock
 from sentry_sdk._compat import iteritems
 from sentry_sdk.utils import logger
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Callable
     from typing import Dict
     from typing import Iterator
@@ -67,6 +67,7 @@ _AUTO_ENABLING_INTEGRATIONS = (
     "sentry_sdk.integrations.redis.RedisIntegration",
     "sentry_sdk.integrations.pyramid.PyramidIntegration",
     "sentry_sdk.integrations.boto3.Boto3Integration",
+    "sentry_sdk.integrations.httpx.HttpxIntegration",
 )
 
 
diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py
index 1b7b222..585abe2 100644
--- a/sentry_sdk/integrations/_wsgi_common.py
+++ b/sentry_sdk/integrations/_wsgi_common.py
@@ -1,12 +1,13 @@
 import json
+from copy import deepcopy
 
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.utils import AnnotatedValue
 from sentry_sdk._compat import text_type, iteritems
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     import sentry_sdk
 
     from typing import Any
@@ -36,7 +37,7 @@ def request_body_within_bounds(client, content_length):
     if client is None:
         return False
 
-    bodies = client.options["request_bodies"]
+    bodies = client.options["max_request_body_size"]
     return not (
         bodies == "never"
         or (bodies == "small" and content_length > 10**3)
@@ -77,7 +78,7 @@ class RequestExtractor(object):
         if data is not None:
             request_info["data"] = data
 
-        event["request"] = request_info
+        event["request"] = deepcopy(request_info)
 
     def content_length(self):
         # type: () -> int
diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py
index c9a637e..d2d431a 100644
--- a/sentry_sdk/integrations/aiohttp.py
+++ b/sentry_sdk/integrations/aiohttp.py
@@ -1,21 +1,33 @@
 import sys
 import weakref
 
+from sentry_sdk.api import continue_trace
 from sentry_sdk._compat import reraise
+from sentry_sdk.consts import OP, SPANDATA
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.sessions import auto_session_tracking
 from sentry_sdk.integrations._wsgi_common import (
     _filter_headers,
     request_body_within_bounds,
 )
-from sentry_sdk.tracing import SOURCE_FOR_STYLE, Transaction, TRANSACTION_SOURCE_ROUTE
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SOURCE_FOR_STYLE,
+    TRANSACTION_SOURCE_ROUTE,
+)
+from sentry_sdk.tracing_utils import should_propagate_trace
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
+    logger,
+    parse_url,
+    parse_version,
     transaction_from_function,
     HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
+    SENSITIVE_DATA_SUBSTITUTE,
     AnnotatedValue,
 )
 
@@ -23,15 +35,18 @@ try:
     import asyncio
 
     from aiohttp import __version__ as AIOHTTP_VERSION
+    from aiohttp import ClientSession, TraceConfig
     from aiohttp.web import Application, HTTPException, UrlDispatcher
 except ImportError:
     raise DidNotEnable("AIOHTTP not installed")
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from aiohttp.web_request import Request
     from aiohttp.abc import AbstractMatchInfo
+    from aiohttp import TraceRequestStartParams, TraceRequestEndParams
+    from types import SimpleNamespace
     from typing import Any
     from typing import Dict
     from typing import Optional
@@ -62,10 +77,10 @@ class AioHttpIntegration(Integration):
     def setup_once():
         # type: () -> None
 
-        try:
-            version = tuple(map(int, AIOHTTP_VERSION.split(".")[:2]))
-        except (TypeError, ValueError):
-            raise DidNotEnable("AIOHTTP version unparsable: {}".format(AIOHTTP_VERSION))
+        version = parse_version(AIOHTTP_VERSION)
+
+        if version is None:
+            raise DidNotEnable("Unparsable AIOHTTP version: {}".format(AIOHTTP_VERSION))
 
         if version < (3, 4):
             raise DidNotEnable("AIOHTTP 3.4 or newer required.")
@@ -91,38 +106,40 @@ class AioHttpIntegration(Integration):
             weak_request = weakref.ref(request)
 
             with Hub(hub) as hub:
-                # Scope data will not leak between requests because aiohttp
-                # create a task to wrap each request.
-                with hub.configure_scope() as scope:
-                    scope.clear_breadcrumbs()
-                    scope.add_event_processor(_make_request_processor(weak_request))
-
-                transaction = Transaction.continue_from_headers(
-                    request.headers,
-                    op="http.server",
-                    # If this transaction name makes it to the UI, AIOHTTP's
-                    # URL resolver did not find a route or died trying.
-                    name="generic AIOHTTP request",
-                    source=TRANSACTION_SOURCE_ROUTE,
-                )
-                with hub.start_transaction(
-                    transaction, custom_sampling_context={"aiohttp_request": request}
-                ):
-                    try:
-                        response = await old_handle(self, request)
-                    except HTTPException as e:
-                        transaction.set_http_status(e.status_code)
-                        raise
-                    except (asyncio.CancelledError, ConnectionResetError):
-                        transaction.set_status("cancelled")
-                        raise
-                    except Exception:
-                        # This will probably map to a 500 but seems like we
-                        # have no way to tell. Do not set span status.
-                        reraise(*_capture_exception(hub))
-
-                    transaction.set_http_status(response.status)
-                    return response
+                with auto_session_tracking(hub, session_mode="request"):
+                    # Scope data will not leak between requests because aiohttp
+                    # create a task to wrap each request.
+                    with hub.configure_scope() as scope:
+                        scope.clear_breadcrumbs()
+                        scope.add_event_processor(_make_request_processor(weak_request))
+
+                    transaction = continue_trace(
+                        request.headers,
+                        op=OP.HTTP_SERVER,
+                        # If this transaction name makes it to the UI, AIOHTTP's
+                        # URL resolver did not find a route or died trying.
+                        name="generic AIOHTTP request",
+                        source=TRANSACTION_SOURCE_ROUTE,
+                    )
+                    with hub.start_transaction(
+                        transaction,
+                        custom_sampling_context={"aiohttp_request": request},
+                    ):
+                        try:
+                            response = await old_handle(self, request)
+                        except HTTPException as e:
+                            transaction.set_http_status(e.status_code)
+                            raise
+                        except (asyncio.CancelledError, ConnectionResetError):
+                            transaction.set_status("cancelled")
+                            raise
+                        except Exception:
+                            # This will probably map to a 500 but seems like we
+                            # have no way to tell. Do not set span status.
+                            reraise(*_capture_exception(hub))
+
+                        transaction.set_http_status(response.status)
+                        return response
 
         Application._handle = sentry_app_handle
 
@@ -158,6 +175,82 @@ class AioHttpIntegration(Integration):
 
         UrlDispatcher.resolve = sentry_urldispatcher_resolve
 
+        old_client_session_init = ClientSession.__init__
+
+        def init(*args, **kwargs):
+            # type: (Any, Any) -> ClientSession
+            hub = Hub.current
+            if hub.get_integration(AioHttpIntegration) is None:
+                return old_client_session_init(*args, **kwargs)
+
+            client_trace_configs = list(kwargs.get("trace_configs") or ())
+            trace_config = create_trace_config()
+            client_trace_configs.append(trace_config)
+
+            kwargs["trace_configs"] = client_trace_configs
+            return old_client_session_init(*args, **kwargs)
+
+        ClientSession.__init__ = init
+
+
+def create_trace_config():
+    # type: () -> TraceConfig
+    async def on_request_start(session, trace_config_ctx, params):
+        # type: (ClientSession, SimpleNamespace, TraceRequestStartParams) -> None
+        hub = Hub.current
+        if hub.get_integration(AioHttpIntegration) is None:
+            return
+
+        method = params.method.upper()
+
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(str(params.url), sanitize=False)
+
+        span = hub.start_span(
+            op=OP.HTTP_CLIENT,
+            description="%s %s"
+            % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
+        )
+        span.set_data(SPANDATA.HTTP_METHOD, method)
+        span.set_data("url", parsed_url.url)
+        span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+        span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
+        if should_propagate_trace(hub, str(params.url)):
+            for key, value in hub.iter_trace_propagation_headers(span):
+                logger.debug(
+                    "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
+                        key=key, value=value, url=params.url
+                    )
+                )
+                if key == BAGGAGE_HEADER_NAME and params.headers.get(
+                    BAGGAGE_HEADER_NAME
+                ):
+                    # do not overwrite any existing baggage, just append to it
+                    params.headers[key] += "," + value
+                else:
+                    params.headers[key] = value
+
+        trace_config_ctx.span = span
+
+    async def on_request_end(session, trace_config_ctx, params):
+        # type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None
+        if trace_config_ctx.span is None:
+            return
+
+        span = trace_config_ctx.span
+        span.set_http_status(int(params.response.status))
+        span.set_data("reason", params.response.reason)
+        span.finish()
+
+    trace_config = TraceConfig()
+
+    trace_config.on_request_start.append(on_request_start)
+    trace_config.on_request_end.append(on_request_end)
+
+    return trace_config
+
 
 def _make_request_processor(weak_request):
     # type: (Callable[[], Request]) -> EventProcessor
diff --git a/sentry_sdk/integrations/argv.py b/sentry_sdk/integrations/argv.py
index f005521..fea0861 100644
--- a/sentry_sdk/integrations/argv.py
+++ b/sentry_sdk/integrations/argv.py
@@ -6,9 +6,9 @@ from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration
 from sentry_sdk.scope import add_global_event_processor
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional
 
     from sentry_sdk._types import Event, Hint
diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py
new file mode 100644
index 0000000..e19933a
--- /dev/null
+++ b/sentry_sdk/integrations/arq.py
@@ -0,0 +1,226 @@
+from __future__ import absolute_import
+
+import sys
+
+from sentry_sdk._compat import reraise
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    SENSITIVE_DATA_SUBSTITUTE,
+    parse_version,
+)
+
+try:
+    import arq.worker
+    from arq.version import VERSION as ARQ_VERSION
+    from arq.connections import ArqRedis
+    from arq.worker import JobExecutionFailed, Retry, RetryJob, Worker
+except ImportError:
+    raise DidNotEnable("Arq is not installed")
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Optional, Union
+
+    from sentry_sdk._types import EventProcessor, Event, ExcInfo, Hint
+
+    from arq.cron import CronJob
+    from arq.jobs import Job
+    from arq.typing import WorkerCoroutine
+    from arq.worker import Function
+
+ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob)
+
+
+class ArqIntegration(Integration):
+    identifier = "arq"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        try:
+            if isinstance(ARQ_VERSION, str):
+                version = parse_version(ARQ_VERSION)
+            else:
+                version = ARQ_VERSION.version[:2]
+
+        except (TypeError, ValueError):
+            version = None
+
+        if version is None:
+            raise DidNotEnable("Unparsable arq version: {}".format(ARQ_VERSION))
+
+        if version < (0, 23):
+            raise DidNotEnable("arq 0.23 or newer required.")
+
+        patch_enqueue_job()
+        patch_run_job()
+        patch_create_worker()
+
+        ignore_logger("arq.worker")
+
+
+def patch_enqueue_job():
+    # type: () -> None
+    old_enqueue_job = ArqRedis.enqueue_job
+
+    async def _sentry_enqueue_job(self, function, *args, **kwargs):
+        # type: (ArqRedis, str, *Any, **Any) -> Optional[Job]
+        hub = Hub.current
+
+        if hub.get_integration(ArqIntegration) is None:
+            return await old_enqueue_job(self, function, *args, **kwargs)
+
+        with hub.start_span(op=OP.QUEUE_SUBMIT_ARQ, description=function):
+            return await old_enqueue_job(self, function, *args, **kwargs)
+
+    ArqRedis.enqueue_job = _sentry_enqueue_job
+
+
+def patch_run_job():
+    # type: () -> None
+    old_run_job = Worker.run_job
+
+    async def _sentry_run_job(self, job_id, score):
+        # type: (Worker, str, int) -> None
+        hub = Hub(Hub.current)
+
+        if hub.get_integration(ArqIntegration) is None:
+            return await old_run_job(self, job_id, score)
+
+        with hub.push_scope() as scope:
+            scope._name = "arq"
+            scope.clear_breadcrumbs()
+
+            transaction = Transaction(
+                name="unknown arq task",
+                status="ok",
+                op=OP.QUEUE_TASK_ARQ,
+                source=TRANSACTION_SOURCE_TASK,
+            )
+
+            with hub.start_transaction(transaction):
+                return await old_run_job(self, job_id, score)
+
+    Worker.run_job = _sentry_run_job
+
+
+def _capture_exception(exc_info):
+    # type: (ExcInfo) -> None
+    hub = Hub.current
+
+    if hub.scope.transaction is not None:
+        if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS:
+            hub.scope.transaction.set_status("aborted")
+            return
+
+        hub.scope.transaction.set_status("internal_error")
+
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=hub.client.options if hub.client else None,
+        mechanism={"type": ArqIntegration.identifier, "handled": False},
+    )
+    hub.capture_event(event, hint=hint)
+
+
+def _make_event_processor(ctx, *args, **kwargs):
+    # type: (Dict[Any, Any], *Any, **Any) -> EventProcessor
+    def event_processor(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+
+        hub = Hub.current
+
+        with capture_internal_exceptions():
+            if hub.scope.transaction is not None:
+                hub.scope.transaction.name = ctx["job_name"]
+                event["transaction"] = ctx["job_name"]
+
+            tags = event.setdefault("tags", {})
+            tags["arq_task_id"] = ctx["job_id"]
+            tags["arq_task_retry"] = ctx["job_try"] > 1
+            extra = event.setdefault("extra", {})
+            extra["arq-job"] = {
+                "task": ctx["job_name"],
+                "args": args
+                if _should_send_default_pii()
+                else SENSITIVE_DATA_SUBSTITUTE,
+                "kwargs": kwargs
+                if _should_send_default_pii()
+                else SENSITIVE_DATA_SUBSTITUTE,
+                "retry": ctx["job_try"],
+            }
+
+        return event
+
+    return event_processor
+
+
+def _wrap_coroutine(name, coroutine):
+    # type: (str, WorkerCoroutine) -> WorkerCoroutine
+    async def _sentry_coroutine(ctx, *args, **kwargs):
+        # type: (Dict[Any, Any], *Any, **Any) -> Any
+        hub = Hub.current
+        if hub.get_integration(ArqIntegration) is None:
+            return await coroutine(*args, **kwargs)
+
+        hub.scope.add_event_processor(
+            _make_event_processor({**ctx, "job_name": name}, *args, **kwargs)
+        )
+
+        try:
+            result = await coroutine(ctx, *args, **kwargs)
+        except Exception:
+            exc_info = sys.exc_info()
+            _capture_exception(exc_info)
+            reraise(*exc_info)
+
+        return result
+
+    return _sentry_coroutine
+
+
+def patch_create_worker():
+    # type: () -> None
+    old_create_worker = arq.worker.create_worker
+
+    def _sentry_create_worker(*args, **kwargs):
+        # type: (*Any, **Any) -> Worker
+        hub = Hub.current
+
+        if hub.get_integration(ArqIntegration) is None:
+            return old_create_worker(*args, **kwargs)
+
+        settings_cls = args[0]
+
+        functions = settings_cls.functions
+        cron_jobs = settings_cls.cron_jobs
+
+        settings_cls.functions = [_get_arq_function(func) for func in functions]
+        settings_cls.cron_jobs = [_get_arq_cron_job(cron_job) for cron_job in cron_jobs]
+
+        return old_create_worker(*args, **kwargs)
+
+    arq.worker.create_worker = _sentry_create_worker
+
+
+def _get_arq_function(func):
+    # type: (Union[str, Function, WorkerCoroutine]) -> Function
+    arq_func = arq.worker.func(func)
+    arq_func.coroutine = _wrap_coroutine(arq_func.name, arq_func.coroutine)
+
+    return arq_func
+
+
+def _get_arq_cron_job(cron_job):
+    # type: (CronJob) -> CronJob
+    cron_job.coroutine = _wrap_coroutine(cron_job.name, cron_job.coroutine)
+
+    return cron_job
diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py
index 67e6eac..dc63be9 100644
--- a/sentry_sdk/integrations/asgi.py
+++ b/sentry_sdk/integrations/asgi.py
@@ -7,9 +7,12 @@ Based on Tom Christie's `sentry-asgi <https://github.com/encode/sentry-asgi>`.
 import asyncio
 import inspect
 import urllib
+from copy import deepcopy
 
 from sentry_sdk._functools import partial
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.integrations._wsgi_common import _filter_headers
 from sentry_sdk.integrations.modules import _get_installed_modules
@@ -28,7 +31,7 @@ from sentry_sdk.utils import (
 )
 from sentry_sdk.tracing import Transaction
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Dict
     from typing import Any
     from typing import Optional
@@ -107,7 +110,7 @@ class SentryAsgiMiddleware:
             )
 
         asgi_middleware_while_using_starlette_or_fastapi = (
-            "starlette" in _get_installed_modules() and mechanism_type == "asgi"
+            mechanism_type == "asgi" and "starlette" in _get_installed_modules()
         )
         if asgi_middleware_while_using_starlette_or_fastapi:
             logger.warning(
@@ -161,12 +164,12 @@ class SentryAsgiMiddleware:
                     ty = scope["type"]
 
                     if ty in ("http", "websocket"):
-                        transaction = Transaction.continue_from_headers(
+                        transaction = continue_trace(
                             self._get_headers(scope),
                             op="{}.server".format(ty),
                         )
                     else:
-                        transaction = Transaction(op="asgi.server")
+                        transaction = Transaction(op=OP.HTTP_SERVER)
 
                     transaction.name = _DEFAULT_TRANSACTION_NAME
                     transaction.source = TRANSACTION_SOURCE_ROUTE
@@ -210,7 +213,7 @@ class SentryAsgiMiddleware:
 
         self._set_transaction_name_and_source(event, self.transaction_style, asgi_scope)
 
-        event["request"] = request_info
+        event["request"] = deepcopy(request_info)
 
         return event
 
diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py
new file mode 100644
index 0000000..03e320a
--- /dev/null
+++ b/sentry_sdk/integrations/asyncio.py
@@ -0,0 +1,105 @@
+from __future__ import absolute_import
+import sys
+
+from sentry_sdk._compat import reraise
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.utils import event_from_exception
+
+try:
+    import asyncio
+    from asyncio.tasks import Task
+except ImportError:
+    raise DidNotEnable("asyncio not available")
+
+
+if TYPE_CHECKING:
+    from typing import Any
+
+    from sentry_sdk._types import ExcInfo
+
+
+def get_name(coro):
+    # type: (Any) -> str
+    return (
+        getattr(coro, "__qualname__", None)
+        or getattr(coro, "__name__", None)
+        or "coroutine without __name__"
+    )
+
+
+def patch_asyncio():
+    # type: () -> None
+    orig_task_factory = None
+    try:
+        loop = asyncio.get_running_loop()
+        orig_task_factory = loop.get_task_factory()
+
+        def _sentry_task_factory(loop, coro):
+            # type: (Any, Any) -> Any
+
+            async def _coro_creating_hub_and_span():
+                # type: () -> Any
+                hub = Hub(Hub.current)
+                result = None
+
+                with hub:
+                    with hub.start_span(op=OP.FUNCTION, description=get_name(coro)):
+                        try:
+                            result = await coro
+                        except Exception:
+                            reraise(*_capture_exception(hub))
+
+                return result
+
+            # Trying to use user set task factory (if there is one)
+            if orig_task_factory:
+                return orig_task_factory(loop, _coro_creating_hub_and_span())
+
+            # The default task factory in `asyncio` does not have its own function
+            # but is just a couple of lines in `asyncio.base_events.create_task()`
+            # Those lines are copied here.
+
+            # WARNING:
+            # If the default behavior of the task creation in asyncio changes,
+            # this will break!
+            task = Task(_coro_creating_hub_and_span(), loop=loop)
+            if task._source_traceback:  # type: ignore
+                del task._source_traceback[-1]  # type: ignore
+
+            return task
+
+        loop.set_task_factory(_sentry_task_factory)
+    except RuntimeError:
+        # When there is no running loop, we have nothing to patch.
+        pass
+
+
+def _capture_exception(hub):
+    # type: (Hub) -> ExcInfo
+    exc_info = sys.exc_info()
+
+    integration = hub.get_integration(AsyncioIntegration)
+    if integration is not None:
+        # If an integration is there, a client has to be there.
+        client = hub.client  # type: Any
+
+        event, hint = event_from_exception(
+            exc_info,
+            client_options=client.options,
+            mechanism={"type": "asyncio", "handled": False},
+        )
+        hub.capture_event(event, hint=hint)
+
+    return exc_info
+
+
+class AsyncioIntegration(Integration):
+    identifier = "asyncio"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_asyncio()
diff --git a/sentry_sdk/integrations/atexit.py b/sentry_sdk/integrations/atexit.py
index 18fe657..af70dd9 100644
--- a/sentry_sdk/integrations/atexit.py
+++ b/sentry_sdk/integrations/atexit.py
@@ -8,10 +8,9 @@ from sentry_sdk.hub import Hub
 from sentry_sdk.utils import logger
 from sentry_sdk.integrations import Integration
 
-from sentry_sdk._types import MYPY
-
-if MYPY:
+from sentry_sdk._types import TYPE_CHECKING
 
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
 
@@ -27,7 +26,7 @@ def default_callback(pending, timeout):
         # type: (str) -> None
         sys.stderr.write(msg + "\n")
 
-    echo("Sentry is attempting to send %i pending error messages" % pending)
+    echo("Sentry is attempting to send %i pending events" % pending)
     echo("Waiting up to %s seconds" % timeout)
     echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C"))
     sys.stderr.flush()
diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py
index 3652477..9436892 100644
--- a/sentry_sdk/integrations/aws_lambda.py
+++ b/sentry_sdk/integrations/aws_lambda.py
@@ -1,10 +1,12 @@
+import sys
+from copy import deepcopy
 from datetime import datetime, timedelta
 from os import environ
-import sys
 
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction
-from sentry_sdk._compat import reraise
+from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
 from sentry_sdk.utils import (
     AnnotatedValue,
     capture_internal_exceptions,
@@ -14,10 +16,10 @@ from sentry_sdk.utils import (
 )
 from sentry_sdk.integrations import Integration
 from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk._compat import reraise
+from sentry_sdk._types import TYPE_CHECKING
 
-from sentry_sdk._types import MYPY
-
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import TypeVar
     from typing import Callable
@@ -138,9 +140,10 @@ def _wrap_handler(handler):
             # AWS Service may set an explicit `{headers: None}`, we can't rely on `.get()`'s default.
             if headers is None:
                 headers = {}
-            transaction = Transaction.continue_from_headers(
+
+            transaction = continue_trace(
                 headers,
-                op="serverless.function",
+                op=OP.FUNCTION_AWS,
                 name=aws_context.function_name,
                 source=TRANSACTION_SOURCE_COMPONENT,
             )
@@ -379,7 +382,7 @@ def _make_request_event_processor(aws_event, aws_context, configured_timeout):
                 # event. Meaning every body is unstructured to us.
                 request["data"] = AnnotatedValue.removed_because_raw_data()
 
-        sentry_event["request"] = request
+        sentry_event["request"] = deepcopy(request)
 
         return sentry_event
 
diff --git a/sentry_sdk/integrations/beam.py b/sentry_sdk/integrations/beam.py
index 30faa38..ea45087 100644
--- a/sentry_sdk/integrations/beam.py
+++ b/sentry_sdk/integrations/beam.py
@@ -9,9 +9,9 @@ from sentry_sdk._compat import reraise
 from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
 from sentry_sdk.integrations import Integration
 from sentry_sdk.integrations.logging import ignore_logger
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Iterator
     from typing import TypeVar
diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py
index e65f5a7..a21772f 100644
--- a/sentry_sdk/integrations/boto3.py
+++ b/sentry_sdk/integrations/boto3.py
@@ -1,13 +1,15 @@
 from __future__ import absolute_import
 
 from sentry_sdk import Hub
+from sentry_sdk.consts import OP, SPANDATA
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.tracing import Span
 
 from sentry_sdk._functools import partial
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.utils import capture_internal_exceptions, parse_url, parse_version
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import Optional
@@ -28,14 +30,17 @@ class Boto3Integration(Integration):
     @staticmethod
     def setup_once():
         # type: () -> None
-        try:
-            version = tuple(map(int, BOTOCORE_VERSION.split(".")[:3]))
-        except (ValueError, TypeError):
+
+        version = parse_version(BOTOCORE_VERSION)
+
+        if version is None:
             raise DidNotEnable(
                 "Unparsable botocore version: {}".format(BOTOCORE_VERSION)
             )
+
         if version < (1, 12):
             raise DidNotEnable("Botocore 1.12 or newer is required.")
+
         orig_init = BaseClient.__init__
 
         def sentry_patched_init(self, *args, **kwargs):
@@ -62,12 +67,19 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs):
     description = "aws.%s.%s" % (service_id, operation_name)
     span = hub.start_span(
         hub=hub,
-        op="aws.request",
+        op=OP.HTTP_CLIENT,
         description=description,
     )
+
+    with capture_internal_exceptions():
+        parsed_url = parse_url(request.url, sanitize=False)
+        span.set_data("aws.request.url", parsed_url.url)
+        span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+        span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
     span.set_tag("aws.service_id", service_id)
     span.set_tag("aws.operation_name", operation_name)
-    span.set_data("aws.request.url", request.url)
+    span.set_data(SPANDATA.HTTP_METHOD, request.method)
 
     # We do it in order for subsequent http calls/retries be
     # attached to this span.
@@ -92,7 +104,7 @@ def _sentry_after_call(context, parsed, **kwargs):
         return
 
     streaming_span = span.start_child(
-        op="aws.request.stream",
+        op=OP.HTTP_CLIENT_STREAM,
         description=span.description,
     )
 
diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py
index 271fc15..cc6360d 100644
--- a/sentry_sdk/integrations/bottle.py
+++ b/sentry_sdk/integrations/bottle.py
@@ -5,15 +5,16 @@ from sentry_sdk.tracing import SOURCE_FOR_STYLE
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
+    parse_version,
     transaction_from_function,
 )
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
 from sentry_sdk.integrations._wsgi_common import RequestExtractor
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from sentry_sdk.integrations.wsgi import _ScopedResponse
     from typing import Any
     from typing import Dict
@@ -57,10 +58,10 @@ class BottleIntegration(Integration):
     def setup_once():
         # type: () -> None
 
-        try:
-            version = tuple(map(int, BOTTLE_VERSION.replace("-dev", "").split(".")))
-        except (TypeError, ValueError):
-            raise DidNotEnable("Unparsable Bottle version: {}".format(version))
+        version = parse_version(BOTTLE_VERSION)
+
+        if version is None:
+            raise DidNotEnable("Unparsable Bottle version: {}".format(BOTTLE_VERSION))
 
         if version < (0, 12):
             raise DidNotEnable("Bottle 0.12 or newer required.")
diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py
index 2a095ec..1a5a7c5 100644
--- a/sentry_sdk/integrations/celery.py
+++ b/sentry_sdk/integrations/celery.py
@@ -1,25 +1,34 @@
 from __future__ import absolute_import
 
 import sys
+import time
 
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
+from sentry_sdk._compat import reraise
+from sentry_sdk._functools import wraps
+from sentry_sdk.crons import capture_checkin, MonitorStatus
 from sentry_sdk.hub import Hub
-from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TRANSACTION_SOURCE_TASK
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
+    logger,
+    match_regex_list,
 )
-from sentry_sdk.tracing import Transaction
-from sentry_sdk._compat import reraise
-from sentry_sdk.integrations import Integration, DidNotEnable
-from sentry_sdk.integrations.logging import ignore_logger
-from sentry_sdk._types import MYPY
-from sentry_sdk._functools import wraps
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
-    from typing import TypeVar
     from typing import Callable
+    from typing import Dict
+    from typing import List
     from typing import Optional
+    from typing import Tuple
+    from typing import TypeVar
+    from typing import Union
 
     from sentry_sdk._types import EventProcessor, Event, Hint, ExcInfo
 
@@ -27,14 +36,22 @@ if MYPY:
 
 
 try:
-    from celery import VERSION as CELERY_VERSION
+    from celery import VERSION as CELERY_VERSION  # type: ignore
+    from celery import Task, Celery
+    from celery.app.trace import task_has_custom
+    from celery.beat import Scheduler  # type: ignore
     from celery.exceptions import (  # type: ignore
-        SoftTimeLimitExceeded,
-        Retry,
         Ignore,
         Reject,
+        Retry,
+        SoftTimeLimitExceeded,
+    )
+    from celery.schedules import crontab, schedule  # type: ignore
+    from celery.signals import (  # type: ignore
+        task_failure,
+        task_success,
+        task_retry,
     )
-    from celery.app.trace import task_has_custom
 except ImportError:
     raise DidNotEnable("Celery not installed")
 
@@ -45,9 +62,20 @@ CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject)
 class CeleryIntegration(Integration):
     identifier = "celery"
 
-    def __init__(self, propagate_traces=True):
-        # type: (bool) -> None
+    def __init__(
+        self,
+        propagate_traces=True,
+        monitor_beat_tasks=False,
+        exclude_beat_tasks=None,
+    ):
+        # type: (bool, bool, Optional[List[str]]) -> None
         self.propagate_traces = propagate_traces
+        self.monitor_beat_tasks = monitor_beat_tasks
+        self.exclude_beat_tasks = exclude_beat_tasks
+
+        if monitor_beat_tasks:
+            _patch_beat_apply_entry()
+            _setup_celery_beat_signals()
 
     @staticmethod
     def setup_once():
@@ -95,6 +123,16 @@ class CeleryIntegration(Integration):
         ignore_logger("celery.redirected")
 
 
+def _now_seconds_since_epoch():
+    # type: () -> float
+    # We cannot use `time.perf_counter()` when dealing with the duration
+    # of a Celery task, because the start of a Celery task and
+    # the end are recorded in different processes.
+    # Start happens in the Celery Beat process,
+    # the end in a Celery Worker process.
+    return time.time()
+
+
 def _wrap_apply_async(f):
     # type: (F) -> F
     @wraps(f)
@@ -103,15 +141,37 @@ def _wrap_apply_async(f):
         hub = Hub.current
         integration = hub.get_integration(CeleryIntegration)
         if integration is not None and integration.propagate_traces:
-            with hub.start_span(op="celery.submit", description=args[0].name) as span:
+            with hub.start_span(
+                op=OP.QUEUE_SUBMIT_CELERY, description=args[0].name
+            ) as span:
                 with capture_internal_exceptions():
                     headers = dict(hub.iter_trace_propagation_headers(span))
+                    if integration.monitor_beat_tasks:
+                        headers.update(
+                            {
+                                "sentry-monitor-start-timestamp-s": "%.9f"
+                                % _now_seconds_since_epoch(),
+                            }
+                        )
 
                     if headers:
                         # Note: kwargs can contain headers=None, so no setdefault!
                         # Unsure which backend though.
                         kwarg_headers = kwargs.get("headers") or {}
+
+                        existing_baggage = kwarg_headers.get(BAGGAGE_HEADER_NAME)
+                        sentry_baggage = headers.get(BAGGAGE_HEADER_NAME)
+
+                        combined_baggage = sentry_baggage or existing_baggage
+                        if sentry_baggage and existing_baggage:
+                            combined_baggage = "{},{}".format(
+                                existing_baggage,
+                                sentry_baggage,
+                            )
+
                         kwarg_headers.update(headers)
+                        if combined_baggage:
+                            kwarg_headers[BAGGAGE_HEADER_NAME] = combined_baggage
 
                         # https://github.com/celery/celery/issues/4875
                         #
@@ -119,6 +179,17 @@ def _wrap_apply_async(f):
                         # tracing tools (dd-trace-py) also employ this exact
                         # workaround and we don't want to break them.
                         kwarg_headers.setdefault("headers", {}).update(headers)
+                        if combined_baggage:
+                            kwarg_headers["headers"][
+                                BAGGAGE_HEADER_NAME
+                            ] = combined_baggage
+
+                        # Add the Sentry options potentially added in `sentry_apply_entry`
+                        # to the headers (done when auto-instrumenting Celery Beat tasks)
+                        for key, value in kwarg_headers.items():
+                            if key.startswith("sentry-"):
+                                kwarg_headers["headers"][key] = value
+
                         kwargs["headers"] = kwarg_headers
 
                 return f(*args, **kwargs)
@@ -154,9 +225,9 @@ def _wrap_tracer(task, f):
             # Celery task objects are not a thing to be trusted. Even
             # something such as attribute access can fail.
             with capture_internal_exceptions():
-                transaction = Transaction.continue_from_headers(
+                transaction = continue_trace(
                     args[3].get("headers") or {},
-                    op="celery.task",
+                    op=OP.QUEUE_TASK_CELERY,
                     name="unknown celery task",
                     source=TRANSACTION_SOURCE_TASK,
                 )
@@ -291,3 +362,205 @@ def _patch_worker_exit():
                     hub.flush()
 
     Worker.workloop = sentry_workloop
+
+
+def _get_headers(task):
+    # type: (Task) -> Dict[str, Any]
+    headers = task.request.get("headers") or {}
+
+    # flatten nested headers
+    if "headers" in headers:
+        headers.update(headers["headers"])
+        del headers["headers"]
+
+    headers.update(task.request.get("properties") or {})
+
+    return headers
+
+
+def _get_humanized_interval(seconds):
+    # type: (float) -> Tuple[int, str]
+    TIME_UNITS = (  # noqa: N806
+        ("day", 60 * 60 * 24.0),
+        ("hour", 60 * 60.0),
+        ("minute", 60.0),
+    )
+
+    seconds = float(seconds)
+    for unit, divider in TIME_UNITS:
+        if seconds >= divider:
+            interval = int(seconds / divider)
+            return (interval, unit)
+
+    return (int(seconds), "second")
+
+
+def _get_monitor_config(celery_schedule, app, monitor_name):
+    # type: (Any, Celery, str) -> Dict[str, Any]
+    monitor_config = {}  # type: Dict[str, Any]
+    schedule_type = None  # type: Optional[str]
+    schedule_value = None  # type: Optional[Union[str, int]]
+    schedule_unit = None  # type: Optional[str]
+
+    if isinstance(celery_schedule, crontab):
+        schedule_type = "crontab"
+        schedule_value = (
+            "{0._orig_minute} "
+            "{0._orig_hour} "
+            "{0._orig_day_of_month} "
+            "{0._orig_month_of_year} "
+            "{0._orig_day_of_week}".format(celery_schedule)
+        )
+    elif isinstance(celery_schedule, schedule):
+        schedule_type = "interval"
+        (schedule_value, schedule_unit) = _get_humanized_interval(
+            celery_schedule.seconds
+        )
+
+        if schedule_unit == "second":
+            logger.warning(
+                "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
+                monitor_name,
+                schedule_value,
+            )
+            return {}
+
+    else:
+        logger.warning(
+            "Celery schedule type '%s' not supported by Sentry Crons.",
+            type(celery_schedule),
+        )
+        return {}
+
+    monitor_config["schedule"] = {}
+    monitor_config["schedule"]["type"] = schedule_type
+    monitor_config["schedule"]["value"] = schedule_value
+
+    if schedule_unit is not None:
+        monitor_config["schedule"]["unit"] = schedule_unit
+
+    monitor_config["timezone"] = app.conf.timezone or "UTC"
+
+    return monitor_config
+
+
+def _patch_beat_apply_entry():
+    # type: () -> None
+    original_apply_entry = Scheduler.apply_entry
+
+    def sentry_apply_entry(*args, **kwargs):
+        # type: (*Any, **Any) -> None
+        scheduler, schedule_entry = args
+        app = scheduler.app
+
+        celery_schedule = schedule_entry.schedule
+        monitor_name = schedule_entry.name
+
+        hub = Hub.current
+        integration = hub.get_integration(CeleryIntegration)
+        if integration is None:
+            return original_apply_entry(*args, **kwargs)
+
+        if match_regex_list(monitor_name, integration.exclude_beat_tasks):
+            return original_apply_entry(*args, **kwargs)
+
+        with hub.configure_scope() as scope:
+            # When tasks are started from Celery Beat, make sure each task has its own trace.
+            scope.set_new_propagation_context()
+
+            monitor_config = _get_monitor_config(celery_schedule, app, monitor_name)
+
+            is_supported_schedule = bool(monitor_config)
+            if is_supported_schedule:
+                headers = schedule_entry.options.pop("headers", {})
+                headers.update(
+                    {
+                        "sentry-monitor-slug": monitor_name,
+                        "sentry-monitor-config": monitor_config,
+                    }
+                )
+
+                check_in_id = capture_checkin(
+                    monitor_slug=monitor_name,
+                    monitor_config=monitor_config,
+                    status=MonitorStatus.IN_PROGRESS,
+                )
+                headers.update({"sentry-monitor-check-in-id": check_in_id})
+
+                # Set the Sentry configuration in the options of the ScheduleEntry.
+                # Those will be picked up in `apply_async` and added to the headers.
+                schedule_entry.options["headers"] = headers
+
+            return original_apply_entry(*args, **kwargs)
+
+    Scheduler.apply_entry = sentry_apply_entry
+
+
+def _setup_celery_beat_signals():
+    # type: () -> None
+    task_success.connect(crons_task_success)
+    task_failure.connect(crons_task_failure)
+    task_retry.connect(crons_task_retry)
+
+
+def crons_task_success(sender, **kwargs):
+    # type: (Task, Dict[Any, Any]) -> None
+    logger.debug("celery_task_success %s", sender)
+    headers = _get_headers(sender)
+
+    if "sentry-monitor-slug" not in headers:
+        return
+
+    monitor_config = headers.get("sentry-monitor-config", {})
+
+    start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
+
+    capture_checkin(
+        monitor_slug=headers["sentry-monitor-slug"],
+        monitor_config=monitor_config,
+        check_in_id=headers["sentry-monitor-check-in-id"],
+        duration=_now_seconds_since_epoch() - start_timestamp_s,
+        status=MonitorStatus.OK,
+    )
+
+
+def crons_task_failure(sender, **kwargs):
+    # type: (Task, Dict[Any, Any]) -> None
+    logger.debug("celery_task_failure %s", sender)
+    headers = _get_headers(sender)
+
+    if "sentry-monitor-slug" not in headers:
+        return
+
+    monitor_config = headers.get("sentry-monitor-config", {})
+
+    start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
+
+    capture_checkin(
+        monitor_slug=headers["sentry-monitor-slug"],
+        monitor_config=monitor_config,
+        check_in_id=headers["sentry-monitor-check-in-id"],
+        duration=_now_seconds_since_epoch() - start_timestamp_s,
+        status=MonitorStatus.ERROR,
+    )
+
+
+def crons_task_retry(sender, **kwargs):
+    # type: (Task, Dict[Any, Any]) -> None
+    logger.debug("celery_task_retry %s", sender)
+    headers = _get_headers(sender)
+
+    if "sentry-monitor-slug" not in headers:
+        return
+
+    monitor_config = headers.get("sentry-monitor-config", {})
+
+    start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
+
+    capture_checkin(
+        monitor_slug=headers["sentry-monitor-slug"],
+        monitor_config=monitor_config,
+        check_in_id=headers["sentry-monitor-check-in-id"],
+        duration=_now_seconds_since_epoch() - start_timestamp_s,
+        status=MonitorStatus.ERROR,
+    )
diff --git a/sentry_sdk/integrations/chalice.py b/sentry_sdk/integrations/chalice.py
index 80069b2..25d8b4a 100644
--- a/sentry_sdk/integrations/chalice.py
+++ b/sentry_sdk/integrations/chalice.py
@@ -8,15 +8,16 @@ from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
+    parse_version,
 )
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk._functools import wraps
 
 import chalice  # type: ignore
 from chalice import Chalice, ChaliceViewError
 from chalice.app import EventSourceHandler as ChaliceEventSourceHandler  # type: ignore
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import TypeVar
@@ -102,10 +103,12 @@ class ChaliceIntegration(Integration):
     @staticmethod
     def setup_once():
         # type: () -> None
-        try:
-            version = tuple(map(int, CHALICE_VERSION.split(".")[:3]))
-        except (ValueError, TypeError):
+
+        version = parse_version(CHALICE_VERSION)
+
+        if version is None:
             raise DidNotEnable("Unparsable Chalice version: {}".format(CHALICE_VERSION))
+
         if version < (1, 20):
             old_get_view_function_response = Chalice._get_view_function_response
         else:
diff --git a/sentry_sdk/integrations/cloud_resource_context.py b/sentry_sdk/integrations/cloud_resource_context.py
new file mode 100644
index 0000000..695bf17
--- /dev/null
+++ b/sentry_sdk/integrations/cloud_resource_context.py
@@ -0,0 +1,258 @@
+import json
+import urllib3
+
+from sentry_sdk.integrations import Integration
+from sentry_sdk.api import set_context
+from sentry_sdk.utils import logger
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Dict
+
+
+CONTEXT_TYPE = "cloud_resource"
+
+AWS_METADATA_HOST = "169.254.169.254"
+AWS_TOKEN_URL = "http://{}/latest/api/token".format(AWS_METADATA_HOST)
+AWS_METADATA_URL = "http://{}/latest/dynamic/instance-identity/document".format(
+    AWS_METADATA_HOST
+)
+
+GCP_METADATA_HOST = "metadata.google.internal"
+GCP_METADATA_URL = "http://{}/computeMetadata/v1/?recursive=true".format(
+    GCP_METADATA_HOST
+)
+
+
+class CLOUD_PROVIDER:  # noqa: N801
+    """
+    Name of the cloud provider.
+    see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/
+    """
+
+    ALIBABA = "alibaba_cloud"
+    AWS = "aws"
+    AZURE = "azure"
+    GCP = "gcp"
+    IBM = "ibm_cloud"
+    TENCENT = "tencent_cloud"
+
+
+class CLOUD_PLATFORM:  # noqa: N801
+    """
+    The cloud platform.
+    see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/
+    """
+
+    AWS_EC2 = "aws_ec2"
+    GCP_COMPUTE_ENGINE = "gcp_compute_engine"
+
+
+class CloudResourceContextIntegration(Integration):
+    """
+    Adds cloud resource context to the Senty scope
+    """
+
+    identifier = "cloudresourcecontext"
+
+    cloud_provider = ""
+
+    aws_token = ""
+    http = urllib3.PoolManager()
+
+    gcp_metadata = None
+
+    def __init__(self, cloud_provider=""):
+        # type: (str) -> None
+        CloudResourceContextIntegration.cloud_provider = cloud_provider
+
+    @classmethod
+    def _is_aws(cls):
+        # type: () -> bool
+        try:
+            r = cls.http.request(
+                "PUT",
+                AWS_TOKEN_URL,
+                headers={"X-aws-ec2-metadata-token-ttl-seconds": "60"},
+            )
+
+            if r.status != 200:
+                return False
+
+            cls.aws_token = r.data.decode()
+            return True
+
+        except Exception:
+            return False
+
+    @classmethod
+    def _get_aws_context(cls):
+        # type: () -> Dict[str, str]
+        ctx = {
+            "cloud.provider": CLOUD_PROVIDER.AWS,
+            "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
+        }
+
+        try:
+            r = cls.http.request(
+                "GET",
+                AWS_METADATA_URL,
+                headers={"X-aws-ec2-metadata-token": cls.aws_token},
+            )
+
+            if r.status != 200:
+                return ctx
+
+            data = json.loads(r.data.decode("utf-8"))
+
+            try:
+                ctx["cloud.account.id"] = data["accountId"]
+            except Exception:
+                pass
+
+            try:
+                ctx["cloud.availability_zone"] = data["availabilityZone"]
+            except Exception:
+                pass
+
+            try:
+                ctx["cloud.region"] = data["region"]
+            except Exception:
+                pass
+
+            try:
+                ctx["host.id"] = data["instanceId"]
+            except Exception:
+                pass
+
+            try:
+                ctx["host.type"] = data["instanceType"]
+            except Exception:
+                pass
+
+        except Exception:
+            pass
+
+        return ctx
+
+    @classmethod
+    def _is_gcp(cls):
+        # type: () -> bool
+        try:
+            r = cls.http.request(
+                "GET",
+                GCP_METADATA_URL,
+                headers={"Metadata-Flavor": "Google"},
+            )
+
+            if r.status != 200:
+                return False
+
+            cls.gcp_metadata = json.loads(r.data.decode("utf-8"))
+            return True
+
+        except Exception:
+            return False
+
+    @classmethod
+    def _get_gcp_context(cls):
+        # type: () -> Dict[str, str]
+        ctx = {
+            "cloud.provider": CLOUD_PROVIDER.GCP,
+            "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
+        }
+
+        try:
+            if cls.gcp_metadata is None:
+                r = cls.http.request(
+                    "GET",
+                    GCP_METADATA_URL,
+                    headers={"Metadata-Flavor": "Google"},
+                )
+
+                if r.status != 200:
+                    return ctx
+
+                cls.gcp_metadata = json.loads(r.data.decode("utf-8"))
+
+            try:
+                ctx["cloud.account.id"] = cls.gcp_metadata["project"]["projectId"]
+            except Exception:
+                pass
+
+            try:
+                ctx["cloud.availability_zone"] = cls.gcp_metadata["instance"][
+                    "zone"
+                ].split("/")[-1]
+            except Exception:
+                pass
+
+            try:
+                # only populated in google cloud run
+                ctx["cloud.region"] = cls.gcp_metadata["instance"]["region"].split("/")[
+                    -1
+                ]
+            except Exception:
+                pass
+
+            try:
+                ctx["host.id"] = cls.gcp_metadata["instance"]["id"]
+            except Exception:
+                pass
+
+        except Exception:
+            pass
+
+        return ctx
+
+    @classmethod
+    def _get_cloud_provider(cls):
+        # type: () -> str
+        if cls._is_aws():
+            return CLOUD_PROVIDER.AWS
+
+        if cls._is_gcp():
+            return CLOUD_PROVIDER.GCP
+
+        return ""
+
+    @classmethod
+    def _get_cloud_resource_context(cls):
+        # type: () -> Dict[str, str]
+        cloud_provider = (
+            cls.cloud_provider
+            if cls.cloud_provider != ""
+            else CloudResourceContextIntegration._get_cloud_provider()
+        )
+        if cloud_provider in context_getters.keys():
+            return context_getters[cloud_provider]()
+
+        return {}
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        cloud_provider = CloudResourceContextIntegration.cloud_provider
+        unsupported_cloud_provider = (
+            cloud_provider != "" and cloud_provider not in context_getters.keys()
+        )
+
+        if unsupported_cloud_provider:
+            logger.warning(
+                "Invalid value for cloud_provider: %s (must be in %s). Falling back to autodetection...",
+                CloudResourceContextIntegration.cloud_provider,
+                list(context_getters.keys()),
+            )
+
+        context = CloudResourceContextIntegration._get_cloud_resource_context()
+        if context != {}:
+            set_context(CONTEXT_TYPE, context)
+
+
+# Map with the currently supported cloud providers
+# mapping to functions extracting the context
+context_getters = {
+    CLOUD_PROVIDER.AWS: CloudResourceContextIntegration._get_aws_context,
+    CLOUD_PROVIDER.GCP: CloudResourceContextIntegration._get_gcp_context,
+}
diff --git a/sentry_sdk/integrations/dedupe.py b/sentry_sdk/integrations/dedupe.py
index b023df2..04208f6 100644
--- a/sentry_sdk/integrations/dedupe.py
+++ b/sentry_sdk/integrations/dedupe.py
@@ -3,9 +3,9 @@ from sentry_sdk.utils import ContextVar
 from sentry_sdk.integrations import Integration
 from sentry_sdk.scope import add_global_event_processor
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional
 
     from sentry_sdk._types import Event, Hint
diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py
index 23b446f..0e67ad1 100644
--- a/sentry_sdk/integrations/django/__init__.py
+++ b/sentry_sdk/integrations/django/__init__.py
@@ -4,16 +4,21 @@ from __future__ import absolute_import
 import sys
 import threading
 import weakref
+from importlib import import_module
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._compat import string_types
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP, SPANDATA
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.serializer import add_global_repr_processor
 from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_URL
 from sentry_sdk.tracing_utils import record_sql_queries
 from sentry_sdk.utils import (
+    AnnotatedValue,
     HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
+    SENSITIVE_DATA_SUBSTITUTE,
     logger,
     capture_internal_exceptions,
     event_from_exception,
@@ -27,16 +32,22 @@ from sentry_sdk.integrations._wsgi_common import RequestExtractor
 
 try:
     from django import VERSION as DJANGO_VERSION
+    from django.conf import settings as django_settings
     from django.core import signals
+    from django.conf import settings
 
     try:
         from django.urls import resolve
     except ImportError:
         from django.core.urlresolvers import resolve
+
+    try:
+        from django.urls import Resolver404
+    except ImportError:
+        from django.core.urlresolvers import Resolver404
 except ImportError:
     raise DidNotEnable("Django not installed")
 
-
 from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER
 from sentry_sdk.integrations.django.templates import (
     get_template_frame_from_exception,
@@ -46,8 +57,13 @@ from sentry_sdk.integrations.django.middleware import patch_django_middlewares
 from sentry_sdk.integrations.django.signals_handlers import patch_signals
 from sentry_sdk.integrations.django.views import patch_views
 
+if DJANGO_VERSION[:2] > (1, 8):
+    from sentry_sdk.integrations.django.caching import patch_caching
+else:
+    patch_caching = None  # type: ignore
+
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Dict
@@ -60,6 +76,7 @@ if MYPY:
     from django.http.request import QueryDict
     from django.utils.datastructures import MultiValueDict
 
+    from sentry_sdk.tracing import Span
     from sentry_sdk.scope import Scope
     from sentry_sdk.integrations.wsgi import _ScopedResponse
     from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType
@@ -86,9 +103,17 @@ class DjangoIntegration(Integration):
 
     transaction_style = ""
     middleware_spans = None
-
-    def __init__(self, transaction_style="url", middleware_spans=True):
-        # type: (str, bool) -> None
+    signals_spans = None
+    cache_spans = None
+
+    def __init__(
+        self,
+        transaction_style="url",
+        middleware_spans=True,
+        signals_spans=True,
+        cache_spans=False,
+    ):
+        # type: (str, bool, bool, bool) -> None
         if transaction_style not in TRANSACTION_STYLE_VALUES:
             raise ValueError(
                 "Invalid value for transaction_style: %s (must be in %s)"
@@ -96,6 +121,8 @@ class DjangoIntegration(Integration):
             )
         self.transaction_style = transaction_style
         self.middleware_spans = middleware_spans
+        self.signals_spans = signals_spans
+        self.cache_spans = cache_spans
 
     @staticmethod
     def setup_once():
@@ -215,6 +242,9 @@ class DjangoIntegration(Integration):
         patch_templates()
         patch_signals()
 
+        if patch_caching is not None:
+            patch_caching()
+
 
 _DRF_PATCHED = False
 _DRF_PATCH_LOCK = threading.Lock()
@@ -348,6 +378,18 @@ def _set_transaction_name_and_source(scope, transaction_style, request):
             transaction_name,
             source=source,
         )
+    except Resolver404:
+        urlconf = import_module(settings.ROOT_URLCONF)
+        # This exception only gets thrown when transaction_style is `function_name`
+        # So we don't check here what style is configured
+        if hasattr(urlconf, "handler404"):
+            handler = urlconf.handler404
+            if isinstance(handler, string_types):
+                scope.transaction = handler
+            else:
+                scope.transaction = transaction_from_function(
+                    getattr(handler, "view_class", handler)
+                )
     except Exception:
         pass
 
@@ -453,7 +495,6 @@ def _got_request_exception(request=None, **kwargs):
     hub = Hub.current
     integration = hub.get_integration(DjangoIntegration)
     if integration is not None:
-
         if request is not None and integration.transaction_style == "url":
             with hub.configure_scope() as scope:
                 _attempt_resolve_again(request, scope, integration.transaction_style)
@@ -475,8 +516,20 @@ class DjangoRequestExtractor(RequestExtractor):
         return self.request.META
 
     def cookies(self):
-        # type: () -> Dict[str, str]
-        return self.request.COOKIES
+        # type: () -> Dict[str, Union[str, AnnotatedValue]]
+        privacy_cookies = [
+            django_settings.CSRF_COOKIE_NAME,
+            django_settings.SESSION_COOKIE_NAME,
+        ]
+
+        clean_cookies = {}  # type: Dict[str, Union[str, AnnotatedValue]]
+        for key, val in self.request.COOKIES.items():
+            if key in privacy_cookies:
+                clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE
+            else:
+                clean_cookies[key] = val
+
+        return clean_cookies
 
     def raw_data(self):
         # type: () -> bytes
@@ -558,7 +611,8 @@ def install_sql_hook():
 
         with record_sql_queries(
             hub, self.cursor, sql, params, paramstyle="format", executemany=False
-        ):
+        ) as span:
+            _set_db_data(span, self.db.vendor, self.db.get_connection_params())
             return real_execute(self, sql, params)
 
     def executemany(self, sql, param_list):
@@ -569,7 +623,8 @@ def install_sql_hook():
 
         with record_sql_queries(
             hub, self.cursor, sql, param_list, paramstyle="format", executemany=True
-        ):
+        ) as span:
+            _set_db_data(span, self.db.vendor, self.db.get_connection_params())
             return real_executemany(self, sql, param_list)
 
     def connect(self):
@@ -581,10 +636,32 @@ def install_sql_hook():
         with capture_internal_exceptions():
             hub.add_breadcrumb(message="connect", category="query")
 
-        with hub.start_span(op="db", description="connect"):
+        with hub.start_span(op=OP.DB, description="connect") as span:
+            _set_db_data(span, self.vendor, self.get_connection_params())
             return real_connect(self)
 
     CursorWrapper.execute = execute
     CursorWrapper.executemany = executemany
     BaseDatabaseWrapper.connect = connect
     ignore_logger("django.db.backends")
+
+
+def _set_db_data(span, vendor, connection_params):
+    # type: (Span, str, Dict[str, str]) -> None
+    span.set_data(SPANDATA.DB_SYSTEM, vendor)
+
+    db_name = connection_params.get("dbname") or connection_params.get("database")
+    if db_name is not None:
+        span.set_data(SPANDATA.DB_NAME, db_name)
+
+    server_address = connection_params.get("host")
+    if server_address is not None:
+        span.set_data(SPANDATA.SERVER_ADDRESS, server_address)
+
+    server_port = connection_params.get("port")
+    if server_port is not None:
+        span.set_data(SPANDATA.SERVER_PORT, server_port)
+
+    server_socket_address = connection_params.get("unix_socket")
+    if server_socket_address is not None:
+        span.set_data(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address)
diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py
index 79916e9..41ebe18 100644
--- a/sentry_sdk/integrations/django/asgi.py
+++ b/sentry_sdk/integrations/django/asgi.py
@@ -9,11 +9,12 @@ Since this file contains `async def` it is conditionally imported in
 import asyncio
 
 from sentry_sdk import Hub, _functools
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
 
 from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Union
     from typing import Callable
@@ -60,7 +61,6 @@ def patch_channels_asgi_handler_impl(cls):
     from sentry_sdk.integrations.django import DjangoIntegration
 
     if channels.__version__ < "3.0.0":
-
         old_app = cls.__call__
 
         async def sentry_patched_asgi_handler(self, receive, send):
@@ -88,10 +88,14 @@ def wrap_async_view(hub, callback):
     async def sentry_wrapped_callback(request, *args, **kwargs):
         # type: (Any, *Any, **Any) -> Any
 
-        with hub.start_span(
-            op="django.view", description=request.resolver_match.view_name
-        ):
-            return await callback(request, *args, **kwargs)
+        with hub.configure_scope() as sentry_scope:
+            if sentry_scope.profile is not None:
+                sentry_scope.profile.update_active_thread_id()
+
+            with hub.start_span(
+                op=OP.VIEW_RENDER, description=request.resolver_match.view_name
+            ):
+                return await callback(request, *args, **kwargs)
 
     return sentry_wrapped_callback
 
@@ -104,7 +108,7 @@ def _asgi_middleware_mixin_factory(_check_middleware_span):
     """
 
     class SentryASGIMixin:
-        if MYPY:
+        if TYPE_CHECKING:
             _inner = None
 
         def __init__(self, get_response):
diff --git a/sentry_sdk/integrations/django/caching.py b/sentry_sdk/integrations/django/caching.py
new file mode 100644
index 0000000..921f8e4
--- /dev/null
+++ b/sentry_sdk/integrations/django/caching.py
@@ -0,0 +1,117 @@
+import functools
+from typing import TYPE_CHECKING
+
+from django import VERSION as DJANGO_VERSION
+from django.core.cache import CacheHandler
+
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk._compat import text_type
+
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Callable
+
+
+METHODS_TO_INSTRUMENT = [
+    "get",
+    "get_many",
+]
+
+
+def _get_span_description(method_name, args, kwargs):
+    # type: (str, Any, Any) -> str
+    description = "{} ".format(method_name)
+
+    if args is not None and len(args) >= 1:
+        description += text_type(args[0])
+    elif kwargs is not None and "key" in kwargs:
+        description += text_type(kwargs["key"])
+
+    return description
+
+
+def _patch_cache_method(cache, method_name):
+    # type: (CacheHandler, str) -> None
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    def _instrument_call(cache, method_name, original_method, args, kwargs):
+        # type: (CacheHandler, str, Callable[..., Any], Any, Any) -> Any
+        hub = Hub.current
+        integration = hub.get_integration(DjangoIntegration)
+        if integration is None or not integration.cache_spans:
+            return original_method(*args, **kwargs)
+
+        description = _get_span_description(method_name, args, kwargs)
+
+        with hub.start_span(op=OP.CACHE_GET_ITEM, description=description) as span:
+            value = original_method(*args, **kwargs)
+
+            if value:
+                span.set_data(SPANDATA.CACHE_HIT, True)
+
+                size = len(text_type(value))
+                span.set_data(SPANDATA.CACHE_ITEM_SIZE, size)
+
+            else:
+                span.set_data(SPANDATA.CACHE_HIT, False)
+
+            return value
+
+    original_method = getattr(cache, method_name)
+
+    @functools.wraps(original_method)
+    def sentry_method(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        return _instrument_call(cache, method_name, original_method, args, kwargs)
+
+    setattr(cache, method_name, sentry_method)
+
+
+def _patch_cache(cache):
+    # type: (CacheHandler) -> None
+    if not hasattr(cache, "_sentry_patched"):
+        for method_name in METHODS_TO_INSTRUMENT:
+            _patch_cache_method(cache, method_name)
+        cache._sentry_patched = True
+
+
+def patch_caching():
+    # type: () -> None
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    if not hasattr(CacheHandler, "_sentry_patched"):
+        if DJANGO_VERSION < (3, 2):
+            original_get_item = CacheHandler.__getitem__
+
+            @functools.wraps(original_get_item)
+            def sentry_get_item(self, alias):
+                # type: (CacheHandler, str) -> Any
+                cache = original_get_item(self, alias)
+
+                integration = Hub.current.get_integration(DjangoIntegration)
+                if integration and integration.cache_spans:
+                    _patch_cache(cache)
+
+                return cache
+
+            CacheHandler.__getitem__ = sentry_get_item
+            CacheHandler._sentry_patched = True
+
+        else:
+            original_create_connection = CacheHandler.create_connection
+
+            @functools.wraps(original_create_connection)
+            def sentry_create_connection(self, alias):
+                # type: (CacheHandler, str) -> Any
+                cache = original_create_connection(self, alias)
+
+                integration = Hub.current.get_integration(DjangoIntegration)
+                if integration and integration.cache_spans:
+                    _patch_cache(cache)
+
+                return cache
+
+            CacheHandler.create_connection = sentry_create_connection
+            CacheHandler._sentry_patched = True
diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py
index c9001cd..aa8023d 100644
--- a/sentry_sdk/integrations/django/middleware.py
+++ b/sentry_sdk/integrations/django/middleware.py
@@ -6,14 +6,15 @@ from django import VERSION as DJANGO_VERSION
 
 from sentry_sdk import Hub
 from sentry_sdk._functools import wraps
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
 from sentry_sdk.utils import (
     ContextVar,
     transaction_from_function,
     capture_internal_exceptions,
 )
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Optional
@@ -88,7 +89,7 @@ def _wrap_middleware(middleware, middleware_name):
             description = "{}.{}".format(description, function_basename)
 
         middleware_span = hub.start_span(
-            op="django.middleware", description=description
+            op=OP.MIDDLEWARE_DJANGO, description=description
         )
         middleware_span.set_tag("django.function_name", function_name)
         middleware_span.set_tag("django.middleware_name", middleware_name)
@@ -125,7 +126,6 @@ def _wrap_middleware(middleware, middleware_name):
     class SentryWrappingMiddleware(
         _asgi_middleware_mixin_factory(_check_middleware_span)  # type: ignore
     ):
-
         async_capable = getattr(middleware, "async_capable", False)
 
         def __init__(self, get_response=None, *args, **kwargs):
diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py
index 4d81772..87b6b22 100644
--- a/sentry_sdk/integrations/django/signals_handlers.py
+++ b/sentry_sdk/integrations/django/signals_handlers.py
@@ -4,10 +4,12 @@ from __future__ import absolute_import
 from django.dispatch import Signal
 
 from sentry_sdk import Hub
-from sentry_sdk._types import MYPY
+from sentry_sdk._functools import wraps
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
 
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import List
@@ -18,13 +20,18 @@ def _get_receiver_name(receiver):
     name = ""
 
     if hasattr(receiver, "__qualname__"):
-        name += receiver.__qualname__
+        name = receiver.__qualname__
     elif hasattr(receiver, "__name__"):  # Python 2.7 has no __qualname__
-        name += receiver.__name__
+        name = receiver.__name__
+    elif hasattr(
+        receiver, "func"
+    ):  # certain functions (like partials) dont have a name
+        if hasattr(receiver, "func") and hasattr(receiver.func, "__name__"):
+            name = "partial(<function " + receiver.func.__name__ + ">)"
 
     if (
         name == ""
-    ):  # certain functions (like partials) dont have a name so return the string representation
+    ):  # In case nothing was found, return the string representation (this is the slowest case)
         return str(receiver)
 
     if hasattr(receiver, "__module__"):  # prepend with module, if there is one
@@ -36,6 +43,7 @@ def _get_receiver_name(receiver):
 def patch_signals():
     # type: () -> None
     """Patch django signal receivers to create a span"""
+    from sentry_sdk.integrations.django import DjangoIntegration
 
     old_live_receivers = Signal._live_receivers
 
@@ -46,11 +54,12 @@ def patch_signals():
 
         def sentry_receiver_wrapper(receiver):
             # type: (Callable[..., Any]) -> Callable[..., Any]
+            @wraps(receiver)
             def wrapper(*args, **kwargs):
                 # type: (Any, Any) -> Any
                 signal_name = _get_receiver_name(receiver)
                 with hub.start_span(
-                    op="django.signals",
+                    op=OP.EVENT_DJANGO,
                     description=signal_name,
                 ) as span:
                     span.set_data("signal", signal_name)
@@ -58,8 +67,10 @@ def patch_signals():
 
             return wrapper
 
-        for idx, receiver in enumerate(receivers):
-            receivers[idx] = sentry_receiver_wrapper(receiver)
+        integration = hub.get_integration(DjangoIntegration)
+        if integration and integration.signals_spans:
+            for idx, receiver in enumerate(receivers):
+                receivers[idx] = sentry_receiver_wrapper(receiver)
 
         return receivers
 
diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py
index 2ff9d1b..e6c83b5 100644
--- a/sentry_sdk/integrations/django/templates.py
+++ b/sentry_sdk/integrations/django/templates.py
@@ -1,10 +1,12 @@
 from django.template import TemplateSyntaxError
+from django.utils.safestring import mark_safe
 from django import VERSION as DJANGO_VERSION
 
 from sentry_sdk import _functools, Hub
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import Optional
@@ -66,7 +68,7 @@ def patch_templates():
             return real_rendered_content.fget(self)
 
         with hub.start_span(
-            op="django.template.render",
+            op=OP.TEMPLATE_RENDER,
             description=_get_template_name_description(self.template_name),
         ) as span:
             span.set_data("context", self.context_data)
@@ -87,8 +89,13 @@ def patch_templates():
         if hub.get_integration(DjangoIntegration) is None:
             return real_render(request, template_name, context, *args, **kwargs)
 
+        # Inject trace meta tags into template context
+        context = context or {}
+        if "sentry_trace_meta" not in context:
+            context["sentry_trace_meta"] = mark_safe(hub.trace_propagation_meta())
+
         with hub.start_span(
-            op="django.template.render",
+            op=OP.TEMPLATE_RENDER,
             description=_get_template_name_description(template_name),
         ) as span:
             span.set_data("context", context)
diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py
index 8b6fc95..1532c6f 100644
--- a/sentry_sdk/integrations/django/transactions.py
+++ b/sentry_sdk/integrations/django/transactions.py
@@ -7,9 +7,9 @@ from __future__ import absolute_import
 
 import re
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from django.urls.resolvers import URLResolver
     from typing import Dict
     from typing import List
@@ -37,7 +37,7 @@ def get_regex(resolver_or_pattern):
 
 class RavenResolver(object):
     _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
-    _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+")
+    _named_group_matcher = re.compile(r"\(\?P<(\w+)>.*\)")
     _non_named_group_matcher = re.compile(r"\([^\)]+\)")
     # [foo|bar|baz]
     _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]")
diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py
index 51f1abc..c1034d0 100644
--- a/sentry_sdk/integrations/django/views.py
+++ b/sentry_sdk/integrations/django/views.py
@@ -1,8 +1,9 @@
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk import _functools
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
 
 
@@ -22,9 +23,19 @@ def patch_views():
     # type: () -> None
 
     from django.core.handlers.base import BaseHandler
+    from django.template.response import SimpleTemplateResponse
     from sentry_sdk.integrations.django import DjangoIntegration
 
     old_make_view_atomic = BaseHandler.make_view_atomic
+    old_render = SimpleTemplateResponse.render
+
+    def sentry_patched_render(self):
+        # type: (SimpleTemplateResponse) -> Any
+        hub = Hub.current
+        with hub.start_span(
+            op=OP.VIEW_RESPONSE_RENDER, description="serialize response"
+        ):
+            return old_render(self)
 
     @_functools.wraps(old_make_view_atomic)
     def sentry_patched_make_view_atomic(self, *args, **kwargs):
@@ -38,7 +49,6 @@ def patch_views():
         integration = hub.get_integration(DjangoIntegration)
 
         if integration is not None and integration.middleware_spans:
-
             if (
                 iscoroutinefunction is not None
                 and wrap_async_view is not None
@@ -53,6 +63,7 @@ def patch_views():
 
         return sentry_wrapped_callback
 
+    SimpleTemplateResponse.render = sentry_patched_render
     BaseHandler.make_view_atomic = sentry_patched_make_view_atomic
 
 
@@ -61,9 +72,15 @@ def _wrap_sync_view(hub, callback):
     @_functools.wraps(callback)
     def sentry_wrapped_callback(request, *args, **kwargs):
         # type: (Any, *Any, **Any) -> Any
-        with hub.start_span(
-            op="django.view", description=request.resolver_match.view_name
-        ):
-            return callback(request, *args, **kwargs)
+        with hub.configure_scope() as sentry_scope:
+            # set the active thread id to the handler thread for sync views
+            # this isn't necessary for async views since that runs on main
+            if sentry_scope.profile is not None:
+                sentry_scope.profile.update_active_thread_id()
+
+            with hub.start_span(
+                op=OP.VIEW_RENDER, description=request.resolver_match.view_name
+            ):
+                return callback(request, *args, **kwargs)
 
     return sentry_wrapped_callback
diff --git a/sentry_sdk/integrations/excepthook.py b/sentry_sdk/integrations/excepthook.py
index 1f16ff0..514e082 100644
--- a/sentry_sdk/integrations/excepthook.py
+++ b/sentry_sdk/integrations/excepthook.py
@@ -4,9 +4,9 @@ from sentry_sdk.hub import Hub
 from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
 from sentry_sdk.integrations import Integration
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Callable
     from typing import Any
     from typing import Type
diff --git a/sentry_sdk/integrations/executing.py b/sentry_sdk/integrations/executing.py
index 4fbf729..e8636b6 100644
--- a/sentry_sdk/integrations/executing.py
+++ b/sentry_sdk/integrations/executing.py
@@ -1,12 +1,12 @@
 from __future__ import absolute_import
 
 from sentry_sdk import Hub
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.utils import walk_exception_chain, iter_stacks
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional
 
     from sentry_sdk._types import Event, Hint
diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py
index b38e4bd..1bb7942 100644
--- a/sentry_sdk/integrations/falcon.py
+++ b/sentry_sdk/integrations/falcon.py
@@ -8,25 +8,41 @@ from sentry_sdk.tracing import SOURCE_FOR_STYLE
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
+    parse_version,
 )
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import Optional
 
     from sentry_sdk._types import EventProcessor
 
+# In Falcon 3.0 `falcon.api_helpers` is renamed to `falcon.app_helpers`
+# and `falcon.API` to `falcon.App`
+
 try:
     import falcon  # type: ignore
-    import falcon.api_helpers  # type: ignore
 
     from falcon import __version__ as FALCON_VERSION
 except ImportError:
     raise DidNotEnable("Falcon not installed")
 
+try:
+    import falcon.app_helpers  # type: ignore
+
+    falcon_helpers = falcon.app_helpers
+    falcon_app_class = falcon.App
+    FALCON3 = True
+except ImportError:
+    import falcon.api_helpers  # type: ignore
+
+    falcon_helpers = falcon.api_helpers
+    falcon_app_class = falcon.API
+    FALCON3 = False
+
 
 class FalconRequestExtractor(RequestExtractor):
     def env(self):
@@ -58,16 +74,27 @@ class FalconRequestExtractor(RequestExtractor):
         else:
             return None
 
-    def json(self):
-        # type: () -> Optional[Dict[str, Any]]
-        try:
-            return self.request.media
-        except falcon.errors.HTTPBadRequest:
-            # NOTE(jmagnusson): We return `falcon.Request._media` here because
-            # falcon 1.4 doesn't do proper type checking in
-            # `falcon.Request.media`. This has been fixed in 2.0.
-            # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953
-            return self.request._media
+    if FALCON3:
+
+        def json(self):
+            # type: () -> Optional[Dict[str, Any]]
+            try:
+                return self.request.media
+            except falcon.errors.HTTPBadRequest:
+                return None
+
+    else:
+
+        def json(self):
+            # type: () -> Optional[Dict[str, Any]]
+            try:
+                return self.request.media
+            except falcon.errors.HTTPBadRequest:
+                # NOTE(jmagnusson): We return `falcon.Request._media` here because
+                # falcon 1.4 doesn't do proper type checking in
+                # `falcon.Request.media`. This has been fixed in 2.0.
+                # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953
+                return self.request._media
 
 
 class SentryFalconMiddleware(object):
@@ -105,9 +132,10 @@ class FalconIntegration(Integration):
     @staticmethod
     def setup_once():
         # type: () -> None
-        try:
-            version = tuple(map(int, FALCON_VERSION.split(".")))
-        except (ValueError, TypeError):
+
+        version = parse_version(FALCON_VERSION)
+
+        if version is None:
             raise DidNotEnable("Unparsable Falcon version: {}".format(FALCON_VERSION))
 
         if version < (1, 4):
@@ -120,7 +148,7 @@ class FalconIntegration(Integration):
 
 def _patch_wsgi_app():
     # type: () -> None
-    original_wsgi_app = falcon.API.__call__
+    original_wsgi_app = falcon_app_class.__call__
 
     def sentry_patched_wsgi_app(self, env, start_response):
         # type: (falcon.API, Any, Any) -> Any
@@ -135,12 +163,12 @@ def _patch_wsgi_app():
 
         return sentry_wrapped(env, start_response)
 
-    falcon.API.__call__ = sentry_patched_wsgi_app
+    falcon_app_class.__call__ = sentry_patched_wsgi_app
 
 
 def _patch_handle_exception():
     # type: () -> None
-    original_handle_exception = falcon.API._handle_exception
+    original_handle_exception = falcon_app_class._handle_exception
 
     def sentry_patched_handle_exception(self, *args):
         # type: (falcon.API, *Any) -> Any
@@ -170,12 +198,12 @@ def _patch_handle_exception():
 
         return was_handled
 
-    falcon.API._handle_exception = sentry_patched_handle_exception
+    falcon_app_class._handle_exception = sentry_patched_handle_exception
 
 
 def _patch_prepare_middleware():
     # type: () -> None
-    original_prepare_middleware = falcon.api_helpers.prepare_middleware
+    original_prepare_middleware = falcon_helpers.prepare_middleware
 
     def sentry_patched_prepare_middleware(
         middleware=None, independent_middleware=False
@@ -187,7 +215,7 @@ def _patch_prepare_middleware():
             middleware = [SentryFalconMiddleware()] + (middleware or [])
         return original_prepare_middleware(middleware, independent_middleware)
 
-    falcon.api_helpers.prepare_middleware = sentry_patched_prepare_middleware
+    falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware
 
 
 def _exception_leads_to_http_5xx(ex):
diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py
index 1c21196..17e0576 100644
--- a/sentry_sdk/integrations/fastapi.py
+++ b/sentry_sdk/integrations/fastapi.py
@@ -1,17 +1,23 @@
-from sentry_sdk._types import MYPY
+import asyncio
+from copy import deepcopy
+
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.integrations import DidNotEnable
-from sentry_sdk.integrations.starlette import (
-    StarletteIntegration,
-    StarletteRequestExtractor,
-)
 from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
 from sentry_sdk.utils import transaction_from_function
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any, Callable, Dict
+    from sentry_sdk.scope import Scope
 
-    from sentry_sdk._types import Event
+try:
+    from sentry_sdk.integrations.starlette import (
+        StarletteIntegration,
+        StarletteRequestExtractor,
+    )
+except DidNotEnable:
+    raise DidNotEnable("Starlette is not installed")
 
 try:
     import fastapi  # type: ignore
@@ -31,8 +37,8 @@ class FastApiIntegration(StarletteIntegration):
         patch_get_request_handler()
 
 
-def _set_transaction_name_and_source(event, transaction_style, request):
-    # type: (Event, str, Any) -> None
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, Any) -> None
     name = ""
 
     if transaction_style == "endpoint":
@@ -48,12 +54,12 @@ def _set_transaction_name_and_source(event, transaction_style, request):
                 name = path
 
     if not name:
-        event["transaction"] = _DEFAULT_TRANSACTION_NAME
-        event["transaction_info"] = {"source": TRANSACTION_SOURCE_ROUTE}
-        return
+        name = _DEFAULT_TRANSACTION_NAME
+        source = TRANSACTION_SOURCE_ROUTE
+    else:
+        source = SOURCE_FOR_STYLE[transaction_style]
 
-    event["transaction"] = name
-    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
+    scope.set_transaction_name(name, source=source)
 
 
 def patch_get_request_handler():
@@ -62,6 +68,24 @@ def patch_get_request_handler():
 
     def _sentry_get_request_handler(*args, **kwargs):
         # type: (*Any, **Any) -> Any
+        dependant = kwargs.get("dependant")
+        if (
+            dependant
+            and dependant.call is not None
+            and not asyncio.iscoroutinefunction(dependant.call)
+        ):
+            old_call = dependant.call
+
+            def _sentry_call(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                hub = Hub.current
+                with hub.configure_scope() as sentry_scope:
+                    if sentry_scope.profile is not None:
+                        sentry_scope.profile.update_active_thread_id()
+                    return old_call(*args, **kwargs)
+
+            dependant.call = _sentry_call
+
         old_app = old_get_request_handler(*args, **kwargs)
 
         async def _sentry_app(*args, **kwargs):
@@ -73,6 +97,11 @@ def patch_get_request_handler():
 
             with hub.configure_scope() as sentry_scope:
                 request = args[0]
+
+                _set_transaction_name_and_source(
+                    sentry_scope, integration.transaction_style, request
+                )
+
                 extractor = StarletteRequestExtractor(request)
                 info = await extractor.extract_request_info()
 
@@ -88,11 +117,7 @@ def patch_get_request_handler():
                                 request_info["cookies"] = info["cookies"]
                             if "data" in info:
                                 request_info["data"] = info["data"]
-                        event["request"] = request_info
-
-                        _set_transaction_name_and_source(
-                            event, integration.transaction_style, req
-                        )
+                        event["request"] = deepcopy(request_info)
 
                         return event
 
diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py
index 52cce0b..61f2e31 100644
--- a/sentry_sdk/integrations/flask.py
+++ b/sentry_sdk/integrations/flask.py
@@ -1,6 +1,6 @@
 from __future__ import absolute_import
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.integrations import DidNotEnable, Integration
 from sentry_sdk.integrations._wsgi_common import RequestExtractor
@@ -10,9 +10,10 @@ from sentry_sdk.tracing import SOURCE_FOR_STYLE
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
+    parse_version,
 )
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any, Callable, Dict, Union
 
     from sentry_sdk._types import EventProcessor
@@ -26,7 +27,7 @@ except ImportError:
     flask_login = None
 
 try:
-    from flask import Flask, Markup, Request  # type: ignore
+    from flask import Flask, Request  # type: ignore
     from flask import __version__ as FLASK_VERSION
     from flask import request as flask_request
     from flask.signals import (
@@ -34,6 +35,7 @@ try:
         got_request_exception,
         request_started,
     )
+    from markupsafe import Markup
 except ImportError:
     raise DidNotEnable("Flask is not installed")
 
@@ -63,16 +65,13 @@ class FlaskIntegration(Integration):
     def setup_once():
         # type: () -> None
 
-        # This version parsing is absolutely naive but the alternative is to
-        # import pkg_resources which slows down the SDK a lot.
-        try:
-            version = tuple(map(int, FLASK_VERSION.split(".")[:3]))
-        except (ValueError, TypeError):
-            # It's probably a release candidate, we assume it's fine.
-            pass
-        else:
-            if version < (0, 10):
-                raise DidNotEnable("Flask 0.10 or newer is required.")
+        version = parse_version(FLASK_VERSION)
+
+        if version is None:
+            raise DidNotEnable("Unparsable Flask version: {}".format(FLASK_VERSION))
+
+        if version < (0, 10):
+            raise DidNotEnable("Flask 0.10 or newer is required.")
 
         before_render_template.connect(_add_sentry_trace)
         request_started.connect(_request_started)
@@ -94,19 +93,13 @@ class FlaskIntegration(Integration):
 
 def _add_sentry_trace(sender, template, context, **extra):
     # type: (Flask, Any, Dict[str, Any], **Any) -> None
-
     if "sentry_trace" in context:
         return
 
-    sentry_span = Hub.current.scope.span
-    context["sentry_trace"] = (
-        Markup(
-            '<meta name="sentry-trace" content="%s" />'
-            % (sentry_span.to_traceparent(),)
-        )
-        if sentry_span
-        else ""
-    )
+    hub = Hub.current
+    trace_meta = Markup(hub.trace_propagation_meta())
+    context["sentry_trace"] = trace_meta  # for backwards compatibility
+    context["sentry_trace_meta"] = trace_meta
 
 
 def _set_transaction_name_and_source(scope, transaction_style, request):
@@ -170,7 +163,7 @@ class FlaskRequestExtractor(RequestExtractor):
 
     def json(self):
         # type: () -> Any
-        return self.request.get_json()
+        return self.request.get_json(silent=True)
 
     def size_of_file(self, file):
         # type: (FileStorage) -> int
@@ -258,6 +251,5 @@ def _add_user_to_event(event):
 
         try:
             user_info.setdefault("username", user.username)
-            user_info.setdefault("username", user.email)
         except Exception:
             pass
diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py
index 6025d38..33f86e2 100644
--- a/sentry_sdk/integrations/gcp.py
+++ b/sentry_sdk/integrations/gcp.py
@@ -1,9 +1,12 @@
+import sys
+from copy import deepcopy
 from datetime import datetime, timedelta
 from os import environ
-import sys
 
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction
+from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
 from sentry_sdk._compat import reraise
 from sentry_sdk.utils import (
     AnnotatedValue,
@@ -15,13 +18,13 @@ from sentry_sdk.utils import (
 from sentry_sdk.integrations import Integration
 from sentry_sdk.integrations._wsgi_common import _filter_headers
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
 # Constants
 TIMEOUT_WARNING_BUFFER = 1.5  # Buffer time required to send timeout warning to Sentry
 MILLIS_TO_SECONDS = 1000.0
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import TypeVar
     from typing import Callable
@@ -80,9 +83,10 @@ def _wrap_func(func):
             headers = {}
             if hasattr(gcp_event, "headers"):
                 headers = gcp_event.headers
-            transaction = Transaction.continue_from_headers(
+
+            transaction = continue_trace(
                 headers,
-                op="serverless.function",
+                op=OP.FUNCTION_GCP,
                 name=environ.get("FUNCTION_NAME", ""),
                 source=TRANSACTION_SOURCE_COMPONENT,
             )
@@ -192,7 +196,7 @@ def _make_request_event_processor(gcp_event, configured_timeout, initial_time):
                 # event. Meaning every body is unstructured to us.
                 request["data"] = AnnotatedValue.removed_because_raw_data()
 
-        event["request"] = request
+        event["request"] = deepcopy(request)
 
         return event
 
diff --git a/sentry_sdk/integrations/gnu_backtrace.py b/sentry_sdk/integrations/gnu_backtrace.py
index e0ec110..ad9c437 100644
--- a/sentry_sdk/integrations/gnu_backtrace.py
+++ b/sentry_sdk/integrations/gnu_backtrace.py
@@ -5,9 +5,9 @@ from sentry_sdk.integrations import Integration
 from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.utils import capture_internal_exceptions
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
 
diff --git a/sentry_sdk/integrations/grpc/__init__.py b/sentry_sdk/integrations/grpc/__init__.py
new file mode 100644
index 0000000..59bfd50
--- /dev/null
+++ b/sentry_sdk/integrations/grpc/__init__.py
@@ -0,0 +1,2 @@
+from .server import ServerInterceptor  # noqa: F401
+from .client import ClientInterceptor  # noqa: F401
diff --git a/sentry_sdk/integrations/grpc/client.py b/sentry_sdk/integrations/grpc/client.py
new file mode 100644
index 0000000..1eb3621
--- /dev/null
+++ b/sentry_sdk/integrations/grpc/client.py
@@ -0,0 +1,82 @@
+from sentry_sdk import Hub
+from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import DidNotEnable
+
+if MYPY:
+    from typing import Any, Callable, Iterator, Iterable, Union
+
+try:
+    import grpc
+    from grpc import ClientCallDetails, Call
+    from grpc._interceptor import _UnaryOutcome
+    from grpc.aio._interceptor import UnaryStreamCall
+    from google.protobuf.message import Message  # type: ignore
+except ImportError:
+    raise DidNotEnable("grpcio is not installed")
+
+
+class ClientInterceptor(
+    grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor  # type: ignore
+):
+    def intercept_unary_unary(self, continuation, client_call_details, request):
+        # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], _UnaryOutcome], ClientCallDetails, Message) -> _UnaryOutcome
+        hub = Hub.current
+        method = client_call_details.method
+
+        with hub.start_span(
+            op=OP.GRPC_CLIENT, description="unary unary call to %s" % method
+        ) as span:
+            span.set_data("type", "unary unary")
+            span.set_data("method", method)
+
+            client_call_details = self._update_client_call_details_metadata_from_hub(
+                client_call_details, hub
+            )
+
+            response = continuation(client_call_details, request)
+            span.set_data("code", response.code().name)
+
+            return response
+
+    def intercept_unary_stream(self, continuation, client_call_details, request):
+        # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall]], ClientCallDetails, Message) -> Union[Iterator[Message], Call]
+        hub = Hub.current
+        method = client_call_details.method
+
+        with hub.start_span(
+            op=OP.GRPC_CLIENT, description="unary stream call to %s" % method
+        ) as span:
+            span.set_data("type", "unary stream")
+            span.set_data("method", method)
+
+            client_call_details = self._update_client_call_details_metadata_from_hub(
+                client_call_details, hub
+            )
+
+            response = continuation(
+                client_call_details, request
+            )  # type: UnaryStreamCall
+            span.set_data("code", response.code().name)
+
+            return response
+
+    @staticmethod
+    def _update_client_call_details_metadata_from_hub(client_call_details, hub):
+        # type: (ClientCallDetails, Hub) -> ClientCallDetails
+        metadata = (
+            list(client_call_details.metadata) if client_call_details.metadata else []
+        )
+        for key, value in hub.iter_trace_propagation_headers():
+            metadata.append((key, value))
+
+        client_call_details = grpc._interceptor._ClientCallDetails(
+            method=client_call_details.method,
+            timeout=client_call_details.timeout,
+            metadata=metadata,
+            credentials=client_call_details.credentials,
+            wait_for_ready=client_call_details.wait_for_ready,
+            compression=client_call_details.compression,
+        )
+
+        return client_call_details
diff --git a/sentry_sdk/integrations/grpc/server.py b/sentry_sdk/integrations/grpc/server.py
new file mode 100644
index 0000000..cdeea4a
--- /dev/null
+++ b/sentry_sdk/integrations/grpc/server.py
@@ -0,0 +1,64 @@
+from sentry_sdk import Hub
+from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM
+
+if MYPY:
+    from typing import Callable, Optional
+    from google.protobuf.message import Message  # type: ignore
+
+try:
+    import grpc
+    from grpc import ServicerContext, HandlerCallDetails, RpcMethodHandler
+except ImportError:
+    raise DidNotEnable("grpcio is not installed")
+
+
+class ServerInterceptor(grpc.ServerInterceptor):  # type: ignore
+    def __init__(self, find_name=None):
+        # type: (ServerInterceptor, Optional[Callable[[ServicerContext], str]]) -> None
+        self._find_method_name = find_name or ServerInterceptor._find_name
+
+        super(ServerInterceptor, self).__init__()
+
+    def intercept_service(self, continuation, handler_call_details):
+        # type: (ServerInterceptor, Callable[[HandlerCallDetails], RpcMethodHandler], HandlerCallDetails) -> RpcMethodHandler
+        handler = continuation(handler_call_details)
+        if not handler or not handler.unary_unary:
+            return handler
+
+        def behavior(request, context):
+            # type: (Message, ServicerContext) -> Message
+            hub = Hub(Hub.current)
+
+            name = self._find_method_name(context)
+
+            if name:
+                metadata = dict(context.invocation_metadata())
+
+                transaction = Transaction.continue_from_headers(
+                    metadata,
+                    op=OP.GRPC_SERVER,
+                    name=name,
+                    source=TRANSACTION_SOURCE_CUSTOM,
+                )
+
+                with hub.start_transaction(transaction=transaction):
+                    try:
+                        return handler.unary_unary(request, context)
+                    except BaseException as e:
+                        raise e
+            else:
+                return handler.unary_unary(request, context)
+
+        return grpc.unary_unary_rpc_method_handler(
+            behavior,
+            request_deserializer=handler.request_deserializer,
+            response_serializer=handler.response_serializer,
+        )
+
+    @staticmethod
+    def _find_name(context):
+        # type: (ServicerContext) -> str
+        return context._rpc_event.call_details.method.decode()
diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py
index 3d4bbf8..04db504 100644
--- a/sentry_sdk/integrations/httpx.py
+++ b/sentry_sdk/integrations/httpx.py
@@ -1,10 +1,18 @@
 from sentry_sdk import Hub
+from sentry_sdk.consts import OP, SPANDATA
 from sentry_sdk.integrations import Integration, DidNotEnable
-from sentry_sdk.utils import logger
-
-from sentry_sdk._types import MYPY
-
-if MYPY:
+from sentry_sdk.tracing import BAGGAGE_HEADER_NAME
+from sentry_sdk.tracing_utils import should_propagate_trace
+from sentry_sdk.utils import (
+    SENSITIVE_DATA_SUBSTITUTE,
+    capture_internal_exceptions,
+    logger,
+    parse_url,
+)
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
     from typing import Any
 
 
@@ -40,23 +48,44 @@ def _install_httpx_client():
         if hub.get_integration(HttpxIntegration) is None:
             return real_send(self, request, **kwargs)
 
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(str(request.url), sanitize=False)
+
         with hub.start_span(
-            op="http", description="%s %s" % (request.method, request.url)
+            op=OP.HTTP_CLIENT,
+            description="%s %s"
+            % (
+                request.method,
+                parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE,
+            ),
         ) as span:
-            span.set_data("method", request.method)
-            span.set_data("url", str(request.url))
-            for key, value in hub.iter_trace_propagation_headers():
-                logger.debug(
-                    "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
-                        key=key, value=value, url=request.url
+            span.set_data(SPANDATA.HTTP_METHOD, request.method)
+            if parsed_url is not None:
+                span.set_data("url", parsed_url.url)
+                span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+                span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
+            if should_propagate_trace(hub, str(request.url)):
+                for key, value in hub.iter_trace_propagation_headers():
+                    logger.debug(
+                        "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
+                            key=key, value=value, url=request.url
+                        )
                     )
-                )
-                request.headers[key] = value
+                    if key == BAGGAGE_HEADER_NAME and request.headers.get(
+                        BAGGAGE_HEADER_NAME
+                    ):
+                        # do not overwrite any existing baggage, just append to it
+                        request.headers[key] += "," + value
+                    else:
+                        request.headers[key] = value
+
             rv = real_send(self, request, **kwargs)
 
-            span.set_data("status_code", rv.status_code)
             span.set_http_status(rv.status_code)
             span.set_data("reason", rv.reason_phrase)
+
             return rv
 
     Client.send = send
@@ -72,23 +101,44 @@ def _install_httpx_async_client():
         if hub.get_integration(HttpxIntegration) is None:
             return await real_send(self, request, **kwargs)
 
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(str(request.url), sanitize=False)
+
         with hub.start_span(
-            op="http", description="%s %s" % (request.method, request.url)
+            op=OP.HTTP_CLIENT,
+            description="%s %s"
+            % (
+                request.method,
+                parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE,
+            ),
         ) as span:
-            span.set_data("method", request.method)
-            span.set_data("url", str(request.url))
-            for key, value in hub.iter_trace_propagation_headers():
-                logger.debug(
-                    "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
-                        key=key, value=value, url=request.url
+            span.set_data(SPANDATA.HTTP_METHOD, request.method)
+            if parsed_url is not None:
+                span.set_data("url", parsed_url.url)
+                span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+                span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
+            if should_propagate_trace(hub, str(request.url)):
+                for key, value in hub.iter_trace_propagation_headers():
+                    logger.debug(
+                        "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
+                            key=key, value=value, url=request.url
+                        )
                     )
-                )
-                request.headers[key] = value
+                    if key == BAGGAGE_HEADER_NAME and request.headers.get(
+                        BAGGAGE_HEADER_NAME
+                    ):
+                        # do not overwrite any existing baggage, just append to it
+                        request.headers[key] += "," + value
+                    else:
+                        request.headers[key] = value
+
             rv = await real_send(self, request, **kwargs)
 
-            span.set_data("status_code", rv.status_code)
             span.set_http_status(rv.status_code)
             span.set_data("reason", rv.reason_phrase)
+
             return rv
 
     AsyncClient.send = send
diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py
new file mode 100644
index 0000000..52b0e54
--- /dev/null
+++ b/sentry_sdk/integrations/huey.py
@@ -0,0 +1,158 @@
+from __future__ import absolute_import
+
+import sys
+from datetime import datetime
+
+from sentry_sdk._compat import reraise
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    SENSITIVE_DATA_SUBSTITUTE,
+)
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Optional, Union, TypeVar
+
+    from sentry_sdk._types import EventProcessor, Event, Hint
+    from sentry_sdk.utils import ExcInfo
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+try:
+    from huey.api import Huey, Result, ResultGroup, Task
+    from huey.exceptions import CancelExecution, RetryTask, TaskLockedException
+except ImportError:
+    raise DidNotEnable("Huey is not installed")
+
+
+HUEY_CONTROL_FLOW_EXCEPTIONS = (CancelExecution, RetryTask, TaskLockedException)
+
+
+class HueyIntegration(Integration):
+    identifier = "huey"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_enqueue()
+        patch_execute()
+
+
+def patch_enqueue():
+    # type: () -> None
+    old_enqueue = Huey.enqueue
+
+    def _sentry_enqueue(self, task):
+        # type: (Huey, Task) -> Optional[Union[Result, ResultGroup]]
+        hub = Hub.current
+
+        if hub.get_integration(HueyIntegration) is None:
+            return old_enqueue(self, task)
+
+        with hub.start_span(op=OP.QUEUE_SUBMIT_HUEY, description=task.name):
+            return old_enqueue(self, task)
+
+    Huey.enqueue = _sentry_enqueue
+
+
+def _make_event_processor(task):
+    # type: (Any) -> EventProcessor
+    def event_processor(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+
+        with capture_internal_exceptions():
+            tags = event.setdefault("tags", {})
+            tags["huey_task_id"] = task.id
+            tags["huey_task_retry"] = task.default_retries > task.retries
+            extra = event.setdefault("extra", {})
+            extra["huey-job"] = {
+                "task": task.name,
+                "args": task.args
+                if _should_send_default_pii()
+                else SENSITIVE_DATA_SUBSTITUTE,
+                "kwargs": task.kwargs
+                if _should_send_default_pii()
+                else SENSITIVE_DATA_SUBSTITUTE,
+                "retry": (task.default_retries or 0) - task.retries,
+            }
+
+        return event
+
+    return event_processor
+
+
+def _capture_exception(exc_info):
+    # type: (ExcInfo) -> None
+    hub = Hub.current
+
+    if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS:
+        hub.scope.transaction.set_status("aborted")
+        return
+
+    hub.scope.transaction.set_status("internal_error")
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=hub.client.options if hub.client else None,
+        mechanism={"type": HueyIntegration.identifier, "handled": False},
+    )
+    hub.capture_event(event, hint=hint)
+
+
+def _wrap_task_execute(func):
+    # type: (F) -> F
+    def _sentry_execute(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        hub = Hub.current
+        if hub.get_integration(HueyIntegration) is None:
+            return func(*args, **kwargs)
+
+        try:
+            result = func(*args, **kwargs)
+        except Exception:
+            exc_info = sys.exc_info()
+            _capture_exception(exc_info)
+            reraise(*exc_info)
+
+        return result
+
+    return _sentry_execute  # type: ignore
+
+
+def patch_execute():
+    # type: () -> None
+    old_execute = Huey._execute
+
+    def _sentry_execute(self, task, timestamp=None):
+        # type: (Huey, Task, Optional[datetime]) -> Any
+        hub = Hub.current
+
+        if hub.get_integration(HueyIntegration) is None:
+            return old_execute(self, task, timestamp)
+
+        with hub.push_scope() as scope:
+            with capture_internal_exceptions():
+                scope._name = "huey"
+                scope.clear_breadcrumbs()
+                scope.add_event_processor(_make_event_processor(task))
+
+            transaction = Transaction(
+                name=task.name,
+                status="ok",
+                op=OP.QUEUE_TASK_HUEY,
+                source=TRANSACTION_SOURCE_TASK,
+            )
+
+            if not getattr(task, "_sentry_is_patched", False):
+                task.execute = _wrap_task_execute(task.execute)
+                task._sentry_is_patched = True
+
+            with hub.start_transaction(transaction):
+                return old_execute(self, task, timestamp)
+
+    Huey._execute = _sentry_execute
diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py
index 86cea09..f13f8c8 100644
--- a/sentry_sdk/integrations/logging.py
+++ b/sentry_sdk/integrations/logging.py
@@ -14,9 +14,9 @@ from sentry_sdk.utils import (
 from sentry_sdk.integrations import Integration
 from sentry_sdk._compat import iteritems
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from logging import LogRecord
     from typing import Any
     from typing import Dict
@@ -107,75 +107,61 @@ class LoggingIntegration(Integration):
         logging.Logger.callHandlers = sentry_patched_callhandlers  # type: ignore
 
 
-def _can_record(record):
-    # type: (LogRecord) -> bool
-    """Prevents ignored loggers from recording"""
-    for logger in _IGNORED_LOGGERS:
-        if fnmatch(record.name, logger):
-            return False
-    return True
-
-
-def _breadcrumb_from_record(record):
-    # type: (LogRecord) -> Dict[str, Any]
-    return {
-        "type": "log",
-        "level": _logging_to_event_level(record),
-        "category": record.name,
-        "message": record.message,
-        "timestamp": datetime.datetime.utcfromtimestamp(record.created),
-        "data": _extra_from_record(record),
-    }
-
-
-def _logging_to_event_level(record):
-    # type: (LogRecord) -> str
-    return LOGGING_TO_EVENT_LEVEL.get(
-        record.levelno, record.levelname.lower() if record.levelname else ""
-    )
-
-
-COMMON_RECORD_ATTRS = frozenset(
-    (
-        "args",
-        "created",
-        "exc_info",
-        "exc_text",
-        "filename",
-        "funcName",
-        "levelname",
-        "levelno",
-        "linenno",
-        "lineno",
-        "message",
-        "module",
-        "msecs",
-        "msg",
-        "name",
-        "pathname",
-        "process",
-        "processName",
-        "relativeCreated",
-        "stack",
-        "tags",
-        "thread",
-        "threadName",
-        "stack_info",
+class _BaseHandler(logging.Handler, object):
+    COMMON_RECORD_ATTRS = frozenset(
+        (
+            "args",
+            "created",
+            "exc_info",
+            "exc_text",
+            "filename",
+            "funcName",
+            "levelname",
+            "levelno",
+            "linenno",
+            "lineno",
+            "message",
+            "module",
+            "msecs",
+            "msg",
+            "name",
+            "pathname",
+            "process",
+            "processName",
+            "relativeCreated",
+            "stack",
+            "tags",
+            "thread",
+            "threadName",
+            "stack_info",
+        )
     )
-)
 
+    def _can_record(self, record):
+        # type: (LogRecord) -> bool
+        """Prevents ignored loggers from recording"""
+        for logger in _IGNORED_LOGGERS:
+            if fnmatch(record.name, logger):
+                return False
+        return True
+
+    def _logging_to_event_level(self, record):
+        # type: (LogRecord) -> str
+        return LOGGING_TO_EVENT_LEVEL.get(
+            record.levelno, record.levelname.lower() if record.levelname else ""
+        )
 
-def _extra_from_record(record):
-    # type: (LogRecord) -> Dict[str, None]
-    return {
-        k: v
-        for k, v in iteritems(vars(record))
-        if k not in COMMON_RECORD_ATTRS
-        and (not isinstance(k, str) or not k.startswith("_"))
-    }
+    def _extra_from_record(self, record):
+        # type: (LogRecord) -> Dict[str, None]
+        return {
+            k: v
+            for k, v in iteritems(vars(record))
+            if k not in self.COMMON_RECORD_ATTRS
+            and (not isinstance(k, str) or not k.startswith("_"))
+        }
 
 
-class EventHandler(logging.Handler, object):
+class EventHandler(_BaseHandler):
     """
     A logging handler that emits Sentry events for each log record
 
@@ -190,7 +176,7 @@ class EventHandler(logging.Handler, object):
 
     def _emit(self, record):
         # type: (LogRecord) -> None
-        if not _can_record(record):
+        if not self._can_record(record):
             return
 
         hub = Hub.current
@@ -219,7 +205,10 @@ class EventHandler(logging.Handler, object):
                     "values": [
                         {
                             "stacktrace": current_stacktrace(
-                                client_options["with_locals"]
+                                include_local_variables=client_options[
+                                    "include_local_variables"
+                                ],
+                                max_value_length=client_options["max_value_length"],
                             ),
                             "crashed": False,
                             "current": True,
@@ -232,7 +221,7 @@ class EventHandler(logging.Handler, object):
 
         hint["log_record"] = record
 
-        event["level"] = _logging_to_event_level(record)
+        event["level"] = self._logging_to_event_level(record)
         event["logger"] = record.name
 
         # Log records from `warnings` module as separate issues
@@ -255,7 +244,7 @@ class EventHandler(logging.Handler, object):
                 "params": record.args,
             }
 
-        event["extra"] = _extra_from_record(record)
+        event["extra"] = self._extra_from_record(record)
 
         hub.capture_event(event, hint=hint)
 
@@ -264,7 +253,7 @@ class EventHandler(logging.Handler, object):
 SentryHandler = EventHandler
 
 
-class BreadcrumbHandler(logging.Handler, object):
+class BreadcrumbHandler(_BaseHandler):
     """
     A logging handler that records breadcrumbs for each log record.
 
@@ -279,9 +268,20 @@ class BreadcrumbHandler(logging.Handler, object):
 
     def _emit(self, record):
         # type: (LogRecord) -> None
-        if not _can_record(record):
+        if not self._can_record(record):
             return
 
         Hub.current.add_breadcrumb(
-            _breadcrumb_from_record(record), hint={"log_record": record}
+            self._breadcrumb_from_record(record), hint={"log_record": record}
         )
+
+    def _breadcrumb_from_record(self, record):
+        # type: (LogRecord) -> Dict[str, Any]
+        return {
+            "type": "log",
+            "level": self._logging_to_event_level(record),
+            "category": record.name,
+            "message": record.message,
+            "timestamp": datetime.datetime.utcfromtimestamp(record.created),
+            "data": self._extra_from_record(record),
+        }
diff --git a/sentry_sdk/integrations/loguru.py b/sentry_sdk/integrations/loguru.py
new file mode 100644
index 0000000..b1ee2a6
--- /dev/null
+++ b/sentry_sdk/integrations/loguru.py
@@ -0,0 +1,101 @@
+from __future__ import absolute_import
+
+import enum
+
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.logging import (
+    BreadcrumbHandler,
+    EventHandler,
+    _BaseHandler,
+)
+
+if TYPE_CHECKING:
+    from logging import LogRecord
+    from typing import Optional, Tuple
+
+try:
+    import loguru
+    from loguru import logger
+    from loguru._defaults import LOGURU_FORMAT as DEFAULT_FORMAT
+except ImportError:
+    raise DidNotEnable("LOGURU is not installed")
+
+
+class LoggingLevels(enum.IntEnum):
+    TRACE = 5
+    DEBUG = 10
+    INFO = 20
+    SUCCESS = 25
+    WARNING = 30
+    ERROR = 40
+    CRITICAL = 50
+
+
+DEFAULT_LEVEL = LoggingLevels.INFO.value
+DEFAULT_EVENT_LEVEL = LoggingLevels.ERROR.value
+# We need to save the handlers to be able to remove them later
+# in tests (they call `LoguruIntegration.__init__` multiple times,
+# and we can't use `setup_once` because it's called before
+# than we get configuration).
+_ADDED_HANDLERS = (None, None)  # type: Tuple[Optional[int], Optional[int]]
+
+
+class LoguruIntegration(Integration):
+    identifier = "loguru"
+
+    def __init__(
+        self,
+        level=DEFAULT_LEVEL,
+        event_level=DEFAULT_EVENT_LEVEL,
+        breadcrumb_format=DEFAULT_FORMAT,
+        event_format=DEFAULT_FORMAT,
+    ):
+        # type: (Optional[int], Optional[int], str | loguru.FormatFunction, str | loguru.FormatFunction) -> None
+        global _ADDED_HANDLERS
+        breadcrumb_handler, event_handler = _ADDED_HANDLERS
+
+        if breadcrumb_handler is not None:
+            logger.remove(breadcrumb_handler)
+            breadcrumb_handler = None
+        if event_handler is not None:
+            logger.remove(event_handler)
+            event_handler = None
+
+        if level is not None:
+            breadcrumb_handler = logger.add(
+                LoguruBreadcrumbHandler(level=level),
+                level=level,
+                format=breadcrumb_format,
+            )
+
+        if event_level is not None:
+            event_handler = logger.add(
+                LoguruEventHandler(level=event_level),
+                level=event_level,
+                format=event_format,
+            )
+
+        _ADDED_HANDLERS = (breadcrumb_handler, event_handler)
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        pass  # we do everything in __init__
+
+
+class _LoguruBaseHandler(_BaseHandler):
+    def _logging_to_event_level(self, record):
+        # type: (LogRecord) -> str
+        try:
+            return LoggingLevels(record.levelno).name.lower()
+        except ValueError:
+            return record.levelname.lower() if record.levelname else ""
+
+
+class LoguruEventHandler(_LoguruBaseHandler, EventHandler):
+    """Modified version of :class:`sentry_sdk.integrations.logging.EventHandler` to use loguru's level names."""
+
+
+class LoguruBreadcrumbHandler(_LoguruBaseHandler, BreadcrumbHandler):
+    """Modified version of :class:`sentry_sdk.integrations.logging.BreadcrumbHandler` to use loguru's level names."""
diff --git a/sentry_sdk/integrations/modules.py b/sentry_sdk/integrations/modules.py
index 3d78cb8..3f9f356 100644
--- a/sentry_sdk/integrations/modules.py
+++ b/sentry_sdk/integrations/modules.py
@@ -4,9 +4,9 @@ from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration
 from sentry_sdk.scope import add_global_event_processor
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import Tuple
@@ -18,15 +18,36 @@ if MYPY:
 _installed_modules = None
 
 
+def _normalize_module_name(name):
+    # type: (str) -> str
+    return name.lower()
+
+
 def _generate_installed_modules():
     # type: () -> Iterator[Tuple[str, str]]
     try:
-        import pkg_resources
-    except ImportError:
-        return
+        from importlib import metadata
+
+        for dist in metadata.distributions():
+            name = dist.metadata["Name"]
+            # `metadata` values may be `None`, see:
+            # https://github.com/python/cpython/issues/91216
+            # and
+            # https://github.com/python/importlib_metadata/issues/371
+            if name is not None:
+                version = metadata.version(name)
+                if version is not None:
+                    yield _normalize_module_name(name), version
 
-    for info in pkg_resources.working_set:
-        yield info.key, info.version
+    except ImportError:
+        # < py3.8
+        try:
+            import pkg_resources
+        except ImportError:
+            return
+
+        for info in pkg_resources.working_set:
+            yield _normalize_module_name(info.key), info.version
 
 
 def _get_installed_modules():
diff --git a/sentry_sdk/integrations/opentelemetry/__init__.py b/sentry_sdk/integrations/opentelemetry/__init__.py
new file mode 100644
index 0000000..e002020
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/__init__.py
@@ -0,0 +1,7 @@
+from sentry_sdk.integrations.opentelemetry.span_processor import (  # noqa: F401
+    SentrySpanProcessor,
+)
+
+from sentry_sdk.integrations.opentelemetry.propagator import (  # noqa: F401
+    SentryPropagator,
+)
diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py
new file mode 100644
index 0000000..79663dd
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/consts.py
@@ -0,0 +1,6 @@
+from opentelemetry.context import (  # type: ignore
+    create_key,
+)
+
+SENTRY_TRACE_KEY = create_key("sentry-trace")
+SENTRY_BAGGAGE_KEY = create_key("sentry-baggage")
diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/integrations/opentelemetry/propagator.py
new file mode 100644
index 0000000..e1bcc3b
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/propagator.py
@@ -0,0 +1,115 @@
+from opentelemetry import trace  # type: ignore
+from opentelemetry.context import (  # type: ignore
+    Context,
+    get_current,
+    set_value,
+)
+from opentelemetry.propagators.textmap import (  # type: ignore
+    CarrierT,
+    Getter,
+    Setter,
+    TextMapPropagator,
+    default_getter,
+    default_setter,
+)
+from opentelemetry.trace import (  # type: ignore
+    NonRecordingSpan,
+    SpanContext,
+    TraceFlags,
+)
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+from sentry_sdk.integrations.opentelemetry.span_processor import (
+    SentrySpanProcessor,
+)
+
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SENTRY_TRACE_HEADER_NAME,
+)
+from sentry_sdk.tracing_utils import Baggage, extract_sentrytrace_data
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Optional
+    from typing import Set
+
+
+class SentryPropagator(TextMapPropagator):  # type: ignore
+    """
+    Propagates tracing headers for Sentry's tracing system in a way OTel understands.
+    """
+
+    def extract(self, carrier, context=None, getter=default_getter):
+        # type: (CarrierT, Optional[Context], Getter) -> Context
+        if context is None:
+            context = get_current()
+
+        sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME)
+        if not sentry_trace:
+            return context
+
+        sentrytrace = extract_sentrytrace_data(sentry_trace[0])
+        if not sentrytrace:
+            return context
+
+        context = set_value(SENTRY_TRACE_KEY, sentrytrace, context)
+
+        trace_id, span_id = sentrytrace["trace_id"], sentrytrace["parent_span_id"]
+
+        span_context = SpanContext(
+            trace_id=int(trace_id, 16),  # type: ignore
+            span_id=int(span_id, 16),  # type: ignore
+            # we simulate a sampled trace on the otel side and leave the sampling to sentry
+            trace_flags=TraceFlags(TraceFlags.SAMPLED),
+            is_remote=True,
+        )
+
+        baggage_header = getter.get(carrier, BAGGAGE_HEADER_NAME)
+
+        if baggage_header:
+            baggage = Baggage.from_incoming_header(baggage_header[0])
+        else:
+            # If there's an incoming sentry-trace but no incoming baggage header,
+            # for instance in traces coming from older SDKs,
+            # baggage will be empty and frozen and won't be populated as head SDK.
+            baggage = Baggage(sentry_items={})
+
+        baggage.freeze()
+        context = set_value(SENTRY_BAGGAGE_KEY, baggage, context)
+
+        span = NonRecordingSpan(span_context)
+        modified_context = trace.set_span_in_context(span, context)
+        return modified_context
+
+    def inject(self, carrier, context=None, setter=default_setter):
+        # type: (CarrierT, Optional[Context], Setter) -> None
+        if context is None:
+            context = get_current()
+
+        current_span = trace.get_current_span(context)
+        current_span_context = current_span.get_span_context()
+
+        if not current_span_context.is_valid:
+            return
+
+        span_id = trace.format_span_id(current_span_context.span_id)
+
+        span_map = SentrySpanProcessor().otel_span_map
+        sentry_span = span_map.get(span_id, None)
+        if not sentry_span:
+            return
+
+        setter.set(carrier, SENTRY_TRACE_HEADER_NAME, sentry_span.to_traceparent())
+
+        if sentry_span.containing_transaction:
+            baggage = sentry_span.containing_transaction.get_baggage()
+            if baggage:
+                setter.set(carrier, BAGGAGE_HEADER_NAME, baggage.serialize())
+
+    @property
+    def fields(self):
+        # type: () -> Set[str]
+        return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME}
diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py
new file mode 100644
index 0000000..bb53da1
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/span_processor.py
@@ -0,0 +1,308 @@
+from datetime import datetime
+
+from opentelemetry.context import get_value  # type: ignore
+from opentelemetry.sdk.trace import SpanProcessor  # type: ignore
+from opentelemetry.semconv.trace import SpanAttributes  # type: ignore
+from opentelemetry.trace import (  # type: ignore
+    format_span_id,
+    format_trace_id,
+    get_current_span,
+    SpanContext,
+    Span as OTelSpan,
+    SpanKind,
+)
+from opentelemetry.trace.span import (  # type: ignore
+    INVALID_SPAN_ID,
+    INVALID_TRACE_ID,
+)
+from sentry_sdk.consts import INSTRUMENTER
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.tracing import Transaction, Span as SentrySpan
+from sentry_sdk.utils import Dsn
+from sentry_sdk._types import TYPE_CHECKING
+
+from urllib3.util import parse_url as urlparse
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Optional, Union
+
+    from sentry_sdk._types import Event, Hint
+
+OPEN_TELEMETRY_CONTEXT = "otel"
+
+
+def link_trace_context_to_error_event(event, otel_span_map):
+    # type: (Event, Dict[str, Union[Transaction, SentrySpan]]) -> Event
+    hub = Hub.current
+    if not hub:
+        return event
+
+    if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+        return event
+
+    if hasattr(event, "type") and event["type"] == "transaction":
+        return event
+
+    otel_span = get_current_span()
+    if not otel_span:
+        return event
+
+    ctx = otel_span.get_span_context()
+    trace_id = format_trace_id(ctx.trace_id)
+    span_id = format_span_id(ctx.span_id)
+
+    if trace_id == INVALID_TRACE_ID or span_id == INVALID_SPAN_ID:
+        return event
+
+    sentry_span = otel_span_map.get(span_id, None)
+    if not sentry_span:
+        return event
+
+    contexts = event.setdefault("contexts", {})
+    contexts.setdefault("trace", {}).update(sentry_span.get_trace_context())
+
+    return event
+
+
+class SentrySpanProcessor(SpanProcessor):  # type: ignore
+    """
+    Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.
+    """
+
+    # The mapping from otel span ids to sentry spans
+    otel_span_map = {}  # type: Dict[str, Union[Transaction, SentrySpan]]
+
+    def __new__(cls):
+        # type: () -> SentrySpanProcessor
+        if not hasattr(cls, "instance"):
+            cls.instance = super(SentrySpanProcessor, cls).__new__(cls)
+
+        return cls.instance
+
+    def __init__(self):
+        # type: () -> None
+        @add_global_event_processor
+        def global_event_processor(event, hint):
+            # type: (Event, Hint) -> Event
+            return link_trace_context_to_error_event(event, self.otel_span_map)
+
+    def on_start(self, otel_span, parent_context=None):
+        # type: (OTelSpan, Optional[SpanContext]) -> None
+        hub = Hub.current
+        if not hub:
+            return
+
+        if not hub.client or (hub.client and not hub.client.dsn):
+            return
+
+        try:
+            _ = Dsn(hub.client.dsn or "")
+        except Exception:
+            return
+
+        if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+            return
+
+        if not otel_span.get_span_context().is_valid:
+            return
+
+        if self._is_sentry_span(hub, otel_span):
+            return
+
+        trace_data = self._get_trace_data(otel_span, parent_context)
+
+        parent_span_id = trace_data["parent_span_id"]
+        sentry_parent_span = (
+            self.otel_span_map.get(parent_span_id, None) if parent_span_id else None
+        )
+
+        sentry_span = None
+        if sentry_parent_span:
+            sentry_span = sentry_parent_span.start_child(
+                span_id=trace_data["span_id"],
+                description=otel_span.name,
+                start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+                instrumenter=INSTRUMENTER.OTEL,
+            )
+        else:
+            sentry_span = hub.start_transaction(
+                name=otel_span.name,
+                span_id=trace_data["span_id"],
+                parent_span_id=parent_span_id,
+                trace_id=trace_data["trace_id"],
+                baggage=trace_data["baggage"],
+                start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+                instrumenter=INSTRUMENTER.OTEL,
+            )
+
+        self.otel_span_map[trace_data["span_id"]] = sentry_span
+
+    def on_end(self, otel_span):
+        # type: (OTelSpan) -> None
+        hub = Hub.current
+        if not hub:
+            return
+
+        if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+            return
+
+        span_context = otel_span.get_span_context()
+        if not span_context.is_valid:
+            return
+
+        span_id = format_span_id(span_context.span_id)
+        sentry_span = self.otel_span_map.pop(span_id, None)
+        if not sentry_span:
+            return
+
+        sentry_span.op = otel_span.name
+
+        self._update_span_with_otel_status(sentry_span, otel_span)
+
+        if isinstance(sentry_span, Transaction):
+            sentry_span.name = otel_span.name
+            sentry_span.set_context(
+                OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span)
+            )
+
+        else:
+            self._update_span_with_otel_data(sentry_span, otel_span)
+
+        sentry_span.finish(
+            end_timestamp=datetime.fromtimestamp(otel_span.end_time / 1e9)
+        )
+
+    def _is_sentry_span(self, hub, otel_span):
+        # type: (Hub, OTelSpan) -> bool
+        """
+        Break infinite loop:
+        HTTP requests to Sentry are caught by OTel and send again to Sentry.
+        """
+        otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
+        dsn_url = hub.client and Dsn(hub.client.dsn or "").netloc
+
+        if otel_span_url and dsn_url in otel_span_url:
+            return True
+
+        return False
+
+    def _get_otel_context(self, otel_span):
+        # type: (OTelSpan) -> Dict[str, Any]
+        """
+        Returns the OTel context for Sentry.
+        See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context
+        """
+        ctx = {}
+
+        if otel_span.attributes:
+            ctx["attributes"] = dict(otel_span.attributes)
+
+        if otel_span.resource.attributes:
+            ctx["resource"] = dict(otel_span.resource.attributes)
+
+        return ctx
+
+    def _get_trace_data(self, otel_span, parent_context):
+        # type: (OTelSpan, SpanContext) -> Dict[str, Any]
+        """
+        Extracts tracing information from one OTel span and its parent OTel context.
+        """
+        trace_data = {}
+        span_context = otel_span.get_span_context()
+
+        span_id = format_span_id(span_context.span_id)
+        trace_data["span_id"] = span_id
+
+        trace_id = format_trace_id(span_context.trace_id)
+        trace_data["trace_id"] = trace_id
+
+        parent_span_id = (
+            format_span_id(otel_span.parent.span_id) if otel_span.parent else None
+        )
+        trace_data["parent_span_id"] = parent_span_id
+
+        sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context)
+        trace_data["parent_sampled"] = (
+            sentry_trace_data["parent_sampled"] if sentry_trace_data else None
+        )
+
+        baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context)
+        trace_data["baggage"] = baggage
+
+        return trace_data
+
+    def _update_span_with_otel_status(self, sentry_span, otel_span):
+        # type: (SentrySpan, OTelSpan) -> None
+        """
+        Set the Sentry span status from the OTel span
+        """
+        if otel_span.status.is_unset:
+            return
+
+        if otel_span.status.is_ok:
+            sentry_span.set_status("ok")
+            return
+
+        sentry_span.set_status("internal_error")
+
+    def _update_span_with_otel_data(self, sentry_span, otel_span):
+        # type: (SentrySpan, OTelSpan) -> None
+        """
+        Convert OTel span data and update the Sentry span with it.
+        This should eventually happen on the server when ingesting the spans.
+        """
+        for key, val in otel_span.attributes.items():
+            sentry_span.set_data(key, val)
+
+        sentry_span.set_data("otel.kind", otel_span.kind)
+
+        op = otel_span.name
+        description = otel_span.name
+
+        http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD, None)
+        db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM, None)
+
+        if http_method:
+            op = "http"
+
+            if otel_span.kind == SpanKind.SERVER:
+                op += ".server"
+            elif otel_span.kind == SpanKind.CLIENT:
+                op += ".client"
+
+            description = http_method
+
+            peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None)
+            if peer_name:
+                description += " {}".format(peer_name)
+
+            target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None)
+            if target:
+                description += " {}".format(target)
+
+            if not peer_name and not target:
+                url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
+                if url:
+                    parsed_url = urlparse(url)
+                    url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
+                    description += " {}".format(url)
+
+            status_code = otel_span.attributes.get(
+                SpanAttributes.HTTP_STATUS_CODE, None
+            )
+            if status_code:
+                sentry_span.set_http_status(status_code)
+
+        elif db_query:
+            op = "db"
+            statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None)
+            if statement:
+                description = statement
+
+        sentry_span.op = op
+        sentry_span.description = description
diff --git a/sentry_sdk/integrations/pure_eval.py b/sentry_sdk/integrations/pure_eval.py
index 9d3fe66..5a2419c 100644
--- a/sentry_sdk/integrations/pure_eval.py
+++ b/sentry_sdk/integrations/pure_eval.py
@@ -3,12 +3,12 @@ from __future__ import absolute_import
 import ast
 
 from sentry_sdk import Hub, serializer
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.utils import walk_exception_chain, iter_stacks
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional, Dict, Any, Tuple, List
     from types import FrameType
 
@@ -116,7 +116,7 @@ def pure_eval_frame(frame):
             return (n.lineno, n.col_offset)
 
         nodes_before_stmt = [
-            node for node in nodes if start(node) < stmt.last_token.end
+            node for node in nodes if start(node) < stmt.last_token.end  # type: ignore
         ]
         if nodes_before_stmt:
             # The position of the last node before or in the statement
diff --git a/sentry_sdk/integrations/pymongo.py b/sentry_sdk/integrations/pymongo.py
new file mode 100644
index 0000000..59001bb
--- /dev/null
+++ b/sentry_sdk/integrations/pymongo.py
@@ -0,0 +1,206 @@
+from __future__ import absolute_import
+import copy
+
+from sentry_sdk import Hub
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import capture_internal_exceptions
+
+from sentry_sdk._types import TYPE_CHECKING
+
+try:
+    from pymongo import monitoring
+except ImportError:
+    raise DidNotEnable("Pymongo not installed")
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Union
+
+    from pymongo.monitoring import (
+        CommandFailedEvent,
+        CommandStartedEvent,
+        CommandSucceededEvent,
+    )
+
+
+SAFE_COMMAND_ATTRIBUTES = [
+    "insert",
+    "ordered",
+    "find",
+    "limit",
+    "singleBatch",
+    "aggregate",
+    "createIndexes",
+    "indexes",
+    "delete",
+    "findAndModify",
+    "renameCollection",
+    "to",
+    "drop",
+]
+
+
+def _strip_pii(command):
+    # type: (Dict[str, Any]) -> Dict[str, Any]
+    for key in command:
+        is_safe_field = key in SAFE_COMMAND_ATTRIBUTES
+        if is_safe_field:
+            # Skip if safe key
+            continue
+
+        update_db_command = key == "update" and "findAndModify" not in command
+        if update_db_command:
+            # Also skip "update" db command because it is save.
+            # There is also an "update" key in the "findAndModify" command, which is NOT safe!
+            continue
+
+        # Special stripping for documents
+        is_document = key == "documents"
+        if is_document:
+            for doc in command[key]:
+                for doc_key in doc:
+                    doc[doc_key] = "%s"
+            continue
+
+        # Special stripping for dict style fields
+        is_dict_field = key in ["filter", "query", "update"]
+        if is_dict_field:
+            for item_key in command[key]:
+                command[key][item_key] = "%s"
+            continue
+
+        # For pipeline fields strip the `$match` dict
+        is_pipeline_field = key == "pipeline"
+        if is_pipeline_field:
+            for pipeline in command[key]:
+                for match_key in pipeline["$match"] if "$match" in pipeline else []:
+                    pipeline["$match"][match_key] = "%s"
+            continue
+
+        # Default stripping
+        command[key] = "%s"
+
+    return command
+
+
+def _get_db_data(event):
+    # type: (Any) -> Dict[str, Any]
+    data = {}
+
+    data[SPANDATA.DB_SYSTEM] = "mongodb"
+
+    db_name = event.database_name
+    if db_name is not None:
+        data[SPANDATA.DB_NAME] = db_name
+
+    server_address = event.connection_id[0]
+    if server_address is not None:
+        data[SPANDATA.SERVER_ADDRESS] = server_address
+
+    server_port = event.connection_id[1]
+    if server_port is not None:
+        data[SPANDATA.SERVER_PORT] = server_port
+
+    return data
+
+
+class CommandTracer(monitoring.CommandListener):
+    def __init__(self):
+        # type: () -> None
+        self._ongoing_operations = {}  # type: Dict[int, Span]
+
+    def _operation_key(self, event):
+        # type: (Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent]) -> int
+        return event.request_id
+
+    def started(self, event):
+        # type: (CommandStartedEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+        with capture_internal_exceptions():
+            command = dict(copy.deepcopy(event.command))
+
+            command.pop("$db", None)
+            command.pop("$clusterTime", None)
+            command.pop("$signature", None)
+
+            op = "db.query"
+
+            tags = {
+                "db.name": event.database_name,
+                SPANDATA.DB_SYSTEM: "mongodb",
+                SPANDATA.DB_OPERATION: event.command_name,
+            }
+
+            try:
+                tags["net.peer.name"] = event.connection_id[0]
+                tags["net.peer.port"] = str(event.connection_id[1])
+            except TypeError:
+                pass
+
+            data = {"operation_ids": {}}  # type: Dict[str, Any]
+            data["operation_ids"]["operation"] = event.operation_id
+            data["operation_ids"]["request"] = event.request_id
+
+            data.update(_get_db_data(event))
+
+            try:
+                lsid = command.pop("lsid")["id"]
+                data["operation_ids"]["session"] = str(lsid)
+            except KeyError:
+                pass
+
+            if not _should_send_default_pii():
+                command = _strip_pii(command)
+
+            query = "{} {}".format(event.command_name, command)
+            span = hub.start_span(op=op, description=query)
+
+            for tag, value in tags.items():
+                span.set_tag(tag, value)
+
+            for key, value in data.items():
+                span.set_data(key, value)
+
+            with capture_internal_exceptions():
+                hub.add_breadcrumb(message=query, category="query", type=op, data=tags)
+
+            self._ongoing_operations[self._operation_key(event)] = span.__enter__()
+
+    def failed(self, event):
+        # type: (CommandFailedEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+
+        try:
+            span = self._ongoing_operations.pop(self._operation_key(event))
+            span.set_status("internal_error")
+            span.__exit__(None, None, None)
+        except KeyError:
+            return
+
+    def succeeded(self, event):
+        # type: (CommandSucceededEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+
+        try:
+            span = self._ongoing_operations.pop(self._operation_key(event))
+            span.set_status("ok")
+            span.__exit__(None, None, None)
+        except KeyError:
+            pass
+
+
+class PyMongoIntegration(Integration):
+    identifier = "pymongo"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        monitoring.register(CommandTracer())
diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py
index 1e234fc..6bfed03 100644
--- a/sentry_sdk/integrations/pyramid.py
+++ b/sentry_sdk/integrations/pyramid.py
@@ -23,9 +23,9 @@ try:
 except ImportError:
     raise DidNotEnable("Pyramid not installed")
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from pyramid.response import Response
     from typing import Any
     from sentry_sdk.integrations.wsgi import _ScopedResponse
diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py
index e1d4228..ea874ed 100644
--- a/sentry_sdk/integrations/quart.py
+++ b/sentry_sdk/integrations/quart.py
@@ -1,5 +1,8 @@
 from __future__ import absolute_import
 
+import inspect
+import threading
+
 from sentry_sdk.hub import _should_send_default_pii, Hub
 from sentry_sdk.integrations import DidNotEnable, Integration
 from sentry_sdk.integrations._wsgi_common import _filter_headers
@@ -11,9 +14,10 @@ from sentry_sdk.utils import (
     event_from_exception,
 )
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._functools import wraps
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import Union
@@ -34,6 +38,7 @@ try:
         request,
         websocket,
     )
+    from quart.scaffold import Scaffold  # type: ignore
     from quart.signals import (  # type: ignore
         got_background_exception,
         got_request_exception,
@@ -41,6 +46,7 @@ try:
         request_started,
         websocket_started,
     )
+    from quart.utils import is_coroutine_function  # type: ignore
 except ImportError:
     raise DidNotEnable("Quart is not installed")
 
@@ -71,18 +77,62 @@ class QuartIntegration(Integration):
         got_request_exception.connect(_capture_exception)
         got_websocket_exception.connect(_capture_exception)
 
-        old_app = Quart.__call__
+        patch_asgi_app()
+        patch_scaffold_route()
+
+
+def patch_asgi_app():
+    # type: () -> None
+    old_app = Quart.__call__
+
+    async def sentry_patched_asgi_app(self, scope, receive, send):
+        # type: (Any, Any, Any, Any) -> Any
+        if Hub.current.get_integration(QuartIntegration) is None:
+            return await old_app(self, scope, receive, send)
+
+        middleware = SentryAsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))
+        middleware.__call__ = middleware._run_asgi3
+        return await middleware(scope, receive, send)
+
+    Quart.__call__ = sentry_patched_asgi_app
+
+
+def patch_scaffold_route():
+    # type: () -> None
+    old_route = Scaffold.route
+
+    def _sentry_route(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        old_decorator = old_route(*args, **kwargs)
+
+        def decorator(old_func):
+            # type: (Any) -> Any
+
+            if inspect.isfunction(old_func) and not is_coroutine_function(old_func):
+
+                @wraps(old_func)
+                def _sentry_func(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    hub = Hub.current
+                    integration = hub.get_integration(QuartIntegration)
+                    if integration is None:
+                        return old_func(*args, **kwargs)
+
+                    with hub.configure_scope() as sentry_scope:
+                        if sentry_scope.profile is not None:
+                            sentry_scope.profile.active_thread_id = (
+                                threading.current_thread().ident
+                            )
+
+                        return old_func(*args, **kwargs)
+
+                return old_decorator(_sentry_func)
 
-        async def sentry_patched_asgi_app(self, scope, receive, send):
-            # type: (Any, Any, Any, Any) -> Any
-            if Hub.current.get_integration(QuartIntegration) is None:
-                return await old_app(self, scope, receive, send)
+            return old_decorator(old_func)
 
-            middleware = SentryAsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))
-            middleware.__call__ = middleware._run_asgi3
-            return await middleware(scope, receive, send)
+        return decorator
 
-        Quart.__call__ = sentry_patched_asgi_app
+    Scaffold.route = _sentry_route
 
 
 def _set_transaction_name_and_source(scope, transaction_style, request):
@@ -101,7 +151,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request):
         pass
 
 
-def _request_websocket_started(app, **kwargs):
+async def _request_websocket_started(app, **kwargs):
     # type: (Quart, **Any) -> None
     hub = Hub.current
     integration = hub.get_integration(QuartIntegration)
@@ -155,7 +205,7 @@ def _make_request_event_processor(app, request, integration):
     return inner
 
 
-def _capture_exception(sender, exception, **kwargs):
+async def _capture_exception(sender, exception, **kwargs):
     # type: (Quart, Union[ValueError, BaseException], **Any) -> None
     hub = Hub.current
     if hub.get_integration(QuartIntegration) is None:
diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py
deleted file mode 100644
index c27eefa..0000000
--- a/sentry_sdk/integrations/redis.py
+++ /dev/null
@@ -1,169 +0,0 @@
-from __future__ import absolute_import
-
-from sentry_sdk import Hub
-from sentry_sdk.utils import capture_internal_exceptions, logger
-from sentry_sdk.integrations import Integration, DidNotEnable
-
-from sentry_sdk._types import MYPY
-
-if MYPY:
-    from typing import Any, Sequence
-
-_SINGLE_KEY_COMMANDS = frozenset(
-    ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"]
-)
-_MULTI_KEY_COMMANDS = frozenset(["del", "touch", "unlink"])
-
-#: Trim argument lists to this many values
-_MAX_NUM_ARGS = 10
-
-
-def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):
-    # type: (Any, bool, Any) -> None
-    old_execute = pipeline_cls.execute
-
-    def sentry_patched_execute(self, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        hub = Hub.current
-
-        if hub.get_integration(RedisIntegration) is None:
-            return old_execute(self, *args, **kwargs)
-
-        with hub.start_span(op="redis", description="redis.pipeline.execute") as span:
-            with capture_internal_exceptions():
-                span.set_tag("redis.is_cluster", is_cluster)
-                transaction = self.transaction if not is_cluster else False
-                span.set_tag("redis.transaction", transaction)
-
-                commands = []
-                for i, arg in enumerate(self.command_stack):
-                    if i > _MAX_NUM_ARGS:
-                        break
-                    command_args = []
-                    for j, command_arg in enumerate(get_command_args_fn(arg)):
-                        if j > 0:
-                            command_arg = repr(command_arg)
-                        command_args.append(command_arg)
-                    commands.append(" ".join(command_args))
-
-                span.set_data(
-                    "redis.commands",
-                    {"count": len(self.command_stack), "first_ten": commands},
-                )
-
-            return old_execute(self, *args, **kwargs)
-
-    pipeline_cls.execute = sentry_patched_execute
-
-
-def _get_redis_command_args(command):
-    # type: (Any) -> Sequence[Any]
-    return command[0]
-
-
-def _parse_rediscluster_command(command):
-    # type: (Any) -> Sequence[Any]
-    return command.args
-
-
-def _patch_rediscluster():
-    # type: () -> None
-    try:
-        import rediscluster  # type: ignore
-    except ImportError:
-        return
-
-    patch_redis_client(rediscluster.RedisCluster, is_cluster=True)
-
-    # up to v1.3.6, __version__ attribute is a tuple
-    # from v2.0.0, __version__ is a string and VERSION a tuple
-    version = getattr(rediscluster, "VERSION", rediscluster.__version__)
-
-    # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
-    # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
-    if (0, 2, 0) < version < (2, 0, 0):
-        pipeline_cls = rediscluster.pipeline.StrictClusterPipeline
-        patch_redis_client(rediscluster.StrictRedisCluster, is_cluster=True)
-    else:
-        pipeline_cls = rediscluster.pipeline.ClusterPipeline
-
-    patch_redis_pipeline(pipeline_cls, True, _parse_rediscluster_command)
-
-
-class RedisIntegration(Integration):
-    identifier = "redis"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        try:
-            import redis
-        except ImportError:
-            raise DidNotEnable("Redis client not installed")
-
-        patch_redis_client(redis.StrictRedis, is_cluster=False)
-        patch_redis_pipeline(redis.client.Pipeline, False, _get_redis_command_args)
-        try:
-            strict_pipeline = redis.client.StrictPipeline  # type: ignore
-        except AttributeError:
-            pass
-        else:
-            patch_redis_pipeline(strict_pipeline, False, _get_redis_command_args)
-
-        try:
-            import rb.clients  # type: ignore
-        except ImportError:
-            pass
-        else:
-            patch_redis_client(rb.clients.FanoutClient, is_cluster=False)
-            patch_redis_client(rb.clients.MappingClient, is_cluster=False)
-            patch_redis_client(rb.clients.RoutingClient, is_cluster=False)
-
-        try:
-            _patch_rediscluster()
-        except Exception:
-            logger.exception("Error occurred while patching `rediscluster` library")
-
-
-def patch_redis_client(cls, is_cluster):
-    # type: (Any, bool) -> None
-    """
-    This function can be used to instrument custom redis client classes or
-    subclasses.
-    """
-    old_execute_command = cls.execute_command
-
-    def sentry_patched_execute_command(self, name, *args, **kwargs):
-        # type: (Any, str, *Any, **Any) -> Any
-        hub = Hub.current
-
-        if hub.get_integration(RedisIntegration) is None:
-            return old_execute_command(self, name, *args, **kwargs)
-
-        description = name
-
-        with capture_internal_exceptions():
-            description_parts = [name]
-            for i, arg in enumerate(args):
-                if i > _MAX_NUM_ARGS:
-                    break
-
-                description_parts.append(repr(arg))
-
-            description = " ".join(description_parts)
-
-        with hub.start_span(op="redis", description=description) as span:
-            span.set_tag("redis.is_cluster", is_cluster)
-            if name:
-                span.set_tag("redis.command", name)
-
-            if name and args:
-                name_low = name.lower()
-                if (name_low in _SINGLE_KEY_COMMANDS) or (
-                    name_low in _MULTI_KEY_COMMANDS and len(args) == 1
-                ):
-                    span.set_tag("redis.key", args[0])
-
-            return old_execute_command(self, name, *args, **kwargs)
-
-    cls.execute_command = sentry_patched_execute_command
diff --git a/sentry_sdk/integrations/redis/__init__.py b/sentry_sdk/integrations/redis/__init__.py
new file mode 100644
index 0000000..45409a2
--- /dev/null
+++ b/sentry_sdk/integrations/redis/__init__.py
@@ -0,0 +1,266 @@
+from __future__ import absolute_import
+
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.utils import (
+    SENSITIVE_DATA_SUBSTITUTE,
+    capture_internal_exceptions,
+    logger,
+)
+from sentry_sdk.integrations import Integration, DidNotEnable
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Sequence
+    from sentry_sdk.tracing import Span
+
+_SINGLE_KEY_COMMANDS = frozenset(
+    ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"]
+)
+_MULTI_KEY_COMMANDS = frozenset(["del", "touch", "unlink"])
+
+_COMMANDS_INCLUDING_SENSITIVE_DATA = [
+    "auth",
+]
+
+_MAX_NUM_ARGS = 10  # Trim argument lists to this many values
+_MAX_NUM_COMMANDS = 10  # Trim command lists to this many values
+
+_DEFAULT_MAX_DATA_SIZE = 1024
+
+
+def _get_safe_command(name, args):
+    # type: (str, Sequence[Any]) -> str
+    command_parts = [name]
+
+    for i, arg in enumerate(args):
+        if i > _MAX_NUM_ARGS:
+            break
+
+        name_low = name.lower()
+
+        if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA:
+            command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
+            continue
+
+        arg_is_the_key = i == 0
+        if arg_is_the_key:
+            command_parts.append(repr(arg))
+
+        else:
+            if _should_send_default_pii():
+                command_parts.append(repr(arg))
+            else:
+                command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
+
+    command = " ".join(command_parts)
+    return command
+
+
+def _set_pipeline_data(
+    span, is_cluster, get_command_args_fn, is_transaction, command_stack
+):
+    # type: (Span, bool, Any, bool, Sequence[Any]) -> None
+    span.set_tag("redis.is_cluster", is_cluster)
+    transaction = is_transaction if not is_cluster else False
+    span.set_tag("redis.transaction", transaction)
+
+    commands = []
+    for i, arg in enumerate(command_stack):
+        if i >= _MAX_NUM_COMMANDS:
+            break
+
+        command = get_command_args_fn(arg)
+        commands.append(_get_safe_command(command[0], command[1:]))
+
+    span.set_data(
+        "redis.commands",
+        {
+            "count": len(command_stack),
+            "first_ten": commands,
+        },
+    )
+
+
+def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):
+    # type: (Any, bool, Any) -> None
+    old_execute = pipeline_cls.execute
+
+    def sentry_patched_execute(self, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        hub = Hub.current
+
+        if hub.get_integration(RedisIntegration) is None:
+            return old_execute(self, *args, **kwargs)
+
+        with hub.start_span(
+            op=OP.DB_REDIS, description="redis.pipeline.execute"
+        ) as span:
+            with capture_internal_exceptions():
+                _set_pipeline_data(
+                    span,
+                    is_cluster,
+                    get_command_args_fn,
+                    self.transaction,
+                    self.command_stack,
+                )
+                span.set_data(SPANDATA.DB_SYSTEM, "redis")
+
+            return old_execute(self, *args, **kwargs)
+
+    pipeline_cls.execute = sentry_patched_execute
+
+
+def _get_redis_command_args(command):
+    # type: (Any) -> Sequence[Any]
+    return command[0]
+
+
+def _parse_rediscluster_command(command):
+    # type: (Any) -> Sequence[Any]
+    return command.args
+
+
+def _patch_redis(StrictRedis, client):  # noqa: N803
+    # type: (Any, Any) -> None
+    patch_redis_client(StrictRedis, is_cluster=False)
+    patch_redis_pipeline(client.Pipeline, False, _get_redis_command_args)
+    try:
+        strict_pipeline = client.StrictPipeline
+    except AttributeError:
+        pass
+    else:
+        patch_redis_pipeline(strict_pipeline, False, _get_redis_command_args)
+
+    try:
+        import redis.asyncio
+    except ImportError:
+        pass
+    else:
+        from sentry_sdk.integrations.redis.asyncio import (
+            patch_redis_async_client,
+            patch_redis_async_pipeline,
+        )
+
+        patch_redis_async_client(redis.asyncio.client.StrictRedis)
+        patch_redis_async_pipeline(redis.asyncio.client.Pipeline)
+
+
+def _patch_rb():
+    # type: () -> None
+    try:
+        import rb.clients  # type: ignore
+    except ImportError:
+        pass
+    else:
+        patch_redis_client(rb.clients.FanoutClient, is_cluster=False)
+        patch_redis_client(rb.clients.MappingClient, is_cluster=False)
+        patch_redis_client(rb.clients.RoutingClient, is_cluster=False)
+
+
+def _patch_rediscluster():
+    # type: () -> None
+    try:
+        import rediscluster  # type: ignore
+    except ImportError:
+        return
+
+    patch_redis_client(rediscluster.RedisCluster, is_cluster=True)
+
+    # up to v1.3.6, __version__ attribute is a tuple
+    # from v2.0.0, __version__ is a string and VERSION a tuple
+    version = getattr(rediscluster, "VERSION", rediscluster.__version__)
+
+    # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
+    # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
+    if (0, 2, 0) < version < (2, 0, 0):
+        pipeline_cls = rediscluster.pipeline.StrictClusterPipeline
+        patch_redis_client(rediscluster.StrictRedisCluster, is_cluster=True)
+    else:
+        pipeline_cls = rediscluster.pipeline.ClusterPipeline
+
+    patch_redis_pipeline(pipeline_cls, True, _parse_rediscluster_command)
+
+
+class RedisIntegration(Integration):
+    identifier = "redis"
+
+    def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE):
+        # type: (int) -> None
+        self.max_data_size = max_data_size
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        try:
+            from redis import StrictRedis, client
+        except ImportError:
+            raise DidNotEnable("Redis client not installed")
+
+        _patch_redis(StrictRedis, client)
+        _patch_rb()
+
+        try:
+            _patch_rediscluster()
+        except Exception:
+            logger.exception("Error occurred while patching `rediscluster` library")
+
+
+def _get_span_description(name, *args):
+    # type: (str, *Any) -> str
+    description = name
+
+    with capture_internal_exceptions():
+        description = _get_safe_command(name, args)
+
+    return description
+
+
+def _set_client_data(span, is_cluster, name, *args):
+    # type: (Span, bool, str, *Any) -> None
+    span.set_data(SPANDATA.DB_SYSTEM, "redis")
+    span.set_tag("redis.is_cluster", is_cluster)
+    if name:
+        span.set_tag("redis.command", name)
+        span.set_tag(SPANDATA.DB_OPERATION, name)
+
+    if name and args:
+        name_low = name.lower()
+        if (name_low in _SINGLE_KEY_COMMANDS) or (
+            name_low in _MULTI_KEY_COMMANDS and len(args) == 1
+        ):
+            span.set_tag("redis.key", args[0])
+
+
+def patch_redis_client(cls, is_cluster):
+    # type: (Any, bool) -> None
+    """
+    This function can be used to instrument custom redis client classes or
+    subclasses.
+    """
+    old_execute_command = cls.execute_command
+
+    def sentry_patched_execute_command(self, name, *args, **kwargs):
+        # type: (Any, str, *Any, **Any) -> Any
+        hub = Hub.current
+        integration = hub.get_integration(RedisIntegration)
+
+        if integration is None:
+            return old_execute_command(self, name, *args, **kwargs)
+
+        description = _get_span_description(name, *args)
+
+        data_should_be_truncated = (
+            integration.max_data_size and len(description) > integration.max_data_size
+        )
+        if data_should_be_truncated:
+            description = description[: integration.max_data_size - len("...")] + "..."
+
+        with hub.start_span(op=OP.DB_REDIS, description=description) as span:
+            _set_client_data(span, is_cluster, name, *args)
+
+            return old_execute_command(self, name, *args, **kwargs)
+
+    cls.execute_command = sentry_patched_execute_command
diff --git a/sentry_sdk/integrations/redis/asyncio.py b/sentry_sdk/integrations/redis/asyncio.py
new file mode 100644
index 0000000..d0e4e16
--- /dev/null
+++ b/sentry_sdk/integrations/redis/asyncio.py
@@ -0,0 +1,67 @@
+from __future__ import absolute_import
+
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP
+from sentry_sdk.utils import capture_internal_exceptions
+from sentry_sdk.integrations.redis import (
+    RedisIntegration,
+    _get_redis_command_args,
+    _get_span_description,
+    _set_client_data,
+    _set_pipeline_data,
+)
+
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+
+
+def patch_redis_async_pipeline(pipeline_cls):
+    # type: (Any) -> None
+    old_execute = pipeline_cls.execute
+
+    async def _sentry_execute(self, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        hub = Hub.current
+
+        if hub.get_integration(RedisIntegration) is None:
+            return await old_execute(self, *args, **kwargs)
+
+        with hub.start_span(
+            op=OP.DB_REDIS, description="redis.pipeline.execute"
+        ) as span:
+            with capture_internal_exceptions():
+                _set_pipeline_data(
+                    span,
+                    False,
+                    _get_redis_command_args,
+                    self.is_transaction,
+                    self.command_stack,
+                )
+
+            return await old_execute(self, *args, **kwargs)
+
+    pipeline_cls.execute = _sentry_execute
+
+
+def patch_redis_async_client(cls):
+    # type: (Any) -> None
+    old_execute_command = cls.execute_command
+
+    async def _sentry_execute_command(self, name, *args, **kwargs):
+        # type: (Any, str, *Any, **Any) -> Any
+        hub = Hub.current
+
+        if hub.get_integration(RedisIntegration) is None:
+            return await old_execute_command(self, name, *args, **kwargs)
+
+        description = _get_span_description(name, *args)
+
+        with hub.start_span(op=OP.DB_REDIS, description=description) as span:
+            _set_client_data(span, False, name, *args)
+
+            return await old_execute_command(self, name, *args, **kwargs)
+
+    cls.execute_command = _sentry_execute_command
diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py
index 095ab35..5596fe6 100644
--- a/sentry_sdk/integrations/rq.py
+++ b/sentry_sdk/integrations/rq.py
@@ -1,12 +1,19 @@
 from __future__ import absolute_import
 
 import weakref
+from sentry_sdk.consts import OP
 
+from sentry_sdk.api import continue_trace
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import DidNotEnable, Integration
 from sentry_sdk.integrations.logging import ignore_logger
-from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    format_timestamp,
+    parse_version,
+)
 
 try:
     from rq.queue import Queue
@@ -16,9 +23,9 @@ try:
 except ImportError:
     raise DidNotEnable("RQ not installed")
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any, Callable, Dict
 
     from sentry_sdk._types import EventProcessor
@@ -34,9 +41,9 @@ class RqIntegration(Integration):
     def setup_once():
         # type: () -> None
 
-        try:
-            version = tuple(map(int, RQ_VERSION.split(".")[:3]))
-        except (ValueError, TypeError):
+        version = parse_version(RQ_VERSION)
+
+        if version is None:
             raise DidNotEnable("Unparsable RQ version: {}".format(RQ_VERSION))
 
         if version < (0, 6):
@@ -59,9 +66,9 @@ class RqIntegration(Integration):
                 scope.clear_breadcrumbs()
                 scope.add_event_processor(_make_event_processor(weakref.ref(job)))
 
-                transaction = Transaction.continue_from_headers(
+                transaction = continue_trace(
                     job.meta.get("_sentry_trace_headers") or {},
-                    op="rq.task",
+                    op=OP.QUEUE_TASK_RQ,
                     name="unknown RQ task",
                     source=TRANSACTION_SOURCE_TASK,
                 )
@@ -101,9 +108,10 @@ class RqIntegration(Integration):
             # type: (Queue, Any, **Any) -> Any
             hub = Hub.current
             if hub.get_integration(RqIntegration) is not None:
-                job.meta["_sentry_trace_headers"] = dict(
-                    hub.iter_trace_propagation_headers()
-                )
+                if hub.scope.span is not None:
+                    job.meta["_sentry_trace_headers"] = dict(
+                        hub.iter_trace_propagation_headers()
+                    )
 
             return old_enqueue_job(self, job, **kwargs)
 
@@ -128,6 +136,11 @@ def _make_event_processor(weak_job):
                     "description": job.description,
                 }
 
+                if job.enqueued_at:
+                    extra["rq-job"]["enqueued_at"] = format_timestamp(job.enqueued_at)
+                if job.started_at:
+                    extra["rq-job"]["started_at"] = format_timestamp(job.started_at)
+
         if "exc_info" in hint:
             with capture_internal_exceptions():
                 if issubclass(hint["exc_info"][0], JobTimeoutException):
diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py
index 8892f93..f9474d6 100644
--- a/sentry_sdk/integrations/sanic.py
+++ b/sentry_sdk/integrations/sanic.py
@@ -10,14 +10,15 @@ from sentry_sdk.utils import (
     event_from_exception,
     HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
+    parse_version,
 )
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers
 from sentry_sdk.integrations.logging import ignore_logger
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Optional
@@ -51,15 +52,15 @@ except AttributeError:
 
 class SanicIntegration(Integration):
     identifier = "sanic"
-    version = (0, 0)  # type: Tuple[int, ...]
+    version = None
 
     @staticmethod
     def setup_once():
         # type: () -> None
 
-        try:
-            SanicIntegration.version = tuple(map(int, SANIC_VERSION.split(".")))
-        except (TypeError, ValueError):
+        SanicIntegration.version = parse_version(SANIC_VERSION)
+
+        if SanicIntegration.version is None:
             raise DidNotEnable("Unparsable Sanic version: {}".format(SANIC_VERSION))
 
         if SanicIntegration.version < (0, 8):
@@ -225,7 +226,7 @@ def _sentry_error_handler_lookup(self, exception, *args, **kwargs):
         finally:
             # As mentioned in previous comment in _startup, this can be removed
             # after https://github.com/sanic-org/sanic/issues/2297 is resolved
-            if SanicIntegration.version == (21, 9):
+            if SanicIntegration.version and SanicIntegration.version == (21, 9):
                 await _hub_exit(request)
 
     return sentry_wrapped_error_handler
diff --git a/sentry_sdk/integrations/serverless.py b/sentry_sdk/integrations/serverless.py
index c22fbfd..5340345 100644
--- a/sentry_sdk/integrations/serverless.py
+++ b/sentry_sdk/integrations/serverless.py
@@ -6,9 +6,9 @@ from sentry_sdk._compat import reraise
 from sentry_sdk._functools import wraps
 
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import TypeVar
diff --git a/sentry_sdk/integrations/socket.py b/sentry_sdk/integrations/socket.py
new file mode 100644
index 0000000..7a4e358
--- /dev/null
+++ b/sentry_sdk/integrations/socket.py
@@ -0,0 +1,91 @@
+from __future__ import absolute_import
+
+import socket
+from sentry_sdk import Hub
+from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import Integration
+
+if MYPY:
+    from socket import AddressFamily, SocketKind
+    from typing import Tuple, Optional, Union, List
+
+__all__ = ["SocketIntegration"]
+
+
+class SocketIntegration(Integration):
+    identifier = "socket"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        """
+        patches two of the most used functions of socket: create_connection and getaddrinfo(dns resolver)
+        """
+        _patch_create_connection()
+        _patch_getaddrinfo()
+
+
+def _get_span_description(host, port):
+    # type: (Union[bytes, str, None], Union[str, int, None]) -> str
+
+    try:
+        host = host.decode()  # type: ignore
+    except (UnicodeDecodeError, AttributeError):
+        pass
+
+    description = "%s:%s" % (host, port)  # type: ignore
+
+    return description
+
+
+def _patch_create_connection():
+    # type: () -> None
+    real_create_connection = socket.create_connection
+
+    def create_connection(
+        address,
+        timeout=socket._GLOBAL_DEFAULT_TIMEOUT,  # type: ignore
+        source_address=None,
+    ):
+        # type: (Tuple[Optional[str], int], Optional[float], Optional[Tuple[Union[bytearray, bytes, str], int]])-> socket.socket
+        hub = Hub.current
+        if hub.get_integration(SocketIntegration) is None:
+            return real_create_connection(
+                address=address, timeout=timeout, source_address=source_address
+            )
+
+        with hub.start_span(
+            op=OP.SOCKET_CONNECTION,
+            description=_get_span_description(address[0], address[1]),
+        ) as span:
+            span.set_data("address", address)
+            span.set_data("timeout", timeout)
+            span.set_data("source_address", source_address)
+
+            return real_create_connection(
+                address=address, timeout=timeout, source_address=source_address
+            )
+
+    socket.create_connection = create_connection  # type: ignore
+
+
+def _patch_getaddrinfo():
+    # type: () -> None
+    real_getaddrinfo = socket.getaddrinfo
+
+    def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
+        # type: (Union[bytes, str, None], Union[str, int, None], int, int, int, int) -> List[Tuple[AddressFamily, SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]]
+        hub = Hub.current
+        if hub.get_integration(SocketIntegration) is None:
+            return real_getaddrinfo(host, port, family, type, proto, flags)
+
+        with hub.start_span(
+            op=OP.SOCKET_DNS, description=_get_span_description(host, port)
+        ) as span:
+            span.set_data("host", host)
+            span.set_data("port", port)
+
+            return real_getaddrinfo(host, port, family, type, proto, flags)
+
+    socket.getaddrinfo = getaddrinfo  # type: ignore
diff --git a/sentry_sdk/integrations/spark/spark_driver.py b/sentry_sdk/integrations/spark/spark_driver.py
index ea43c37..b3085fc 100644
--- a/sentry_sdk/integrations/spark/spark_driver.py
+++ b/sentry_sdk/integrations/spark/spark_driver.py
@@ -3,9 +3,9 @@ from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration
 from sentry_sdk.utils import capture_internal_exceptions
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
 
diff --git a/sentry_sdk/integrations/spark/spark_worker.py b/sentry_sdk/integrations/spark/spark_worker.py
index 2c27647..cd4eb0f 100644
--- a/sentry_sdk/integrations/spark/spark_worker.py
+++ b/sentry_sdk/integrations/spark/spark_worker.py
@@ -13,9 +13,9 @@ from sentry_sdk.utils import (
     event_hint_with_exc_info,
 )
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
 
diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py
index deb97c0..bd65141 100644
--- a/sentry_sdk/integrations/sqlalchemy.py
+++ b/sentry_sdk/integrations/sqlalchemy.py
@@ -1,10 +1,14 @@
 from __future__ import absolute_import
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._compat import text_type
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import SPANDATA
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.tracing_utils import record_sql_queries
 
+from sentry_sdk.utils import parse_version
+
 try:
     from sqlalchemy.engine import Engine  # type: ignore
     from sqlalchemy.event import listen  # type: ignore
@@ -12,7 +16,7 @@ try:
 except ImportError:
     raise DidNotEnable("SQLAlchemy not installed.")
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import ContextManager
     from typing import Optional
@@ -27,9 +31,9 @@ class SqlalchemyIntegration(Integration):
     def setup_once():
         # type: () -> None
 
-        try:
-            version = tuple(map(int, SQLALCHEMY_VERSION.split("b")[0].split(".")))
-        except (TypeError, ValueError):
+        version = parse_version(SQLALCHEMY_VERSION)
+
+        if version is None:
             raise DidNotEnable(
                 "Unparsable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION)
             )
@@ -63,6 +67,7 @@ def _before_cursor_execute(
     span = ctx_mgr.__enter__()
 
     if span is not None:
+        _set_db_data(span, conn)
         context._sentry_sql_span = span
 
 
@@ -98,3 +103,45 @@ def _handle_error(context, *args):
     if ctx_mgr is not None:
         execution_context._sentry_sql_span_manager = None
         ctx_mgr.__exit__(None, None, None)
+
+
+# See: https://docs.sqlalchemy.org/en/20/dialects/index.html
+def _get_db_system(name):
+    # type: (str) -> Optional[str]
+    name = text_type(name)
+
+    if "sqlite" in name:
+        return "sqlite"
+
+    if "postgres" in name:
+        return "postgresql"
+
+    if "mariadb" in name:
+        return "mariadb"
+
+    if "mysql" in name:
+        return "mysql"
+
+    if "oracle" in name:
+        return "oracle"
+
+    return None
+
+
+def _set_db_data(span, conn):
+    # type: (Span, Any) -> None
+    db_system = _get_db_system(conn.engine.name)
+    if db_system is not None:
+        span.set_data(SPANDATA.DB_SYSTEM, db_system)
+
+    db_name = conn.engine.url.database
+    if db_name is not None:
+        span.set_data(SPANDATA.DB_NAME, db_name)
+
+    server_address = conn.engine.url.host
+    if server_address is not None:
+        span.set_data(SPANDATA.SERVER_ADDRESS, server_address)
+
+    server_port = conn.engine.url.port
+    if server_port is not None:
+        span.set_data(SPANDATA.SERVER_PORT, server_port)
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index 2899361..b44e8f1 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -2,9 +2,11 @@ from __future__ import absolute_import
 
 import asyncio
 import functools
+from copy import deepcopy
 
 from sentry_sdk._compat import iteritems
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.integrations import DidNotEnable, Integration
 from sentry_sdk.integrations._wsgi_common import (
@@ -17,16 +19,18 @@ from sentry_sdk.utils import (
     AnnotatedValue,
     capture_internal_exceptions,
     event_from_exception,
+    parse_version,
     transaction_from_function,
 )
 
-if MYPY:
-    from typing import Any, Awaitable, Callable, Dict, Optional, Union
+if TYPE_CHECKING:
+    from typing import Any, Awaitable, Callable, Dict, Optional
 
-    from sentry_sdk._types import Event
+    from sentry_sdk.scope import Scope as SentryScope
 
 try:
     import starlette  # type: ignore
+    from starlette import __version__ as STARLETTE_VERSION
     from starlette.applications import Starlette  # type: ignore
     from starlette.datastructures import UploadFile  # type: ignore
     from starlette.middleware import Middleware  # type: ignore
@@ -35,7 +39,7 @@ try:
     )
     from starlette.requests import Request  # type: ignore
     from starlette.routing import Match  # type: ignore
-    from starlette.types import ASGIApp, Receive, Scope, Send  # type: ignore
+    from starlette.types import ASGIApp, Receive, Scope as StarletteScope, Send  # type: ignore
 except ImportError:
     raise DidNotEnable("Starlette is not installed")
 
@@ -75,30 +79,71 @@ class StarletteIntegration(Integration):
     @staticmethod
     def setup_once():
         # type: () -> None
+        version = parse_version(STARLETTE_VERSION)
+
+        if version is None:
+            raise DidNotEnable(
+                "Unparsable Starlette version: {}".format(STARLETTE_VERSION)
+            )
+
         patch_middlewares()
         patch_asgi_app()
         patch_request_response()
 
+        if version >= (0, 24):
+            patch_templates()
+
 
 def _enable_span_for_middleware(middleware_class):
     # type: (Any) -> type
     old_call = middleware_class.__call__
 
-    async def _create_span_call(*args, **kwargs):
-        # type: (Any, Any) -> None
+    async def _create_span_call(app, scope, receive, send, **kwargs):
+        # type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None
         hub = Hub.current
         integration = hub.get_integration(StarletteIntegration)
         if integration is not None:
-            middleware_name = args[0].__class__.__name__
+            middleware_name = app.__class__.__name__
+
             with hub.start_span(
-                op="starlette.middleware", description=middleware_name
+                op=OP.MIDDLEWARE_STARLETTE, description=middleware_name
             ) as middleware_span:
                 middleware_span.set_tag("starlette.middleware_name", middleware_name)
 
-                await old_call(*args, **kwargs)
+                # Creating spans for the "receive" callback
+                async def _sentry_receive(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLETTE_RECEIVE,
+                        description=getattr(receive, "__qualname__", str(receive)),
+                    ) as span:
+                        span.set_tag("starlette.middleware_name", middleware_name)
+                        return await receive(*args, **kwargs)
+
+                receive_name = getattr(receive, "__name__", str(receive))
+                receive_patched = receive_name == "_sentry_receive"
+                new_receive = _sentry_receive if not receive_patched else receive
+
+                # Creating spans for the "send" callback
+                async def _sentry_send(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLETTE_SEND,
+                        description=getattr(send, "__qualname__", str(send)),
+                    ) as span:
+                        span.set_tag("starlette.middleware_name", middleware_name)
+                        return await send(*args, **kwargs)
+
+                send_name = getattr(send, "__name__", str(send))
+                send_patched = send_name == "_sentry_send"
+                new_send = _sentry_send if not send_patched else send
+
+                return await old_call(app, scope, new_receive, new_send, **kwargs)
 
         else:
-            await old_call(*args, **kwargs)
+            return await old_call(app, scope, receive, send, **kwargs)
 
     not_yet_patched = old_call.__name__ not in [
         "_create_span_call",
@@ -135,62 +180,68 @@ def patch_exception_middleware(middleware_class):
     """
     old_middleware_init = middleware_class.__init__
 
-    def _sentry_middleware_init(self, *args, **kwargs):
-        # type: (Any, Any, Any) -> None
-        old_middleware_init(self, *args, **kwargs)
+    not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init)
 
-        # Patch existing exception handlers
-        old_handlers = self._exception_handlers.copy()
+    if not_yet_patched:
 
-        async def _sentry_patched_exception_handler(self, *args, **kwargs):
+        def _sentry_middleware_init(self, *args, **kwargs):
             # type: (Any, Any, Any) -> None
-            exp = args[0]
+            old_middleware_init(self, *args, **kwargs)
 
-            is_http_server_error = (
-                hasattr(exp, "status_code") and exp.status_code >= 500
-            )
-            if is_http_server_error:
-                _capture_exception(exp, handled=True)
-
-            # Find a matching handler
-            old_handler = None
-            for cls in type(exp).__mro__:
-                if cls in old_handlers:
-                    old_handler = old_handlers[cls]
-                    break
+            # Patch existing exception handlers
+            old_handlers = self._exception_handlers.copy()
 
-            if old_handler is None:
-                return
+            async def _sentry_patched_exception_handler(self, *args, **kwargs):
+                # type: (Any, Any, Any) -> None
+                exp = args[0]
 
-            if _is_async_callable(old_handler):
-                return await old_handler(self, *args, **kwargs)
-            else:
-                return old_handler(self, *args, **kwargs)
+                is_http_server_error = (
+                    hasattr(exp, "status_code")
+                    and isinstance(exp.status_code, int)
+                    and exp.status_code >= 500
+                )
+                if is_http_server_error:
+                    _capture_exception(exp, handled=True)
 
-        for key in self._exception_handlers.keys():
-            self._exception_handlers[key] = _sentry_patched_exception_handler
+                # Find a matching handler
+                old_handler = None
+                for cls in type(exp).__mro__:
+                    if cls in old_handlers:
+                        old_handler = old_handlers[cls]
+                        break
 
-    middleware_class.__init__ = _sentry_middleware_init
+                if old_handler is None:
+                    return
 
-    old_call = middleware_class.__call__
+                if _is_async_callable(old_handler):
+                    return await old_handler(self, *args, **kwargs)
+                else:
+                    return old_handler(self, *args, **kwargs)
+
+            for key in self._exception_handlers.keys():
+                self._exception_handlers[key] = _sentry_patched_exception_handler
+
+        middleware_class.__init__ = _sentry_middleware_init
+
+        old_call = middleware_class.__call__
 
-    async def _sentry_exceptionmiddleware_call(self, scope, receive, send):
-        # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
-        # Also add the user (that was eventually set by be Authentication middle
-        # that was called before this middleware). This is done because the authentication
-        # middleware sets the user in the scope and then (in the same function)
-        # calls this exception middelware. In case there is no exception (or no handler
-        # for the type of exception occuring) then the exception bubbles up and setting the
-        # user information into the sentry scope is done in auth middleware and the
-        # ASGI middleware will then send everything to Sentry and this is fine.
-        # But if there is an exception happening that the exception middleware here
-        # has a handler for, it will send the exception directly to Sentry, so we need
-        # the user information right now.
-        # This is why we do it here.
-        _add_user_to_sentry_scope(scope)
-        await old_call(self, scope, receive, send)
-
-    middleware_class.__call__ = _sentry_exceptionmiddleware_call
+        async def _sentry_exceptionmiddleware_call(self, scope, receive, send):
+            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
+            # Also add the user (that was eventually set by be Authentication middle
+            # that was called before this middleware). This is done because the authentication
+            # middleware sets the user in the scope and then (in the same function)
+            # calls this exception middelware. In case there is no exception (or no handler
+            # for the type of exception occuring) then the exception bubbles up and setting the
+            # user information into the sentry scope is done in auth middleware and the
+            # ASGI middleware will then send everything to Sentry and this is fine.
+            # But if there is an exception happening that the exception middleware here
+            # has a handler for, it will send the exception directly to Sentry, so we need
+            # the user information right now.
+            # This is why we do it here.
+            _add_user_to_sentry_scope(scope)
+            await old_call(self, scope, receive, send)
+
+        middleware_class.__call__ = _sentry_exceptionmiddleware_call
 
 
 def _add_user_to_sentry_scope(scope):
@@ -235,12 +286,16 @@ def patch_authentication_middleware(middleware_class):
     """
     old_call = middleware_class.__call__
 
-    async def _sentry_authenticationmiddleware_call(self, scope, receive, send):
-        # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
-        await old_call(self, scope, receive, send)
-        _add_user_to_sentry_scope(scope)
+    not_yet_patched = "_sentry_authenticationmiddleware_call" not in str(old_call)
+
+    if not_yet_patched:
+
+        async def _sentry_authenticationmiddleware_call(self, scope, receive, send):
+            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
+            await old_call(self, scope, receive, send)
+            _add_user_to_sentry_scope(scope)
 
-    middleware_class.__call__ = _sentry_authenticationmiddleware_call
+        middleware_class.__call__ = _sentry_authenticationmiddleware_call
 
 
 def patch_middlewares():
@@ -280,7 +335,7 @@ def patch_asgi_app():
     old_app = Starlette.__call__
 
     async def _sentry_patched_asgi_app(self, scope, receive, send):
-        # type: (Starlette, Scope, Receive, Send) -> None
+        # type: (Starlette, StarletteScope, Receive, Send) -> None
         if Hub.current.get_integration(StarletteIntegration) is None:
             return await old_app(self, scope, receive, send)
 
@@ -327,6 +382,11 @@ def patch_request_response():
 
                 with hub.configure_scope() as sentry_scope:
                     request = args[0]
+
+                    _set_transaction_name_and_source(
+                        sentry_scope, integration.transaction_style, request
+                    )
+
                     extractor = StarletteRequestExtractor(request)
                     info = await extractor.extract_request_info()
 
@@ -335,18 +395,14 @@ def patch_request_response():
                         def event_processor(event, hint):
                             # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
 
-                            # Extract information from request
+                            # Add info from request to event
                             request_info = event.get("request", {})
                             if info:
-                                if "cookies" in info and _should_send_default_pii():
+                                if "cookies" in info:
                                     request_info["cookies"] = info["cookies"]
                                 if "data" in info:
                                     request_info["data"] = info["data"]
-                            event["request"] = request_info
-
-                            _set_transaction_name_and_source(
-                                event, integration.transaction_style, req
-                            )
+                            event["request"] = deepcopy(request_info)
 
                             return event
 
@@ -370,7 +426,15 @@ def patch_request_response():
                     return old_func(*args, **kwargs)
 
                 with hub.configure_scope() as sentry_scope:
+                    if sentry_scope.profile is not None:
+                        sentry_scope.profile.update_active_thread_id()
+
                     request = args[0]
+
+                    _set_transaction_name_and_source(
+                        sentry_scope, integration.transaction_style, request
+                    )
+
                     extractor = StarletteRequestExtractor(request)
                     cookies = extractor.extract_cookies_from_request()
 
@@ -384,11 +448,7 @@ def patch_request_response():
                             if cookies:
                                 request_info["cookies"] = cookies
 
-                            event["request"] = request_info
-
-                            _set_transaction_name_and_source(
-                                event, integration.transaction_style, req
-                            )
+                            event["request"] = deepcopy(request_info)
 
                             return event
 
@@ -408,6 +468,47 @@ def patch_request_response():
     starlette.routing.request_response = _sentry_request_response
 
 
+def patch_templates():
+    # type: () -> None
+
+    # If markupsafe is not installed, then Jinja2 is not installed
+    # (markupsafe is a dependency of Jinja2)
+    # In this case we do not need to patch the Jinja2Templates class
+    try:
+        from markupsafe import Markup
+    except ImportError:
+        return  # Nothing to do
+
+    from starlette.templating import Jinja2Templates  # type: ignore
+
+    old_jinja2templates_init = Jinja2Templates.__init__
+
+    not_yet_patched = "_sentry_jinja2templates_init" not in str(
+        old_jinja2templates_init
+    )
+
+    if not_yet_patched:
+
+        def _sentry_jinja2templates_init(self, *args, **kwargs):
+            # type: (Jinja2Templates, *Any, **Any) -> None
+            def add_sentry_trace_meta(request):
+                # type: (Request) -> Dict[str, Any]
+                hub = Hub.current
+                trace_meta = Markup(hub.trace_propagation_meta())
+                return {
+                    "sentry_trace_meta": trace_meta,
+                }
+
+            kwargs.setdefault("context_processors", [])
+
+            if add_sentry_trace_meta not in kwargs["context_processors"]:
+                kwargs["context_processors"].append(add_sentry_trace_meta)
+
+            return old_jinja2templates_init(self, *args, **kwargs)
+
+        Jinja2Templates.__init__ = _sentry_jinja2templates_init
+
+
 class StarletteRequestExtractor:
     """
     Extracts useful information from the Starlette request
@@ -441,30 +542,46 @@ class StarletteRequestExtractor:
         request_info = {}  # type: Dict[str, Any]
 
         with capture_internal_exceptions():
+            # Add cookies
             if _should_send_default_pii():
                 request_info["cookies"] = self.cookies()
 
+            # If there is no body, just return the cookies
             content_length = await self.content_length()
-
-            if content_length:
-                data = None  # type: Union[Dict[str, Any], AnnotatedValue, None]
-
-                if not request_body_within_bounds(client, content_length):
-                    data = AnnotatedValue.removed_because_over_size_limit()
-
-                else:
-                    parsed_body = await self.parsed_body()
-                    if parsed_body is not None:
-                        data = parsed_body
-                    elif await self.raw_data():
-                        data = AnnotatedValue.removed_because_raw_data()
-                    else:
-                        data = None
-
-                if data is not None:
-                    request_info["data"] = data
-
-        return request_info
+            if not content_length:
+                return request_info
+
+            # Add annotation if body is too big
+            if content_length and not request_body_within_bounds(
+                client, content_length
+            ):
+                request_info["data"] = AnnotatedValue.removed_because_over_size_limit()
+                return request_info
+
+            # Add JSON body, if it is a JSON request
+            json = await self.json()
+            if json:
+                request_info["data"] = json
+                return request_info
+
+            # Add form as key/value pairs, if request has form data
+            form = await self.form()
+            if form:
+                form_data = {}
+                for key, val in iteritems(form):
+                    is_file = isinstance(val, UploadFile)
+                    form_data[key] = (
+                        val
+                        if not is_file
+                        else AnnotatedValue.removed_because_raw_data()
+                    )
+
+                request_info["data"] = form_data
+                return request_info
+
+            # Raw data, do not add body just an annotation
+            request_info["data"] = AnnotatedValue.removed_because_raw_data()
+            return request_info
 
     async def content_length(self):
         # type: (StarletteRequestExtractor) -> Optional[int]
@@ -477,19 +594,17 @@ class StarletteRequestExtractor:
         # type: (StarletteRequestExtractor) -> Dict[str, Any]
         return self.request.cookies
 
-    async def raw_data(self):
-        # type: (StarletteRequestExtractor) -> Any
-        return await self.request.body()
-
     async def form(self):
         # type: (StarletteRequestExtractor) -> Any
-        """
-        curl -X POST http://localhost:8000/upload/somethign -H "Content-Type: application/x-www-form-urlencoded" -d "username=kevin&password=welcome123"
-        curl -X POST http://localhost:8000/upload/somethign  -F username=Julian -F password=hello123
-        """
         if multipart is None:
             return None
 
+        # Parse the body first to get it cached, as Starlette does not cache form() as it
+        # does with body() and json() https://github.com/encode/starlette/discussions/1933
+        # Calling `.form()` without calling `.body()` first will
+        # potentially break the users project.
+        await self.request.body()
+
         return await self.request.form()
 
     def is_json(self):
@@ -498,36 +613,14 @@ class StarletteRequestExtractor:
 
     async def json(self):
         # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
-        """
-        curl -X POST localhost:8000/upload/something -H 'Content-Type: application/json' -d '{"login":"my_login","password":"my_password"}'
-        """
         if not self.is_json():
             return None
 
         return await self.request.json()
 
-    async def parsed_body(self):
-        # type: (StarletteRequestExtractor) -> Any
-        """
-        curl -X POST http://localhost:8000/upload/somethign  -F username=Julian -F password=hello123 -F photo=@photo.jpg
-        """
-        form = await self.form()
-        if form:
-            data = {}
-            for key, val in iteritems(form):
-                if isinstance(val, UploadFile):
-                    data[key] = AnnotatedValue.removed_because_raw_data()
-                else:
-                    data[key] = val
-
-            return data
 
-        json_data = await self.json()
-        return json_data
-
-
-def _set_transaction_name_and_source(event, transaction_style, request):
-    # type: (Event, str, Any) -> None
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (SentryScope, str, Any) -> None
     name = ""
 
     if transaction_style == "endpoint":
@@ -549,9 +642,9 @@ def _set_transaction_name_and_source(event, transaction_style, request):
                     break
 
     if not name:
-        event["transaction"] = _DEFAULT_TRANSACTION_NAME
-        event["transaction_info"] = {"source": TRANSACTION_SOURCE_ROUTE}
-        return
+        name = _DEFAULT_TRANSACTION_NAME
+        source = TRANSACTION_SOURCE_ROUTE
+    else:
+        source = SOURCE_FOR_STYLE[transaction_style]
 
-    event["transaction"] = name
-    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
+    scope.set_transaction_name(name, source=source)
diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py
new file mode 100644
index 0000000..62ebc8b
--- /dev/null
+++ b/sentry_sdk/integrations/starlite.py
@@ -0,0 +1,270 @@
+from typing import TYPE_CHECKING
+
+from pydantic import BaseModel  # type: ignore
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
+from sentry_sdk.utils import event_from_exception, transaction_from_function
+
+try:
+    from starlite import Request, Starlite, State  # type: ignore
+    from starlite.handlers.base import BaseRouteHandler  # type: ignore
+    from starlite.middleware import DefineMiddleware  # type: ignore
+    from starlite.plugins.base import get_plugin_for_value  # type: ignore
+    from starlite.routes.http import HTTPRoute  # type: ignore
+    from starlite.utils import ConnectionDataExtractor, is_async_callable, Ref  # type: ignore
+
+    if TYPE_CHECKING:
+        from typing import Any, Dict, List, Optional, Union
+        from starlite.types import (  # type: ignore
+            ASGIApp,
+            HTTPReceiveMessage,
+            HTTPScope,
+            Message,
+            Middleware,
+            Receive,
+            Scope,
+            Send,
+            WebSocketReceiveMessage,
+        )
+        from starlite import MiddlewareProtocol
+        from sentry_sdk._types import Event
+except ImportError:
+    raise DidNotEnable("Starlite is not installed")
+
+
+_DEFAULT_TRANSACTION_NAME = "generic Starlite request"
+
+
+class SentryStarliteASGIMiddleware(SentryAsgiMiddleware):
+    def __init__(self, app: "ASGIApp"):
+        super().__init__(
+            app=app,
+            unsafe_context_data=False,
+            transaction_style="endpoint",
+            mechanism_type="asgi",
+        )
+
+
+class StarliteIntegration(Integration):
+    identifier = "starlite"
+
+    @staticmethod
+    def setup_once() -> None:
+        patch_app_init()
+        patch_middlewares()
+        patch_http_route_handle()
+
+
+def patch_app_init() -> None:
+    """
+    Replaces the Starlite class's `__init__` function in order to inject `after_exception` handlers and set the
+    `SentryStarliteASGIMiddleware` as the outmost middleware in the stack.
+    See:
+    - https://starlite-api.github.io/starlite/usage/0-the-starlite-app/5-application-hooks/#after-exception
+    - https://starlite-api.github.io/starlite/usage/7-middleware/0-middleware-intro/
+    """
+    old__init__ = Starlite.__init__
+
+    def injection_wrapper(self: "Starlite", *args: "Any", **kwargs: "Any") -> None:
+        after_exception = kwargs.pop("after_exception", [])
+        kwargs.update(
+            after_exception=[
+                exception_handler,
+                *(
+                    after_exception
+                    if isinstance(after_exception, list)
+                    else [after_exception]
+                ),
+            ]
+        )
+
+        SentryStarliteASGIMiddleware.__call__ = SentryStarliteASGIMiddleware._run_asgi3
+        middleware = kwargs.pop("middleware", None) or []
+        kwargs["middleware"] = [SentryStarliteASGIMiddleware, *middleware]
+        old__init__(self, *args, **kwargs)
+
+    Starlite.__init__ = injection_wrapper
+
+
+def patch_middlewares() -> None:
+    old__resolve_middleware_stack = BaseRouteHandler.resolve_middleware
+
+    def resolve_middleware_wrapper(self: "Any") -> "List[Middleware]":
+        return [
+            enable_span_for_middleware(middleware)
+            for middleware in old__resolve_middleware_stack(self)
+        ]
+
+    BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper
+
+
+def enable_span_for_middleware(middleware: "Middleware") -> "Middleware":
+    if (
+        not hasattr(middleware, "__call__")  # noqa: B004
+        or middleware is SentryStarliteASGIMiddleware
+    ):
+        return middleware
+
+    if isinstance(middleware, DefineMiddleware):
+        old_call: "ASGIApp" = middleware.middleware.__call__
+    else:
+        old_call = middleware.__call__
+
+    async def _create_span_call(
+        self: "MiddlewareProtocol", scope: "Scope", receive: "Receive", send: "Send"
+    ) -> None:
+        hub = Hub.current
+        integration = hub.get_integration(StarliteIntegration)
+        if integration is not None:
+            middleware_name = self.__class__.__name__
+            with hub.start_span(
+                op=OP.MIDDLEWARE_STARLITE, description=middleware_name
+            ) as middleware_span:
+                middleware_span.set_tag("starlite.middleware_name", middleware_name)
+
+                # Creating spans for the "receive" callback
+                async def _sentry_receive(
+                    *args: "Any", **kwargs: "Any"
+                ) -> "Union[HTTPReceiveMessage, WebSocketReceiveMessage]":
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLITE_RECEIVE,
+                        description=getattr(receive, "__qualname__", str(receive)),
+                    ) as span:
+                        span.set_tag("starlite.middleware_name", middleware_name)
+                        return await receive(*args, **kwargs)
+
+                receive_name = getattr(receive, "__name__", str(receive))
+                receive_patched = receive_name == "_sentry_receive"
+                new_receive = _sentry_receive if not receive_patched else receive
+
+                # Creating spans for the "send" callback
+                async def _sentry_send(message: "Message") -> None:
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLITE_SEND,
+                        description=getattr(send, "__qualname__", str(send)),
+                    ) as span:
+                        span.set_tag("starlite.middleware_name", middleware_name)
+                        return await send(message)
+
+                send_name = getattr(send, "__name__", str(send))
+                send_patched = send_name == "_sentry_send"
+                new_send = _sentry_send if not send_patched else send
+
+                return await old_call(self, scope, new_receive, new_send)
+        else:
+            return await old_call(self, scope, receive, send)
+
+    not_yet_patched = old_call.__name__ not in ["_create_span_call"]
+
+    if not_yet_patched:
+        if isinstance(middleware, DefineMiddleware):
+            middleware.middleware.__call__ = _create_span_call
+        else:
+            middleware.__call__ = _create_span_call
+
+    return middleware
+
+
+def patch_http_route_handle() -> None:
+    old_handle = HTTPRoute.handle
+
+    async def handle_wrapper(
+        self: "HTTPRoute", scope: "HTTPScope", receive: "Receive", send: "Send"
+    ) -> None:
+        hub = Hub.current
+        integration: StarliteIntegration = hub.get_integration(StarliteIntegration)
+        if integration is None:
+            return await old_handle(self, scope, receive, send)
+
+        with hub.configure_scope() as sentry_scope:
+            request: "Request[Any, Any]" = scope["app"].request_class(
+                scope=scope, receive=receive, send=send
+            )
+            extracted_request_data = ConnectionDataExtractor(
+                parse_body=True, parse_query=True
+            )(request)
+            body = extracted_request_data.pop("body")
+
+            request_data = await body
+
+            def event_processor(event: "Event", _: "Dict[str, Any]") -> "Event":
+                route_handler = scope.get("route_handler")
+
+                request_info = event.get("request", {})
+                request_info["content_length"] = len(scope.get("_body", b""))
+                if _should_send_default_pii():
+                    request_info["cookies"] = extracted_request_data["cookies"]
+                if request_data is not None:
+                    request_info["data"] = request_data
+
+                func = None
+                if route_handler.name is not None:
+                    tx_name = route_handler.name
+                elif isinstance(route_handler.fn, Ref):
+                    func = route_handler.fn.value
+                else:
+                    func = route_handler.fn
+                if func is not None:
+                    tx_name = transaction_from_function(func)
+
+                tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]}
+
+                if not tx_name:
+                    tx_name = _DEFAULT_TRANSACTION_NAME
+                    tx_info = {"source": TRANSACTION_SOURCE_ROUTE}
+
+                event.update(
+                    request=request_info, transaction=tx_name, transaction_info=tx_info
+                )
+                return event
+
+            sentry_scope._name = StarliteIntegration.identifier
+            sentry_scope.add_event_processor(event_processor)
+
+            return await old_handle(self, scope, receive, send)
+
+    HTTPRoute.handle = handle_wrapper
+
+
+def retrieve_user_from_scope(scope: "Scope") -> "Optional[Dict[str, Any]]":
+    scope_user = scope.get("user", {})
+    if not scope_user:
+        return None
+    if isinstance(scope_user, dict):
+        return scope_user
+    if isinstance(scope_user, BaseModel):
+        return scope_user.dict()
+    if hasattr(scope_user, "asdict"):  # dataclasses
+        return scope_user.asdict()
+
+    plugin = get_plugin_for_value(scope_user)
+    if plugin and not is_async_callable(plugin.to_dict):
+        return plugin.to_dict(scope_user)
+
+    return None
+
+
+def exception_handler(exc: Exception, scope: "Scope", _: "State") -> None:
+    hub = Hub.current
+    if hub.get_integration(StarliteIntegration) is None:
+        return
+
+    user_info: "Optional[Dict[str, Any]]" = None
+    if _should_send_default_pii():
+        user_info = retrieve_user_from_scope(scope)
+    if user_info and isinstance(user_info, dict):
+        with hub.configure_scope() as sentry_scope:
+            sentry_scope.set_user(user_info)
+
+    event, hint = event_from_exception(
+        exc,
+        client_options=hub.client.options if hub.client else None,
+        mechanism={"type": StarliteIntegration.identifier, "handled": False},
+    )
+
+    hub.capture_event(event, hint=hint)
diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py
index 9495d40..be02779 100644
--- a/sentry_sdk/integrations/stdlib.py
+++ b/sentry_sdk/integrations/stdlib.py
@@ -2,16 +2,23 @@ import os
 import subprocess
 import sys
 import platform
+from sentry_sdk.consts import OP, SPANDATA
 
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration
 from sentry_sdk.scope import add_global_event_processor
-from sentry_sdk.tracing_utils import EnvironHeaders
-from sentry_sdk.utils import capture_internal_exceptions, logger, safe_repr
-
-from sentry_sdk._types import MYPY
-
-if MYPY:
+from sentry_sdk.tracing_utils import EnvironHeaders, should_propagate_trace
+from sentry_sdk.utils import (
+    SENSITIVE_DATA_SUBSTITUTE,
+    capture_internal_exceptions,
+    logger,
+    safe_repr,
+    parse_url,
+)
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Dict
@@ -70,7 +77,7 @@ def _install_httplib():
         default_port = self.default_port
 
         real_url = url
-        if not real_url.startswith(("http://", "https://")):
+        if real_url is None or not real_url.startswith(("http://", "https://")):
             real_url = "%s://%s%s%s" % (
                 default_port == 443 and "https" or "http",
                 host,
@@ -78,20 +85,32 @@ def _install_httplib():
                 url,
             )
 
-        span = hub.start_span(op="http", description="%s %s" % (method, real_url))
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(real_url, sanitize=False)
+
+        span = hub.start_span(
+            op=OP.HTTP_CLIENT,
+            description="%s %s"
+            % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
+        )
 
-        span.set_data("method", method)
-        span.set_data("url", real_url)
+        span.set_data(SPANDATA.HTTP_METHOD, method)
+        if parsed_url is not None:
+            span.set_data("url", parsed_url.url)
+            span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+            span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
 
         rv = real_putrequest(self, method, url, *args, **kwargs)
 
-        for key, value in hub.iter_trace_propagation_headers(span):
-            logger.debug(
-                "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format(
-                    key=key, value=value, real_url=real_url
+        if should_propagate_trace(hub, real_url):
+            for key, value in hub.iter_trace_propagation_headers(span):
+                logger.debug(
+                    "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format(
+                        key=key, value=value, real_url=real_url
+                    )
                 )
-            )
-            self.putheader(key, value)
+                self.putheader(key, value)
 
         self._sentrysdk_span = span
 
@@ -106,7 +125,6 @@ def _install_httplib():
 
         rv = real_getresponse(self, *args, **kwargs)
 
-        span.set_data("status_code", rv.status)
         span.set_http_status(int(rv.status))
         span.set_data("reason", rv.reason)
         span.finish()
@@ -183,8 +201,7 @@ def _install_subprocess():
 
         env = None
 
-        with hub.start_span(op="subprocess", description=description) as span:
-
+        with hub.start_span(op=OP.SUBPROCESS, description=description) as span:
             for k, v in hub.iter_trace_propagation_headers(span):
                 if env is None:
                     env = _init_argument(
@@ -211,7 +228,7 @@ def _install_subprocess():
         if hub.get_integration(StdlibIntegration) is None:
             return old_popen_wait(self, *a, **kw)
 
-        with hub.start_span(op="subprocess.wait") as span:
+        with hub.start_span(op=OP.SUBPROCESS_WAIT) as span:
             span.set_tag("subprocess.pid", self.pid)
             return old_popen_wait(self, *a, **kw)
 
@@ -226,7 +243,7 @@ def _install_subprocess():
         if hub.get_integration(StdlibIntegration) is None:
             return old_popen_communicate(self, *a, **kw)
 
-        with hub.start_span(op="subprocess.communicate") as span:
+        with hub.start_span(op=OP.SUBPROCESS_COMMUNICATE) as span:
             span.set_tag("subprocess.pid", self.pid)
             return old_popen_communicate(self, *a, **kw)
 
diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py
index f29e5e8..499cf85 100644
--- a/sentry_sdk/integrations/threading.py
+++ b/sentry_sdk/integrations/threading.py
@@ -1,15 +1,16 @@
 from __future__ import absolute_import
 
 import sys
+from functools import wraps
 from threading import Thread, current_thread
 
 from sentry_sdk import Hub
 from sentry_sdk._compat import reraise
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.integrations import Integration
 from sentry_sdk.utils import event_from_exception, capture_internal_exceptions
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import TypeVar
     from typing import Callable
@@ -32,6 +33,7 @@ class ThreadingIntegration(Integration):
         # type: () -> None
         old_start = Thread.start
 
+        @wraps(old_start)
         def sentry_start(self, *a, **kw):
             # type: (Thread, *Any, **Any) -> Any
             hub = Hub.current
@@ -58,6 +60,7 @@ class ThreadingIntegration(Integration):
 
 def _wrap_run(parent_hub, old_run_func):
     # type: (Optional[Hub], F) -> F
+    @wraps(old_run_func)
     def run(*a, **kw):
         # type: (*Any, **Any) -> Any
         hub = parent_hub or Hub.current
diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py
index b4a639b..8af93c4 100644
--- a/sentry_sdk/integrations/tornado.py
+++ b/sentry_sdk/integrations/tornado.py
@@ -2,11 +2,12 @@ import weakref
 import contextlib
 from inspect import iscoroutinefunction
 
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.tracing import (
     TRANSACTION_SOURCE_COMPONENT,
     TRANSACTION_SOURCE_ROUTE,
-    Transaction,
 )
 from sentry_sdk.utils import (
     HAS_REAL_CONTEXTVARS,
@@ -31,9 +32,9 @@ try:
 except ImportError:
     raise DidNotEnable("Tornado not installed")
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
     from typing import Dict
@@ -77,7 +78,7 @@ class TornadoIntegration(Integration):
         else:
 
             @coroutine  # type: ignore
-            def sentry_execute_request_handler(self, *args, **kwargs):  # type: ignore
+            def sentry_execute_request_handler(self, *args, **kwargs):
                 # type: (RequestHandler, *Any, **Any) -> Any
                 with _handle_request_impl(self):
                     result = yield from old_execute(self, *args, **kwargs)
@@ -107,14 +108,16 @@ def _handle_request_impl(self):
     weak_handler = weakref.ref(self)
 
     with Hub(hub) as hub:
+        headers = self.request.headers
+
         with hub.configure_scope() as scope:
             scope.clear_breadcrumbs()
             processor = _make_event_processor(weak_handler)
             scope.add_event_processor(processor)
 
-        transaction = Transaction.continue_from_headers(
-            self.request.headers,
-            op="http.server",
+        transaction = continue_trace(
+            headers,
+            op=OP.HTTP_SERVER,
             # Like with all other integrations, this is our
             # fallback transaction in case there is no route.
             # sentry_urldispatcher_resolve is responsible for
diff --git a/sentry_sdk/integrations/trytond.py b/sentry_sdk/integrations/trytond.py
index 062a756..6f1aff2 100644
--- a/sentry_sdk/integrations/trytond.py
+++ b/sentry_sdk/integrations/trytond.py
@@ -2,12 +2,12 @@ import sentry_sdk.hub
 import sentry_sdk.utils
 import sentry_sdk.integrations
 import sentry_sdk.integrations.wsgi
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
 from trytond.exceptions import TrytonException  # type: ignore
 from trytond.wsgi import app  # type: ignore
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
 
 
@@ -22,7 +22,6 @@ class TrytondWSGIIntegration(sentry_sdk.integrations.Integration):
 
     @staticmethod
     def setup_once():  # type: () -> None
-
         app.wsgi_app = sentry_sdk.integrations.wsgi.SentryWsgiMiddleware(app.wsgi_app)
 
         def error_handler(e):  # type: (Exception) -> None
diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py
index 31ffe22..0d53766 100644
--- a/sentry_sdk/integrations/wsgi.py
+++ b/sentry_sdk/integrations/wsgi.py
@@ -1,21 +1,22 @@
 import sys
 
+from sentry_sdk._compat import PY2, reraise
 from sentry_sdk._functools import partial
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk._werkzeug import get_host, _get_headers
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.utils import (
     ContextVar,
     capture_internal_exceptions,
     event_from_exception,
 )
-from sentry_sdk._compat import PY2, reraise, iteritems
 from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE
 from sentry_sdk.sessions import auto_session_tracking
 from sentry_sdk.integrations._wsgi_common import _filter_headers
-from sentry_sdk.profiler import start_profiling
 
-from sentry_sdk._types import MYPY
-
-if MYPY:
+if TYPE_CHECKING:
     from typing import Callable
     from typing import Dict
     from typing import Iterator
@@ -33,7 +34,7 @@ if MYPY:
     WsgiExcInfo = TypeVar("WsgiExcInfo")
 
     class StartResponse(Protocol):
-        def __call__(self, status, response_headers, exc_info=None):
+        def __call__(self, status, response_headers, exc_info=None):  # type: ignore
             # type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter
             pass
 
@@ -54,35 +55,6 @@ else:
         return s.encode("latin1").decode(charset, errors)
 
 
-def get_host(environ, use_x_forwarded_for=False):
-    # type: (Dict[str, str], bool) -> str
-    """Return the host for the given WSGI environment. Yanked from Werkzeug."""
-    if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ:
-        rv = environ["HTTP_X_FORWARDED_HOST"]
-        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
-            rv = rv[:-3]
-        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
-            rv = rv[:-4]
-    elif environ.get("HTTP_HOST"):
-        rv = environ["HTTP_HOST"]
-        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
-            rv = rv[:-3]
-        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
-            rv = rv[:-4]
-    elif environ.get("SERVER_NAME"):
-        rv = environ["SERVER_NAME"]
-        if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in (
-            ("https", "443"),
-            ("http", "80"),
-        ):
-            rv += ":" + environ["SERVER_PORT"]
-    else:
-        # In spite of the WSGI spec, SERVER_NAME might not be present.
-        rv = "unknown"
-
-    return rv
-
-
 def get_request_url(environ, use_x_forwarded_for=False):
     # type: (Dict[str, str], bool) -> str
     """Return the absolute URL without query string for the given WSGI
@@ -122,16 +94,16 @@ class SentryWsgiMiddleware(object):
                                 )
                             )
 
-                    transaction = Transaction.continue_from_environ(
+                    transaction = continue_trace(
                         environ,
-                        op="http.server",
+                        op=OP.HTTP_SERVER,
                         name="generic WSGI request",
                         source=TRANSACTION_SOURCE_ROUTE,
                     )
 
                     with hub.start_transaction(
                         transaction, custom_sampling_context={"wsgi_environ": environ}
-                    ), start_profiling(transaction, hub):
+                    ):
                         try:
                             rv = self.app(
                                 environ,
@@ -147,7 +119,7 @@ class SentryWsgiMiddleware(object):
         return _ScopedResponse(hub, rv)
 
 
-def _sentry_start_response(
+def _sentry_start_response(  # type: ignore
     old_start_response,  # type: StartResponse
     transaction,  # type: Transaction
     status,  # type: str
@@ -185,27 +157,6 @@ def _get_environ(environ):
             yield key, environ[key]
 
 
-# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders`
-#
-# We need this function because Django does not give us a "pure" http header
-# dict. So we might as well use it for all WSGI integrations.
-def _get_headers(environ):
-    # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
-    """
-    Returns only proper HTTP headers.
-
-    """
-    for key, value in iteritems(environ):
-        key = str(key)
-        if key.startswith("HTTP_") and key not in (
-            "HTTP_CONTENT_TYPE",
-            "HTTP_CONTENT_LENGTH",
-        ):
-            yield key[5:].replace("_", "-").title(), value
-        elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
-            yield key.replace("_", "-").title(), value
-
-
 def get_client_ip(environ):
     # type: (Dict[str, str]) -> Optional[Any]
     """
diff --git a/sentry_sdk/monitor.py b/sentry_sdk/monitor.py
new file mode 100644
index 0000000..c66bebb
--- /dev/null
+++ b/sentry_sdk/monitor.py
@@ -0,0 +1,105 @@
+import os
+import time
+from threading import Thread, Lock
+
+import sentry_sdk
+from sentry_sdk.utils import logger
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Optional
+
+
+class Monitor(object):
+    """
+    Performs health checks in a separate thread once every interval seconds
+    and updates the internal state. Other parts of the SDK only read this state
+    and act accordingly.
+    """
+
+    name = "sentry.monitor"
+
+    def __init__(self, transport, interval=10):
+        # type: (sentry_sdk.transport.Transport, float) -> None
+        self.transport = transport  # type: sentry_sdk.transport.Transport
+        self.interval = interval  # type: float
+
+        self._healthy = True
+        self._downsample_factor = 1  # type: int
+
+        self._thread = None  # type: Optional[Thread]
+        self._thread_lock = Lock()
+        self._thread_for_pid = None  # type: Optional[int]
+        self._running = True
+
+    def _ensure_running(self):
+        # type: () -> None
+        if self._thread_for_pid == os.getpid() and self._thread is not None:
+            return None
+
+        with self._thread_lock:
+            if self._thread_for_pid == os.getpid() and self._thread is not None:
+                return None
+
+            def _thread():
+                # type: (...) -> None
+                while self._running:
+                    time.sleep(self.interval)
+                    if self._running:
+                        self.run()
+
+            thread = Thread(name=self.name, target=_thread)
+            thread.daemon = True
+            thread.start()
+            self._thread = thread
+            self._thread_for_pid = os.getpid()
+
+        return None
+
+    def run(self):
+        # type: () -> None
+        self.check_health()
+        self.set_downsample_factor()
+
+    def set_downsample_factor(self):
+        # type: () -> None
+        if self._healthy:
+            if self._downsample_factor > 1:
+                logger.debug(
+                    "[Monitor] health check positive, reverting to normal sampling"
+                )
+            self._downsample_factor = 1
+        else:
+            self._downsample_factor *= 2
+            logger.debug(
+                "[Monitor] health check negative, downsampling with a factor of %d",
+                self._downsample_factor,
+            )
+
+    def check_health(self):
+        # type: () -> None
+        """
+        Perform the actual health checks,
+        currently only checks if the transport is rate-limited.
+        TODO: augment in the future with more checks.
+        """
+        self._healthy = self.transport.is_healthy()
+
+    def is_healthy(self):
+        # type: () -> bool
+        self._ensure_running()
+        return self._healthy
+
+    @property
+    def downsample_factor(self):
+        # type: () -> int
+        self._ensure_running()
+        return self._downsample_factor
+
+    def kill(self):
+        # type: () -> None
+        self._running = False
+
+    def __del__(self):
+        # type: () -> None
+        self.kill()
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 45ef706..edc4fc7 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -1,133 +1,263 @@
 """
-This file is originally based on code from https://github.com/nylas/nylas-perftools, which is published under the following license:
+This file is originally based on code from https://github.com/nylas/nylas-perftools,
+which is published under the following license:
 
 The MIT License (MIT)
 
 Copyright (c) 2014 Nylas
 
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
 """
 
 import atexit
+import os
 import platform
 import random
-import signal
+import sys
 import threading
 import time
-import sys
 import uuid
-
 from collections import deque
-from contextlib import contextmanager
 
 import sentry_sdk
-from sentry_sdk._compat import PY33
-
-from sentry_sdk._types import MYPY
-from sentry_sdk.utils import nanosecond_time
-
-if MYPY:
+from sentry_sdk._compat import PY33, PY311
+from sentry_sdk._lru_cache import LRUCache
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.utils import (
+    capture_internal_exception,
+    filename_for_module,
+    is_valid_sample_rate,
+    logger,
+    nanosecond_time,
+    set_in_app_in_frames,
+)
+
+if TYPE_CHECKING:
+    from types import FrameType
     from typing import Any
+    from typing import Callable
     from typing import Deque
     from typing import Dict
-    from typing import Generator
     from typing import List
     from typing import Optional
+    from typing import Set
     from typing import Sequence
     from typing import Tuple
+    from typing_extensions import TypedDict
+
     import sentry_sdk.tracing
+    from sentry_sdk._types import SamplingContext, ProfilerMode
+
+    ThreadId = str
+
+    ProcessedSample = TypedDict(
+        "ProcessedSample",
+        {
+            "elapsed_since_start_ns": str,
+            "thread_id": ThreadId,
+            "stack_id": int,
+        },
+    )
+
+    ProcessedStack = List[int]
+
+    ProcessedFrame = TypedDict(
+        "ProcessedFrame",
+        {
+            "abs_path": str,
+            "filename": Optional[str],
+            "function": str,
+            "lineno": int,
+            "module": Optional[str],
+        },
+    )
+
+    ProcessedThreadMetadata = TypedDict(
+        "ProcessedThreadMetadata",
+        {"name": str},
+    )
+
+    ProcessedProfile = TypedDict(
+        "ProcessedProfile",
+        {
+            "frames": List[ProcessedFrame],
+            "stacks": List[ProcessedStack],
+            "samples": List[ProcessedSample],
+            "thread_metadata": Dict[ThreadId, ProcessedThreadMetadata],
+        },
+    )
+
+    ProfileContext = TypedDict(
+        "ProfileContext",
+        {"profile_id": str},
+    )
+
+    FrameId = Tuple[
+        str,  # abs_path
+        int,  # lineno
+        str,  # function
+    ]
+    FrameIds = Tuple[FrameId, ...]
+
+    # The exact value of this id is not very meaningful. The purpose
+    # of this id is to give us a compact and unique identifier for a
+    # raw stack that can be used as a key to a dictionary so that it
+    # can be used during the sampled format generation.
+    StackId = Tuple[int, int]
+
+    ExtractedStack = Tuple[StackId, FrameIds, List[ProcessedFrame]]
+    ExtractedSample = Sequence[Tuple[ThreadId, ExtractedStack]]
+
+
+try:
+    from gevent import get_hub as get_gevent_hub  # type: ignore
+    from gevent.monkey import get_original, is_module_patched  # type: ignore
+    from gevent.threadpool import ThreadPool  # type: ignore
+
+    thread_sleep = get_original("time", "sleep")
+except ImportError:
+
+    def get_gevent_hub():
+        # type: () -> Any
+        return None
+
+    thread_sleep = time.sleep
+
+    def is_module_patched(*args, **kwargs):
+        # type: (*Any, **Any) -> bool
+        # unable to import from gevent means no modules have been patched
+        return False
+
+    ThreadPool = None
 
-    Frame = Any
-    FrameData = Tuple[str, str, int]
 
+def is_gevent():
+    # type: () -> bool
+    return is_module_patched("threading") or is_module_patched("_thread")
 
-_sample_buffer = None  # type: Optional[_SampleBuffer]
-_scheduler = None  # type: Optional[_Scheduler]
+
+_scheduler = None  # type: Optional[Scheduler]
+
+# The default sampling frequency to use. This is set at 101 in order to
+# mitigate the effects of lockstep sampling.
+DEFAULT_SAMPLING_FREQUENCY = 101
+
+
+# The minimum number of unique samples that must exist in a profile to be
+# considered valid.
+PROFILE_MINIMUM_SAMPLES = 2
+
+
+def has_profiling_enabled(options):
+    # type: (Dict[str, Any]) -> bool
+    profiles_sampler = options["profiles_sampler"]
+    if profiles_sampler is not None:
+        return True
+
+    profiles_sample_rate = options["profiles_sample_rate"]
+    if profiles_sample_rate is not None and profiles_sample_rate > 0:
+        return True
+
+    profiles_sample_rate = options["_experiments"].get("profiles_sample_rate")
+    if profiles_sample_rate is not None and profiles_sample_rate > 0:
+        return True
+
+    return False
 
 
 def setup_profiler(options):
-    # type: (Dict[str, Any]) -> None
+    # type: (Dict[str, Any]) -> bool
+    global _scheduler
 
-    """
-    `buffer_secs` determines the max time a sample will be buffered for
-    `frequency` determines the number of samples to take per second (Hz)
-    """
-    buffer_secs = 60
-    frequency = 101
+    if _scheduler is not None:
+        logger.debug("[Profiling] Profiler is already setup")
+        return False
 
     if not PY33:
-        from sentry_sdk.utils import logger
+        logger.warn("[Profiling] Profiler requires Python >= 3.3")
+        return False
 
-        logger.warn("profiling is only supported on Python >= 3.3")
-        return
+    frequency = DEFAULT_SAMPLING_FREQUENCY
 
-    global _sample_buffer
-    global _scheduler
+    if is_gevent():
+        # If gevent has patched the threading modules then we cannot rely on
+        # them to spawn a native thread for sampling.
+        # Instead we default to the GeventScheduler which is capable of
+        # spawning native threads within gevent.
+        default_profiler_mode = GeventScheduler.mode
+    else:
+        default_profiler_mode = ThreadScheduler.mode
+
+    if options.get("profiler_mode") is not None:
+        profiler_mode = options["profiler_mode"]
+    else:
+        profiler_mode = (
+            options.get("_experiments", {}).get("profiler_mode")
+            or default_profiler_mode
+        )
 
-    assert _sample_buffer is None and _scheduler is None
-
-    # To buffer samples for `buffer_secs` at `frequency` Hz, we need
-    # a capcity of `buffer_secs * frequency`.
-    _sample_buffer = _SampleBuffer(capacity=buffer_secs * frequency)
-
-    profiler_mode = options["_experiments"].get("profiler_mode", _SigprofScheduler.mode)
-    if profiler_mode == _SigprofScheduler.mode:
-        _scheduler = _SigprofScheduler(frequency=frequency)
-    elif profiler_mode == _SigalrmScheduler.mode:
-        _scheduler = _SigalrmScheduler(frequency=frequency)
-    elif profiler_mode == _SleepScheduler.mode:
-        _scheduler = _SleepScheduler(frequency=frequency)
-    elif profiler_mode == _EventScheduler.mode:
-        _scheduler = _EventScheduler(frequency=frequency)
+    if (
+        profiler_mode == ThreadScheduler.mode
+        # for legacy reasons, we'll keep supporting sleep mode for this scheduler
+        or profiler_mode == "sleep"
+    ):
+        _scheduler = ThreadScheduler(frequency=frequency)
+    elif profiler_mode == GeventScheduler.mode:
+        _scheduler = GeventScheduler(frequency=frequency)
     else:
         raise ValueError("Unknown profiler mode: {}".format(profiler_mode))
+
+    logger.debug(
+        "[Profiling] Setting up profiler in {mode} mode".format(mode=_scheduler.mode)
+    )
     _scheduler.setup()
 
     atexit.register(teardown_profiler)
 
+    return True
+
 
 def teardown_profiler():
     # type: () -> None
 
-    global _sample_buffer
     global _scheduler
 
     if _scheduler is not None:
         _scheduler.teardown()
 
-    _sample_buffer = None
     _scheduler = None
 
 
-def _sample_stack(*args, **kwargs):
-    # type: (*Any, **Any) -> None
-    """
-    Take a sample of the stack on all the threads in the process.
-    This should be called at a regular interval to collect samples.
-    """
-
-    assert _sample_buffer is not None
-    _sample_buffer.write(
-        (
-            nanosecond_time(),
-            [
-                (tid, _extract_stack(frame))
-                for tid, frame in sys._current_frames().items()
-            ],
-        )
-    )
-
-
 # We want to impose a stack depth limit so that samples aren't too large.
 MAX_STACK_DEPTH = 128
 
 
-def _extract_stack(frame):
-    # type: (Frame) -> Sequence[FrameData]
+CWD = os.getcwd()
+
+
+def extract_stack(
+    raw_frame,  # type: Optional[FrameType]
+    cache,  # type: LRUCache
+    cwd=CWD,  # type: str
+    max_stack_depth=MAX_STACK_DEPTH,  # type: int
+):
+    # type: (...) -> ExtractedStack
     """
     Extracts the stack starting the specified frame. The extracted stack
     assumes the specified frame is the top of the stack, and works back
@@ -137,61 +267,432 @@ def _extract_stack(frame):
     only the first `MAX_STACK_DEPTH` frames will be returned.
     """
 
-    stack = deque(maxlen=MAX_STACK_DEPTH)  # type: Deque[FrameData]
-
-    while frame is not None:
-        stack.append(
-            (
-                # co_name only contains the frame name.
-                # If the frame was a class method,
-                # the class name will NOT be included.
-                frame.f_code.co_name,
-                frame.f_code.co_filename,
-                frame.f_code.co_firstlineno,
-            )
-        )
-        frame = frame.f_back
+    raw_frames = deque(maxlen=max_stack_depth)  # type: Deque[FrameType]
+
+    while raw_frame is not None:
+        f_back = raw_frame.f_back
+        raw_frames.append(raw_frame)
+        raw_frame = f_back
+
+    frame_ids = tuple(frame_id(raw_frame) for raw_frame in raw_frames)
+    frames = []
+    for i, fid in enumerate(frame_ids):
+        frame = cache.get(fid)
+        if frame is None:
+            frame = extract_frame(fid, raw_frames[i], cwd)
+            cache.set(fid, frame)
+        frames.append(frame)
+
+    # Instead of mapping the stack into frame ids and hashing
+    # that as a tuple, we can directly hash the stack.
+    # This saves us from having to generate yet another list.
+    # Additionally, using the stack as the key directly is
+    # costly because the stack can be large, so we pre-hash
+    # the stack, and use the hash as the key as this will be
+    # needed a few times to improve performance.
+    #
+    # To Reduce the likelihood of hash collisions, we include
+    # the stack depth. This means that only stacks of the same
+    # depth can suffer from hash collisions.
+    stack_id = len(raw_frames), hash(frame_ids)
+
+    return stack_id, frame_ids, frames
+
+
+def frame_id(raw_frame):
+    # type: (FrameType) -> FrameId
+    return (raw_frame.f_code.co_filename, raw_frame.f_lineno, get_frame_name(raw_frame))
+
+
+def extract_frame(fid, raw_frame, cwd):
+    # type: (FrameId, FrameType, str) -> ProcessedFrame
+    abs_path = raw_frame.f_code.co_filename
+
+    try:
+        module = raw_frame.f_globals["__name__"]
+    except Exception:
+        module = None
+
+    # namedtuples can be many times slower when initialing
+    # and accessing attribute so we opt to use a tuple here instead
+    return {
+        # This originally was `os.path.abspath(abs_path)` but that had
+        # a large performance overhead.
+        #
+        # According to docs, this is equivalent to
+        # `os.path.normpath(os.path.join(os.getcwd(), path))`.
+        # The `os.getcwd()` call is slow here, so we precompute it.
+        #
+        # Additionally, since we are using normalized path already,
+        # we skip calling `os.path.normpath` entirely.
+        "abs_path": os.path.join(cwd, abs_path),
+        "module": module,
+        "filename": filename_for_module(module, abs_path) or None,
+        "function": fid[2],
+        "lineno": raw_frame.f_lineno,
+    }
+
+
+if PY311:
+
+    def get_frame_name(frame):
+        # type: (FrameType) -> str
+        return frame.f_code.co_qualname
+
+else:
+
+    def get_frame_name(frame):
+        # type: (FrameType) -> str
+
+        f_code = frame.f_code
+        co_varnames = f_code.co_varnames
+
+        # co_name only contains the frame name.  If the frame was a method,
+        # the class name will NOT be included.
+        name = f_code.co_name
+
+        # if it was a method, we can get the class name by inspecting
+        # the f_locals for the `self` argument
+        try:
+            if (
+                # the co_varnames start with the frame's positional arguments
+                # and we expect the first to be `self` if its an instance method
+                co_varnames
+                and co_varnames[0] == "self"
+                and "self" in frame.f_locals
+            ):
+                for cls in frame.f_locals["self"].__class__.__mro__:
+                    if name in cls.__dict__:
+                        return "{}.{}".format(cls.__name__, name)
+        except AttributeError:
+            pass
+
+        # if it was a class method, (decorated with `@classmethod`)
+        # we can get the class name by inspecting the f_locals for the `cls` argument
+        try:
+            if (
+                # the co_varnames start with the frame's positional arguments
+                # and we expect the first to be `cls` if its a class method
+                co_varnames
+                and co_varnames[0] == "cls"
+                and "cls" in frame.f_locals
+            ):
+                for cls in frame.f_locals["cls"].__mro__:
+                    if name in cls.__dict__:
+                        return "{}.{}".format(cls.__name__, name)
+        except AttributeError:
+            pass
+
+        # nothing we can do if it is a staticmethod (decorated with @staticmethod)
+
+        # we've done all we can, time to give up and return what we have
+        return name
+
+
+MAX_PROFILE_DURATION_NS = int(3e10)  # 30 seconds
+
+
+def get_current_thread_id(thread=None):
+    # type: (Optional[threading.Thread]) -> Optional[int]
+    """
+    Try to get the id of the current thread, with various fall backs.
+    """
+
+    # if a thread is specified, that takes priority
+    if thread is not None:
+        try:
+            thread_id = thread.ident
+            if thread_id is not None:
+                return thread_id
+        except AttributeError:
+            pass
+
+    # if the app is using gevent, we should look at the gevent hub first
+    # as the id there differs from what the threading module reports
+    if is_gevent():
+        gevent_hub = get_gevent_hub()
+        if gevent_hub is not None:
+            try:
+                # this is undocumented, so wrap it in try except to be safe
+                return gevent_hub.thread_ident
+            except AttributeError:
+                pass
+
+    # use the current thread's id if possible
+    try:
+        current_thread_id = threading.current_thread().ident
+        if current_thread_id is not None:
+            return current_thread_id
+    except AttributeError:
+        pass
+
+    # if we can't get the current thread id, fall back to the main thread id
+    try:
+        main_thread_id = threading.main_thread().ident
+        if main_thread_id is not None:
+            return main_thread_id
+    except AttributeError:
+        pass
 
-    return stack
+    # we've tried everything, time to give up
+    return None
 
 
 class Profile(object):
-    def __init__(self, transaction, hub=None):
-        # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> None
-        self.transaction = transaction
+    def __init__(
+        self,
+        transaction,  # type: sentry_sdk.tracing.Transaction
+        hub=None,  # type: Optional[sentry_sdk.Hub]
+        scheduler=None,  # type: Optional[Scheduler]
+    ):
+        # type: (...) -> None
+        self.scheduler = _scheduler if scheduler is None else scheduler
         self.hub = hub
-        self._start_ns = None  # type: Optional[int]
-        self._stop_ns = None  # type: Optional[int]
 
-    def __enter__(self):
+        self.event_id = uuid.uuid4().hex  # type: str
+
+        # Here, we assume that the sampling decision on the transaction has been finalized.
+        #
+        # We cannot keep a reference to the transaction around here because it'll create
+        # a reference cycle. So we opt to pull out just the necessary attributes.
+        self.sampled = transaction.sampled  # type: Optional[bool]
+
+        # Various framework integrations are capable of overwriting the active thread id.
+        # If it is set to `None` at the end of the profile, we fall back to the default.
+        self._default_active_thread_id = get_current_thread_id() or 0  # type: int
+        self.active_thread_id = None  # type: Optional[int]
+
+        try:
+            self.start_ns = transaction._start_timestamp_monotonic_ns  # type: int
+        except AttributeError:
+            self.start_ns = 0
+
+        self.stop_ns = 0  # type: int
+        self.active = False  # type: bool
+
+        self.indexed_frames = {}  # type: Dict[FrameId, int]
+        self.indexed_stacks = {}  # type: Dict[StackId, int]
+        self.frames = []  # type: List[ProcessedFrame]
+        self.stacks = []  # type: List[ProcessedStack]
+        self.samples = []  # type: List[ProcessedSample]
+
+        self.unique_samples = 0
+
+        transaction._profile = self
+
+    def update_active_thread_id(self):
+        # type: () -> None
+        self.active_thread_id = get_current_thread_id()
+        logger.debug(
+            "[Profiling] updating active thread id to {tid}".format(
+                tid=self.active_thread_id
+            )
+        )
+
+    def _set_initial_sampling_decision(self, sampling_context):
+        # type: (SamplingContext) -> None
+        """
+        Sets the profile's sampling decision according to the following
+        precdence rules:
+
+        1. If the transaction to be profiled is not sampled, that decision
+        will be used, regardless of anything else.
+
+        2. Use `profiles_sample_rate` to decide.
+        """
+
+        # The corresponding transaction was not sampled,
+        # so don't generate a profile for it.
+        if not self.sampled:
+            logger.debug(
+                "[Profiling] Discarding profile because transaction is discarded."
+            )
+            self.sampled = False
+            return
+
+        # The profiler hasn't been properly initialized.
+        if self.scheduler is None:
+            logger.debug(
+                "[Profiling] Discarding profile because profiler was not started."
+            )
+            self.sampled = False
+            return
+
+        hub = self.hub or sentry_sdk.Hub.current
+        client = hub.client
+
+        # The client is None, so we can't get the sample rate.
+        if client is None:
+            self.sampled = False
+            return
+
+        options = client.options
+
+        if callable(options.get("profiles_sampler")):
+            sample_rate = options["profiles_sampler"](sampling_context)
+        elif options["profiles_sample_rate"] is not None:
+            sample_rate = options["profiles_sample_rate"]
+        else:
+            sample_rate = options["_experiments"].get("profiles_sample_rate")
+
+        # The profiles_sample_rate option was not set, so profiling
+        # was never enabled.
+        if sample_rate is None:
+            logger.debug(
+                "[Profiling] Discarding profile because profiling was not enabled."
+            )
+            self.sampled = False
+            return
+
+        if not is_valid_sample_rate(sample_rate, source="Profiling"):
+            logger.warning(
+                "[Profiling] Discarding profile because of invalid sample rate."
+            )
+            self.sampled = False
+            return
+
+        # Now we roll the dice. random.random is inclusive of 0, but not of 1,
+        # so strict < is safe here. In case sample_rate is a boolean, cast it
+        # to a float (True becomes 1.0 and False becomes 0.0)
+        self.sampled = random.random() < float(sample_rate)
+
+        if self.sampled:
+            logger.debug("[Profiling] Initializing profile")
+        else:
+            logger.debug(
+                "[Profiling] Discarding profile because it's not included in the random sample (sample rate = {sample_rate})".format(
+                    sample_rate=float(sample_rate)
+                )
+            )
+
+    def start(self):
         # type: () -> None
-        assert _scheduler is not None
-        self._start_ns = nanosecond_time()
-        _scheduler.start_profiling()
+        if not self.sampled or self.active:
+            return
+
+        assert self.scheduler, "No scheduler specified"
+        logger.debug("[Profiling] Starting profile")
+        self.active = True
+        if not self.start_ns:
+            self.start_ns = nanosecond_time()
+        self.scheduler.start_profiling(self)
+
+    def stop(self):
+        # type: () -> None
+        if not self.sampled or not self.active:
+            return
+
+        assert self.scheduler, "No scheduler specified"
+        logger.debug("[Profiling] Stopping profile")
+        self.active = False
+        self.scheduler.stop_profiling(self)
+        self.stop_ns = nanosecond_time()
+
+    def __enter__(self):
+        # type: () -> Profile
+        hub = self.hub or sentry_sdk.Hub.current
+
+        _, scope = hub._stack[-1]
+        old_profile = scope.profile
+        scope.profile = self
+
+        self._context_manager_state = (hub, scope, old_profile)
+
+        self.start()
+
+        return self
 
     def __exit__(self, ty, value, tb):
         # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
-        assert _scheduler is not None
-        _scheduler.stop_profiling()
-        self._stop_ns = nanosecond_time()
+        self.stop()
+
+        _, scope, old_profile = self._context_manager_state
+        del self._context_manager_state
+
+        scope.profile = old_profile
+
+    def write(self, ts, sample):
+        # type: (int, ExtractedSample) -> None
+        if not self.active:
+            return
+
+        if ts < self.start_ns:
+            return
+
+        offset = ts - self.start_ns
+        if offset > MAX_PROFILE_DURATION_NS:
+            self.stop()
+            return
+
+        self.unique_samples += 1
+
+        elapsed_since_start_ns = str(offset)
+
+        for tid, (stack_id, frame_ids, frames) in sample:
+            try:
+                # Check if the stack is indexed first, this lets us skip
+                # indexing frames if it's not necessary
+                if stack_id not in self.indexed_stacks:
+                    for i, frame_id in enumerate(frame_ids):
+                        if frame_id not in self.indexed_frames:
+                            self.indexed_frames[frame_id] = len(self.indexed_frames)
+                            self.frames.append(frames[i])
+
+                    self.indexed_stacks[stack_id] = len(self.indexed_stacks)
+                    self.stacks.append(
+                        [self.indexed_frames[frame_id] for frame_id in frame_ids]
+                    )
+
+                self.samples.append(
+                    {
+                        "elapsed_since_start_ns": elapsed_since_start_ns,
+                        "thread_id": tid,
+                        "stack_id": self.indexed_stacks[stack_id],
+                    }
+                )
+            except AttributeError:
+                # For some reason, the frame we get doesn't have certain attributes.
+                # When this happens, we abandon the current sample as it's bad.
+                capture_internal_exception(sys.exc_info())
+
+    def process(self):
+        # type: () -> ProcessedProfile
+
+        # This collects the thread metadata at the end of a profile. Doing it
+        # this way means that any threads that terminate before the profile ends
+        # will not have any metadata associated with it.
+        thread_metadata = {
+            str(thread.ident): {
+                "name": str(thread.name),
+            }
+            for thread in threading.enumerate()
+        }  # type: Dict[str, ProcessedThreadMetadata]
 
-        # Now that we've collected all the data, attach it to the
-        # transaction so that it can be sent in the same envelope
-        self.transaction._profile = self.to_json()
+        return {
+            "frames": self.frames,
+            "stacks": self.stacks,
+            "samples": self.samples,
+            "thread_metadata": thread_metadata,
+        }
 
-    def to_json(self):
-        # type: () -> Dict[str, Any]
-        assert _sample_buffer is not None
-        assert self._start_ns is not None
-        assert self._stop_ns is not None
+    def to_json(self, event_opt, options):
+        # type: (Any, Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        profile = self.process()
+
+        set_in_app_in_frames(
+            profile["frames"],
+            options["in_app_exclude"],
+            options["in_app_include"],
+            options["project_root"],
+        )
 
         return {
-            "environment": None,  # Gets added in client.py
-            "event_id": uuid.uuid4().hex,
+            "environment": event_opt.get("environment"),
+            "event_id": self.event_id,
             "platform": "python",
-            "profile": _sample_buffer.slice_profile(self._start_ns, self._stop_ns),
-            "release": None,  # Gets added in client.py
-            "timestamp": None,  # Gets added in client.py
+            "profile": profile,
+            "release": event_opt.get("release", ""),
+            "timestamp": event_opt["start_timestamp"],
             "version": "1",
             "device": {
                 "architecture": platform.machine(),
@@ -206,115 +707,74 @@ class Profile(object):
             },
             "transactions": [
                 {
-                    "id": None,  # Gets added in client.py
-                    "name": self.transaction.name,
+                    "id": event_opt["event_id"],
+                    "name": event_opt["transaction"],
                     # we start the transaction before the profile and this is
                     # the transaction start time relative to the profile, so we
                     # hardcode it to 0 until we can start the profile before
                     "relative_start_ns": "0",
                     # use the duration of the profile instead of the transaction
                     # because we end the transaction after the profile
-                    "relative_end_ns": str(self._stop_ns - self._start_ns),
-                    "trace_id": self.transaction.trace_id,
-                    "active_thread_id": str(self.transaction._active_thread_id),
+                    "relative_end_ns": str(self.stop_ns - self.start_ns),
+                    "trace_id": event_opt["contexts"]["trace"]["trace_id"],
+                    "active_thread_id": str(
+                        self._default_active_thread_id
+                        if self.active_thread_id is None
+                        else self.active_thread_id
+                    ),
                 }
             ],
         }
 
+    def valid(self):
+        # type: () -> bool
+        hub = self.hub or sentry_sdk.Hub.current
+        client = hub.client
+        if client is None:
+            return False
 
-class _SampleBuffer(object):
-    """
-    A simple implementation of a ring buffer to buffer the samples taken.
-
-    At some point, the ring buffer will start overwriting old samples.
-    This is a trade off we've chosen to ensure the memory usage does not
-    grow indefinitely. But by having a sufficiently large buffer, this is
-    largely not a problem.
-    """
-
-    def __init__(self, capacity):
-        # type: (int) -> None
+        if not has_profiling_enabled(client.options):
+            return False
 
-        self.buffer = [None] * capacity
-        self.capacity = capacity
-        self.idx = 0
+        if self.sampled is None or not self.sampled:
+            if client.transport:
+                client.transport.record_lost_event(
+                    "sample_rate", data_category="profile"
+                )
+            return False
 
-    def write(self, sample):
-        # type: (Any) -> None
-        """
-        Writing to the buffer is not thread safe. There is the possibility
-        that parallel writes will overwrite one another.
+        if self.unique_samples < PROFILE_MINIMUM_SAMPLES:
+            if client.transport:
+                client.transport.record_lost_event(
+                    "insufficient_data", data_category="profile"
+                )
+            logger.debug("[Profiling] Discarding profile because insufficient samples.")
+            return False
 
-        This should only be a problem if the signal handler itself is
-        interrupted by the next signal.
-        (i.e. SIGPROF is sent again before the handler finishes).
+        return True
 
-        For this reason, and to keep it performant, we've chosen not to add
-        any synchronization mechanisms here like locks.
-        """
-        idx = self.idx
-        self.buffer[idx] = sample
-        self.idx = (idx + 1) % self.capacity
-
-    def slice_profile(self, start_ns, stop_ns):
-        # type: (int, int) -> Dict[str, Any]
-        samples = []  # type: List[Any]
-        stacks = dict()  # type: Dict[Any, int]
-        stacks_list = list()  # type: List[Any]
-        frames = dict()  # type: Dict[FrameData, int]
-        frames_list = list()  # type: List[Any]
-
-        # TODO: This is doing an naive iteration over the
-        # buffer and extracting the appropriate samples.
-        #
-        # Is it safe to assume that the samples are always in
-        # chronological order and binary search the buffer?
-        for raw_sample in self.buffer:
-            if raw_sample is None:
-                continue
-
-            ts = raw_sample[0]
-            if start_ns > ts or ts > stop_ns:
-                continue
-
-            for tid, stack in raw_sample[1]:
-                sample = {
-                    "elapsed_since_start_ns": str(ts - start_ns),
-                    "thread_id": str(tid),
-                }
-                current_stack = []
 
-                for frame in stack:
-                    if frame not in frames:
-                        frames[frame] = len(frames)
-                        frames_list.append(
-                            {
-                                "name": frame[0],
-                                "file": frame[1],
-                                "line": frame[2],
-                            }
-                        )
-                    current_stack.append(frames[frame])
+class Scheduler(object):
+    mode = "unknown"  # type: ProfilerMode
 
-                current_stack = tuple(current_stack)
-                if current_stack not in stacks:
-                    stacks[current_stack] = len(stacks)
-                    stacks_list.append(current_stack)
-
-                sample["stack_id"] = stacks[current_stack]
-                samples.append(sample)
+    def __init__(self, frequency):
+        # type: (int) -> None
+        self.interval = 1.0 / frequency
 
-        return {"stacks": stacks_list, "frames": frames_list, "samples": samples}
+        self.sampler = self.make_sampler()
 
+        # cap the number of new profiles at any time so it does not grow infinitely
+        self.new_profiles = deque(maxlen=128)  # type: Deque[Profile]
+        self.active_profiles = set()  # type: Set[Profile]
 
-class _Scheduler(object):
-    mode = "unknown"
+    def __enter__(self):
+        # type: () -> Scheduler
+        self.setup()
+        return self
 
-    def __init__(self, frequency):
-        # type: (int) -> None
-        self._lock = threading.Lock()
-        self._count = 0
-        self._interval = 1.0 / frequency
+    def __exit__(self, ty, value, tb):
+        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+        self.teardown()
 
     def setup(self):
         # type: () -> None
@@ -324,31 +784,109 @@ class _Scheduler(object):
         # type: () -> None
         raise NotImplementedError
 
-    def start_profiling(self):
-        # type: () -> bool
-        with self._lock:
-            self._count += 1
-            return self._count == 1
+    def ensure_running(self):
+        # type: () -> None
+        raise NotImplementedError
 
-    def stop_profiling(self):
-        # type: () -> bool
-        with self._lock:
-            self._count -= 1
-            return self._count == 0
+    def start_profiling(self, profile):
+        # type: (Profile) -> None
+        self.ensure_running()
+        self.new_profiles.append(profile)
 
+    def stop_profiling(self, profile):
+        # type: (Profile) -> None
+        pass
 
-class _ThreadScheduler(_Scheduler):
+    def make_sampler(self):
+        # type: () -> Callable[..., None]
+        cwd = os.getcwd()
+
+        cache = LRUCache(max_size=256)
+
+        def _sample_stack(*args, **kwargs):
+            # type: (*Any, **Any) -> None
+            """
+            Take a sample of the stack on all the threads in the process.
+            This should be called at a regular interval to collect samples.
+            """
+            # no profiles taking place, so we can stop early
+            if not self.new_profiles and not self.active_profiles:
+                # make sure to clear the cache if we're not profiling so we dont
+                # keep a reference to the last stack of frames around
+                return
+
+            # This is the number of profiles we want to pop off.
+            # It's possible another thread adds a new profile to
+            # the list and we spend longer than we want inside
+            # the loop below.
+            #
+            # Also make sure to set this value before extracting
+            # frames so we do not write to any new profiles that
+            # were started after this point.
+            new_profiles = len(self.new_profiles)
+
+            now = nanosecond_time()
+
+            try:
+                sample = [
+                    (str(tid), extract_stack(frame, cache, cwd))
+                    for tid, frame in sys._current_frames().items()
+                ]
+            except AttributeError:
+                # For some reason, the frame we get doesn't have certain attributes.
+                # When this happens, we abandon the current sample as it's bad.
+                capture_internal_exception(sys.exc_info())
+                return
+
+            # Move the new profiles into the active_profiles set.
+            #
+            # We cannot directly add the to active_profiles set
+            # in `start_profiling` because it is called from other
+            # threads which can cause a RuntimeError when it the
+            # set sizes changes during iteration without a lock.
+            #
+            # We also want to avoid using a lock here so threads
+            # that are starting profiles are not blocked until it
+            # can acquire the lock.
+            for _ in range(new_profiles):
+                self.active_profiles.add(self.new_profiles.popleft())
+
+            inactive_profiles = []
+
+            for profile in self.active_profiles:
+                if profile.active:
+                    profile.write(now, sample)
+                else:
+                    # If a thread is marked inactive, we buffer it
+                    # to `inactive_profiles` so it can be removed.
+                    # We cannot remove it here as it would result
+                    # in a RuntimeError.
+                    inactive_profiles.append(profile)
+
+            for profile in inactive_profiles:
+                self.active_profiles.remove(profile)
+
+        return _sample_stack
+
+
+class ThreadScheduler(Scheduler):
     """
-    This abstract scheduler is based on running a daemon thread that will call
+    This scheduler is based on running a daemon thread that will call
     the sampler at a regular interval.
     """
 
-    mode = "thread"
+    mode = "thread"  # type: ProfilerMode
+    name = "sentry.profiler.ThreadScheduler"
 
     def __init__(self, frequency):
         # type: (int) -> None
-        super(_ThreadScheduler, self).__init__(frequency)
-        self.event = threading.Event()
+        super(ThreadScheduler, self).__init__(frequency=frequency)
+
+        # used to signal to the thread that it should stop
+        self.running = False
+        self.thread = None  # type: Optional[threading.Thread]
+        self.pid = None  # type: Optional[int]
+        self.lock = threading.Lock()
 
     def setup(self):
         # type: () -> None
@@ -356,246 +894,135 @@ class _ThreadScheduler(_Scheduler):
 
     def teardown(self):
         # type: () -> None
-        pass
-
-    def start_profiling(self):
-        # type: () -> bool
-        if super(_ThreadScheduler, self).start_profiling():
-            # make sure to clear the event as we reuse the same event
-            # over the lifetime of the scheduler
-            self.event.clear()
-
-            # make sure the thread is a daemon here otherwise this
-            # can keep the application running after other threads
-            # have exited
-            thread = threading.Thread(target=self.run, daemon=True)
-            thread.start()
-            return True
-        return False
+        if self.running:
+            self.running = False
+            if self.thread is not None:
+                self.thread.join()
 
-    def stop_profiling(self):
-        # type: () -> bool
-        if super(_ThreadScheduler, self).stop_profiling():
-            # make sure the set the event here so that the thread
-            # can check to see if it should keep running
-            self.event.set()
-            return True
-        return False
-
-    def run(self):
+    def ensure_running(self):
         # type: () -> None
-        raise NotImplementedError
+        pid = os.getpid()
 
+        # is running on the right process
+        if self.running and self.pid == pid:
+            return
 
-class _SleepScheduler(_ThreadScheduler):
-    """
-    This scheduler uses time.sleep to wait the required interval before calling
-    the sampling function.
-    """
+        with self.lock:
+            # another thread may have tried to acquire the lock
+            # at the same time so it may start another thread
+            # make sure to check again before proceeding
+            if self.running and self.pid == pid:
+                return
 
-    mode = "sleep"
+            self.pid = pid
+            self.running = True
+
+            # make sure the thread is a daemon here otherwise this
+            # can keep the application running after other threads
+            # have exited
+            self.thread = threading.Thread(name=self.name, target=self.run, daemon=True)
+            self.thread.start()
 
     def run(self):
         # type: () -> None
         last = time.perf_counter()
 
-        while True:
+        while self.running:
+            self.sampler()
+
             # some time may have elapsed since the last time
             # we sampled, so we need to account for that and
             # not sleep for too long
-            now = time.perf_counter()
-            elapsed = max(now - last, 0)
-
-            if elapsed < self._interval:
-                time.sleep(self._interval - elapsed)
+            elapsed = time.perf_counter() - last
+            if elapsed < self.interval:
+                thread_sleep(self.interval - elapsed)
 
+            # after sleeping, make sure to take the current
+            # timestamp so we can use it next iteration
             last = time.perf_counter()
 
-            if self.event.is_set():
-                break
-
-            _sample_stack()
-
 
-class _EventScheduler(_ThreadScheduler):
+class GeventScheduler(Scheduler):
     """
-    This scheduler uses threading.Event to wait the required interval before
-    calling the sampling function.
+    This scheduler is based on the thread scheduler but adapted to work with
+    gevent. When using gevent, it may monkey patch the threading modules
+    (`threading` and `_thread`). This results in the use of greenlets instead
+    of native threads.
+
+    This is an issue because the sampler CANNOT run in a greenlet because
+    1. Other greenlets doing sync work will prevent the sampler from running
+    2. The greenlet runs in the same thread as other greenlets so when taking
+       a sample, other greenlets will have been evicted from the thread. This
+       results in a sample containing only the sampler's code.
     """
 
-    mode = "event"
+    mode = "gevent"  # type: ProfilerMode
+    name = "sentry.profiler.GeventScheduler"
 
-    def run(self):
-        # type: () -> None
-        while True:
-            self.event.wait(timeout=self._interval)
-
-            if self.event.is_set():
-                break
-
-            _sample_stack()
-
-
-class _SignalScheduler(_Scheduler):
-    """
-    This abstract scheduler is based on UNIX signals. It sets up a
-    signal handler for the specified signal, and the matching itimer in order
-    for the signal handler to fire at a regular interval.
+    def __init__(self, frequency):
+        # type: (int) -> None
 
-    See https://www.gnu.org/software/libc/manual/html_node/Alarm-Signals.html
-    """
+        if ThreadPool is None:
+            raise ValueError("Profiler mode: {} is not available".format(self.mode))
 
-    mode = "signal"
+        super(GeventScheduler, self).__init__(frequency=frequency)
 
-    @property
-    def signal_num(self):
-        # type: () -> signal.Signals
-        raise NotImplementedError
+        # used to signal to the thread that it should stop
+        self.running = False
+        self.thread = None  # type: Optional[ThreadPool]
+        self.pid = None  # type: Optional[int]
 
-    @property
-    def signal_timer(self):
-        # type: () -> int
-        raise NotImplementedError
+        # This intentionally uses the gevent patched threading.Lock.
+        # The lock will be required when first trying to start profiles
+        # as we need to spawn the profiler thread from the greenlets.
+        self.lock = threading.Lock()
 
     def setup(self):
         # type: () -> None
-        """
-        This method sets up the application so that it can be profiled.
-        It MUST be called from the main thread. This is a limitation of
-        python's signal library where it only allows the main thread to
-        set a signal handler.
-        """
-
-        # This setups a process wide signal handler that will be called
-        # at an interval to record samples.
-        try:
-            signal.signal(self.signal_num, _sample_stack)
-        except ValueError:
-            raise ValueError(
-                "Signal based profiling can only be enabled from the main thread."
-            )
-
-        # Ensures that system calls interrupted by signals are restarted
-        # automatically. Otherwise, we may see some strage behaviours
-        # such as IOErrors caused by the system call being interrupted.
-        signal.siginterrupt(self.signal_num, False)
+        pass
 
     def teardown(self):
         # type: () -> None
+        if self.running:
+            self.running = False
+            if self.thread is not None:
+                self.thread.join()
 
-        # setting the timer with 0 will stop will clear the timer
-        signal.setitimer(self.signal_timer, 0)
-
-        # put back the default signal handler
-        signal.signal(self.signal_num, signal.SIG_DFL)
-
-    def start_profiling(self):
-        # type: () -> bool
-        if super(_SignalScheduler, self).start_profiling():
-            signal.setitimer(self.signal_timer, self._interval, self._interval)
-            return True
-        return False
-
-    def stop_profiling(self):
-        # type: () -> bool
-        if super(_SignalScheduler, self).stop_profiling():
-            signal.setitimer(self.signal_timer, 0)
-            return True
-        return False
-
-
-class _SigprofScheduler(_SignalScheduler):
-    """
-    This scheduler uses SIGPROF to regularly call a signal handler where the
-    samples will be taken.
-
-    This is not based on wall time, and you may see some variances
-    in the frequency at which this handler is called.
-
-    This has some limitations:
-    - Only the main thread counts towards the time elapsed. This means that if
-      the main thread is blocking on a sleep() or select() system call, then
-      this clock will not count down. Some examples of this in practice are
-        - When using uwsgi with multiple threads in a worker, the non main
-          threads will only be profiled if the main thread is actively running
-          at the same time.
-        - When using gunicorn with threads, the main thread does not handle the
-          requests directly, so the clock counts down slower than expected since
-          its mostly idling while waiting for requests.
-    """
-
-    mode = "sigprof"
-
-    @property
-    def signal_num(self):
-        # type: () -> signal.Signals
-        return signal.SIGPROF
-
-    @property
-    def signal_timer(self):
-        # type: () -> int
-        return signal.ITIMER_PROF
-
-
-class _SigalrmScheduler(_SignalScheduler):
-    """
-    This scheduler uses SIGALRM to regularly call a signal handler where the
-    samples will be taken.
-
-    This is based on real time, so it *should* be called close to the expected
-    frequency.
-    """
-
-    mode = "sigalrm"
-
-    @property
-    def signal_num(self):
-        # type: () -> signal.Signals
-        return signal.SIGALRM
-
-    @property
-    def signal_timer(self):
-        # type: () -> int
-        return signal.ITIMER_REAL
-
-
-def _should_profile(transaction, hub):
-    # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> bool
-
-    # The corresponding transaction was not sampled,
-    # so don't generate a profile for it.
-    if not transaction.sampled:
-        return False
+    def ensure_running(self):
+        # type: () -> None
+        pid = os.getpid()
 
-    # The profiler hasn't been properly initialized.
-    if _sample_buffer is None or _scheduler is None:
-        return False
+        # is running on the right process
+        if self.running and self.pid == pid:
+            return
 
-    hub = hub or sentry_sdk.Hub.current
-    client = hub.client
+        with self.lock:
+            # another thread may have tried to acquire the lock
+            # at the same time so it may start another thread
+            # make sure to check again before proceeding
+            if self.running and self.pid == pid:
+                return
 
-    # The client is None, so we can't get the sample rate.
-    if client is None:
-        return False
+            self.pid = pid
+            self.running = True
 
-    options = client.options
-    profiles_sample_rate = options["_experiments"].get("profiles_sample_rate")
+            self.thread = ThreadPool(1)
+            self.thread.spawn(self.run)
 
-    # The profiles_sample_rate option was not set, so profiling
-    # was never enabled.
-    if profiles_sample_rate is None:
-        return False
-
-    return random.random() < float(profiles_sample_rate)
+    def run(self):
+        # type: () -> None
+        last = time.perf_counter()
 
+        while self.running:
+            self.sampler()
 
-@contextmanager
-def start_profiling(transaction, hub=None):
-    # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> Generator[None, None, None]
+            # some time may have elapsed since the last time
+            # we sampled, so we need to account for that and
+            # not sleep for too long
+            elapsed = time.perf_counter() - last
+            if elapsed < self.interval:
+                thread_sleep(self.interval - elapsed)
 
-    # if profiling was not enabled, this should be a noop
-    if _should_profile(transaction, hub):
-        with Profile(transaction, hub=hub):
-            yield
-    else:
-        yield
+            # after sleeping, make sure to take the current
+            # timestamp so we can use it next iteration
+            last = time.perf_counter()
diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py
index e0a2dc7..b83cd5f 100644
--- a/sentry_sdk/scope.py
+++ b/sentry_sdk/scope.py
@@ -1,20 +1,37 @@
 from copy import copy
 from collections import deque
 from itertools import chain
+import os
+import uuid
 
+from sentry_sdk.attachments import Attachment
 from sentry_sdk._functools import wraps
-from sentry_sdk._types import MYPY
+from sentry_sdk.tracing_utils import (
+    Baggage,
+    extract_sentrytrace_data,
+    has_tracing_enabled,
+    normalize_incoming_data,
+)
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SENTRY_TRACE_HEADER_NAME,
+    Transaction,
+)
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.utils import logger, capture_internal_exceptions
-from sentry_sdk.tracing import Transaction
-from sentry_sdk.attachments import Attachment
 
-if MYPY:
+from sentry_sdk.consts import FALSE_VALUES
+
+
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
+    from typing import Iterator
     from typing import Optional
     from typing import Deque
     from typing import List
     from typing import Callable
+    from typing import Tuple
     from typing import TypeVar
 
     from sentry_sdk._types import (
@@ -27,6 +44,7 @@ if MYPY:
         Type,
     )
 
+    from sentry_sdk.profiler import Profile
     from sentry_sdk.tracing import Span
     from sentry_sdk.session import Session
 
@@ -94,6 +112,8 @@ class Scope(object):
         "_session",
         "_attachments",
         "_force_auto_session_tracking",
+        "_profile",
+        "_propagation_context",
     )
 
     def __init__(self):
@@ -102,8 +122,187 @@ class Scope(object):
         self._error_processors = []  # type: List[ErrorProcessor]
 
         self._name = None  # type: Optional[str]
+        self._propagation_context = None  # type: Optional[Dict[str, Any]]
+
         self.clear()
 
+        incoming_trace_information = self._load_trace_data_from_env()
+        self.generate_propagation_context(incoming_data=incoming_trace_information)
+
+    def _load_trace_data_from_env(self):
+        # type: () -> Optional[Dict[str, str]]
+        """
+        Load Sentry trace id and baggage from environment variables.
+        Can be disabled by setting SENTRY_USE_ENVIRONMENT to "false".
+        """
+        incoming_trace_information = None
+
+        sentry_use_environment = (
+            os.environ.get("SENTRY_USE_ENVIRONMENT") or ""
+        ).lower()
+        use_environment = sentry_use_environment not in FALSE_VALUES
+        if use_environment:
+            incoming_trace_information = {}
+
+            if os.environ.get("SENTRY_TRACE"):
+                incoming_trace_information[SENTRY_TRACE_HEADER_NAME] = (
+                    os.environ.get("SENTRY_TRACE") or ""
+                )
+
+            if os.environ.get("SENTRY_BAGGAGE"):
+                incoming_trace_information[BAGGAGE_HEADER_NAME] = (
+                    os.environ.get("SENTRY_BAGGAGE") or ""
+                )
+
+        return incoming_trace_information or None
+
+    def _extract_propagation_context(self, data):
+        # type: (Dict[str, Any]) -> Optional[Dict[str, Any]]
+        context = {}  # type: Dict[str, Any]
+        normalized_data = normalize_incoming_data(data)
+
+        baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME)
+        if baggage_header:
+            context["dynamic_sampling_context"] = Baggage.from_incoming_header(
+                baggage_header
+            ).dynamic_sampling_context()
+
+        sentry_trace_header = normalized_data.get(SENTRY_TRACE_HEADER_NAME)
+        if sentry_trace_header:
+            sentrytrace_data = extract_sentrytrace_data(sentry_trace_header)
+            if sentrytrace_data is not None:
+                context.update(sentrytrace_data)
+
+        only_baggage_no_sentry_trace = (
+            "dynamic_sampling_context" in context and "trace_id" not in context
+        )
+        if only_baggage_no_sentry_trace:
+            context.update(self._create_new_propagation_context())
+
+        if context:
+            if not context.get("span_id"):
+                context["span_id"] = uuid.uuid4().hex[16:]
+
+            return context
+
+        return None
+
+    def _create_new_propagation_context(self):
+        # type: () -> Dict[str, Any]
+        return {
+            "trace_id": uuid.uuid4().hex,
+            "span_id": uuid.uuid4().hex[16:],
+            "parent_span_id": None,
+            "dynamic_sampling_context": None,
+        }
+
+    def set_new_propagation_context(self):
+        # type: () -> None
+        """
+        Creates a new propagation context and sets it as `_propagation_context`. Overwriting existing one.
+        """
+        self._propagation_context = self._create_new_propagation_context()
+        logger.debug(
+            "[Tracing] Create new propagation context: %s",
+            self._propagation_context,
+        )
+
+    def generate_propagation_context(self, incoming_data=None):
+        # type: (Optional[Dict[str, str]]) -> None
+        """
+        Makes sure `_propagation_context` is set.
+        If there is `incoming_data` overwrite existing `_propagation_context`.
+        if there is no `incoming_data` create new `_propagation_context`, but do NOT overwrite if already existing.
+        """
+        if incoming_data:
+            context = self._extract_propagation_context(incoming_data)
+
+            if context is not None:
+                self._propagation_context = context
+                logger.debug(
+                    "[Tracing] Extracted propagation context from incoming data: %s",
+                    self._propagation_context,
+                )
+
+        if self._propagation_context is None:
+            self.set_new_propagation_context()
+
+    def get_dynamic_sampling_context(self):
+        # type: () -> Optional[Dict[str, str]]
+        """
+        Returns the Dynamic Sampling Context from the Propagation Context.
+        If not existing, creates a new one.
+        """
+        if self._propagation_context is None:
+            return None
+
+        baggage = self.get_baggage()
+        if baggage is not None:
+            self._propagation_context[
+                "dynamic_sampling_context"
+            ] = baggage.dynamic_sampling_context()
+
+        return self._propagation_context["dynamic_sampling_context"]
+
+    def get_traceparent(self):
+        # type: () -> Optional[str]
+        """
+        Returns the Sentry "sentry-trace" header (aka the traceparent) from the Propagation Context.
+        """
+        if self._propagation_context is None:
+            return None
+
+        traceparent = "%s-%s" % (
+            self._propagation_context["trace_id"],
+            self._propagation_context["span_id"],
+        )
+        return traceparent
+
+    def get_baggage(self):
+        # type: () -> Optional[Baggage]
+        if self._propagation_context is None:
+            return None
+
+        dynamic_sampling_context = self._propagation_context.get(
+            "dynamic_sampling_context"
+        )
+        if dynamic_sampling_context is None:
+            return Baggage.from_options(self)
+        else:
+            return Baggage(dynamic_sampling_context)
+
+    def get_trace_context(self):
+        # type: () -> Any
+        """
+        Returns the Sentry "trace" context from the Propagation Context.
+        """
+        if self._propagation_context is None:
+            return None
+
+        trace_context = {
+            "trace_id": self._propagation_context["trace_id"],
+            "span_id": self._propagation_context["span_id"],
+            "parent_span_id": self._propagation_context["parent_span_id"],
+            "dynamic_sampling_context": self.get_dynamic_sampling_context(),
+        }  # type: Dict[str, Any]
+
+        return trace_context
+
+    def iter_headers(self):
+        # type: () -> Iterator[Tuple[str, str]]
+        """
+        Creates a generator which returns the `sentry-trace` and `baggage` headers from the Propagation Context.
+        """
+        if self._propagation_context is not None:
+            traceparent = self.get_traceparent()
+            if traceparent is not None:
+                yield SENTRY_TRACE_HEADER_NAME, traceparent
+
+            dsc = self.get_dynamic_sampling_context()
+            if dsc is not None:
+                baggage = Baggage(dsc).serialize()
+                yield BAGGAGE_HEADER_NAME, baggage
+
     def clear(self):
         # type: () -> None
         """Clears the entire scope."""
@@ -125,6 +324,10 @@ class Scope(object):
         self._session = None  # type: Optional[Session]
         self._force_auto_session_tracking = None  # type: Optional[bool]
 
+        self._profile = None  # type: Optional[Profile]
+
+        self._propagation_context = None
+
     @_attr_setter
     def level(self, value):
         # type: (Optional[str]) -> None
@@ -228,6 +431,17 @@ class Scope(object):
             if transaction.name:
                 self._transaction = transaction.name
 
+    @property
+    def profile(self):
+        # type: () -> Optional[Profile]
+        return self._profile
+
+    @profile.setter
+    def profile(self, profile):
+        # type: (Optional[Profile]) -> None
+
+        self._profile = profile
+
     def set_tag(
         self,
         key,  # type: str
@@ -351,13 +565,14 @@ class Scope(object):
         self,
         event,  # type: Event
         hint,  # type: Hint
+        options=None,  # type: Optional[Dict[str, Any]]
     ):
         # type: (...) -> Optional[Event]
         """Applies the information contained on the scope to the given event."""
 
-        def _drop(event, cause, ty):
-            # type: (Dict[str, Any], Any, str) -> Optional[Any]
-            logger.info("%s (%s) dropped event (%s)", ty, cause, event)
+        def _drop(cause, ty):
+            # type: (Any, str) -> Optional[Any]
+            logger.info("%s (%s) dropped event", ty, cause)
             return None
 
         is_transaction = event.get("type") == "transaction"
@@ -400,17 +615,30 @@ class Scope(object):
         if self._contexts:
             event.setdefault("contexts", {}).update(self._contexts)
 
-        if self._span is not None:
-            contexts = event.setdefault("contexts", {})
-            if not contexts.get("trace"):
+        contexts = event.setdefault("contexts", {})
+
+        if contexts.get("trace") is None:
+            if has_tracing_enabled(options) and self._span is not None:
                 contexts["trace"] = self._span.get_trace_context()
+            else:
+                contexts["trace"] = self.get_trace_context()
+
+        try:
+            replay_id = contexts["trace"]["dynamic_sampling_context"]["replay_id"]
+        except (KeyError, TypeError):
+            replay_id = None
+
+        if replay_id is not None:
+            contexts["replay"] = {
+                "replay_id": replay_id,
+            }
 
         exc_info = hint.get("exc_info")
         if exc_info is not None:
             for error_processor in self._error_processors:
                 new_event = error_processor(event, exc_info)
                 if new_event is None:
-                    return _drop(event, error_processor, "error processor")
+                    return _drop(error_processor, "error processor")
                 event = new_event
 
         for event_processor in chain(global_event_processors, self._event_processors):
@@ -418,7 +646,7 @@ class Scope(object):
             with capture_internal_exceptions():
                 new_event = event_processor(event, hint)
             if new_event is None:
-                return _drop(event, event_processor, "event processor")
+                return _drop(event_processor, "event processor")
             event = new_event
 
         return event
@@ -447,6 +675,10 @@ class Scope(object):
             self._span = scope._span
         if scope._attachments:
             self._attachments.extend(scope._attachments)
+        if scope._profile:
+            self._profile = scope._profile
+        if scope._propagation_context:
+            self._propagation_context = scope._propagation_context
 
     def update_from_kwargs(
         self,
@@ -489,6 +721,7 @@ class Scope(object):
         rv._breadcrumbs = copy(self._breadcrumbs)
         rv._event_processors = list(self._event_processors)
         rv._error_processors = list(self._error_processors)
+        rv._propagation_context = self._propagation_context
 
         rv._should_capture = self._should_capture
         rv._span = self._span
@@ -496,6 +729,8 @@ class Scope(object):
         rv._force_auto_session_tracking = self._force_auto_session_tracking
         rv._attachments = list(self._attachments)
 
+        rv._profile = self._profile
+
         return rv
 
     def __repr__(self):
diff --git a/sentry_sdk/scrubber.py b/sentry_sdk/scrubber.py
new file mode 100644
index 0000000..838ef08
--- /dev/null
+++ b/sentry_sdk/scrubber.py
@@ -0,0 +1,130 @@
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    AnnotatedValue,
+    iter_event_frames,
+)
+from sentry_sdk._compat import string_types
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from sentry_sdk._types import Event
+    from typing import Any
+    from typing import Dict
+    from typing import List
+    from typing import Optional
+
+
+DEFAULT_DENYLIST = [
+    # stolen from relay
+    "password",
+    "passwd",
+    "secret",
+    "api_key",
+    "apikey",
+    "auth",
+    "credentials",
+    "mysql_pwd",
+    "privatekey",
+    "private_key",
+    "token",
+    "ip_address",
+    "session",
+    # django
+    "csrftoken",
+    "sessionid",
+    # wsgi
+    "remote_addr",
+    "x_csrftoken",
+    "x_forwarded_for",
+    "set_cookie",
+    "cookie",
+    "authorization",
+    "x_api_key",
+    "x_forwarded_for",
+    "x_real_ip",
+    # other common names used in the wild
+    "aiohttp_session",  # aiohttp
+    "connect.sid",  # Express
+    "csrf_token",  # Pyramid
+    "csrf",  # (this is a cookie name used in accepted answers on stack overflow)
+    "_csrf",  # Express
+    "_csrf_token",  # Bottle
+    "PHPSESSID",  # PHP
+    "_session",  # Sanic
+    "symfony",  # Symfony
+    "user_session",  # Vue
+    "_xsrf",  # Tornado
+    "XSRF-TOKEN",  # Angular, Laravel
+]
+
+
+class EventScrubber(object):
+    def __init__(self, denylist=None):
+        # type: (Optional[List[str]]) -> None
+        self.denylist = DEFAULT_DENYLIST if denylist is None else denylist
+        self.denylist = [x.lower() for x in self.denylist]
+
+    def scrub_dict(self, d):
+        # type: (Dict[str, Any]) -> None
+        if not isinstance(d, dict):
+            return
+
+        for k in d.keys():
+            if isinstance(k, string_types) and k.lower() in self.denylist:
+                d[k] = AnnotatedValue.substituted_because_contains_sensitive_data()
+
+    def scrub_request(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            if "request" in event:
+                if "headers" in event["request"]:
+                    self.scrub_dict(event["request"]["headers"])
+                if "cookies" in event["request"]:
+                    self.scrub_dict(event["request"]["cookies"])
+                if "data" in event["request"]:
+                    self.scrub_dict(event["request"]["data"])
+
+    def scrub_extra(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            if "extra" in event:
+                self.scrub_dict(event["extra"])
+
+    def scrub_user(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            if "user" in event:
+                self.scrub_dict(event["user"])
+
+    def scrub_breadcrumbs(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            if "breadcrumbs" in event:
+                if "values" in event["breadcrumbs"]:
+                    for value in event["breadcrumbs"]["values"]:
+                        if "data" in value:
+                            self.scrub_dict(value["data"])
+
+    def scrub_frames(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            for frame in iter_event_frames(event):
+                if "vars" in frame:
+                    self.scrub_dict(frame["vars"])
+
+    def scrub_spans(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            if "spans" in event:
+                for span in event["spans"]:
+                    if "data" in span:
+                        self.scrub_dict(span["data"])
+
+    def scrub_event(self, event):
+        # type: (Event) -> None
+        self.scrub_request(event)
+        self.scrub_extra(event)
+        self.scrub_user(event)
+        self.scrub_breadcrumbs(event)
+        self.scrub_frames(event)
+        self.scrub_spans(event)
diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py
index e657f6b..7925cf5 100644
--- a/sentry_sdk/serializer.py
+++ b/sentry_sdk/serializer.py
@@ -8,20 +8,20 @@ from sentry_sdk.utils import (
     capture_internal_exception,
     disable_capture_event,
     format_timestamp,
-    json_dumps,
     safe_repr,
     strip_string,
 )
+from sentry_sdk._compat import (
+    text_type,
+    PY2,
+    string_types,
+    number_types,
+    iteritems,
+    binary_sequence_types,
+)
+from sentry_sdk._types import TYPE_CHECKING
 
-import sentry_sdk.utils
-
-from sentry_sdk._compat import text_type, PY2, string_types, number_types, iteritems
-
-from sentry_sdk._types import MYPY
-
-if MYPY:
-    from datetime import timedelta
-
+if TYPE_CHECKING:
     from types import TracebackType
 
     from typing import Any
@@ -30,7 +30,6 @@ if MYPY:
     from typing import Dict
     from typing import List
     from typing import Optional
-    from typing import Tuple
     from typing import Type
     from typing import Union
 
@@ -47,7 +46,7 @@ if PY2:
     # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
     from collections import Mapping, Sequence, Set
 
-    serializable_str_types = string_types
+    serializable_str_types = string_types + binary_sequence_types
 
 else:
     # New in 3.3
@@ -55,7 +54,7 @@ else:
     from collections.abc import Mapping, Sequence, Set
 
     # Bytes are technically not strings in Python 3, but we can serialize them
-    serializable_str_types = (str, bytes)
+    serializable_str_types = string_types + binary_sequence_types
 
 
 # Maximum length of JSON-serialized event payloads that can be safely sent
@@ -68,6 +67,8 @@ else:
 # this value due to attached metadata, so keep the number conservative.
 MAX_EVENT_BYTES = 10**6
 
+# Maximum depth and breadth of databags. Excess data will be trimmed. If
+# max_request_body_size is "always", request bodies won't be trimmed.
 MAX_DATABAG_DEPTH = 5
 MAX_DATABAG_BREADTH = 10
 CYCLE_MARKER = "<cyclic>"
@@ -113,12 +114,16 @@ class Memo(object):
         self._ids.pop(id(self._objs.pop()), None)
 
 
-def serialize(event, smart_transaction_trimming=False, **kwargs):
-    # type: (Event, bool, **Any) -> Event
+def serialize(event, **kwargs):
+    # type: (Event, **Any) -> Event
     memo = Memo()
     path = []  # type: List[Segment]
     meta_stack = []  # type: List[Dict[str, Any]]
-    span_description_bytes = []  # type: List[int]
+
+    keep_request_bodies = (
+        kwargs.pop("max_request_body_size", None) == "always"
+    )  # type: bool
+    max_value_length = kwargs.pop("max_value_length", None)  # type: Optional[int]
 
     def _annotate(**meta):
         # type: (**Any) -> None
@@ -184,10 +189,11 @@ def serialize(event, smart_transaction_trimming=False, **kwargs):
             if rv in (True, None):
                 return rv
 
-            p0 = path[0]
-            if p0 == "request" and path[1] == "data":
-                return True
+            is_request_body = _is_request_body()
+            if is_request_body in (True, None):
+                return is_request_body
 
+            p0 = path[0]
             if p0 == "breadcrumbs" and path[1] == "values":
                 path[2]
                 return True
@@ -200,13 +206,24 @@ def serialize(event, smart_transaction_trimming=False, **kwargs):
 
         return False
 
+    def _is_request_body():
+        # type: () -> Optional[bool]
+        try:
+            if path[0] == "request" and path[1] == "data":
+                return True
+        except IndexError:
+            return None
+
+        return False
+
     def _serialize_node(
         obj,  # type: Any
         is_databag=None,  # type: Optional[bool]
+        is_request_body=None,  # type: Optional[bool]
         should_repr_strings=None,  # type: Optional[bool]
         segment=None,  # type: Optional[Segment]
-        remaining_breadth=None,  # type: Optional[int]
-        remaining_depth=None,  # type: Optional[int]
+        remaining_breadth=None,  # type: Optional[Union[int, float]]
+        remaining_depth=None,  # type: Optional[Union[int, float]]
     ):
         # type: (...) -> Any
         if segment is not None:
@@ -220,6 +237,7 @@ def serialize(event, smart_transaction_trimming=False, **kwargs):
                 return _serialize_node_impl(
                     obj,
                     is_databag=is_databag,
+                    is_request_body=is_request_body,
                     should_repr_strings=should_repr_strings,
                     remaining_depth=remaining_depth,
                     remaining_breadth=remaining_breadth,
@@ -244,26 +262,43 @@ def serialize(event, smart_transaction_trimming=False, **kwargs):
         return obj
 
     def _serialize_node_impl(
-        obj, is_databag, should_repr_strings, remaining_depth, remaining_breadth
+        obj,
+        is_databag,
+        is_request_body,
+        should_repr_strings,
+        remaining_depth,
+        remaining_breadth,
     ):
-        # type: (Any, Optional[bool], Optional[bool], Optional[int], Optional[int]) -> Any
+        # type: (Any, Optional[bool], Optional[bool], Optional[bool], Optional[Union[float, int]], Optional[Union[float, int]]) -> Any
+        if isinstance(obj, AnnotatedValue):
+            should_repr_strings = False
         if should_repr_strings is None:
             should_repr_strings = _should_repr_strings()
 
         if is_databag is None:
             is_databag = _is_databag()
 
-        if is_databag and remaining_depth is None:
-            remaining_depth = MAX_DATABAG_DEPTH
-        if is_databag and remaining_breadth is None:
-            remaining_breadth = MAX_DATABAG_BREADTH
+        if is_request_body is None:
+            is_request_body = _is_request_body()
+
+        if is_databag:
+            if is_request_body and keep_request_bodies:
+                remaining_depth = float("inf")
+                remaining_breadth = float("inf")
+            else:
+                if remaining_depth is None:
+                    remaining_depth = MAX_DATABAG_DEPTH
+                if remaining_breadth is None:
+                    remaining_breadth = MAX_DATABAG_BREADTH
 
         obj = _flatten_annotated(obj)
 
         if remaining_depth is not None and remaining_depth <= 0:
             _annotate(rem=[["!limit", "x"]])
             if is_databag:
-                return _flatten_annotated(strip_string(safe_repr(obj)))
+                return _flatten_annotated(
+                    strip_string(safe_repr(obj), max_length=max_value_length)
+                )
             return None
 
         if is_databag and global_repr_processors:
@@ -312,6 +347,7 @@ def serialize(event, smart_transaction_trimming=False, **kwargs):
                     segment=str_k,
                     should_repr_strings=should_repr_strings,
                     is_databag=is_databag,
+                    is_request_body=is_request_body,
                     remaining_depth=remaining_depth - 1
                     if remaining_depth is not None
                     else None,
@@ -338,6 +374,7 @@ def serialize(event, smart_transaction_trimming=False, **kwargs):
                         segment=i,
                         should_repr_strings=should_repr_strings,
                         is_databag=is_databag,
+                        is_request_body=is_request_body,
                         remaining_depth=remaining_depth - 1
                         if remaining_depth is not None
                         else None,
@@ -350,119 +387,29 @@ def serialize(event, smart_transaction_trimming=False, **kwargs):
         if should_repr_strings:
             obj = safe_repr(obj)
         else:
-            if isinstance(obj, bytes):
+            if isinstance(obj, bytes) or isinstance(obj, bytearray):
                 obj = obj.decode("utf-8", "replace")
 
             if not isinstance(obj, string_types):
                 obj = safe_repr(obj)
 
-        # Allow span descriptions to be longer than other strings.
-        #
-        # For database auto-instrumented spans, the description contains
-        # potentially long SQL queries that are most useful when not truncated.
-        # Because arbitrarily large events may be discarded by the server as a
-        # protection mechanism, we dynamically limit the description length
-        # later in _truncate_span_descriptions.
-        if (
-            smart_transaction_trimming
-            and len(path) == 3
-            and path[0] == "spans"
-            and path[-1] == "description"
-        ):
-            span_description_bytes.append(len(obj))
+        is_span_description = (
+            len(path) == 3 and path[0] == "spans" and path[-1] == "description"
+        )
+        if is_span_description:
             return obj
-        return _flatten_annotated(strip_string(obj))
 
-    def _truncate_span_descriptions(serialized_event, event, excess_bytes):
-        # type: (Event, Event, int) -> None
-        """
-        Modifies serialized_event in-place trying to remove excess_bytes from
-        span descriptions. The original event is used read-only to access the
-        span timestamps (represented as RFC3399-formatted strings in
-        serialized_event).
-
-        It uses heuristics to prioritize preserving the description of spans
-        that might be the most interesting ones in terms of understanding and
-        optimizing performance.
-        """
-        # When truncating a description, preserve a small prefix.
-        min_length = 10
-
-        def shortest_duration_longest_description_first(args):
-            # type: (Tuple[int, Span]) -> Tuple[timedelta, int]
-            i, serialized_span = args
-            span = event["spans"][i]
-            now = datetime.utcnow()
-            start = span.get("start_timestamp") or now
-            end = span.get("timestamp") or now
-            duration = end - start
-            description = serialized_span.get("description") or ""
-            return (duration, -len(description))
-
-        # Note: for simplicity we sort spans by exact duration and description
-        # length. If ever needed, we could have a more involved heuristic, e.g.
-        # replacing exact durations with "buckets" and/or looking at other span
-        # properties.
-        path.append("spans")
-        for i, span in sorted(
-            enumerate(serialized_event.get("spans") or []),
-            key=shortest_duration_longest_description_first,
-        ):
-            description = span.get("description") or ""
-            if len(description) <= min_length:
-                continue
-            excess_bytes -= len(description) - min_length
-            path.extend([i, "description"])
-            # Note: the last time we call strip_string we could preserve a few
-            # more bytes up to a total length of MAX_EVENT_BYTES. Since that's
-            # not strictly required, we leave it out for now for simplicity.
-            span["description"] = _flatten_annotated(
-                strip_string(description, max_length=min_length)
-            )
-            del path[-2:]
-            del meta_stack[len(path) + 1 :]
-
-            if excess_bytes <= 0:
-                break
-        path.pop()
-        del meta_stack[len(path) + 1 :]
+        return _flatten_annotated(strip_string(obj, max_length=max_value_length))
 
+    #
+    # Start of serialize() function
+    #
     disable_capture_event.set(True)
     try:
-        rv = _serialize_node(event, **kwargs)
-        if meta_stack and isinstance(rv, dict):
-            rv["_meta"] = meta_stack[0]
-
-        sum_span_description_bytes = sum(span_description_bytes)
-        if smart_transaction_trimming and sum_span_description_bytes > 0:
-            span_count = len(event.get("spans") or [])
-            # This is an upper bound of how many bytes all descriptions would
-            # consume if the usual string truncation in _serialize_node_impl
-            # would have taken place, not accounting for the metadata attached
-            # as event["_meta"].
-            descriptions_budget_bytes = span_count * sentry_sdk.utils.MAX_STRING_LENGTH
-
-            # If by not truncating descriptions we ended up with more bytes than
-            # per the usual string truncation, check if the event is too large
-            # and we need to truncate some descriptions.
-            #
-            # This is guarded with an if statement to avoid JSON-encoding the
-            # event unnecessarily.
-            if sum_span_description_bytes > descriptions_budget_bytes:
-                original_bytes = len(json_dumps(rv))
-                excess_bytes = original_bytes - MAX_EVENT_BYTES
-                if excess_bytes > 0:
-                    # Event is too large, will likely be discarded by the
-                    # server. Trim it down before sending.
-                    _truncate_span_descriptions(rv, event, excess_bytes)
-
-                    # Span descriptions truncated, set or reset _meta.
-                    #
-                    # We run the same code earlier because we want to account
-                    # for _meta when calculating original_bytes, the number of
-                    # bytes in the JSON-encoded event.
-                    if meta_stack and isinstance(rv, dict):
-                        rv["_meta"] = meta_stack[0]
-        return rv
+        serialized_event = _serialize_node(event, **kwargs)
+        if meta_stack and isinstance(serialized_event, dict):
+            serialized_event["_meta"] = meta_stack[0]
+
+        return serialized_event
     finally:
         disable_capture_event.set(False)
diff --git a/sentry_sdk/session.py b/sentry_sdk/session.py
index 98a8c72..b0c3d53 100644
--- a/sentry_sdk/session.py
+++ b/sentry_sdk/session.py
@@ -1,10 +1,10 @@
 import uuid
 from datetime import datetime
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.utils import format_timestamp
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional
     from typing import Union
     from typing import Any
diff --git a/sentry_sdk/sessions.py b/sentry_sdk/sessions.py
index 4e4d21b..520fbbc 100644
--- a/sentry_sdk/sessions.py
+++ b/sentry_sdk/sessions.py
@@ -6,10 +6,10 @@ from contextlib import contextmanager
 import sentry_sdk
 from sentry_sdk.envelope import Envelope
 from sentry_sdk.session import Session
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.utils import format_timestamp
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Dict
@@ -93,7 +93,7 @@ class SessionFlusher(object):
 
             envelope.add_session(session)
 
-        for (attrs, states) in pending_aggregates.items():
+        for attrs, states in pending_aggregates.items():
             if len(envelope.items) == MAX_ENVELOPE_ITEMS:
                 self.capture_func(envelope)
                 envelope = Envelope()
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index 3bef18b..fa65e49 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -1,27 +1,32 @@
 import uuid
 import random
-import threading
-import time
 
 from datetime import datetime, timedelta
 
 import sentry_sdk
+from sentry_sdk.consts import INSTRUMENTER
+from sentry_sdk.utils import is_valid_sample_rate, logger, nanosecond_time
+from sentry_sdk._compat import PY2
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk._types import TYPE_CHECKING
 
-from sentry_sdk.utils import logger
-from sentry_sdk._types import MYPY
 
-
-if MYPY:
+if TYPE_CHECKING:
     import typing
 
-    from typing import Optional
     from typing import Any
     from typing import Dict
+    from typing import Iterator
     from typing import List
+    from typing import Optional
     from typing import Tuple
-    from typing import Iterator
 
-    from sentry_sdk._types import SamplingContext, MeasurementUnit
+    import sentry_sdk.profiler
+    from sentry_sdk._types import Event, MeasurementUnit, SamplingContext
+
+
+BAGGAGE_HEADER_NAME = "baggage"
+SENTRY_TRACE_HEADER_NAME = "sentry-trace"
 
 
 # Transaction source
@@ -84,7 +89,7 @@ class Span(object):
         "op",
         "description",
         "start_timestamp",
-        "_start_timestamp_monotonic",
+        "_start_timestamp_monotonic_ns",
         "status",
         "timestamp",
         "_tags",
@@ -122,6 +127,7 @@ class Span(object):
         status=None,  # type: Optional[str]
         transaction=None,  # type: Optional[str] # deprecated
         containing_transaction=None,  # type: Optional[Transaction]
+        start_timestamp=None,  # type: Optional[datetime]
     ):
         # type: (...) -> None
         self.trace_id = trace_id or uuid.uuid4().hex
@@ -136,13 +142,11 @@ class Span(object):
         self._tags = {}  # type: Dict[str, str]
         self._data = {}  # type: Dict[str, Any]
         self._containing_transaction = containing_transaction
-        self.start_timestamp = datetime.utcnow()
+        self.start_timestamp = start_timestamp or datetime.utcnow()
         try:
-            # TODO: For Python 3.7+, we could use a clock with ns resolution:
-            # self._start_timestamp_monotonic = time.perf_counter_ns()
-
-            # Python 3.3+
-            self._start_timestamp_monotonic = time.perf_counter()
+            # profiling depends on this value and requires that
+            # it is measured in nanoseconds
+            self._start_timestamp_monotonic_ns = nanosecond_time()
         except AttributeError:
             pass
 
@@ -203,8 +207,8 @@ class Span(object):
         # referencing themselves)
         return self._containing_transaction
 
-    def start_child(self, **kwargs):
-        # type: (**Any) -> Span
+    def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+        # type: (str, **Any) -> Span
         """
         Start a sub-span from the current span or transaction.
 
@@ -212,6 +216,13 @@ class Span(object):
         trace id, sampling decision, transaction pointer, and span recorder are
         inherited from the current span/transaction.
         """
+        hub = self.hub or sentry_sdk.Hub.current
+        client = hub.client
+        configuration_instrumenter = client and client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
         kwargs.setdefault("sampled", self.sampled)
 
         child = Span(
@@ -230,7 +241,7 @@ class Span(object):
 
     def new_span(self, **kwargs):
         # type: (**Any) -> Span
-        """Deprecated: use start_child instead."""
+        """Deprecated: use :py:meth:`sentry_sdk.tracing.Span.start_child` instead."""
         logger.warning("Deprecated: use Span.start_child instead of Span.new_span.")
         return self.start_child(**kwargs)
 
@@ -243,7 +254,7 @@ class Span(object):
         # type: (...) -> Transaction
         """
         Create a Transaction with the given params, then add in data pulled from
-        the 'sentry-trace', 'baggage' and 'tracestate' headers from the environ (if any)
+        the 'sentry-trace' and 'baggage' headers from the environ (if any)
         before returning the Transaction.
 
         This is different from `continue_from_headers` in that it assumes header
@@ -266,7 +277,7 @@ class Span(object):
         # type: (...) -> Transaction
         """
         Create a transaction with the given params (including any data pulled from
-        the 'sentry-trace', 'baggage' and 'tracestate' headers).
+        the 'sentry-trace' and 'baggage' headers).
         """
         # TODO move this to the Transaction class
         if cls is Span:
@@ -277,10 +288,12 @@ class Span(object):
 
         # TODO-neel move away from this kwargs stuff, it's confusing and opaque
         # make more explicit
-        baggage = Baggage.from_incoming_header(headers.get("baggage"))
-        kwargs.update({"baggage": baggage})
+        baggage = Baggage.from_incoming_header(headers.get(BAGGAGE_HEADER_NAME))
+        kwargs.update({BAGGAGE_HEADER_NAME: baggage})
 
-        sentrytrace_kwargs = extract_sentrytrace_data(headers.get("sentry-trace"))
+        sentrytrace_kwargs = extract_sentrytrace_data(
+            headers.get(SENTRY_TRACE_HEADER_NAME)
+        )
 
         if sentrytrace_kwargs is not None:
             kwargs.update(sentrytrace_kwargs)
@@ -290,8 +303,6 @@ class Span(object):
             # baggage will be empty and immutable and won't be populated as head SDK.
             baggage.freeze()
 
-        kwargs.update(extract_tracestate_data(headers.get("tracestate")))
-
         transaction = Transaction(**kwargs)
         transaction.same_process_as_parent = False
 
@@ -300,26 +311,16 @@ class Span(object):
     def iter_headers(self):
         # type: () -> Iterator[Tuple[str, str]]
         """
-        Creates a generator which returns the span's `sentry-trace`, `baggage` and
-        `tracestate` headers.
-
-        If the span's containing transaction doesn't yet have a
-        `sentry_tracestate` value, this will cause one to be generated and
-        stored.
+        Creates a generator which returns the span's `sentry-trace` and `baggage` headers.
+        If the span's containing transaction doesn't yet have a `baggage` value,
+        this will cause one to be generated and stored.
         """
-        yield "sentry-trace", self.to_traceparent()
-
-        tracestate = self.to_tracestate() if has_tracestate_enabled(self) else None
-        # `tracestate` will only be `None` if there's no client or no DSN
-        # TODO (kmclb) the above will be true once the feature is no longer
-        # behind a flag
-        if tracestate:
-            yield "tracestate", tracestate
+        yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent()
 
         if self.containing_transaction:
             baggage = self.containing_transaction.get_baggage().serialize()
             if baggage:
-                yield "baggage", baggage
+                yield BAGGAGE_HEADER_NAME, baggage
 
     @classmethod
     def from_traceparent(
@@ -329,11 +330,10 @@ class Span(object):
     ):
         # type: (...) -> Optional[Transaction]
         """
-        DEPRECATED: Use Transaction.continue_from_headers(headers, **kwargs)
-
-        Create a Transaction with the given params, then add in data pulled from
-        the given 'sentry-trace' header value before returning the Transaction.
+        DEPRECATED: Use :py:meth:`sentry_sdk.tracing.Transaction.continue_from_headers`.
 
+        Create a `Transaction` with the given params, then add in data pulled from
+        the given 'sentry-trace' header value before returning the `Transaction`.
         """
         logger.warning(
             "Deprecated: Use Transaction.continue_from_headers(headers, **kwargs) "
@@ -343,67 +343,30 @@ class Span(object):
         if not traceparent:
             return None
 
-        return cls.continue_from_headers({"sentry-trace": traceparent}, **kwargs)
+        return cls.continue_from_headers(
+            {SENTRY_TRACE_HEADER_NAME: traceparent}, **kwargs
+        )
 
     def to_traceparent(self):
         # type: () -> str
-        sampled = ""
         if self.sampled is True:
             sampled = "1"
-        if self.sampled is False:
+        elif self.sampled is False:
             sampled = "0"
-        return "%s-%s-%s" % (self.trace_id, self.span_id, sampled)
-
-    def to_tracestate(self):
-        # type: () -> Optional[str]
-        """
-        Computes the `tracestate` header value using data from the containing
-        transaction.
-
-        If the containing transaction doesn't yet have a `sentry_tracestate`
-        value, this will cause one to be generated and stored.
-
-        If there is no containing transaction, a value will be generated but not
-        stored.
-
-        Returns None if there's no client and/or no DSN.
-        """
-
-        sentry_tracestate = self.get_or_set_sentry_tracestate()
-        third_party_tracestate = (
-            self.containing_transaction._third_party_tracestate
-            if self.containing_transaction
-            else None
-        )
-
-        if not sentry_tracestate:
-            return None
-
-        header_value = sentry_tracestate
-
-        if third_party_tracestate:
-            header_value = header_value + "," + third_party_tracestate
-
-        return header_value
-
-    def get_or_set_sentry_tracestate(self):
-        # type: (Span) -> Optional[str]
-        """
-        Read sentry tracestate off of the span's containing transaction.
-
-        If the transaction doesn't yet have a `_sentry_tracestate` value,
-        compute one and store it.
-        """
-        transaction = self.containing_transaction
+        else:
+            sampled = None
 
-        if transaction:
-            if not transaction._sentry_tracestate:
-                transaction._sentry_tracestate = compute_tracestate_entry(self)
+        traceparent = "%s-%s" % (self.trace_id, self.span_id)
+        if sampled is not None:
+            traceparent += "-%s" % (sampled,)
 
-            return transaction._sentry_tracestate
+        return traceparent
 
-        # orphan span - nowhere to store the value, so just return it
-        return compute_tracestate_entry(self)
+    def to_baggage(self):
+        # type: () -> Optional[Baggage]
+        if self.containing_transaction:
+            return self.containing_transaction.get_baggage()
+        return None
 
     def set_tag(self, key, value):
         # type: (str, Any) -> None
@@ -419,7 +382,10 @@ class Span(object):
 
     def set_http_status(self, http_status):
         # type: (int) -> None
-        self.set_tag("http.status_code", str(http_status))
+        self.set_tag(
+            "http.status_code", str(http_status)
+        )  # we keep this for backwards compatability
+        self.set_data(SPANDATA.HTTP_STATUS_CODE, http_status)
 
         if http_status < 400:
             self.set_status("ok")
@@ -454,8 +420,8 @@ class Span(object):
         # type: () -> bool
         return self.status == "ok"
 
-    def finish(self, hub=None):
-        # type: (Optional[sentry_sdk.Hub]) -> Optional[str]
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str]
         # XXX: would be type: (Optional[sentry_sdk.Hub]) -> None, but that leads
         # to incompatible return types for Span.finish and Transaction.finish.
         if self.timestamp is not None:
@@ -465,8 +431,13 @@ class Span(object):
         hub = hub or self.hub or sentry_sdk.Hub.current
 
         try:
-            duration_seconds = time.perf_counter() - self._start_timestamp_monotonic
-            self.timestamp = self.start_timestamp + timedelta(seconds=duration_seconds)
+            if end_timestamp:
+                self.timestamp = end_timestamp
+            else:
+                elapsed = nanosecond_time() - self._start_timestamp_monotonic_ns
+                self.timestamp = self.start_timestamp + timedelta(
+                    microseconds=elapsed / 1000
+                )
         except AttributeError:
             self.timestamp = datetime.utcnow()
 
@@ -511,15 +482,6 @@ class Span(object):
         if self.status:
             rv["status"] = self.status
 
-        # if the transaction didn't inherit a tracestate value, and no outgoing
-        # requests - whose need for headers would have caused a tracestate value
-        # to be created - were made as part of the transaction, the transaction
-        # still won't have a tracestate value, so compute one now
-        sentry_tracestate = self.get_or_set_sentry_tracestate()
-
-        if sentry_tracestate:
-            rv["tracestate"] = sentry_tracestate
-
         if self.containing_transaction:
             rv[
                 "dynamic_sampling_context"
@@ -535,25 +497,16 @@ class Transaction(Span):
         "parent_sampled",
         # used to create baggage value for head SDKs in dynamic sampling
         "sample_rate",
-        # the sentry portion of the `tracestate` header used to transmit
-        # correlation context for server-side dynamic sampling, of the form
-        # `sentry=xxxxx`, where `xxxxx` is the base64-encoded json of the
-        # correlation context data, missing trailing any =
-        "_sentry_tracestate",
-        # tracestate data from other vendors, of the form `dogs=yes,cats=maybe`
-        "_third_party_tracestate",
         "_measurements",
+        "_contexts",
         "_profile",
         "_baggage",
-        "_active_thread_id",
     )
 
     def __init__(
         self,
         name="",  # type: str
         parent_sampled=None,  # type: Optional[bool]
-        sentry_tracestate=None,  # type: Optional[str]
-        third_party_tracestate=None,  # type: Optional[str]
         baggage=None,  # type: Optional[Baggage]
         source=TRANSACTION_SOURCE_CUSTOM,  # type: str
         **kwargs  # type: Any
@@ -568,24 +521,17 @@ class Transaction(Span):
                 "instead of Span(transaction=...)."
             )
             name = kwargs.pop("transaction")
+
         Span.__init__(self, **kwargs)
+
         self.name = name
         self.source = source
         self.sample_rate = None  # type: Optional[float]
         self.parent_sampled = parent_sampled
-        # if tracestate isn't inherited and set here, it will get set lazily,
-        # either the first time an outgoing request needs it for a header or the
-        # first time an event needs it for inclusion in the captured data
-        self._sentry_tracestate = sentry_tracestate
-        self._third_party_tracestate = third_party_tracestate
         self._measurements = {}  # type: Dict[str, Any]
-        self._profile = None  # type: Optional[Dict[str, Any]]
+        self._contexts = {}  # type: Dict[str, Any]
+        self._profile = None  # type: Optional[sentry_sdk.profiler.Profile]
         self._baggage = baggage
-        # for profiling, we want to know on which thread a transaction is started
-        # to accurately show the active thread in the UI
-        self._active_thread_id = (
-            threading.current_thread().ident
-        )  # used by profiling.py
 
     def __repr__(self):
         # type: () -> str
@@ -603,6 +549,22 @@ class Transaction(Span):
             )
         )
 
+    def __enter__(self):
+        # type: () -> Transaction
+        super(Transaction, self).__enter__()
+
+        if self._profile is not None:
+            self._profile.__enter__()
+
+        return self
+
+    def __exit__(self, ty, value, tb):
+        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+        if self._profile is not None:
+            self._profile.__exit__(ty, value, tb)
+
+        super(Transaction, self).__exit__(ty, value, tb)
+
     @property
     def containing_transaction(self):
         # type: () -> Transaction
@@ -612,8 +574,8 @@ class Transaction(Span):
         # reference.
         return self
 
-    def finish(self, hub=None):
-        # type: (Optional[sentry_sdk.Hub]) -> Optional[str]
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str]
         if self.timestamp is not None:
             # This transaction is already finished, ignore.
             return None
@@ -633,9 +595,12 @@ class Transaction(Span):
             # exclusively based on sample rate but also traces sampler, but
             # we handle this the same here.
             if client.transport and has_tracing_enabled(client.options):
-                client.transport.record_lost_event(
-                    "sample_rate", data_category="transaction"
-                )
+                if client.monitor and client.monitor.downsample_factor > 1:
+                    reason = "backpressure"
+                else:
+                    reason = "sample_rate"
+
+                client.transport.record_lost_event(reason, data_category="transaction")
 
             return None
 
@@ -645,13 +610,14 @@ class Transaction(Span):
             )
             self.name = "<unlabeled transaction>"
 
-        Span.finish(self, hub)
+        Span.finish(self, hub, end_timestamp)
 
         if not self.sampled:
             # At this point a `sampled = None` should have already been resolved
             # to a concrete decision.
             if self.sampled is None:
                 logger.warning("Discarding transaction without sampling decision.")
+
             return None
 
         finished_spans = [
@@ -666,35 +632,37 @@ class Transaction(Span):
         # to be garbage collected
         self._span_recorder = None
 
+        contexts = {}
+        contexts.update(self._contexts)
+        contexts.update({"trace": self.get_trace_context()})
+
         event = {
             "type": "transaction",
             "transaction": self.name,
             "transaction_info": {"source": self.source},
-            "contexts": {"trace": self.get_trace_context()},
+            "contexts": contexts,
             "tags": self._tags,
             "timestamp": self.timestamp,
             "start_timestamp": self.start_timestamp,
             "spans": finished_spans,
-        }
+        }  # type: Event
 
-        if hub.client is not None and self._profile is not None:
+        if self._profile is not None and self._profile.valid():
             event["profile"] = self._profile
+            self._profile = None
 
-        if has_custom_measurements_enabled():
-            event["measurements"] = self._measurements
+        event["measurements"] = self._measurements
 
         return hub.capture_event(event)
 
     def set_measurement(self, name, value, unit=""):
         # type: (str, float, MeasurementUnit) -> None
-        if not has_custom_measurements_enabled():
-            logger.debug(
-                "[Tracing] Experimental custom_measurements feature is disabled"
-            )
-            return
-
         self._measurements[name] = {"value": value, "unit": unit}
 
+    def set_context(self, key, value):
+        # type: (str, Any) -> None
+        self._contexts[key] = value
+
     def to_json(self):
         # type: () -> Dict[str, Any]
         rv = super(Transaction, self).to_json()
@@ -773,7 +741,7 @@ class Transaction(Span):
         # Since this is coming from the user (or from a function provided by the
         # user), who knows what we might get. (The only valid values are
         # booleans or numbers between 0 and 1.)
-        if not is_valid_sample_rate(sample_rate):
+        if not is_valid_sample_rate(sample_rate, source="Tracing"):
             logger.warning(
                 "[Tracing] Discarding {transaction_description} because of invalid sample rate.".format(
                     transaction_description=transaction_description,
@@ -784,9 +752,12 @@ class Transaction(Span):
 
         self.sample_rate = float(sample_rate)
 
+        if client.monitor:
+            self.sample_rate /= client.monitor.downsample_factor
+
         # if the function returned 0 (or false), or if `traces_sample_rate` is
         # 0, it's a sign the transaction should be dropped
-        if not sample_rate:
+        if not self.sample_rate:
             logger.debug(
                 "[Tracing] Discarding {transaction_description} because {reason}".format(
                     transaction_description=transaction_description,
@@ -803,7 +774,7 @@ class Transaction(Span):
         # Now we roll the dice. random.random is inclusive of 0, but not of 1,
         # so strict < is safe here. In case sample_rate is a boolean, cast it
         # to a float (True becomes 1.0 and False becomes 0.0)
-        self.sampled = random.random() < float(sample_rate)
+        self.sampled = random.random() < self.sample_rate
 
         if self.sampled:
             logger.debug(
@@ -815,22 +786,107 @@ class Transaction(Span):
             logger.debug(
                 "[Tracing] Discarding {transaction_description} because it's not included in the random sample (sampling rate = {sample_rate})".format(
                     transaction_description=transaction_description,
-                    sample_rate=float(sample_rate),
+                    sample_rate=self.sample_rate,
                 )
             )
 
 
+class NoOpSpan(Span):
+    def __repr__(self):
+        # type: () -> str
+        return self.__class__.__name__
+
+    def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+        # type: (str, **Any) -> NoOpSpan
+        return NoOpSpan()
+
+    def new_span(self, **kwargs):
+        # type: (**Any) -> NoOpSpan
+        return self.start_child(**kwargs)
+
+    def to_traceparent(self):
+        # type: () -> str
+        return ""
+
+    def to_baggage(self):
+        # type: () -> Optional[Baggage]
+        return None
+
+    def iter_headers(self):
+        # type: () -> Iterator[Tuple[str, str]]
+        return iter(())
+
+    def set_tag(self, key, value):
+        # type: (str, Any) -> None
+        pass
+
+    def set_data(self, key, value):
+        # type: (str, Any) -> None
+        pass
+
+    def set_status(self, value):
+        # type: (str) -> None
+        pass
+
+    def set_http_status(self, http_status):
+        # type: (int) -> None
+        pass
+
+    def is_success(self):
+        # type: () -> bool
+        return True
+
+    def to_json(self):
+        # type: () -> Dict[str, Any]
+        return {}
+
+    def get_trace_context(self):
+        # type: () -> Any
+        return {}
+
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str]
+        pass
+
+
+def trace(func=None):
+    # type: (Any) -> Any
+    """
+    Decorator to start a child span under the existing current transaction.
+    If there is no current transaction, then nothing will be traced.
+
+    .. code-block::
+        :caption: Usage
+
+        import sentry_sdk
+
+        @sentry_sdk.trace
+        def my_function():
+            ...
+
+        @sentry_sdk.trace
+        async def my_async_function():
+            ...
+    """
+    if PY2:
+        from sentry_sdk.tracing_utils_py2 import start_child_span_decorator
+    else:
+        from sentry_sdk.tracing_utils_py3 import start_child_span_decorator
+
+    # This patterns allows usage of both @sentry_traced and @sentry_traced(...)
+    # See https://stackoverflow.com/questions/52126071/decorator-with-arguments-avoid-parenthesis-when-no-arguments/52126278
+    if func:
+        return start_child_span_decorator(func)
+    else:
+        return start_child_span_decorator
+
+
 # Circular imports
 
 from sentry_sdk.tracing_utils import (
     Baggage,
     EnvironHeaders,
-    compute_tracestate_entry,
     extract_sentrytrace_data,
-    extract_tracestate_data,
-    has_tracestate_enabled,
     has_tracing_enabled,
-    is_valid_sample_rate,
     maybe_create_breadcrumbs_from_span,
-    has_custom_measurements_enabled,
 )
diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py
index 80bbcc2..eb0d0e7 100644
--- a/sentry_sdk/tracing_utils.py
+++ b/sentry_sdk/tracing_utils.py
@@ -1,23 +1,16 @@
 import re
 import contextlib
-import json
-import math
-
-from numbers import Real
 
 import sentry_sdk
-
+from sentry_sdk.consts import OP
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     Dsn,
-    logger,
-    safe_str,
-    to_base64,
+    match_regex_list,
     to_string,
-    from_base64,
 )
 from sentry_sdk._compat import PY2, iteritems
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
 if PY2:
     from collections import Mapping
@@ -26,13 +19,13 @@ else:
     from collections.abc import Mapping
     from urllib.parse import quote, unquote
 
-if MYPY:
+if TYPE_CHECKING:
     import typing
 
-    from typing import Generator
-    from typing import Optional
     from typing import Any
     from typing import Dict
+    from typing import Generator
+    from typing import Optional
     from typing import Union
 
 
@@ -55,27 +48,6 @@ base64_stripped = (
     "([a-zA-Z0-9+/]{2,3})?"
 )
 
-# comma-delimited list of entries of the form `xxx=yyy`
-tracestate_entry = "[^=]+=[^=]+"
-TRACESTATE_ENTRIES_REGEX = re.compile(
-    # one or more xxxxx=yyyy entries
-    "^({te})+"
-    # each entry except the last must be followed by a comma
-    "(,|$)".format(te=tracestate_entry)
-)
-
-# this doesn't check that the value is valid, just that there's something there
-# of the form `sentry=xxxx`
-SENTRY_TRACESTATE_ENTRY_REGEX = re.compile(
-    # either sentry is the first entry or there's stuff immediately before it,
-    # ending in a comma (this prevents matching something like `coolsentry=xxx`)
-    "(?:^|.+,)"
-    # sentry's part, not including the potential comma
-    "(sentry=[^,]*)"
-    # either there's a comma and another vendor's entry or we end
-    "(?:,.+|$)"
-)
-
 
 class EnvironHeaders(Mapping):  # type: ignore
     def __init__(
@@ -109,47 +81,21 @@ class EnvironHeaders(Mapping):  # type: ignore
 
 
 def has_tracing_enabled(options):
-    # type: (Dict[str, Any]) -> bool
+    # type: (Optional[Dict[str, Any]]) -> bool
     """
     Returns True if either traces_sample_rate or traces_sampler is
-    defined, False otherwise.
-    """
-
-    return bool(
-        options.get("traces_sample_rate") is not None
-        or options.get("traces_sampler") is not None
-    )
-
-
-def is_valid_sample_rate(rate):
-    # type: (Any) -> bool
+    defined and enable_tracing is set and not false.
     """
-    Checks the given sample rate to make sure it is valid type and value (a
-    boolean or a number between 0 and 1, inclusive).
-    """
-
-    # both booleans and NaN are instances of Real, so a) checking for Real
-    # checks for the possibility of a boolean also, and b) we have to check
-    # separately for NaN
-    if not isinstance(rate, Real) or math.isnan(rate):
-        logger.warning(
-            "[Tracing] Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
-                rate=rate, type=type(rate)
-            )
-        )
+    if options is None:
         return False
 
-    # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False
-    rate = float(rate)
-    if rate < 0 or rate > 1:
-        logger.warning(
-            "[Tracing] Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format(
-                rate=rate
-            )
+    return bool(
+        options.get("enable_tracing") is not False
+        and (
+            options.get("traces_sample_rate") is not None
+            or options.get("traces_sampler") is not None
         )
-        return False
-
-    return True
+    )
 
 
 @contextlib.contextmanager
@@ -161,7 +107,7 @@ def record_sql_queries(
     paramstyle,  # type: Optional[str]
     executemany,  # type: bool
 ):
-    # type: (...) -> Generator[Span, None, None]
+    # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None]
 
     # TODO: Bring back capturing of params by default
     if hub.client and hub.client.options["_experiments"].get(
@@ -189,19 +135,19 @@ def record_sql_queries(
     with capture_internal_exceptions():
         hub.add_breadcrumb(message=query, category="query", data=data)
 
-    with hub.start_span(op="db", description=query) as span:
+    with hub.start_span(op=OP.DB, description=query) as span:
         for k, v in data.items():
             span.set_data(k, v)
         yield span
 
 
 def maybe_create_breadcrumbs_from_span(hub, span):
-    # type: (sentry_sdk.Hub, Span) -> None
-    if span.op == "redis":
+    # type: (sentry_sdk.Hub, sentry_sdk.tracing.Span) -> None
+    if span.op == OP.DB_REDIS:
         hub.add_breadcrumb(
             message=span.description, type="redis", category="redis", data=span._tags
         )
-    elif span.op == "http":
+    elif span.op == OP.HTTP_CLIENT:
         hub.add_breadcrumb(type="http", category="httplib", data=span._data)
     elif span.op == "subprocess":
         hub.add_breadcrumb(
@@ -213,7 +159,7 @@ def maybe_create_breadcrumbs_from_span(hub, span):
 
 
 def extract_sentrytrace_data(header):
-    # type: (Optional[str]) -> Optional[typing.Mapping[str, Union[str, bool, None]]]
+    # type: (Optional[str]) -> Optional[Dict[str, Union[str, bool, None]]]
     """
     Given a `sentry-trace` header string, return a dictionary of data.
     """
@@ -244,143 +190,6 @@ def extract_sentrytrace_data(header):
     }
 
 
-def extract_tracestate_data(header):
-    # type: (Optional[str]) -> typing.Mapping[str, Optional[str]]
-    """
-    Extracts the sentry tracestate value and any third-party data from the given
-    tracestate header, returning a dictionary of data.
-    """
-    sentry_entry = third_party_entry = None
-    before = after = ""
-
-    if header:
-        # find sentry's entry, if any
-        sentry_match = SENTRY_TRACESTATE_ENTRY_REGEX.search(header)
-
-        if sentry_match:
-            sentry_entry = sentry_match.group(1)
-
-            # remove the commas after the split so we don't end up with
-            # `xxx=yyy,,zzz=qqq` (double commas) when we put them back together
-            before, after = map(lambda s: s.strip(","), header.split(sentry_entry))
-
-            # extract sentry's value from its entry and test to make sure it's
-            # valid; if it isn't, discard the entire entry so that a new one
-            # will be created
-            sentry_value = sentry_entry.replace("sentry=", "")
-            if not re.search("^{b64}$".format(b64=base64_stripped), sentry_value):
-                sentry_entry = None
-        else:
-            after = header
-
-        # if either part is invalid or empty, remove it before gluing them together
-        third_party_entry = (
-            ",".join(filter(TRACESTATE_ENTRIES_REGEX.search, [before, after])) or None
-        )
-
-    return {
-        "sentry_tracestate": sentry_entry,
-        "third_party_tracestate": third_party_entry,
-    }
-
-
-def compute_tracestate_value(data):
-    # type: (typing.Mapping[str, str]) -> str
-    """
-    Computes a new tracestate value using the given data.
-
-    Note: Returns just the base64-encoded data, NOT the full `sentry=...`
-    tracestate entry.
-    """
-
-    tracestate_json = json.dumps(data, default=safe_str)
-
-    # Base64-encoded strings always come out with a length which is a multiple
-    # of 4. In order to achieve this, the end is padded with one or more `=`
-    # signs. Because the tracestate standard calls for using `=` signs between
-    # vendor name and value (`sentry=xxx,dogsaregreat=yyy`), to avoid confusion
-    # we strip the `=`
-    return (to_base64(tracestate_json) or "").rstrip("=")
-
-
-def compute_tracestate_entry(span):
-    # type: (Span) -> Optional[str]
-    """
-    Computes a new sentry tracestate for the span. Includes the `sentry=`.
-
-    Will return `None` if there's no client and/or no DSN.
-    """
-    data = {}
-
-    hub = span.hub or sentry_sdk.Hub.current
-
-    client = hub.client
-    scope = hub.scope
-
-    if client and client.options.get("dsn"):
-        options = client.options
-        user = scope._user
-
-        data = {
-            "trace_id": span.trace_id,
-            "environment": options["environment"],
-            "release": options.get("release"),
-            "public_key": Dsn(options["dsn"]).public_key,
-        }
-
-        if user and (user.get("id") or user.get("segment")):
-            user_data = {}
-
-            if user.get("id"):
-                user_data["id"] = user["id"]
-
-            if user.get("segment"):
-                user_data["segment"] = user["segment"]
-
-            data["user"] = user_data
-
-        if span.containing_transaction:
-            data["transaction"] = span.containing_transaction.name
-
-        return "sentry=" + compute_tracestate_value(data)
-
-    return None
-
-
-def reinflate_tracestate(encoded_tracestate):
-    # type: (str) -> typing.Optional[Mapping[str, str]]
-    """
-    Given a sentry tracestate value in its encoded form, translate it back into
-    a dictionary of data.
-    """
-    inflated_tracestate = None
-
-    if encoded_tracestate:
-        # Base64-encoded strings always come out with a length which is a
-        # multiple of 4. In order to achieve this, the end is padded with one or
-        # more `=` signs. Because the tracestate standard calls for using `=`
-        # signs between vendor name and value (`sentry=xxx,dogsaregreat=yyy`),
-        # to avoid confusion we strip the `=` when the data is initially
-        # encoded. Python's decoding function requires they be put back.
-        # Fortunately, it doesn't complain if there are too many, so we just
-        # attach two `=` on spec (there will never be more than 2, see
-        # https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding).
-        tracestate_json = from_base64(encoded_tracestate + "==")
-
-        try:
-            assert tracestate_json is not None
-            inflated_tracestate = json.loads(tracestate_json)
-        except Exception as err:
-            logger.warning(
-                (
-                    "Unable to attach tracestate data to envelope header: {err}"
-                    + "\nTracestate value is {encoded_tracestate}"
-                ).format(err=err, encoded_tracestate=encoded_tracestate),
-            )
-
-    return inflated_tracestate
-
-
 def _format_sql(cursor, sql):
     # type: (Any, str) -> Optional[str]
 
@@ -401,40 +210,12 @@ def _format_sql(cursor, sql):
     return real_sql or to_string(sql)
 
 
-def has_tracestate_enabled(span=None):
-    # type: (Optional[Span]) -> bool
-
-    client = ((span and span.hub) or sentry_sdk.Hub.current).client
-    options = client and client.options
-
-    return bool(options and options["_experiments"].get("propagate_tracestate"))
-
-
-def has_custom_measurements_enabled():
-    # type: () -> bool
-    client = sentry_sdk.Hub.current.client
-    options = client and client.options
-    return bool(options and options["_experiments"].get("custom_measurements"))
-
-
 class Baggage(object):
     __slots__ = ("sentry_items", "third_party_items", "mutable")
 
     SENTRY_PREFIX = "sentry-"
     SENTRY_PREFIX_REGEX = re.compile("^sentry-")
 
-    # DynamicSamplingContext
-    DSC_KEYS = [
-        "trace_id",
-        "public_key",
-        "sample_rate",
-        "release",
-        "environment",
-        "transaction",
-        "user_id",
-        "user_segment",
-    ]
-
     def __init__(
         self,
         sentry_items,  # type: Dict[str, str]
@@ -472,9 +253,46 @@ class Baggage(object):
 
         return Baggage(sentry_items, third_party_items, mutable)
 
+    @classmethod
+    def from_options(cls, scope):
+        # type: (sentry_sdk.scope.Scope) -> Optional[Baggage]
+
+        sentry_items = {}  # type: Dict[str, str]
+        third_party_items = ""
+        mutable = False
+
+        client = sentry_sdk.Hub.current.client
+
+        if client is None or scope._propagation_context is None:
+            return Baggage(sentry_items)
+
+        options = client.options
+        propagation_context = scope._propagation_context
+
+        if propagation_context is not None and "trace_id" in propagation_context:
+            sentry_items["trace_id"] = propagation_context["trace_id"]
+
+        if options.get("environment"):
+            sentry_items["environment"] = options["environment"]
+
+        if options.get("release"):
+            sentry_items["release"] = options["release"]
+
+        if options.get("dsn"):
+            sentry_items["public_key"] = Dsn(options["dsn"]).public_key
+
+        if options.get("traces_sample_rate"):
+            sentry_items["sample_rate"] = options["traces_sample_rate"]
+
+        user = (scope and scope._user) or {}
+        if user.get("segment"):
+            sentry_items["user_segment"] = user["segment"]
+
+        return Baggage(sentry_items, third_party_items, mutable)
+
     @classmethod
     def populate_from_transaction(cls, transaction):
-        # type: (Transaction) -> Baggage
+        # type: (sentry_sdk.tracing.Transaction) -> Baggage
         """
         Populate fresh baggage entry with sentry_items and make it immutable
         if this is the head SDK which originates traces.
@@ -512,6 +330,9 @@ class Baggage(object):
         if transaction.sample_rate is not None:
             sentry_items["sample_rate"] = str(transaction.sample_rate)
 
+        if transaction.sampled is not None:
+            sentry_items["sampled"] = "true" if transaction.sampled else "false"
+
         # there's an existing baggage but it was mutable,
         # which is why we are creating this new baggage.
         # However, if by chance the user put some sentry items in there, give them precedence.
@@ -528,10 +349,8 @@ class Baggage(object):
         # type: () -> Dict[str, str]
         header = {}
 
-        for key in Baggage.DSC_KEYS:
-            item = self.sentry_items.get(key)
-            if item:
-                header[key] = item
+        for key, item in iteritems(self.sentry_items):
+            header[key] = item
 
         return header
 
@@ -550,8 +369,41 @@ class Baggage(object):
         return ",".join(items)
 
 
+def should_propagate_trace(hub, url):
+    # type: (sentry_sdk.Hub, str) -> bool
+    """
+    Returns True if url matches trace_propagation_targets configured in the given hub. Otherwise, returns False.
+    """
+    client = hub.client  # type: Any
+    trace_propagation_targets = client.options["trace_propagation_targets"]
+
+    if client.transport and client.transport.parsed_dsn:
+        dsn_url = client.transport.parsed_dsn.netloc
+    else:
+        dsn_url = None
+
+    is_request_to_sentry = dsn_url and dsn_url in url
+    if is_request_to_sentry:
+        return False
+
+    return match_regex_list(url, trace_propagation_targets, substring_matching=True)
+
+
+def normalize_incoming_data(incoming_data):
+    # type: (Dict[str, Any]) -> Dict[str, Any]
+    """
+    Normalizes incoming data so the keys are all lowercase with dashes instead of underscores and stripped from known prefixes.
+    """
+    data = {}
+    for key, value in incoming_data.items():
+        if key.startswith("HTTP_"):
+            key = key[5:]
+
+        key = key.replace("_", "-").lower()
+        data[key] = value
+
+    return data
+
+
 # Circular imports
 from sentry_sdk.tracing import LOW_QUALITY_TRANSACTION_SOURCES
-
-if MYPY:
-    from sentry_sdk.tracing import Span, Transaction
diff --git a/sentry_sdk/tracing_utils_py2.py b/sentry_sdk/tracing_utils_py2.py
new file mode 100644
index 0000000..a251ab4
--- /dev/null
+++ b/sentry_sdk/tracing_utils_py2.py
@@ -0,0 +1,45 @@
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk import get_current_span
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
+from sentry_sdk.utils import logger, qualname_from_function
+
+
+if TYPE_CHECKING:
+    from typing import Any
+
+
+def start_child_span_decorator(func):
+    # type: (Any) -> Any
+    """
+    Decorator to add child spans for functions.
+
+    This is the Python 2 compatible version of the decorator.
+    Duplicated code from ``sentry_sdk.tracing_utils_python3.start_child_span_decorator``.
+
+    See also ``sentry_sdk.tracing.trace()``.
+    """
+
+    @wraps(func)
+    def func_with_tracing(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+
+        span = get_current_span(sentry_sdk.Hub.current)
+
+        if span is None:
+            logger.warning(
+                "Can not create a child span for %s. "
+                "Please start a Sentry transaction before calling this function.",
+                qualname_from_function(func),
+            )
+            return func(*args, **kwargs)
+
+        with span.start_child(
+            op=OP.FUNCTION,
+            description=qualname_from_function(func),
+        ):
+            return func(*args, **kwargs)
+
+    return func_with_tracing
diff --git a/sentry_sdk/tracing_utils_py3.py b/sentry_sdk/tracing_utils_py3.py
new file mode 100644
index 0000000..d58d5f7
--- /dev/null
+++ b/sentry_sdk/tracing_utils_py3.py
@@ -0,0 +1,72 @@
+import inspect
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk import get_current_span
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
+from sentry_sdk.utils import logger, qualname_from_function
+
+
+if TYPE_CHECKING:
+    from typing import Any
+
+
+def start_child_span_decorator(func):
+    # type: (Any) -> Any
+    """
+    Decorator to add child spans for functions.
+
+    This is the Python 3 compatible version of the decorator.
+    For Python 2 there is duplicated code here: ``sentry_sdk.tracing_utils_python2.start_child_span_decorator()``.
+
+    See also ``sentry_sdk.tracing.trace()``.
+    """
+
+    # Asynchronous case
+    if inspect.iscoroutinefunction(func):
+
+        @wraps(func)
+        async def func_with_tracing(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+
+            span = get_current_span(sentry_sdk.Hub.current)
+
+            if span is None:
+                logger.warning(
+                    "Can not create a child span for %s. "
+                    "Please start a Sentry transaction before calling this function.",
+                    qualname_from_function(func),
+                )
+                return await func(*args, **kwargs)
+
+            with span.start_child(
+                op=OP.FUNCTION,
+                description=qualname_from_function(func),
+            ):
+                return await func(*args, **kwargs)
+
+    # Synchronous case
+    else:
+
+        @wraps(func)
+        def func_with_tracing(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+
+            span = get_current_span(sentry_sdk.Hub.current)
+
+            if span is None:
+                logger.warning(
+                    "Can not create a child span for %s. "
+                    "Please start a Sentry transaction before calling this function.",
+                    qualname_from_function(func),
+                )
+                return func(*args, **kwargs)
+
+            with span.start_child(
+                op=OP.FUNCTION,
+                description=qualname_from_function(func),
+            ):
+                return func(*args, **kwargs)
+
+    return func_with_tracing
diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py
index fca6fa8..73defe9 100644
--- a/sentry_sdk/transport.py
+++ b/sentry_sdk/transport.py
@@ -1,7 +1,7 @@
 from __future__ import print_function
 
 import io
-import urllib3  # type: ignore
+import urllib3
 import certifi
 import gzip
 import time
@@ -13,9 +13,9 @@ from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions, json_dump
 from sentry_sdk.worker import BackgroundWorker
 from sentry_sdk.envelope import Envelope, Item, PayloadRef
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Dict
@@ -26,7 +26,7 @@ if MYPY:
     from typing import Union
     from typing import DefaultDict
 
-    from urllib3.poolmanager import PoolManager  # type: ignore
+    from urllib3.poolmanager import PoolManager
     from urllib3.poolmanager import ProxyManager
 
     from sentry_sdk._types import Event, EndpointType
@@ -107,6 +107,10 @@ class Transport(object):
         """
         return None
 
+    def is_healthy(self):
+        # type: () -> bool
+        return True
+
     def __del__(self):
         # type: () -> None
         try:
@@ -156,6 +160,7 @@ class HttpTransport(Transport):
             http_proxy=options["http_proxy"],
             https_proxy=options["https_proxy"],
             ca_certs=options["ca_certs"],
+            proxy_headers=options["proxy_headers"],
         )
 
         from sentry_sdk import Hub
@@ -185,7 +190,7 @@ class HttpTransport(Transport):
         self._discarded_events[data_category, reason] += quantity
 
     def _update_rate_limits(self, response):
-        # type: (urllib3.HTTPResponse) -> None
+        # type: (urllib3.BaseHTTPResponse) -> None
 
         # new sentries with more rate limit insights.  We honor this header
         # no matter of the status code to update our internal rate limits.
@@ -310,6 +315,18 @@ class HttpTransport(Transport):
 
         return _disabled(category) or _disabled(None)
 
+    def _is_rate_limited(self):
+        # type: () -> bool
+        return any(ts > datetime.utcnow() for ts in self._disabled_until.values())
+
+    def _is_worker_full(self):
+        # type: () -> bool
+        return self._worker.full()
+
+    def is_healthy(self):
+        # type: () -> bool
+        return not (self._is_worker_full() or self._is_rate_limited())
+
     def _send_event(
         self, event  # type: Event
     ):
@@ -420,6 +437,7 @@ class HttpTransport(Transport):
         http_proxy,  # type: Optional[str]
         https_proxy,  # type: Optional[str]
         ca_certs,  # type: Optional[Any]
+        proxy_headers,  # type: Optional[Dict[str, str]]
     ):
         # type: (...) -> Union[PoolManager, ProxyManager]
         proxy = None
@@ -436,7 +454,27 @@ class HttpTransport(Transport):
         opts = self._get_pool_options(ca_certs)
 
         if proxy:
-            return urllib3.ProxyManager(proxy, **opts)
+            if proxy_headers:
+                opts["proxy_headers"] = proxy_headers
+
+            if proxy.startswith("socks"):
+                use_socks_proxy = True
+                try:
+                    # Check if PySocks depencency is available
+                    from urllib3.contrib.socks import SOCKSProxyManager
+                except ImportError:
+                    use_socks_proxy = False
+                    logger.warning(
+                        "You have configured a SOCKS proxy (%s) but support for SOCKS proxies is not installed. Disabling proxy support. Please add `PySocks` (or `urllib3` with the `[socks]` extra) to your dependencies.",
+                        proxy,
+                    )
+
+                if use_socks_proxy:
+                    return SOCKSProxyManager(proxy, **opts)
+                else:
+                    return urllib3.PoolManager(**opts)
+            else:
+                return urllib3.ProxyManager(proxy, **opts)
         else:
             return urllib3.PoolManager(**opts)
 
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 5e74885..475652c 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -2,47 +2,85 @@ import base64
 import json
 import linecache
 import logging
+import math
 import os
+import re
+import subprocess
 import sys
 import threading
-import subprocess
-import re
 import time
+from collections import namedtuple
+from copy import copy
+from decimal import Decimal
+from numbers import Real
+
+try:
+    # Python 3
+    from urllib.parse import parse_qs
+    from urllib.parse import unquote
+    from urllib.parse import urlencode
+    from urllib.parse import urlsplit
+    from urllib.parse import urlunsplit
+
+except ImportError:
+    # Python 2
+    from cgi import parse_qs  # type: ignore
+    from urllib import unquote  # type: ignore
+    from urllib import urlencode  # type: ignore
+    from urlparse import urlsplit  # type: ignore
+    from urlparse import urlunsplit  # type: ignore
+
+try:
+    # Python 3.11
+    from builtins import BaseExceptionGroup
+except ImportError:
+    # Python 3.10 and below
+    BaseExceptionGroup = None  # type: ignore
 
 from datetime import datetime
+from functools import partial
 
-import sentry_sdk
-from sentry_sdk._compat import urlparse, text_type, implements_str, PY2, PY33, PY37
+try:
+    from functools import partialmethod
 
-from sentry_sdk._types import MYPY
+    _PARTIALMETHOD_AVAILABLE = True
+except ImportError:
+    _PARTIALMETHOD_AVAILABLE = False
 
-if MYPY:
-    from types import FrameType
-    from types import TracebackType
-    from typing import Any
-    from typing import Callable
-    from typing import Dict
-    from typing import ContextManager
-    from typing import Iterator
-    from typing import List
-    from typing import Optional
-    from typing import Set
-    from typing import Tuple
-    from typing import Union
-    from typing import Type
+import sentry_sdk
+from sentry_sdk._compat import PY2, PY33, PY37, implements_str, text_type, urlparse
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH
+
+if TYPE_CHECKING:
+    from types import FrameType, TracebackType
+    from typing import (
+        Any,
+        Callable,
+        ContextManager,
+        Dict,
+        Iterator,
+        List,
+        Optional,
+        Set,
+        Tuple,
+        Type,
+        Union,
+    )
 
-    from sentry_sdk._types import ExcInfo, EndpointType
+    from sentry_sdk._types import EndpointType, ExcInfo
 
 
 epoch = datetime(1970, 1, 1)
 
-
 # The logger is created here but initialized in the debug support module
 logger = logging.getLogger("sentry_sdk.errors")
 
-MAX_STRING_LENGTH = 1024
+
 BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$")
 
+SENSITIVE_DATA_SUBSTITUTE = "[Filtered]"
+
 
 def json_dumps(data):
     # type: (Any) -> bytes
@@ -95,6 +133,40 @@ def get_default_release():
     return None
 
 
+def get_sdk_name(installed_integrations):
+    # type: (List[str]) -> str
+    """Return the SDK name including the name of the used web framework."""
+
+    # Note: I can not use for example sentry_sdk.integrations.django.DjangoIntegration.identifier
+    # here because if django is not installed the integration is not accessible.
+    framework_integrations = [
+        "django",
+        "flask",
+        "fastapi",
+        "bottle",
+        "falcon",
+        "quart",
+        "sanic",
+        "starlette",
+        "chalice",
+        "serverless",
+        "pyramid",
+        "tornado",
+        "aiohttp",
+        "aws_lambda",
+        "gcp",
+        "beam",
+        "asgi",
+        "wsgi",
+    ]
+
+    for integration in framework_integrations:
+        if integration in installed_integrations:
+            return "sentry.python.{}".format(integration)
+
+    return "sentry.python"
+
+
 class CaptureInternalException(object):
     __slots__ = ()
 
@@ -316,7 +388,7 @@ class AnnotatedValue(object):
     @classmethod
     def removed_because_over_size_limit(cls):
         # type: () -> AnnotatedValue
-        """The actual value was removed because the size of the field exceeded the configured maximum size (specified with the request_bodies sdk option)"""
+        """The actual value was removed because the size of the field exceeded the configured maximum size (specified with the max_request_body_size sdk option)"""
         return AnnotatedValue(
             value="",
             metadata={
@@ -329,8 +401,24 @@ class AnnotatedValue(object):
             },
         )
 
+    @classmethod
+    def substituted_because_contains_sensitive_data(cls):
+        # type: () -> AnnotatedValue
+        """The actual value was removed because it contained sensitive information."""
+        return AnnotatedValue(
+            value=SENSITIVE_DATA_SUBSTITUTE,
+            metadata={
+                "rem": [  # Remark
+                    [
+                        "!config",  # Because of SDK configuration (in this case the config is the hard coded removal of certain django cookies)
+                        "s",  # The fields original value was substituted
+                    ]
+                ]
+            },
+        )
+
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import TypeVar
 
     T = TypeVar("T")
@@ -381,6 +469,7 @@ def iter_stacks(tb):
 def get_lines_from_file(
     filename,  # type: str
     lineno,  # type: int
+    max_length=None,  # type: Optional[int]
     loader=None,  # type: Optional[Any]
     module=None,  # type: Optional[str]
 ):
@@ -409,11 +498,12 @@ def get_lines_from_file(
 
     try:
         pre_context = [
-            strip_string(line.strip("\r\n")) for line in source[lower_bound:lineno]
+            strip_string(line.strip("\r\n"), max_length=max_length)
+            for line in source[lower_bound:lineno]
         ]
-        context_line = strip_string(source[lineno].strip("\r\n"))
+        context_line = strip_string(source[lineno].strip("\r\n"), max_length=max_length)
         post_context = [
-            strip_string(line.strip("\r\n"))
+            strip_string(line.strip("\r\n"), max_length=max_length)
             for line in source[(lineno + 1) : upper_bound]
         ]
         return pre_context, context_line, post_context
@@ -425,6 +515,7 @@ def get_lines_from_file(
 def get_source_context(
     frame,  # type: FrameType
     tb_lineno,  # type: int
+    max_value_length=None,  # type: Optional[int]
 ):
     # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
     try:
@@ -441,7 +532,9 @@ def get_source_context(
         loader = None
     lineno = tb_lineno - 1
     if lineno is not None and abs_path:
-        return get_lines_from_file(abs_path, lineno, loader, module)
+        return get_lines_from_file(
+            abs_path, lineno, max_value_length, loader=loader, module=module
+        )
     return [], None, []
 
 
@@ -514,8 +607,14 @@ def filename_for_module(module, abs_path):
         return abs_path
 
 
-def serialize_frame(frame, tb_lineno=None, with_locals=True):
-    # type: (FrameType, Optional[int], bool) -> Dict[str, Any]
+def serialize_frame(
+    frame,
+    tb_lineno=None,
+    include_local_variables=True,
+    include_source_context=True,
+    max_value_length=None,
+):
+    # type: (FrameType, Optional[int], bool, bool, Optional[int]) -> Dict[str, Any]
     f_code = getattr(frame, "f_code", None)
     if not f_code:
         abs_path = None
@@ -531,33 +630,45 @@ def serialize_frame(frame, tb_lineno=None, with_locals=True):
     if tb_lineno is None:
         tb_lineno = frame.f_lineno
 
-    pre_context, context_line, post_context = get_source_context(frame, tb_lineno)
-
     rv = {
         "filename": filename_for_module(module, abs_path) or None,
         "abs_path": os.path.abspath(abs_path) if abs_path else None,
         "function": function or "<unknown>",
         "module": module,
         "lineno": tb_lineno,
-        "pre_context": pre_context,
-        "context_line": context_line,
-        "post_context": post_context,
     }  # type: Dict[str, Any]
-    if with_locals:
-        rv["vars"] = frame.f_locals
+
+    if include_source_context:
+        rv["pre_context"], rv["context_line"], rv["post_context"] = get_source_context(
+            frame, tb_lineno, max_value_length
+        )
+
+    if include_local_variables:
+        rv["vars"] = copy(frame.f_locals)
 
     return rv
 
 
-def current_stacktrace(with_locals=True):
-    # type: (bool) -> Any
+def current_stacktrace(
+    include_local_variables=True,  # type: bool
+    include_source_context=True,  # type: bool
+    max_value_length=None,  # type: Optional[int]
+):
+    # type: (...) -> Dict[str, Any]
     __tracebackhide__ = True
     frames = []
 
     f = sys._getframe()  # type: Optional[FrameType]
     while f is not None:
         if not should_hide_frame(f):
-            frames.append(serialize_frame(f, with_locals=with_locals))
+            frames.append(
+                serialize_frame(
+                    f,
+                    include_local_variables=include_local_variables,
+                    include_source_context=include_source_context,
+                    max_value_length=max_value_length,
+                )
+            )
         f = f.f_back
 
     frames.reverse()
@@ -576,40 +687,79 @@ def single_exception_from_error_tuple(
     tb,  # type: Optional[TracebackType]
     client_options=None,  # type: Optional[Dict[str, Any]]
     mechanism=None,  # type: Optional[Dict[str, Any]]
+    exception_id=None,  # type: Optional[int]
+    parent_id=None,  # type: Optional[int]
+    source=None,  # type: Optional[str]
 ):
     # type: (...) -> Dict[str, Any]
+    """
+    Creates a dict that goes into the events `exception.values` list and is ingestible by Sentry.
+
+    See the Exception Interface documentation for more details:
+    https://develop.sentry.dev/sdk/event-payloads/exception/
+    """
+    exception_value = {}  # type: Dict[str, Any]
+    exception_value["mechanism"] = (
+        mechanism.copy() if mechanism else {"type": "generic", "handled": True}
+    )
+    if exception_id is not None:
+        exception_value["mechanism"]["exception_id"] = exception_id
+
     if exc_value is not None:
         errno = get_errno(exc_value)
     else:
         errno = None
 
     if errno is not None:
-        mechanism = mechanism or {"type": "generic"}
-        mechanism.setdefault("meta", {}).setdefault("errno", {}).setdefault(
-            "number", errno
-        )
+        exception_value["mechanism"].setdefault("meta", {}).setdefault(
+            "errno", {}
+        ).setdefault("number", errno)
+
+    if source is not None:
+        exception_value["mechanism"]["source"] = source
+
+    is_root_exception = exception_id == 0
+    if not is_root_exception and parent_id is not None:
+        exception_value["mechanism"]["parent_id"] = parent_id
+        exception_value["mechanism"]["type"] = "chained"
+
+    if is_root_exception and "type" not in exception_value["mechanism"]:
+        exception_value["mechanism"]["type"] = "generic"
+
+    is_exception_group = BaseExceptionGroup is not None and isinstance(
+        exc_value, BaseExceptionGroup
+    )
+    if is_exception_group:
+        exception_value["mechanism"]["is_exception_group"] = True
+
+    exception_value["module"] = get_type_module(exc_type)
+    exception_value["type"] = get_type_name(exc_type)
+    exception_value["value"] = getattr(exc_value, "message", safe_str(exc_value))
 
     if client_options is None:
-        with_locals = True
+        include_local_variables = True
+        include_source_context = True
+        max_value_length = DEFAULT_MAX_VALUE_LENGTH  # fallback
     else:
-        with_locals = client_options["with_locals"]
+        include_local_variables = client_options["include_local_variables"]
+        include_source_context = client_options["include_source_context"]
+        max_value_length = client_options["max_value_length"]
 
     frames = [
-        serialize_frame(tb.tb_frame, tb_lineno=tb.tb_lineno, with_locals=with_locals)
+        serialize_frame(
+            tb.tb_frame,
+            tb_lineno=tb.tb_lineno,
+            include_local_variables=include_local_variables,
+            include_source_context=include_source_context,
+            max_value_length=max_value_length,
+        )
         for tb in iter_stacks(tb)
     ]
 
-    rv = {
-        "module": get_type_module(exc_type),
-        "type": get_type_name(exc_type),
-        "value": safe_str(exc_value),
-        "mechanism": mechanism,
-    }
-
     if frames:
-        rv["stacktrace"] = {"frames": frames}
+        exception_value["stacktrace"] = {"frames": frames}
 
-    return rv
+    return exception_value
 
 
 HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__")
@@ -653,6 +803,102 @@ else:
         yield exc_info
 
 
+def exceptions_from_error(
+    exc_type,  # type: Optional[type]
+    exc_value,  # type: Optional[BaseException]
+    tb,  # type: Optional[TracebackType]
+    client_options=None,  # type: Optional[Dict[str, Any]]
+    mechanism=None,  # type: Optional[Dict[str, Any]]
+    exception_id=0,  # type: int
+    parent_id=0,  # type: int
+    source=None,  # type: Optional[str]
+):
+    # type: (...) -> Tuple[int, List[Dict[str, Any]]]
+    """
+    Creates the list of exceptions.
+    This can include chained exceptions and exceptions from an ExceptionGroup.
+
+    See the Exception Interface documentation for more details:
+    https://develop.sentry.dev/sdk/event-payloads/exception/
+    """
+
+    parent = single_exception_from_error_tuple(
+        exc_type=exc_type,
+        exc_value=exc_value,
+        tb=tb,
+        client_options=client_options,
+        mechanism=mechanism,
+        exception_id=exception_id,
+        parent_id=parent_id,
+        source=source,
+    )
+    exceptions = [parent]
+
+    parent_id = exception_id
+    exception_id += 1
+
+    should_supress_context = hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__  # type: ignore
+    if should_supress_context:
+        # Add direct cause.
+        # The field `__cause__` is set when raised with the exception (using the `from` keyword).
+        exception_has_cause = (
+            exc_value
+            and hasattr(exc_value, "__cause__")
+            and exc_value.__cause__ is not None
+        )
+        if exception_has_cause:
+            cause = exc_value.__cause__  # type: ignore
+            (exception_id, child_exceptions) = exceptions_from_error(
+                exc_type=type(cause),
+                exc_value=cause,
+                tb=getattr(cause, "__traceback__", None),
+                client_options=client_options,
+                mechanism=mechanism,
+                exception_id=exception_id,
+                source="__cause__",
+            )
+            exceptions.extend(child_exceptions)
+
+    else:
+        # Add indirect cause.
+        # The field `__context__` is assigned if another exception occurs while handling the exception.
+        exception_has_content = (
+            exc_value
+            and hasattr(exc_value, "__context__")
+            and exc_value.__context__ is not None
+        )
+        if exception_has_content:
+            context = exc_value.__context__  # type: ignore
+            (exception_id, child_exceptions) = exceptions_from_error(
+                exc_type=type(context),
+                exc_value=context,
+                tb=getattr(context, "__traceback__", None),
+                client_options=client_options,
+                mechanism=mechanism,
+                exception_id=exception_id,
+                source="__context__",
+            )
+            exceptions.extend(child_exceptions)
+
+    # Add exceptions from an ExceptionGroup.
+    is_exception_group = exc_value and hasattr(exc_value, "exceptions")
+    if is_exception_group:
+        for idx, e in enumerate(exc_value.exceptions):  # type: ignore
+            (exception_id, child_exceptions) = exceptions_from_error(
+                exc_type=type(e),
+                exc_value=e,
+                tb=getattr(e, "__traceback__", None),
+                client_options=client_options,
+                mechanism=mechanism,
+                exception_id=exception_id,
+                parent_id=parent_id,
+                source="exceptions[%s]" % idx,
+            )
+            exceptions.extend(child_exceptions)
+
+    return (exception_id, exceptions)
+
+
 def exceptions_from_error_tuple(
     exc_info,  # type: ExcInfo
     client_options=None,  # type: Optional[Dict[str, Any]]
@@ -660,17 +906,34 @@ def exceptions_from_error_tuple(
 ):
     # type: (...) -> List[Dict[str, Any]]
     exc_type, exc_value, tb = exc_info
-    rv = []
-    for exc_type, exc_value, tb in walk_exception_chain(exc_info):
-        rv.append(
-            single_exception_from_error_tuple(
-                exc_type, exc_value, tb, client_options, mechanism
-            )
+
+    is_exception_group = BaseExceptionGroup is not None and isinstance(
+        exc_value, BaseExceptionGroup
+    )
+
+    if is_exception_group:
+        (_, exceptions) = exceptions_from_error(
+            exc_type=exc_type,
+            exc_value=exc_value,
+            tb=tb,
+            client_options=client_options,
+            mechanism=mechanism,
+            exception_id=0,
+            parent_id=0,
         )
 
-    rv.reverse()
+    else:
+        exceptions = []
+        for exc_type, exc_value, tb in walk_exception_chain(exc_info):
+            exceptions.append(
+                single_exception_from_error_tuple(
+                    exc_type, exc_value, tb, client_options, mechanism
+                )
+            )
+
+    exceptions.reverse()
 
-    return rv
+    return exceptions
 
 
 def to_string(value):
@@ -702,44 +965,54 @@ def iter_event_frames(event):
             yield frame
 
 
-def handle_in_app(event, in_app_exclude=None, in_app_include=None):
-    # type: (Dict[str, Any], Optional[List[str]], Optional[List[str]]) -> Dict[str, Any]
+def handle_in_app(event, in_app_exclude=None, in_app_include=None, project_root=None):
+    # type: (Dict[str, Any], Optional[List[str]], Optional[List[str]], Optional[str]) -> Dict[str, Any]
     for stacktrace in iter_event_stacktraces(event):
-        handle_in_app_impl(
+        set_in_app_in_frames(
             stacktrace.get("frames"),
             in_app_exclude=in_app_exclude,
             in_app_include=in_app_include,
+            project_root=project_root,
         )
 
     return event
 
 
-def handle_in_app_impl(frames, in_app_exclude, in_app_include):
-    # type: (Any, Optional[List[str]], Optional[List[str]]) -> Optional[Any]
+def set_in_app_in_frames(frames, in_app_exclude, in_app_include, project_root=None):
+    # type: (Any, Optional[List[str]], Optional[List[str]], Optional[str]) -> Optional[Any]
     if not frames:
         return None
 
-    any_in_app = False
     for frame in frames:
-        in_app = frame.get("in_app")
-        if in_app is not None:
-            if in_app:
-                any_in_app = True
+        # if frame has already been marked as in_app, skip it
+        current_in_app = frame.get("in_app")
+        if current_in_app is not None:
             continue
 
         module = frame.get("module")
-        if not module:
-            continue
-        elif _module_in_set(module, in_app_include):
+
+        # check if module in frame is in the list of modules to include
+        if _module_in_list(module, in_app_include):
             frame["in_app"] = True
-            any_in_app = True
-        elif _module_in_set(module, in_app_exclude):
+            continue
+
+        # check if module in frame is in the list of modules to exclude
+        if _module_in_list(module, in_app_exclude):
+            frame["in_app"] = False
+            continue
+
+        # if frame has no abs_path, skip further checks
+        abs_path = frame.get("abs_path")
+        if abs_path is None:
+            continue
+
+        if _is_external_source(abs_path):
             frame["in_app"] = False
+            continue
 
-    if not any_in_app:
-        for frame in frames:
-            if frame.get("in_app") is None:
-                frame["in_app"] = True
+        if _is_in_project_root(abs_path, project_root):
+            frame["in_app"] = True
+            continue
 
     return frames
 
@@ -787,27 +1060,51 @@ def event_from_exception(
     )
 
 
-def _module_in_set(name, set):
+def _module_in_list(name, items):
     # type: (str, Optional[List[str]]) -> bool
-    if not set:
+    if name is None:
+        return False
+
+    if not items:
         return False
-    for item in set or ():
+
+    for item in items:
         if item == name or name.startswith(item + "."):
             return True
+
+    return False
+
+
+def _is_external_source(abs_path):
+    # type: (str) -> bool
+    # check if frame is in 'site-packages' or 'dist-packages'
+    external_source = (
+        re.search(r"[\\/](?:dist|site)-packages[\\/]", abs_path) is not None
+    )
+    return external_source
+
+
+def _is_in_project_root(abs_path, project_root):
+    # type: (str, Optional[str]) -> bool
+    if project_root is None:
+        return False
+
+    # check if path is in the project root
+    if abs_path.startswith(project_root):
+        return True
+
     return False
 
 
 def strip_string(value, max_length=None):
     # type: (str, Optional[int]) -> Union[AnnotatedValue, str]
-    # TODO: read max_length from config
     if not value:
         return value
 
     if max_length is None:
-        # This is intentionally not just the default such that one can patch `MAX_STRING_LENGTH` and affect `strip_string`.
-        max_length = MAX_STRING_LENGTH
+        max_length = DEFAULT_MAX_VALUE_LENGTH
 
-    length = len(value)
+    length = len(value.encode("utf-8"))
 
     if length > max_length:
         return AnnotatedValue(
@@ -934,9 +1231,12 @@ Please refer to https://docs.sentry.io/platforms/python/contextvars/ for more in
 """
 
 
-def transaction_from_function(func):
+def qualname_from_function(func):
     # type: (Callable[..., Any]) -> Optional[str]
-    # Methods in Python 2
+    """Return the qualified name of func. Works with regular function, lambda, partial and partialmethod."""
+    func_qualname = None  # type: Optional[str]
+
+    # Python 2
     try:
         return "%s.%s.%s" % (
             func.im_class.__module__,  # type: ignore
@@ -946,26 +1246,38 @@ def transaction_from_function(func):
     except Exception:
         pass
 
-    func_qualname = (
-        getattr(func, "__qualname__", None) or getattr(func, "__name__", None) or None
-    )  # type: Optional[str]
-
-    if not func_qualname:
-        # No idea what it is
-        return None
+    prefix, suffix = "", ""
 
-    # Methods in Python 3
-    # Functions
-    # Classes
-    try:
-        return "%s.%s" % (func.__module__, func_qualname)
-    except Exception:
-        pass
+    if (
+        _PARTIALMETHOD_AVAILABLE
+        and hasattr(func, "_partialmethod")
+        and isinstance(func._partialmethod, partialmethod)
+    ):
+        prefix, suffix = "partialmethod(<function ", ">)"
+        func = func._partialmethod.func
+    elif isinstance(func, partial) and hasattr(func.func, "__name__"):
+        prefix, suffix = "partial(<function ", ">)"
+        func = func.func
+
+    if hasattr(func, "__qualname__"):
+        func_qualname = func.__qualname__
+    elif hasattr(func, "__name__"):  # Python 2.7 has no __qualname__
+        func_qualname = func.__name__
+
+    # Python 3: methods, functions, classes
+    if func_qualname is not None:
+        if hasattr(func, "__module__"):
+            func_qualname = func.__module__ + "." + func_qualname
+        func_qualname = prefix + func_qualname + suffix
 
-    # Possibly a lambda
     return func_qualname
 
 
+def transaction_from_function(func):
+    # type: (Callable[..., Any]) -> Optional[str]
+    return qualname_from_function(func)
+
+
 disable_capture_event = ContextVar("disable_capture_event")
 
 
@@ -1052,6 +1364,183 @@ def from_base64(base64_string):
     return utf8_string
 
 
+Components = namedtuple("Components", ["scheme", "netloc", "path", "query", "fragment"])
+
+
+def sanitize_url(url, remove_authority=True, remove_query_values=True, split=False):
+    # type: (str, bool, bool, bool) -> Union[str, Components]
+    """
+    Removes the authority and query parameter values from a given URL.
+    """
+    parsed_url = urlsplit(url)
+    query_params = parse_qs(parsed_url.query, keep_blank_values=True)
+
+    # strip username:password (netloc can be usr:pwd@example.com)
+    if remove_authority:
+        netloc_parts = parsed_url.netloc.split("@")
+        if len(netloc_parts) > 1:
+            netloc = "%s:%s@%s" % (
+                SENSITIVE_DATA_SUBSTITUTE,
+                SENSITIVE_DATA_SUBSTITUTE,
+                netloc_parts[-1],
+            )
+        else:
+            netloc = parsed_url.netloc
+    else:
+        netloc = parsed_url.netloc
+
+    # strip values from query string
+    if remove_query_values:
+        query_string = unquote(
+            urlencode({key: SENSITIVE_DATA_SUBSTITUTE for key in query_params})
+        )
+    else:
+        query_string = parsed_url.query
+
+    components = Components(
+        scheme=parsed_url.scheme,
+        netloc=netloc,
+        query=query_string,
+        path=parsed_url.path,
+        fragment=parsed_url.fragment,
+    )
+
+    if split:
+        return components
+    else:
+        return urlunsplit(components)
+
+
+ParsedUrl = namedtuple("ParsedUrl", ["url", "query", "fragment"])
+
+
+def parse_url(url, sanitize=True):
+    # type: (str, bool) -> ParsedUrl
+    """
+    Splits a URL into a url (including path), query and fragment. If sanitize is True, the query
+    parameters will be sanitized to remove sensitive data. The autority (username and password)
+    in the URL will always be removed.
+    """
+    parsed_url = sanitize_url(
+        url, remove_authority=True, remove_query_values=sanitize, split=True
+    )
+
+    base_url = urlunsplit(
+        Components(
+            scheme=parsed_url.scheme,  # type: ignore
+            netloc=parsed_url.netloc,  # type: ignore
+            query="",
+            path=parsed_url.path,  # type: ignore
+            fragment="",
+        )
+    )
+
+    return ParsedUrl(
+        url=base_url,
+        query=parsed_url.query,  # type: ignore
+        fragment=parsed_url.fragment,  # type: ignore
+    )
+
+
+def is_valid_sample_rate(rate, source):
+    # type: (Any, str) -> bool
+    """
+    Checks the given sample rate to make sure it is valid type and value (a
+    boolean or a number between 0 and 1, inclusive).
+    """
+
+    # both booleans and NaN are instances of Real, so a) checking for Real
+    # checks for the possibility of a boolean also, and b) we have to check
+    # separately for NaN and Decimal does not derive from Real so need to check that too
+    if not isinstance(rate, (Real, Decimal)) or math.isnan(rate):
+        logger.warning(
+            "{source} Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
+                source=source, rate=rate, type=type(rate)
+            )
+        )
+        return False
+
+    # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False
+    rate = float(rate)
+    if rate < 0 or rate > 1:
+        logger.warning(
+            "{source} Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format(
+                source=source, rate=rate
+            )
+        )
+        return False
+
+    return True
+
+
+def match_regex_list(item, regex_list=None, substring_matching=False):
+    # type: (str, Optional[List[str]], bool) -> bool
+    if regex_list is None:
+        return False
+
+    for item_matcher in regex_list:
+        if not substring_matching and item_matcher[-1] != "$":
+            item_matcher += "$"
+
+        matched = re.search(item_matcher, item)
+        if matched:
+            return True
+
+    return False
+
+
+def parse_version(version):
+    # type: (str) -> Optional[Tuple[int, ...]]
+    """
+    Parses a version string into a tuple of integers.
+    This uses the parsing loging from PEP 440:
+    https://peps.python.org/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions
+    """
+    VERSION_PATTERN = r"""  # noqa: N806
+        v?
+        (?:
+            (?:(?P<epoch>[0-9]+)!)?                           # epoch
+            (?P<release>[0-9]+(?:\.[0-9]+)*)                  # release segment
+            (?P<pre>                                          # pre-release
+                [-_\.]?
+                (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+                [-_\.]?
+                (?P<pre_n>[0-9]+)?
+            )?
+            (?P<post>                                         # post release
+                (?:-(?P<post_n1>[0-9]+))
+                |
+                (?:
+                    [-_\.]?
+                    (?P<post_l>post|rev|r)
+                    [-_\.]?
+                    (?P<post_n2>[0-9]+)?
+                )
+            )?
+            (?P<dev>                                          # dev release
+                [-_\.]?
+                (?P<dev_l>dev)
+                [-_\.]?
+                (?P<dev_n>[0-9]+)?
+            )?
+        )
+        (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+    """
+
+    pattern = re.compile(
+        r"^\s*" + VERSION_PATTERN + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    try:
+        release = pattern.match(version).groupdict()["release"]  # type: ignore
+        release_tuple = tuple(map(int, release.split(".")[:3]))  # type: Tuple[int, ...]
+    except (TypeError, ValueError, AttributeError):
+        return None
+
+    return release_tuple
+
+
 if PY37:
 
     def nanosecond_time():
@@ -1062,12 +1551,23 @@ elif PY33:
 
     def nanosecond_time():
         # type: () -> int
-
         return int(time.perf_counter() * 1e9)
 
 else:
 
     def nanosecond_time():
         # type: () -> int
-
         raise AttributeError
+
+
+if PY2:
+
+    def now():
+        # type: () -> float
+        return time.time()
+
+else:
+
+    def now():
+        # type: () -> float
+        return time.perf_counter()
diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py
index 310ba3b..2fe81a8 100644
--- a/sentry_sdk/worker.py
+++ b/sentry_sdk/worker.py
@@ -7,9 +7,9 @@ from sentry_sdk._queue import Queue, FullError
 from sentry_sdk.utils import logger
 from sentry_sdk.consts import DEFAULT_QUEUE_SIZE
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
     from typing import Callable
@@ -95,6 +95,10 @@ class BackgroundWorker(object):
                 self._wait_flush(timeout, callback)
         logger.debug("background worker flushed")
 
+    def full(self):
+        # type: () -> bool
+        return self._queue.full()
+
     def _wait_flush(self, timeout, callback):
         # type: (float, Optional[Any]) -> None
         initial_timeout = min(0.1, timeout)
diff --git a/setup.py b/setup.py
index f87a9f2..f79ff91 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.10",
+    version="1.29.2",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",
@@ -36,7 +36,7 @@ setup(
     # PEP 561
     package_data={"sentry_sdk": ["py.typed"]},
     zip_safe=False,
-    license="BSD",
+    license="MIT",
     install_requires=[
         'urllib3>=1.25.7; python_version<="3.4"',
         'urllib3>=1.26.9; python_version=="3.5"',
@@ -44,14 +44,16 @@ setup(
         "certifi",
     ],
     extras_require={
-        "flask": ["flask>=0.11", "blinker>=1.1"],
+        "flask": ["flask>=0.11", "blinker>=1.1", "markupsafe"],
         "quart": ["quart>=0.16.1", "blinker>=1.1"],
         "bottle": ["bottle>=0.12.13"],
         "falcon": ["falcon>=1.4"],
         "django": ["django>=1.8"],
         "sanic": ["sanic>=0.8"],
         "celery": ["celery>=3"],
+        "huey": ["huey>=2"],
         "beam": ["apache-beam>=2.12"],
+        "arq": ["arq>=0.23"],
         "rq": ["rq>=0.6"],
         "aiohttp": ["aiohttp>=3.5"],
         "tornado": ["tornado>=5"],
@@ -61,7 +63,12 @@ setup(
         "chalice": ["chalice>=1.16.0"],
         "httpx": ["httpx>=0.16.0"],
         "starlette": ["starlette>=0.19.1"],
+        "starlite": ["starlite>=1.48"],
         "fastapi": ["fastapi>=0.79.0"],
+        "pymongo": ["pymongo>=3.1"],
+        "opentelemetry": ["opentelemetry-distro>=0.35b0"],
+        "grpcio": ["grpcio>=1.21.1"],
+        "loguru": ["loguru>=0.5"],
     },
     classifiers=[
         "Development Status :: 5 - Production/Stable",
diff --git a/test-requirements.txt b/test-requirements.txt
index 74332d9..4b04d1b 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,13 +1,15 @@
 pip  # always use newest pip
-mock # for testing under python < 3.3
+mock ; python_version<'3.3'
 pytest<7
 pytest-cov==2.8.1
 pytest-forked<=1.4.0
 pytest-localserver==0.5.0
 pytest-watch==4.2.0
 tox==3.7.0
-Werkzeug<2.1.0
 jsonschema==3.2.0
 pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205
 executing
-asttokens
\ No newline at end of file
+asttokens
+responses
+pysocks
+ipdb
diff --git a/tests/conftest.py b/tests/conftest.py
index a239ccc..d9d8806 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,5 +1,7 @@
-import os
 import json
+import os
+import socket
+from threading import Thread
 
 import pytest
 import jsonschema
@@ -14,12 +16,24 @@ try:
 except ImportError:
     eventlet = None
 
+try:
+    # Python 2
+    import BaseHTTPServer
+
+    HTTPServer = BaseHTTPServer.HTTPServer
+    BaseHTTPRequestHandler = BaseHTTPServer.BaseHTTPRequestHandler
+except Exception:
+    # Python 3
+    from http.server import BaseHTTPRequestHandler, HTTPServer
+
+
 import sentry_sdk
-from sentry_sdk._compat import reraise, string_types, iteritems
-from sentry_sdk.transport import Transport
+from sentry_sdk._compat import iteritems, reraise, string_types
 from sentry_sdk.envelope import Envelope
-from sentry_sdk.utils import capture_internal_exceptions
 from sentry_sdk.integrations import _installed_integrations  # noqa: F401
+from sentry_sdk.profiler import teardown_profiler
+from sentry_sdk.transport import Transport
+from sentry_sdk.utils import capture_internal_exceptions
 
 from tests import _warning_recorder, _warning_recorder_mgr
 
@@ -55,7 +69,7 @@ def internal_exceptions(request, monkeypatch):
 
     @request.addfinalizer
     def _():
-        # rerasise the errors so that this just acts as a pass-through (that
+        # reraise the errors so that this just acts as a pass-through (that
         # happens to keep track of the errors which pass through it)
         for e in errors:
             reraise(*e)
@@ -143,11 +157,11 @@ def monkeypatch_test_transport(monkeypatch, validate_event_schema):
 
     def check_envelope(envelope):
         with capture_internal_exceptions():
-            # Assert error events are sent without envelope to server, for compat.
-            # This does not apply if any item in the envelope is an attachment.
-            if not any(x.type == "attachment" for x in envelope.items):
-                assert not any(item.data_category == "error" for item in envelope.items)
-                assert not any(item.get_event() is not None for item in envelope.items)
+            # There used to be a check here for errors are not sent in envelopes.
+            # We changed the behaviour to send errors in envelopes when tracing is enabled.
+            # This is checked in test_client.py::test_sending_events_with_tracing
+            # and test_client.py::test_sending_events_with_no_tracing
+            pass
 
     def inner(client):
         monkeypatch.setattr(
@@ -297,20 +311,21 @@ def capture_events_forksafe(monkeypatch, capture_events, request):
         monkeypatch.setattr(test_client.transport, "capture_event", append)
         monkeypatch.setattr(test_client, "flush", flush)
 
-        return EventStreamReader(events_r)
+        return EventStreamReader(events_r, events_w)
 
     return inner
 
 
 class EventStreamReader(object):
-    def __init__(self, file):
-        self.file = file
+    def __init__(self, read_file, write_file):
+        self.read_file = read_file
+        self.write_file = write_file
 
     def read_event(self):
-        return json.loads(self.file.readline().decode("utf-8"))
+        return json.loads(self.read_file.readline().decode("utf-8"))
 
     def read_flush(self):
-        assert self.file.readline() == b"flush\n"
+        assert self.read_file.readline() == b"flush\n"
 
 
 # scope=session ensures that fixture is run earlier
@@ -554,3 +569,36 @@ def object_described_by_matcher():
             return not self.__eq__(test_obj)
 
     return ObjectDescribedBy
+
+
+@pytest.fixture
+def teardown_profiling():
+    yield
+    teardown_profiler()
+
+
+class MockServerRequestHandler(BaseHTTPRequestHandler):
+    def do_GET(self):  # noqa: N802
+        # Process an HTTP GET request and return a response with an HTTP 200 status.
+        self.send_response(200)
+        self.end_headers()
+        return
+
+
+def get_free_port():
+    s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)
+    s.bind(("localhost", 0))
+    _, port = s.getsockname()
+    s.close()
+    return port
+
+
+def create_mock_http_server():
+    # Start a mock server to test outgoing http requests
+    mock_server_port = get_free_port()
+    mock_server = HTTPServer(("localhost", mock_server_port), MockServerRequestHandler)
+    mock_server_thread = Thread(target=mock_server.serve_forever)
+    mock_server_thread.setDaemon(True)
+    mock_server_thread.start()
+
+    return mock_server_port
diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py
index 7e49a28..8068365 100644
--- a/tests/integrations/aiohttp/test_aiohttp.py
+++ b/tests/integrations/aiohttp/test_aiohttp.py
@@ -7,6 +7,7 @@ from aiohttp import web
 from aiohttp.client import ServerDisconnectedError
 from aiohttp.web_request import Request
 
+from sentry_sdk import capture_message, start_transaction
 from sentry_sdk.integrations.aiohttp import AioHttpIntegration
 
 try:
@@ -15,7 +16,8 @@ except ImportError:
     import mock  # python < 3.3
 
 
-async def test_basic(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_basic(sentry_init, aiohttp_client, capture_events):
     sentry_init(integrations=[AioHttpIntegration()])
 
     async def hello(request):
@@ -52,10 +54,13 @@ async def test_basic(sentry_init, aiohttp_client, loop, capture_events):
         "Accept-Encoding": "gzip, deflate",
         "Host": host,
         "User-Agent": request["headers"]["User-Agent"],
+        "baggage": mock.ANY,
+        "sentry-trace": mock.ANY,
     }
 
 
-async def test_post_body_not_read(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_post_body_not_read(sentry_init, aiohttp_client, capture_events):
     from sentry_sdk.integrations.aiohttp import BODY_NOT_READ_MESSAGE
 
     sentry_init(integrations=[AioHttpIntegration()])
@@ -84,7 +89,8 @@ async def test_post_body_not_read(sentry_init, aiohttp_client, loop, capture_eve
     assert request["data"] == BODY_NOT_READ_MESSAGE
 
 
-async def test_post_body_read(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_post_body_read(sentry_init, aiohttp_client, capture_events):
     sentry_init(integrations=[AioHttpIntegration()])
 
     body = {"some": "value"}
@@ -112,7 +118,8 @@ async def test_post_body_read(sentry_init, aiohttp_client, loop, capture_events)
     assert request["data"] == json.dumps(body)
 
 
-async def test_403_not_captured(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_403_not_captured(sentry_init, aiohttp_client, capture_events):
     sentry_init(integrations=[AioHttpIntegration()])
 
     async def hello(request):
@@ -130,8 +137,9 @@ async def test_403_not_captured(sentry_init, aiohttp_client, loop, capture_event
     assert not events
 
 
+@pytest.mark.asyncio
 async def test_cancelled_error_not_captured(
-    sentry_init, aiohttp_client, loop, capture_events
+    sentry_init, aiohttp_client, capture_events
 ):
     sentry_init(integrations=[AioHttpIntegration()])
 
@@ -152,7 +160,8 @@ async def test_cancelled_error_not_captured(
     assert not events
 
 
-async def test_half_initialized(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_half_initialized(sentry_init, aiohttp_client, capture_events):
     sentry_init(integrations=[AioHttpIntegration()])
     sentry_init()
 
@@ -171,7 +180,8 @@ async def test_half_initialized(sentry_init, aiohttp_client, loop, capture_event
     assert events == []
 
 
-async def test_tracing(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_tracing(sentry_init, aiohttp_client, capture_events):
     sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
 
     async def hello(request):
@@ -195,6 +205,7 @@ async def test_tracing(sentry_init, aiohttp_client, loop, capture_events):
     )
 
 
+@pytest.mark.asyncio
 @pytest.mark.parametrize(
     "url,transaction_style,expected_transaction,expected_source",
     [
@@ -245,6 +256,7 @@ async def test_transaction_style(
     assert event["transaction_info"] == {"source": expected_source}
 
 
+@pytest.mark.asyncio
 async def test_traces_sampler_gets_request_object_in_sampling_context(
     sentry_init,
     aiohttp_client,
@@ -275,3 +287,250 @@ async def test_traces_sampler_gets_request_object_in_sampling_context(
             }
         )
     )
+
+
+@pytest.mark.asyncio
+async def test_has_trace_if_performance_enabled(
+    sentry_init, aiohttp_client, capture_events
+):
+    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
+
+    async def hello(request):
+        capture_message("It's a good day to try dividing by 0")
+        1 / 0
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    assert resp.status == 500
+
+    msg_event, error_event, transaction_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+        == msg_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+@pytest.mark.asyncio
+async def test_has_trace_if_performance_disabled(
+    sentry_init, aiohttp_client, capture_events
+):
+    sentry_init(integrations=[AioHttpIntegration()])
+
+    async def hello(request):
+        capture_message("It's a good day to try dividing by 0")
+        1 / 0
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    assert resp.status == 500
+
+    msg_event, error_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        error_event["contexts"]["trace"]["trace_id"]
+        == msg_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+@pytest.mark.asyncio
+async def test_trace_from_headers_if_performance_enabled(
+    sentry_init, aiohttp_client, capture_events
+):
+    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
+
+    async def hello(request):
+        capture_message("It's a good day to try dividing by 0")
+        1 / 0
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    # The aiohttp_client is instrumented so will generate the sentry-trace header and add request.
+    # Get the sentry-trace header from the request so we can later compare with transaction events.
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    sentry_trace_header = resp.request_info.headers.get("sentry-trace")
+    trace_id = sentry_trace_header.split("-")[0]
+
+    assert resp.status == 500
+
+    msg_event, error_event, transaction_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+@pytest.mark.asyncio
+async def test_trace_from_headers_if_performance_disabled(
+    sentry_init, aiohttp_client, capture_events
+):
+    sentry_init(integrations=[AioHttpIntegration()])
+
+    async def hello(request):
+        capture_message("It's a good day to try dividing by 0")
+        1 / 0
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    # The aiohttp_client is instrumented so will generate the sentry-trace header and add request.
+    # Get the sentry-trace header from the request so we can later compare with transaction events.
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    sentry_trace_header = resp.request_info.headers.get("sentry-trace")
+    trace_id = sentry_trace_header.split("-")[0]
+
+    assert resp.status == 500
+
+    msg_event, error_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+@pytest.mark.asyncio
+async def test_crumb_capture(
+    sentry_init, aiohttp_raw_server, aiohttp_client, loop, capture_events
+):
+    def before_breadcrumb(crumb, hint):
+        crumb["data"]["extra"] = "foo"
+        return crumb
+
+    sentry_init(
+        integrations=[AioHttpIntegration()], before_breadcrumb=before_breadcrumb
+    )
+
+    async def handler(request):
+        return web.Response(text="OK")
+
+    raw_server = await aiohttp_raw_server(handler)
+
+    with start_transaction():
+        events = capture_events()
+
+        client = await aiohttp_client(raw_server)
+        resp = await client.get("/")
+        assert resp.status == 200
+        capture_message("Testing!")
+
+        (event,) = events
+
+        crumb = event["breadcrumbs"]["values"][0]
+        assert crumb["type"] == "http"
+        assert crumb["category"] == "httplib"
+        assert crumb["data"] == {
+            "url": "http://127.0.0.1:{}/".format(raw_server.port),
+            "http.fragment": "",
+            "http.method": "GET",
+            "http.query": "",
+            "http.response.status_code": 200,
+            "reason": "OK",
+            "extra": "foo",
+        }
+
+
+@pytest.mark.asyncio
+async def test_outgoing_trace_headers(sentry_init, aiohttp_raw_server, aiohttp_client):
+    sentry_init(
+        integrations=[AioHttpIntegration()],
+        traces_sample_rate=1.0,
+    )
+
+    async def handler(request):
+        return web.Response(text="OK")
+
+    raw_server = await aiohttp_raw_server(handler)
+
+    with start_transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        # make trace_id difference between transactions
+        trace_id="0123456789012345678901234567890",
+    ) as transaction:
+        client = await aiohttp_client(raw_server)
+        resp = await client.get("/")
+        request_span = transaction._span_recorder.spans[-1]
+
+        assert resp.request_info.headers[
+            "sentry-trace"
+        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
+            trace_id=transaction.trace_id,
+            parent_span_id=request_span.span_id,
+            sampled=1,
+        )
+
+
+@pytest.mark.asyncio
+async def test_outgoing_trace_headers_append_to_baggage(
+    sentry_init, aiohttp_raw_server, aiohttp_client
+):
+    sentry_init(
+        integrations=[AioHttpIntegration()],
+        traces_sample_rate=1.0,
+        release="d08ebdb9309e1b004c6f52202de58a09c2268e42",
+    )
+
+    async def handler(request):
+        return web.Response(text="OK")
+
+    raw_server = await aiohttp_raw_server(handler)
+
+    with start_transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        trace_id="0123456789012345678901234567890",
+    ):
+        client = await aiohttp_client(raw_server)
+        resp = await client.get("/", headers={"bagGage": "custom=value"})
+
+        assert (
+            resp.request_info.headers["baggage"]
+            == "custom=value,sentry-trace_id=0123456789012345678901234567890,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true"
+        )
diff --git a/tests/integrations/arq/__init__.py b/tests/integrations/arq/__init__.py
new file mode 100644
index 0000000..f0b4712
--- /dev/null
+++ b/tests/integrations/arq/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("arq")
diff --git a/tests/integrations/arq/test_arq.py b/tests/integrations/arq/test_arq.py
new file mode 100644
index 0000000..9b224a6
--- /dev/null
+++ b/tests/integrations/arq/test_arq.py
@@ -0,0 +1,214 @@
+import asyncio
+import pytest
+
+from sentry_sdk import start_transaction
+from sentry_sdk.integrations.arq import ArqIntegration
+
+import arq.worker
+from arq import cron
+from arq.connections import ArqRedis
+from arq.jobs import Job
+from arq.utils import timestamp_ms
+
+from fakeredis.aioredis import FakeRedis
+
+
+def async_partial(async_fn, *args, **kwargs):
+    # asyncio.iscoroutinefunction (Used in the integration code) in Python < 3.8
+    # does not detect async functions in functools.partial objects.
+    # This partial implementation returns a coroutine instead.
+    async def wrapped(ctx):
+        return await async_fn(ctx, *args, **kwargs)
+
+    return wrapped
+
+
+@pytest.fixture(autouse=True)
+def patch_fakeredis_info_command():
+    from fakeredis._fakesocket import FakeSocket
+
+    if not hasattr(FakeSocket, "info"):
+        from fakeredis._commands import command
+        from fakeredis._helpers import SimpleString
+
+        @command((SimpleString,), name="info")
+        def info(self, section):
+            return section
+
+        FakeSocket.info = info
+
+
+@pytest.fixture
+def init_arq(sentry_init):
+    def inner(functions_=None, cron_jobs_=None, allow_abort_jobs_=False):
+        functions_ = functions_ or []
+        cron_jobs_ = cron_jobs_ or []
+
+        sentry_init(
+            integrations=[ArqIntegration()],
+            traces_sample_rate=1.0,
+            send_default_pii=True,
+            debug=True,
+        )
+
+        server = FakeRedis()
+        pool = ArqRedis(pool_or_conn=server.connection_pool)
+
+        class WorkerSettings:
+            functions = functions_
+            cron_jobs = cron_jobs_
+            redis_pool = pool
+            allow_abort_jobs = allow_abort_jobs_
+
+        worker = arq.worker.create_worker(WorkerSettings)
+
+        return pool, worker
+
+    return inner
+
+
+@pytest.mark.asyncio
+async def test_job_result(init_arq):
+    async def increase(ctx, num):
+        return num + 1
+
+    increase.__qualname__ = increase.__name__
+
+    pool, worker = init_arq([increase])
+
+    job = await pool.enqueue_job("increase", 3)
+
+    assert isinstance(job, Job)
+
+    await worker.run_job(job.job_id, timestamp_ms())
+    result = await job.result()
+    job_result = await job.result_info()
+
+    assert result == 4
+    assert job_result.result == 4
+
+
+@pytest.mark.asyncio
+async def test_job_retry(capture_events, init_arq):
+    async def retry_job(ctx):
+        if ctx["job_try"] < 2:
+            raise arq.worker.Retry
+
+    retry_job.__qualname__ = retry_job.__name__
+
+    pool, worker = init_arq([retry_job])
+
+    job = await pool.enqueue_job("retry_job")
+
+    events = capture_events()
+
+    await worker.run_job(job.job_id, timestamp_ms())
+
+    event = events.pop(0)
+    assert event["contexts"]["trace"]["status"] == "aborted"
+    assert event["transaction"] == "retry_job"
+    assert event["tags"]["arq_task_id"] == job.job_id
+    assert event["extra"]["arq-job"]["retry"] == 1
+
+    await worker.run_job(job.job_id, timestamp_ms())
+
+    event = events.pop(0)
+    assert event["contexts"]["trace"]["status"] == "ok"
+    assert event["transaction"] == "retry_job"
+    assert event["tags"]["arq_task_id"] == job.job_id
+    assert event["extra"]["arq-job"]["retry"] == 2
+
+
+@pytest.mark.parametrize("job_fails", [True, False], ids=["error", "success"])
+@pytest.mark.asyncio
+async def test_job_transaction(capture_events, init_arq, job_fails):
+    async def division(_, a, b=0):
+        return a / b
+
+    division.__qualname__ = division.__name__
+
+    cron_func = async_partial(division, a=1, b=int(not job_fails))
+    cron_func.__qualname__ = division.__name__
+
+    cron_job = cron(cron_func, minute=0, run_at_startup=True)
+
+    pool, worker = init_arq(functions_=[division], cron_jobs_=[cron_job])
+
+    events = capture_events()
+
+    job = await pool.enqueue_job("division", 1, b=int(not job_fails))
+    await worker.run_job(job.job_id, timestamp_ms())
+
+    loop = asyncio.get_event_loop()
+    task = loop.create_task(worker.async_run())
+    await asyncio.sleep(1)
+
+    task.cancel()
+
+    await worker.close()
+
+    if job_fails:
+        error_func_event = events.pop(0)
+        error_cron_event = events.pop(1)
+
+        assert error_func_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+        assert error_func_event["exception"]["values"][0]["mechanism"]["type"] == "arq"
+
+        func_extra = error_func_event["extra"]["arq-job"]
+        assert func_extra["task"] == "division"
+
+        assert error_cron_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+        assert error_cron_event["exception"]["values"][0]["mechanism"]["type"] == "arq"
+
+        cron_extra = error_cron_event["extra"]["arq-job"]
+        assert cron_extra["task"] == "cron:division"
+
+    [func_event, cron_event] = events
+
+    assert func_event["type"] == "transaction"
+    assert func_event["transaction"] == "division"
+    assert func_event["transaction_info"] == {"source": "task"}
+
+    assert "arq_task_id" in func_event["tags"]
+    assert "arq_task_retry" in func_event["tags"]
+
+    func_extra = func_event["extra"]["arq-job"]
+
+    assert func_extra["task"] == "division"
+    assert func_extra["kwargs"] == {"b": int(not job_fails)}
+    assert func_extra["retry"] == 1
+
+    assert cron_event["type"] == "transaction"
+    assert cron_event["transaction"] == "cron:division"
+    assert cron_event["transaction_info"] == {"source": "task"}
+
+    assert "arq_task_id" in cron_event["tags"]
+    assert "arq_task_retry" in cron_event["tags"]
+
+    cron_extra = cron_event["extra"]["arq-job"]
+
+    assert cron_extra["task"] == "cron:division"
+    assert cron_extra["kwargs"] == {}
+    assert cron_extra["retry"] == 1
+
+
+@pytest.mark.asyncio
+async def test_enqueue_job(capture_events, init_arq):
+    async def dummy_job(_):
+        pass
+
+    pool, _ = init_arq([dummy_job])
+
+    events = capture_events()
+
+    with start_transaction() as transaction:
+        await pool.enqueue_job("dummy_job")
+
+    (event,) = events
+
+    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+    assert event["contexts"]["trace"]["span_id"] == transaction.span_id
+
+    assert len(event["spans"])
+    assert event["spans"][0]["op"] == "queue.submit.arq"
+    assert event["spans"][0]["description"] == "dummy_job"
diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py
index ce28b1e..d51293a 100644
--- a/tests/integrations/asgi/test_asgi.py
+++ b/tests/integrations/asgi/test_asgi.py
@@ -24,7 +24,7 @@ def asgi3_app():
             and "route" in scope
             and scope["route"] == "/trigger/error"
         ):
-            division_by_zero = 1 / 0  # noqa
+            1 / 0
 
         await send(
             {
@@ -59,7 +59,33 @@ def asgi3_app_with_error():
             }
         )
 
-        division_by_zero = 1 / 0  # noqa
+        1 / 0
+
+        await send(
+            {
+                "type": "http.response.body",
+                "body": b"Hello, world!",
+            }
+        )
+
+    return app
+
+
+@pytest.fixture
+def asgi3_app_with_error_and_msg():
+    async def app(scope, receive, send):
+        await send(
+            {
+                "type": "http.response.start",
+                "status": 200,
+                "headers": [
+                    [b"content-type", b"text/plain"],
+                ],
+            }
+        )
+
+        capture_message("Let's try dividing by 0")
+        1 / 0
 
         await send(
             {
@@ -164,6 +190,126 @@ async def test_capture_transaction_with_error(
     assert transaction_event["request"] == error_event["request"]
 
 
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_has_trace_if_performance_enabled(
+    sentry_init,
+    asgi3_app_with_error_and_msg,
+    capture_events,
+):
+    sentry_init(traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
+
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app) as client:
+            events = capture_events()
+            await client.get("/")
+
+    msg_event, error_event, transaction_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+        == msg_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_has_trace_if_performance_disabled(
+    sentry_init,
+    asgi3_app_with_error_and_msg,
+    capture_events,
+):
+    sentry_init()
+    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
+
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app) as client:
+            events = capture_events()
+            await client.get("/")
+
+    msg_event, error_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_trace_from_headers_if_performance_enabled(
+    sentry_init,
+    asgi3_app_with_error_and_msg,
+    capture_events,
+):
+    sentry_init(traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app) as client:
+            events = capture_events()
+            await client.get("/", headers={"sentry-trace": sentry_trace_header})
+
+    msg_event, error_event, transaction_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_trace_from_headers_if_performance_disabled(
+    sentry_init,
+    asgi3_app_with_error_and_msg,
+    capture_events,
+):
+    sentry_init()
+    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app) as client:
+            events = capture_events()
+            await client.get("/", headers={"sentry-trace": sentry_trace_header})
+
+    msg_event, error_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
 @minimum_python_36
 @pytest.mark.asyncio
 async def test_websocket(sentry_init, asgi3_ws_app, capture_events, request):
diff --git a/tests/integrations/asyncio/__init__.py b/tests/integrations/asyncio/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/integrations/asyncio/test_asyncio_py3.py b/tests/integrations/asyncio/test_asyncio_py3.py
new file mode 100644
index 0000000..98106ed
--- /dev/null
+++ b/tests/integrations/asyncio/test_asyncio_py3.py
@@ -0,0 +1,172 @@
+import asyncio
+import sys
+
+import pytest
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.asyncio import AsyncioIntegration
+
+
+minimum_python_37 = pytest.mark.skipif(
+    sys.version_info < (3, 7), reason="Asyncio tests need Python >= 3.7"
+)
+
+
+async def foo():
+    await asyncio.sleep(0.01)
+
+
+async def bar():
+    await asyncio.sleep(0.01)
+
+
+async def boom():
+    1 / 0
+
+
+@pytest.fixture(scope="session")
+def event_loop(request):
+    """Create an instance of the default event loop for each test case."""
+    loop = asyncio.get_event_loop_policy().new_event_loop()
+    yield loop
+    loop.close()
+
+
+@minimum_python_37
+@pytest.mark.asyncio
+async def test_create_task(
+    sentry_init,
+    capture_events,
+    event_loop,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_transaction_for_create_task"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            tasks = [event_loop.create_task(foo()), event_loop.create_task(bar())]
+            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)
+
+            sentry_sdk.flush()
+
+    (transaction_event,) = events
+
+    assert transaction_event["spans"][0]["op"] == "root"
+    assert transaction_event["spans"][0]["description"] == "not so important"
+
+    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][1]["description"] == "foo"
+    assert (
+        transaction_event["spans"][1]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][2]["description"] == "bar"
+    assert (
+        transaction_event["spans"][2]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+
+@minimum_python_37
+@pytest.mark.asyncio
+async def test_gather(
+    sentry_init,
+    capture_events,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_transaction_for_gather"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            await asyncio.gather(foo(), bar(), return_exceptions=True)
+
+        sentry_sdk.flush()
+
+    (transaction_event,) = events
+
+    assert transaction_event["spans"][0]["op"] == "root"
+    assert transaction_event["spans"][0]["description"] == "not so important"
+
+    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][1]["description"] == "foo"
+    assert (
+        transaction_event["spans"][1]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][2]["description"] == "bar"
+    assert (
+        transaction_event["spans"][2]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+
+@minimum_python_37
+@pytest.mark.asyncio
+async def test_exception(
+    sentry_init,
+    capture_events,
+    event_loop,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_exception"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            tasks = [event_loop.create_task(boom()), event_loop.create_task(bar())]
+            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)
+
+            sentry_sdk.flush()
+
+    (error_event, _) = events
+
+    assert error_event["transaction"] == "test_exception"
+    assert error_event["contexts"]["trace"]["op"] == "function"
+    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+    assert error_event["exception"]["values"][0]["value"] == "division by zero"
+    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
+    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asyncio"
+
+
+@minimum_python_37
+@pytest.mark.asyncio
+async def test_task_result(sentry_init):
+    sentry_init(
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    async def add(a, b):
+        return a + b
+
+    result = await asyncio.create_task(add(1, 2))
+    assert result == 3, result
diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py
index 458f55b..f042125 100644
--- a/tests/integrations/aws_lambda/test_aws.py
+++ b/tests/integrations/aws_lambda/test_aws.py
@@ -25,8 +25,6 @@ import pytest
 boto3 = pytest.importorskip("boto3")
 
 LAMBDA_PRELUDE = """
-from __future__ import print_function
-
 from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration, get_lambda_bootstrap
 import sentry_sdk
 import json
@@ -106,7 +104,11 @@ def lambda_client():
 
 
 @pytest.fixture(
-    params=["python3.6", "python3.7", "python3.8", "python3.9", "python2.7"]
+    params=[
+        "python3.7",
+        "python3.8",
+        "python3.9",
+    ]
 )
 def lambda_runtime(request):
     return request.param
@@ -189,7 +191,8 @@ def test_basic(run_lambda_function):
 
     assert frame1["in_app"] is True
 
-    assert exception["mechanism"] == {"type": "aws_lambda", "handled": False}
+    assert exception["mechanism"]["type"] == "aws_lambda"
+    assert not exception["mechanism"]["handled"]
 
     assert event["extra"]["lambda"]["function_name"].startswith("test_function_")
 
@@ -283,9 +286,6 @@ def test_request_data(run_lambda_function):
 
 
 def test_init_error(run_lambda_function, lambda_runtime):
-    if lambda_runtime == "python2.7":
-        pytest.skip("initialization error not supported on Python 2.7")
-
     envelopes, events, response = run_lambda_function(
         LAMBDA_PRELUDE
         + (
@@ -327,7 +327,8 @@ def test_timeout_error(run_lambda_function):
         "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds.",
     )
 
-    assert exception["mechanism"] == {"type": "threading", "handled": False}
+    assert exception["mechanism"]["type"] == "threading"
+    assert not exception["mechanism"]["handled"]
 
     assert event["extra"]["lambda"]["function_name"].startswith("test_function_")
 
@@ -360,7 +361,7 @@ def test_performance_no_error(run_lambda_function):
 
     (envelope,) = envelopes
     assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
+    assert envelope["contexts"]["trace"]["op"] == "function.aws.lambda"
     assert envelope["transaction"].startswith("test_function_")
     assert envelope["transaction_info"] == {"source": "component"}
     assert envelope["transaction"] in envelope["request"]["url"]
@@ -389,7 +390,7 @@ def test_performance_error(run_lambda_function):
     (envelope,) = envelopes
 
     assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
+    assert envelope["contexts"]["trace"]["op"] == "function.aws.lambda"
     assert envelope["transaction"].startswith("test_function_")
     assert envelope["transaction_info"] == {"source": "component"}
     assert envelope["transaction"] in envelope["request"]["url"]
@@ -476,7 +477,7 @@ def test_non_dict_event(
 
     error_event = events[0]
     assert error_event["level"] == "error"
-    assert error_event["contexts"]["trace"]["op"] == "serverless.function"
+    assert error_event["contexts"]["trace"]["op"] == "function.aws.lambda"
 
     function_name = error_event["extra"]["lambda"]["function_name"]
     assert function_name.startswith("test_function_")
@@ -664,3 +665,139 @@ def test_serverless_no_code_instrumentation(run_lambda_function):
         assert response["Payload"]["errorMessage"] == "something went wrong"
 
         assert "sentry_handler" in response["LogResult"][3].decode("utf-8")
+
+
+def test_error_has_new_trace_context_performance_enabled(run_lambda_function):
+    envelopes, _, _ = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+
+        def test_handler(event, context):
+            sentry_sdk.capture_message("hi")
+            raise Exception("something went wrong")
+        """
+        ),
+        payload=b'{"foo": "bar"}',
+    )
+
+    (msg_event, error_event, transaction_event) = envelopes
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_error_has_new_trace_context_performance_disabled(run_lambda_function):
+    _, events, _ = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=None) # this is the default, just added for clarity
+
+        def test_handler(event, context):
+            sentry_sdk.capture_message("hi")
+            raise Exception("something went wrong")
+        """
+        ),
+        payload=b'{"foo": "bar"}',
+    )
+
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_error_has_existing_trace_context_performance_enabled(run_lambda_function):
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    envelopes, _, _ = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+
+        def test_handler(event, context):
+            sentry_sdk.capture_message("hi")
+            raise Exception("something went wrong")
+        """
+        ),
+        payload=b'{"sentry_trace": "%s"}' % sentry_trace_header.encode(),
+    )
+
+    (msg_event, error_event, transaction_event) = envelopes
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
+
+
+def test_error_has_existing_trace_context_performance_disabled(run_lambda_function):
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    _, events, _ = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=None)  # this is the default, just added for clarity
+
+        def test_handler(event, context):
+            sentry_sdk.capture_message("hi")
+            raise Exception("something went wrong")
+        """
+        ),
+        payload=b'{"sentry_trace": "%s"}' % sentry_trace_header.encode(),
+    )
+
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py
index 67376b5..5812c2c 100644
--- a/tests/integrations/boto3/test_s3.py
+++ b/tests/integrations/boto3/test_s3.py
@@ -1,9 +1,17 @@
+import pytest
+
+import boto3
+
 from sentry_sdk import Hub
 from sentry_sdk.integrations.boto3 import Boto3Integration
 from tests.integrations.boto3.aws_mock import MockResponse
 from tests.integrations.boto3 import read_fixture
 
-import boto3
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
 
 session = boto3.Session(
     aws_access_key_id="-",
@@ -30,7 +38,7 @@ def test_basic(sentry_init, capture_events):
     assert event["type"] == "transaction"
     assert len(event["spans"]) == 1
     (span,) = event["spans"]
-    assert span["op"] == "aws.request"
+    assert span["op"] == "http.client"
     assert span["description"] == "aws.s3.ListObjects"
 
 
@@ -53,11 +61,19 @@ def test_streaming(sentry_init, capture_events):
     (event,) = events
     assert event["type"] == "transaction"
     assert len(event["spans"]) == 2
+
     span1 = event["spans"][0]
-    assert span1["op"] == "aws.request"
+    assert span1["op"] == "http.client"
     assert span1["description"] == "aws.s3.GetObject"
+    assert span1["data"] == {
+        "http.method": "GET",
+        "aws.request.url": "https://bucket.s3.amazonaws.com/foo.pdf",
+        "http.fragment": "",
+        "http.query": "",
+    }
+
     span2 = event["spans"][1]
-    assert span2["op"] == "aws.request.stream"
+    assert span2["op"] == "http.client.stream"
     assert span2["description"] == "aws.s3.GetObject"
     assert span2["parent_span_id"] == span1["span_id"]
 
@@ -80,6 +96,34 @@ def test_streaming_close(sentry_init, capture_events):
     assert event["type"] == "transaction"
     assert len(event["spans"]) == 2
     span1 = event["spans"][0]
-    assert span1["op"] == "aws.request"
+    assert span1["op"] == "http.client"
     span2 = event["spans"][1]
-    assert span2["op"] == "aws.request.stream"
+    assert span2["op"] == "http.client.stream"
+
+
+@pytest.mark.tests_internal_exceptions
+def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
+    events = capture_events()
+
+    s3 = session.resource("s3")
+
+    with mock.patch(
+        "sentry_sdk.integrations.boto3.parse_url",
+        side_effect=ValueError,
+    ):
+        with Hub.current.start_transaction() as transaction, MockResponse(
+            s3.meta.client, 200, {}, read_fixture("s3_list.xml")
+        ):
+            bucket = s3.Bucket("bucket")
+            items = [obj for obj in bucket.objects.all()]
+            assert len(items) == 2
+            assert items[0].key == "foo.txt"
+            assert items[1].key == "bar.txt"
+            transaction.finish()
+
+    (event,) = events
+    assert event["spans"][0]["data"] == {
+        "http.method": "GET",
+        # no url data
+    }
diff --git a/tests/integrations/bottle/test_bottle.py b/tests/integrations/bottle/test_bottle.py
index dfd6e52..273424e 100644
--- a/tests/integrations/bottle/test_bottle.py
+++ b/tests/integrations/bottle/test_bottle.py
@@ -8,6 +8,7 @@ pytest.importorskip("bottle")
 from io import BytesIO
 from bottle import Bottle, debug as set_debug, abort, redirect
 from sentry_sdk import capture_message
+from sentry_sdk.serializer import MAX_DATABAG_BREADTH
 
 from sentry_sdk.integrations.logging import LoggingIntegration
 from werkzeug.test import Client
@@ -210,7 +211,7 @@ def test_too_large_raw_request(
     sentry_init, input_char, capture_events, app, get_client
 ):
     sentry_init(
-        integrations=[bottle_sentry.BottleIntegration()], request_bodies="small"
+        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="small"
     )
 
     data = input_char * 2000
@@ -240,7 +241,7 @@ def test_too_large_raw_request(
 
 def test_files_and_form(sentry_init, capture_events, app, get_client):
     sentry_init(
-        integrations=[bottle_sentry.BottleIntegration()], request_bodies="always"
+        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="always"
     )
 
     data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
@@ -275,6 +276,37 @@ def test_files_and_form(sentry_init, capture_events, app, get_client):
     assert not event["request"]["data"]["file"]
 
 
+def test_json_not_truncated_if_max_request_body_size_is_always(
+    sentry_init, capture_events, app, get_client
+):
+    sentry_init(
+        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="always"
+    )
+
+    data = {
+        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
+    }
+
+    @app.route("/", method="POST")
+    def index():
+        import bottle
+
+        assert bottle.request.json == data
+        assert bottle.request.body.read() == json.dumps(data).encode("ascii")
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = get_client()
+
+    response = client.post("/", content_type="application/json", data=json.dumps(data))
+    assert response[1] == "200 OK"
+
+    (event,) = events
+    assert event["request"]["data"] == data
+
+
 @pytest.mark.parametrize(
     "integrations",
     [
@@ -354,10 +386,8 @@ def test_mount(app, capture_exceptions, capture_events, sentry_init, get_client)
     assert error is exc.value
 
     (event,) = events
-    assert event["exception"]["values"][0]["mechanism"] == {
-        "type": "bottle",
-        "handled": False,
-    }
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "bottle"
+    assert event["exception"]["values"][0]["mechanism"]["handled"] is False
 
 
 def test_500(sentry_init, capture_events, app, get_client):
diff --git a/tests/integrations/celery/__init__.py b/tests/integrations/celery/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py
index 2c52031..2b49640 100644
--- a/tests/integrations/celery/test_celery.py
+++ b/tests/integrations/celery/test_celery.py
@@ -5,7 +5,8 @@ import pytest
 pytest.importorskip("celery")
 
 from sentry_sdk import Hub, configure_scope, start_transaction
-from sentry_sdk.integrations.celery import CeleryIntegration
+from sentry_sdk.integrations.celery import CeleryIntegration, _get_headers
+
 from sentry_sdk._compat import text_type
 
 from celery import Celery, VERSION
@@ -84,8 +85,14 @@ def celery(init_celery):
 
 @pytest.fixture(
     params=[
-        lambda task, x, y: (task.delay(x, y), {"args": [x, y], "kwargs": {}}),
-        lambda task, x, y: (task.apply_async((x, y)), {"args": [x, y], "kwargs": {}}),
+        lambda task, x, y: (
+            task.delay(x, y),
+            {"args": [x, y], "kwargs": {}},
+        ),
+        lambda task, x, y: (
+            task.apply_async((x, y)),
+            {"args": [x, y], "kwargs": {}},
+        ),
         lambda task, x, y: (
             task.apply_async(args=(x, y)),
             {"args": [x, y], "kwargs": {}},
@@ -105,7 +112,8 @@ def celery_invocation(request):
     return request.param
 
 
-def test_simple(capture_events, celery, celery_invocation):
+def test_simple_with_performance(capture_events, init_celery, celery_invocation):
+    celery = init_celery(traces_sample_rate=1.0)
     events = capture_events()
 
     @celery.task(name="dummy_task")
@@ -113,26 +121,61 @@ def test_simple(capture_events, celery, celery_invocation):
         foo = 42  # noqa
         return x / y
 
-    with start_transaction() as transaction:
+    with start_transaction(op="unit test transaction") as transaction:
         celery_invocation(dummy_task, 1, 2)
         _, expected_context = celery_invocation(dummy_task, 1, 0)
 
-    (event,) = events
+    (_, error_event, _, _) = events
 
-    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
-    assert event["contexts"]["trace"]["span_id"] != transaction.span_id
-    assert event["transaction"] == "dummy_task"
-    assert "celery_task_id" in event["tags"]
-    assert event["extra"]["celery-job"] == dict(
+    assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+    assert error_event["contexts"]["trace"]["span_id"] != transaction.span_id
+    assert error_event["transaction"] == "dummy_task"
+    assert "celery_task_id" in error_event["tags"]
+    assert error_event["extra"]["celery-job"] == dict(
         task_name="dummy_task", **expected_context
     )
 
-    (exception,) = event["exception"]["values"]
+    (exception,) = error_event["exception"]["values"]
     assert exception["type"] == "ZeroDivisionError"
     assert exception["mechanism"]["type"] == "celery"
     assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"
 
 
+def test_simple_without_performance(capture_events, init_celery, celery_invocation):
+    celery = init_celery(traces_sample_rate=None)
+    events = capture_events()
+
+    @celery.task(name="dummy_task")
+    def dummy_task(x, y):
+        foo = 42  # noqa
+        return x / y
+
+    with configure_scope() as scope:
+        celery_invocation(dummy_task, 1, 2)
+        _, expected_context = celery_invocation(dummy_task, 1, 0)
+
+        (error_event,) = events
+
+        assert (
+            error_event["contexts"]["trace"]["trace_id"]
+            == scope._propagation_context["trace_id"]
+        )
+        assert (
+            error_event["contexts"]["trace"]["span_id"]
+            != scope._propagation_context["span_id"]
+        )
+        assert error_event["transaction"] == "dummy_task"
+        assert "celery_task_id" in error_event["tags"]
+        assert error_event["extra"]["celery-job"] == dict(
+            task_name="dummy_task", **expected_context
+        )
+
+        (exception,) = error_event["exception"]["values"]
+        assert exception["type"] == "ZeroDivisionError"
+        assert exception["mechanism"]["type"] == "celery"
+        assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"
+
+
 @pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
 def test_transaction_events(capture_events, init_celery, celery_invocation, task_fails):
     celery = init_celery(traces_sample_rate=1.0)
@@ -174,7 +217,7 @@ def test_transaction_events(capture_events, init_celery, celery_invocation, task
     assert submission_event["spans"] == [
         {
             "description": "dummy_task",
-            "op": "celery.submit",
+            "op": "queue.submit.celery",
             "parent_span_id": submission_event["contexts"]["trace"]["span_id"],
             "same_process_as_parent": True,
             "span_id": submission_event["spans"][0]["span_id"],
@@ -316,7 +359,7 @@ def test_retry(celery, capture_events):
 # TODO: This test is hanging when running test with `tox --parallel auto`. Find out why and fix it!
 @pytest.mark.skip
 @pytest.mark.forked
-def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe, tmpdir):
+def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe):
     celery = init_celery(traces_sample_rate=1.0, backend="redis", debug=True)
 
     events = capture_events_forksafe()
@@ -347,7 +390,7 @@ def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe, t
         submit_transaction["spans"]
     ), 4  # Because redis integration was auto enabled
     span = submit_transaction["spans"][0]
-    assert span["op"] == "celery.submit"
+    assert span["op"] == "queue.submit.celery"
     assert span["description"] == "dummy_task"
 
     event = events.read_event()
@@ -437,3 +480,49 @@ def test_abstract_task(capture_events, celery, celery_invocation):
         celery_invocation(dummy_task, 1, 0)
 
     assert not events
+
+
+def test_task_headers(celery):
+    """
+    Test that the headers set in the Celery Beat auto-instrumentation are passed to the celery signal handlers
+    """
+    sentry_crons_setup = {
+        "sentry-monitor-slug": "some-slug",
+        "sentry-monitor-config": {"some": "config"},
+        "sentry-monitor-check-in-id": "123abc",
+    }
+
+    @celery.task(name="dummy_task", bind=True)
+    def dummy_task(self, x, y):
+        return _get_headers(self)
+
+    # This is how the Celery Beat auto-instrumentation starts a task
+    # in the monkey patched version of `apply_async`
+    # in `sentry_sdk/integrations/celery.py::_wrap_apply_async()`
+    result = dummy_task.apply_async(args=(1, 0), headers=sentry_crons_setup)
+    assert result.get() == sentry_crons_setup
+
+
+def test_baggage_propagation(init_celery):
+    celery = init_celery(traces_sample_rate=1.0, release="abcdef")
+
+    @celery.task(name="dummy_task", bind=True)
+    def dummy_task(self, x, y):
+        return _get_headers(self)
+
+    with start_transaction() as transaction:
+        result = dummy_task.apply_async(
+            args=(1, 0),
+            headers={"baggage": "custom=value"},
+        ).get()
+
+        assert sorted(result["baggage"].split(",")) == sorted(
+            [
+                "sentry-release=abcdef",
+                "sentry-trace_id={}".format(transaction.trace_id),
+                "sentry-environment=production",
+                "sentry-sample_rate=1.0",
+                "sentry-sampled=true",
+                "custom=value",
+            ]
+        )
diff --git a/tests/integrations/celery/test_celery_beat_crons.py b/tests/integrations/celery/test_celery_beat_crons.py
new file mode 100644
index 0000000..ab1ceea
--- /dev/null
+++ b/tests/integrations/celery/test_celery_beat_crons.py
@@ -0,0 +1,335 @@
+import pytest
+
+pytest.importorskip("celery")
+
+from sentry_sdk.integrations.celery import (
+    _get_headers,
+    _get_humanized_interval,
+    _get_monitor_config,
+    _patch_beat_apply_entry,
+    crons_task_success,
+    crons_task_failure,
+    crons_task_retry,
+)
+from sentry_sdk.crons import MonitorStatus
+from celery.schedules import crontab, schedule
+
+try:
+    from unittest import mock  # python 3.3 and above
+    from unittest.mock import MagicMock
+except ImportError:
+    import mock  # python < 3.3
+    from mock import MagicMock
+
+
+def test_get_headers():
+    fake_task = MagicMock()
+    fake_task.request = {
+        "bla": "blub",
+        "foo": "bar",
+    }
+
+    assert _get_headers(fake_task) == {}
+
+    fake_task.request.update(
+        {
+            "headers": {
+                "bla": "blub",
+            },
+        }
+    )
+
+    assert _get_headers(fake_task) == {"bla": "blub"}
+
+    fake_task.request.update(
+        {
+            "headers": {
+                "headers": {
+                    "tri": "blub",
+                    "bar": "baz",
+                },
+                "bla": "blub",
+            },
+        }
+    )
+
+    assert _get_headers(fake_task) == {"bla": "blub", "tri": "blub", "bar": "baz"}
+
+
+@pytest.mark.parametrize(
+    "seconds, expected_tuple",
+    [
+        (0, (0, "second")),
+        (1, (1, "second")),
+        (0.00001, (0, "second")),
+        (59, (59, "second")),
+        (60, (1, "minute")),
+        (100, (1, "minute")),
+        (1000, (16, "minute")),
+        (10000, (2, "hour")),
+        (100000, (1, "day")),
+        (100000000, (1157, "day")),
+    ],
+)
+def test_get_humanized_interval(seconds, expected_tuple):
+    assert _get_humanized_interval(seconds) == expected_tuple
+
+
+def test_crons_task_success():
+    fake_task = MagicMock()
+    fake_task.request = {
+        "headers": {
+            "sentry-monitor-slug": "test123",
+            "sentry-monitor-check-in-id": "1234567890",
+            "sentry-monitor-start-timestamp-s": 200.1,
+            "sentry-monitor-config": {
+                "schedule": {
+                    "type": "interval",
+                    "value": 3,
+                    "unit": "day",
+                },
+                "timezone": "Europe/Vienna",
+            },
+            "sentry-monitor-some-future-key": "some-future-value",
+        },
+    }
+
+    with mock.patch(
+        "sentry_sdk.integrations.celery.capture_checkin"
+    ) as mock_capture_checkin:
+        with mock.patch(
+            "sentry_sdk.integrations.celery._now_seconds_since_epoch",
+            return_value=500.5,
+        ):
+            crons_task_success(fake_task)
+
+            mock_capture_checkin.assert_called_once_with(
+                monitor_slug="test123",
+                monitor_config={
+                    "schedule": {
+                        "type": "interval",
+                        "value": 3,
+                        "unit": "day",
+                    },
+                    "timezone": "Europe/Vienna",
+                },
+                duration=300.4,
+                check_in_id="1234567890",
+                status=MonitorStatus.OK,
+            )
+
+
+def test_crons_task_failure():
+    fake_task = MagicMock()
+    fake_task.request = {
+        "headers": {
+            "sentry-monitor-slug": "test123",
+            "sentry-monitor-check-in-id": "1234567890",
+            "sentry-monitor-start-timestamp-s": 200.1,
+            "sentry-monitor-config": {
+                "schedule": {
+                    "type": "interval",
+                    "value": 3,
+                    "unit": "day",
+                },
+                "timezone": "Europe/Vienna",
+            },
+            "sentry-monitor-some-future-key": "some-future-value",
+        },
+    }
+
+    with mock.patch(
+        "sentry_sdk.integrations.celery.capture_checkin"
+    ) as mock_capture_checkin:
+        with mock.patch(
+            "sentry_sdk.integrations.celery._now_seconds_since_epoch",
+            return_value=500.5,
+        ):
+            crons_task_failure(fake_task)
+
+            mock_capture_checkin.assert_called_once_with(
+                monitor_slug="test123",
+                monitor_config={
+                    "schedule": {
+                        "type": "interval",
+                        "value": 3,
+                        "unit": "day",
+                    },
+                    "timezone": "Europe/Vienna",
+                },
+                duration=300.4,
+                check_in_id="1234567890",
+                status=MonitorStatus.ERROR,
+            )
+
+
+def test_crons_task_retry():
+    fake_task = MagicMock()
+    fake_task.request = {
+        "headers": {
+            "sentry-monitor-slug": "test123",
+            "sentry-monitor-check-in-id": "1234567890",
+            "sentry-monitor-start-timestamp-s": 200.1,
+            "sentry-monitor-config": {
+                "schedule": {
+                    "type": "interval",
+                    "value": 3,
+                    "unit": "day",
+                },
+                "timezone": "Europe/Vienna",
+            },
+            "sentry-monitor-some-future-key": "some-future-value",
+        },
+    }
+
+    with mock.patch(
+        "sentry_sdk.integrations.celery.capture_checkin"
+    ) as mock_capture_checkin:
+        with mock.patch(
+            "sentry_sdk.integrations.celery._now_seconds_since_epoch",
+            return_value=500.5,
+        ):
+            crons_task_retry(fake_task)
+
+            mock_capture_checkin.assert_called_once_with(
+                monitor_slug="test123",
+                monitor_config={
+                    "schedule": {
+                        "type": "interval",
+                        "value": 3,
+                        "unit": "day",
+                    },
+                    "timezone": "Europe/Vienna",
+                },
+                duration=300.4,
+                check_in_id="1234567890",
+                status=MonitorStatus.ERROR,
+            )
+
+
+def test_get_monitor_config_crontab():
+    app = MagicMock()
+    app.conf = MagicMock()
+    app.conf.timezone = "Europe/Vienna"
+
+    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
+    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+    assert monitor_config == {
+        "schedule": {
+            "type": "crontab",
+            "value": "*/10 12 3 * *",
+        },
+        "timezone": "Europe/Vienna",
+    }
+    assert "unit" not in monitor_config["schedule"]
+
+
+def test_get_monitor_config_seconds():
+    app = MagicMock()
+    app.conf = MagicMock()
+    app.conf.timezone = "Europe/Vienna"
+
+    celery_schedule = schedule(run_every=3)  # seconds
+
+    with mock.patch(
+        "sentry_sdk.integrations.celery.logger.warning"
+    ) as mock_logger_warning:
+        monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+        mock_logger_warning.assert_called_with(
+            "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
+            "foo",
+            3,
+        )
+        assert monitor_config == {}
+
+
+def test_get_monitor_config_minutes():
+    app = MagicMock()
+    app.conf = MagicMock()
+    app.conf.timezone = "Europe/Vienna"
+
+    celery_schedule = schedule(run_every=60)  # seconds
+    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+    assert monitor_config == {
+        "schedule": {
+            "type": "interval",
+            "value": 1,
+            "unit": "minute",
+        },
+        "timezone": "Europe/Vienna",
+    }
+
+
+def test_get_monitor_config_unknown():
+    app = MagicMock()
+    app.conf = MagicMock()
+    app.conf.timezone = "Europe/Vienna"
+
+    unknown_celery_schedule = MagicMock()
+    monitor_config = _get_monitor_config(unknown_celery_schedule, app, "foo")
+    assert monitor_config == {}
+
+
+def test_get_monitor_config_default_timezone():
+    app = MagicMock()
+    app.conf = MagicMock()
+    app.conf.timezone = None
+
+    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
+
+    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+
+    assert monitor_config["timezone"] == "UTC"
+
+
+@pytest.mark.parametrize(
+    "task_name,exclude_beat_tasks,task_in_excluded_beat_tasks",
+    [
+        ["some_task_name", ["xxx", "some_task.*"], True],
+        ["some_task_name", ["xxx", "some_other_task.*"], False],
+    ],
+)
+def test_exclude_beat_tasks_option(
+    task_name, exclude_beat_tasks, task_in_excluded_beat_tasks
+):
+    """
+    Test excluding Celery Beat tasks from automatic instrumentation.
+    """
+    fake_apply_entry = MagicMock()
+
+    fake_scheduler = MagicMock()
+    fake_scheduler.apply_entry = fake_apply_entry
+
+    fake_integration = MagicMock()
+    fake_integration.exclude_beat_tasks = exclude_beat_tasks
+
+    fake_schedule_entry = MagicMock()
+    fake_schedule_entry.name = task_name
+
+    fake_get_monitor_config = MagicMock()
+
+    with mock.patch(
+        "sentry_sdk.integrations.celery.Scheduler", fake_scheduler
+    ) as Scheduler:  # noqa: N806
+        with mock.patch(
+            "sentry_sdk.integrations.celery.Hub.current.get_integration",
+            return_value=fake_integration,
+        ):
+            with mock.patch(
+                "sentry_sdk.integrations.celery._get_monitor_config",
+                fake_get_monitor_config,
+            ) as _get_monitor_config:
+                # Mimic CeleryIntegration patching of Scheduler.apply_entry()
+                _patch_beat_apply_entry()
+                # Mimic Celery Beat calling a task from the Beat schedule
+                Scheduler.apply_entry(fake_scheduler, fake_schedule_entry)
+
+                if task_in_excluded_beat_tasks:
+                    # Only the original Scheduler.apply_entry() is called, _get_monitor_config is NOT called.
+                    assert fake_apply_entry.call_count == 1
+                    _get_monitor_config.assert_not_called()
+
+                else:
+                    # The original Scheduler.apply_entry() is called, AND _get_monitor_config is called.
+                    assert fake_apply_entry.call_count == 1
+                    assert _get_monitor_config.call_count == 1
diff --git a/tests/integrations/cloud_resource_context/__init__.py b/tests/integrations/cloud_resource_context/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/integrations/cloud_resource_context/test_cloud_resource_context.py b/tests/integrations/cloud_resource_context/test_cloud_resource_context.py
new file mode 100644
index 0000000..b36f795
--- /dev/null
+++ b/tests/integrations/cloud_resource_context/test_cloud_resource_context.py
@@ -0,0 +1,410 @@
+import json
+
+import pytest
+
+try:
+    from unittest import mock  # python 3.3 and above
+    from unittest.mock import MagicMock
+except ImportError:
+    import mock  # python < 3.3
+    from mock import MagicMock
+
+from sentry_sdk.integrations.cloud_resource_context import (
+    CLOUD_PLATFORM,
+    CLOUD_PROVIDER,
+)
+
+AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD = {
+    "accountId": "298817902971",
+    "architecture": "x86_64",
+    "availabilityZone": "us-east-1b",
+    "billingProducts": None,
+    "devpayProductCodes": None,
+    "marketplaceProductCodes": None,
+    "imageId": "ami-00874d747dde344fa",
+    "instanceId": "i-07d3301297fe0a55a",
+    "instanceType": "t2.small",
+    "kernelId": None,
+    "pendingTime": "2023-02-08T07:54:05Z",
+    "privateIp": "171.131.65.115",
+    "ramdiskId": None,
+    "region": "us-east-1",
+    "version": "2017-09-30",
+}
+
+try:
+    # Python 3
+    AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
+        json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD), "utf-8"
+    )
+except TypeError:
+    # Python 2
+    AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
+        json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD)
+    ).encode("utf-8")
+
+GCP_GCE_EXAMPLE_METADATA_PLAYLOAD = {
+    "instance": {
+        "attributes": {},
+        "cpuPlatform": "Intel Broadwell",
+        "description": "",
+        "disks": [
+            {
+                "deviceName": "tests-cloud-contexts-in-python-sdk",
+                "index": 0,
+                "interface": "SCSI",
+                "mode": "READ_WRITE",
+                "type": "PERSISTENT-BALANCED",
+            }
+        ],
+        "guestAttributes": {},
+        "hostname": "tests-cloud-contexts-in-python-sdk.c.client-infra-internal.internal",
+        "id": 1535324527892303790,
+        "image": "projects/debian-cloud/global/images/debian-11-bullseye-v20221206",
+        "licenses": [{"id": "2853224013536823851"}],
+        "machineType": "projects/542054129475/machineTypes/e2-medium",
+        "maintenanceEvent": "NONE",
+        "name": "tests-cloud-contexts-in-python-sdk",
+        "networkInterfaces": [
+            {
+                "accessConfigs": [
+                    {"externalIp": "134.30.53.15", "type": "ONE_TO_ONE_NAT"}
+                ],
+                "dnsServers": ["169.254.169.254"],
+                "forwardedIps": [],
+                "gateway": "10.188.0.1",
+                "ip": "10.188.0.3",
+                "ipAliases": [],
+                "mac": "42:01:0c:7c:00:13",
+                "mtu": 1460,
+                "network": "projects/544954029479/networks/default",
+                "subnetmask": "255.255.240.0",
+                "targetInstanceIps": [],
+            }
+        ],
+        "preempted": "FALSE",
+        "remainingCpuTime": -1,
+        "scheduling": {
+            "automaticRestart": "TRUE",
+            "onHostMaintenance": "MIGRATE",
+            "preemptible": "FALSE",
+        },
+        "serviceAccounts": {},
+        "tags": ["http-server", "https-server"],
+        "virtualClock": {"driftToken": "0"},
+        "zone": "projects/142954069479/zones/northamerica-northeast2-b",
+    },
+    "oslogin": {"authenticate": {"sessions": {}}},
+    "project": {
+        "attributes": {},
+        "numericProjectId": 204954049439,
+        "projectId": "my-project-internal",
+    },
+}
+
+try:
+    # Python 3
+    GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES = bytes(
+        json.dumps(GCP_GCE_EXAMPLE_METADATA_PLAYLOAD), "utf-8"
+    )
+except TypeError:
+    # Python 2
+    GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES = bytes(
+        json.dumps(GCP_GCE_EXAMPLE_METADATA_PLAYLOAD)
+    ).encode("utf-8")
+
+
+def test_is_aws_http_error():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    response = MagicMock()
+    response.status = 405
+
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._is_aws() is False
+    assert CloudResourceContextIntegration.aws_token == ""
+
+
+def test_is_aws_ok():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    response = MagicMock()
+    response.status = 200
+    response.data = b"something"
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._is_aws() is True
+    assert CloudResourceContextIntegration.aws_token == "something"
+
+    CloudResourceContextIntegration.http.request = MagicMock(
+        side_effect=Exception("Test")
+    )
+    assert CloudResourceContextIntegration._is_aws() is False
+
+
+def test_is_aw_exception():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(
+        side_effect=Exception("Test")
+    )
+
+    assert CloudResourceContextIntegration._is_aws() is False
+
+
+@pytest.mark.parametrize(
+    "http_status, response_data, expected_context",
+    [
+        [
+            405,
+            b"",
+            {
+                "cloud.provider": CLOUD_PROVIDER.AWS,
+                "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
+            },
+        ],
+        [
+            200,
+            b"something-but-not-json",
+            {
+                "cloud.provider": CLOUD_PROVIDER.AWS,
+                "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
+            },
+        ],
+        [
+            200,
+            AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES,
+            {
+                "cloud.provider": "aws",
+                "cloud.platform": "aws_ec2",
+                "cloud.account.id": "298817902971",
+                "cloud.availability_zone": "us-east-1b",
+                "cloud.region": "us-east-1",
+                "host.id": "i-07d3301297fe0a55a",
+                "host.type": "t2.small",
+            },
+        ],
+    ],
+)
+def test_get_aws_context(http_status, response_data, expected_context):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    response = MagicMock()
+    response.status = http_status
+    response.data = response_data
+
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._get_aws_context() == expected_context
+
+
+def test_is_gcp_http_error():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    response = MagicMock()
+    response.status = 405
+    response.data = b'{"some": "json"}'
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._is_gcp() is False
+    assert CloudResourceContextIntegration.gcp_metadata is None
+
+
+def test_is_gcp_ok():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    response = MagicMock()
+    response.status = 200
+    response.data = b'{"some": "json"}'
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._is_gcp() is True
+    assert CloudResourceContextIntegration.gcp_metadata == {"some": "json"}
+
+
+def test_is_gcp_exception():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(
+        side_effect=Exception("Test")
+    )
+    assert CloudResourceContextIntegration._is_gcp() is False
+
+
+@pytest.mark.parametrize(
+    "http_status, response_data, expected_context",
+    [
+        [
+            405,
+            None,
+            {
+                "cloud.provider": CLOUD_PROVIDER.GCP,
+                "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
+            },
+        ],
+        [
+            200,
+            b"something-but-not-json",
+            {
+                "cloud.provider": CLOUD_PROVIDER.GCP,
+                "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
+            },
+        ],
+        [
+            200,
+            GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES,
+            {
+                "cloud.provider": "gcp",
+                "cloud.platform": "gcp_compute_engine",
+                "cloud.account.id": "my-project-internal",
+                "cloud.availability_zone": "northamerica-northeast2-b",
+                "host.id": 1535324527892303790,
+            },
+        ],
+    ],
+)
+def test_get_gcp_context(http_status, response_data, expected_context):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration.gcp_metadata = None
+
+    response = MagicMock()
+    response.status = http_status
+    response.data = response_data
+
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._get_gcp_context() == expected_context
+
+
+@pytest.mark.parametrize(
+    "is_aws, is_gcp, expected_provider",
+    [
+        [False, False, ""],
+        [False, True, CLOUD_PROVIDER.GCP],
+        [True, False, CLOUD_PROVIDER.AWS],
+        [True, True, CLOUD_PROVIDER.AWS],
+    ],
+)
+def test_get_cloud_provider(is_aws, is_gcp, expected_provider):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration._is_aws = MagicMock(return_value=is_aws)
+    CloudResourceContextIntegration._is_gcp = MagicMock(return_value=is_gcp)
+
+    assert CloudResourceContextIntegration._get_cloud_provider() == expected_provider
+
+
+@pytest.mark.parametrize(
+    "cloud_provider",
+    [
+        CLOUD_PROVIDER.ALIBABA,
+        CLOUD_PROVIDER.AZURE,
+        CLOUD_PROVIDER.IBM,
+        CLOUD_PROVIDER.TENCENT,
+    ],
+)
+def test_get_cloud_resource_context_unsupported_providers(cloud_provider):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration._get_cloud_provider = MagicMock(
+        return_value=cloud_provider
+    )
+
+    assert CloudResourceContextIntegration._get_cloud_resource_context() == {}
+
+
+@pytest.mark.parametrize(
+    "cloud_provider",
+    [
+        CLOUD_PROVIDER.AWS,
+        CLOUD_PROVIDER.GCP,
+    ],
+)
+def test_get_cloud_resource_context_supported_providers(cloud_provider):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration._get_cloud_provider = MagicMock(
+        return_value=cloud_provider
+    )
+
+    assert CloudResourceContextIntegration._get_cloud_resource_context() != {}
+
+
+@pytest.mark.parametrize(
+    "cloud_provider, cloud_resource_context, warning_called, set_context_called",
+    [
+        ["", {}, False, False],
+        [CLOUD_PROVIDER.AWS, {}, False, False],
+        [CLOUD_PROVIDER.GCP, {}, False, False],
+        [CLOUD_PROVIDER.AZURE, {}, True, False],
+        [CLOUD_PROVIDER.ALIBABA, {}, True, False],
+        [CLOUD_PROVIDER.IBM, {}, True, False],
+        [CLOUD_PROVIDER.TENCENT, {}, True, False],
+        ["", {"some": "context"}, False, True],
+        [CLOUD_PROVIDER.AWS, {"some": "context"}, False, True],
+        [CLOUD_PROVIDER.GCP, {"some": "context"}, False, True],
+    ],
+)
+def test_setup_once(
+    cloud_provider, cloud_resource_context, warning_called, set_context_called
+):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration.cloud_provider = cloud_provider
+    CloudResourceContextIntegration._get_cloud_resource_context = MagicMock(
+        return_value=cloud_resource_context
+    )
+
+    with mock.patch(
+        "sentry_sdk.integrations.cloud_resource_context.set_context"
+    ) as fake_set_context:
+        with mock.patch(
+            "sentry_sdk.integrations.cloud_resource_context.logger.warning"
+        ) as fake_warning:
+            CloudResourceContextIntegration.setup_once()
+
+            if set_context_called:
+                fake_set_context.assert_called_once_with(
+                    "cloud_resource", cloud_resource_context
+                )
+            else:
+                fake_set_context.assert_not_called()
+
+            if warning_called:
+                assert fake_warning.call_count == 1
+            else:
+                fake_warning.assert_not_called()
diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py
index 2b3382b..85921cf 100644
--- a/tests/integrations/django/asgi/test_asgi.py
+++ b/tests/integrations/django/asgi/test_asgi.py
@@ -1,3 +1,5 @@
+import json
+
 import django
 import pytest
 from channels.testing import HttpCommunicator
@@ -5,6 +7,11 @@ from sentry_sdk import capture_message
 from sentry_sdk.integrations.django import DjangoIntegration
 from tests.integrations.django.myapp.asgi import channels_application
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
 APPS = [channels_application]
 if django.VERSION >= (3, 0):
     from tests.integrations.django.myapp.asgi import asgi_application
@@ -70,11 +77,47 @@ async def test_async_views(sentry_init, capture_events, application):
     }
 
 
+@pytest.mark.parametrize("application", APPS)
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
 @pytest.mark.asyncio
 @pytest.mark.skipif(
     django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
 )
-async def test_async_views_concurrent_execution(sentry_init, capture_events, settings):
+async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, application):
+    with mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0):
+        sentry_init(
+            integrations=[DjangoIntegration()],
+            traces_sample_rate=1.0,
+            _experiments={"profiles_sample_rate": 1.0},
+        )
+
+        envelopes = capture_envelopes()
+
+        comm = HttpCommunicator(application, "GET", endpoint)
+        response = await comm.get_response()
+        assert response["status"] == 200, response["body"]
+
+        await comm.wait()
+
+        data = json.loads(response["body"])
+
+        envelopes = [envelope for envelope in envelopes]
+        assert len(envelopes) == 1
+
+        profiles = [item for item in envelopes[0].items if item.type == "profile"]
+        assert len(profiles) == 1
+
+        for profile in profiles:
+            transactions = profile.payload.json["transactions"]
+            assert len(transactions) == 1
+            assert str(data["active"]) == transactions[0]["active_thread_id"]
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_async_views_concurrent_execution(sentry_init, settings):
     import asyncio
     import time
 
@@ -108,7 +151,7 @@ async def test_async_views_concurrent_execution(sentry_init, capture_events, set
     django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
 )
 async def test_async_middleware_that_is_function_concurrent_execution(
-    sentry_init, capture_events, settings
+    sentry_init, settings
 ):
     import asyncio
     import time
@@ -175,15 +218,138 @@ async def test_async_middleware_spans(
         render_span_tree(transaction)
         == """\
 - op="http.server": description=null
-  - op="django.signals": description="django.db.reset_queries"
-  - op="django.signals": description="django.db.close_old_connections"
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
-    - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
-      - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
-        - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
-          - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
-          - op="django.view": description="async_message"
-  - op="django.signals": description="django.db.close_old_connections"
-  - op="django.signals": description="django.core.cache.close_caches"
-  - op="django.signals": description="django.core.handlers.base.reset_urlconf\""""
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
+    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
+      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
+        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
+          - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+          - op="view.render": description="async_message"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="event.django": description="django.core.cache.close_caches"
+  - op="event.django": description="django.core.handlers.base.reset_urlconf\""""
+    )
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_has_trace_if_performance_enabled(sentry_init, capture_events):
+    sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)
+
+    events = capture_events()
+
+    comm = HttpCommunicator(asgi_application, "GET", "/view-exc-with-msg")
+    response = await comm.get_response()
+    assert response["status"] == 500
+
+    # ASGI Django does not create transactions per default,
+    # so we do not have a transaction_event here.
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
     )
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_has_trace_if_performance_disabled(sentry_init, capture_events):
+    sentry_init(integrations=[DjangoIntegration()])
+
+    events = capture_events()
+
+    comm = HttpCommunicator(asgi_application, "GET", "/view-exc-with-msg")
+    response = await comm.get_response()
+    assert response["status"] == 500
+
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_trace_from_headers_if_performance_enabled(sentry_init, capture_events):
+    sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)
+
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    comm = HttpCommunicator(
+        asgi_application,
+        "GET",
+        "/view-exc-with-msg",
+        headers=[(b"sentry-trace", sentry_trace_header.encode())],
+    )
+    response = await comm.get_response()
+    assert response["status"] == 500
+
+    # ASGI Django does not create transactions per default,
+    # so we do not have a transaction_event here.
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_trace_from_headers_if_performance_disabled(sentry_init, capture_events):
+    sentry_init(integrations=[DjangoIntegration()])
+
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    comm = HttpCommunicator(
+        asgi_application,
+        "GET",
+        "/view-exc-with-msg",
+        headers=[(b"sentry-trace", sentry_trace_header.encode())],
+    )
+    response = await comm.get_response()
+    assert response["status"] == 500
+
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
diff --git a/tests/integrations/django/myapp/routing.py b/tests/integrations/django/myapp/routing.py
index b575554..30cab96 100644
--- a/tests/integrations/django/myapp/routing.py
+++ b/tests/integrations/django/myapp/routing.py
@@ -1,11 +1,18 @@
 import channels
-
-from channels.http import AsgiHandler
 from channels.routing import ProtocolTypeRouter
 
-if channels.__version__ < "3.0.0":
-    channels_handler = AsgiHandler
-else:
-    channels_handler = AsgiHandler()
+try:
+    from channels.http import AsgiHandler
+
+    if channels.__version__ < "3.0.0":
+        django_asgi_app = AsgiHandler
+    else:
+        django_asgi_app = AsgiHandler()
+
+except ModuleNotFoundError:
+    # Since channels 4.0 ASGI handling is done by Django itself
+    from django.core.asgi import get_asgi_application
+
+    django_asgi_app = get_asgi_application()
 
-application = ProtocolTypeRouter({"http": channels_handler})
+application = ProtocolTypeRouter({"http": django_asgi_app})
diff --git a/tests/integrations/django/myapp/settings.py b/tests/integrations/django/myapp/settings.py
index cc4d249..0d41618 100644
--- a/tests/integrations/django/myapp/settings.py
+++ b/tests/integrations/django/myapp/settings.py
@@ -122,11 +122,11 @@ try:
     import psycopg2  # noqa
 
     DATABASES["postgres"] = {
-        "ENGINE": "django.db.backends.postgresql_psycopg2",
+        "ENGINE": "django.db.backends.postgresql",
         "NAME": os.environ["SENTRY_PYTHON_TEST_POSTGRES_NAME"],
         "USER": os.environ["SENTRY_PYTHON_TEST_POSTGRES_USER"],
         "PASSWORD": os.environ["SENTRY_PYTHON_TEST_POSTGRES_PASSWORD"],
-        "HOST": "localhost",
+        "HOST": os.environ.get("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost"),
         "PORT": 5432,
     }
 except (ImportError, KeyError):
diff --git a/tests/integrations/django/myapp/templates/trace_meta.html b/tests/integrations/django/myapp/templates/trace_meta.html
new file mode 100644
index 0000000..139fd16
--- /dev/null
+++ b/tests/integrations/django/myapp/templates/trace_meta.html
@@ -0,0 +1 @@
+{{ sentry_trace_meta }}
diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py
index 8e43460..2a4535e 100644
--- a/tests/integrations/django/myapp/urls.py
+++ b/tests/integrations/django/myapp/urls.py
@@ -28,6 +28,14 @@ from . import views
 
 urlpatterns = [
     path("view-exc", views.view_exc, name="view_exc"),
+    path("view-exc-with-msg", views.view_exc_with_msg, name="view_exc_with_msg"),
+    path("cached-view", views.cached_view, name="cached_view"),
+    path("not-cached-view", views.not_cached_view, name="not_cached_view"),
+    path(
+        "view-with-cached-template-fragment",
+        views.view_with_cached_template_fragment,
+        name="view_with_cached_template_fragment",
+    ),
     path(
         "read-body-and-view-exc",
         views.read_body_and_view_exc,
@@ -47,6 +55,7 @@ urlpatterns = [
     path("template-exc", views.template_exc, name="template_exc"),
     path("template-test", views.template_test, name="template_test"),
     path("template-test2", views.template_test2, name="template_test2"),
+    path("template-test3", views.template_test3, name="template_test3"),
     path("postgres-select", views.postgres_select, name="postgres_select"),
     path(
         "permission-denied-exc",
@@ -58,6 +67,7 @@ urlpatterns = [
         views.csrf_hello_not_exempt,
         name="csrf_hello_not_exempt",
     ),
+    path("sync/thread_ids", views.thread_ids_sync, name="thread_ids_sync"),
 ]
 
 # async views
@@ -67,6 +77,11 @@ if views.async_message is not None:
 if views.my_async_view is not None:
     urlpatterns.append(path("my_async_view", views.my_async_view, name="my_async_view"))
 
+if views.thread_ids_async is not None:
+    urlpatterns.append(
+        path("async/thread_ids", views.thread_ids_async, name="thread_ids_async")
+    )
+
 # rest framework
 try:
     urlpatterns.append(
@@ -80,6 +95,9 @@ try:
         )
     )
     urlpatterns.append(path("rest-hello", views.rest_hello, name="rest_hello"))
+    urlpatterns.append(
+        path("rest-json-response", views.rest_json_response, name="rest_json_response")
+    )
     urlpatterns.append(
         path(
             "rest-permission-denied-exc",
diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py
index 02c67ca..1e909f2 100644
--- a/tests/integrations/django/myapp/views.py
+++ b/tests/integrations/django/myapp/views.py
@@ -1,16 +1,22 @@
+import json
+import threading
+
 from django import VERSION
 from django.contrib.auth import login
 from django.contrib.auth.models import User
 from django.core.exceptions import PermissionDenied
 from django.http import HttpResponse, HttpResponseNotFound, HttpResponseServerError
 from django.shortcuts import render
+from django.template import Context, Template
 from django.template.response import TemplateResponse
 from django.utils.decorators import method_decorator
+from django.views.decorators.cache import cache_page
 from django.views.decorators.csrf import csrf_exempt
 from django.views.generic import ListView
 
 try:
     from rest_framework.decorators import api_view
+    from rest_framework.response import Response
 
     @api_view(["POST"])
     def rest_framework_exc(request):
@@ -29,11 +35,16 @@ try:
     def rest_permission_denied_exc(request):
         raise PermissionDenied("bye")
 
+    @api_view(["GET"])
+    def rest_json_response(request):
+        return Response(dict(ok=True))
+
 except ImportError:
     pass
 
 
 import sentry_sdk
+from sentry_sdk import capture_message
 
 
 @csrf_exempt
@@ -41,6 +52,34 @@ def view_exc(request):
     1 / 0
 
 
+@csrf_exempt
+def view_exc_with_msg(request):
+    capture_message("oops")
+    1 / 0
+
+
+@cache_page(60)
+def cached_view(request):
+    return HttpResponse("ok")
+
+
+def not_cached_view(request):
+    return HttpResponse("ok")
+
+
+def view_with_cached_template_fragment(request):
+    template = Template(
+        """{% load cache %}
+        Not cached content goes here.
+        {% cache 500 some_identifier %}
+            And here some cached content.
+        {% endcache %}
+        """
+    )
+    rendered = template.render(Context({}))
+    return HttpResponse(rendered)
+
+
 # This is a "class based view" as previously found in the sentry codebase. The
 # interesting property of this one is that csrf_exempt, as a class attribute,
 # is not in __dict__, so regular use of functools.wraps will not forward the
@@ -136,6 +175,15 @@ def template_test2(request, *args, **kwargs):
     )
 
 
+@csrf_exempt
+def template_test3(request, *args, **kwargs):
+    from sentry_sdk import Hub
+
+    hub = Hub.current
+    capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
+    return render(request, "trace_meta.html", {})
+
+
 @csrf_exempt
 def postgres_select(request, *args, **kwargs):
     from django.db import connections
@@ -154,6 +202,16 @@ def csrf_hello_not_exempt(*args, **kwargs):
     return HttpResponse("ok")
 
 
+def thread_ids_sync(*args, **kwargs):
+    response = json.dumps(
+        {
+            "main": threading.main_thread().ident,
+            "active": threading.current_thread().ident,
+        }
+    )
+    return HttpResponse(response)
+
+
 if VERSION >= (3, 1):
     # Use exec to produce valid Python 2
     exec(
@@ -168,6 +226,16 @@ if VERSION >= (3, 1):
     await asyncio.sleep(1)
     return HttpResponse('Hello World')"""
     )
+
+    exec(
+        """async def thread_ids_async(request):
+    response = json.dumps({
+        "main": threading.main_thread().ident,
+        "active": threading.current_thread().ident,
+    })
+    return HttpResponse(response)"""
+    )
 else:
     async_message = None
     my_async_view = None
+    thread_ids_async = None
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
index a62f1bb..78cd16a 100644
--- a/tests/integrations/django/test_basic.py
+++ b/tests/integrations/django/test_basic.py
@@ -1,11 +1,14 @@
 from __future__ import absolute_import
 
 import json
+import os
+import random
+import re
 import pytest
-import pytest_django
 from functools import partial
 
 from werkzeug.test import Client
+
 from django import VERSION as DJANGO_VERSION
 from django.contrib.auth.models import User
 from django.core.management import execute_from_command_line
@@ -16,30 +19,17 @@ try:
 except ImportError:
     from django.core.urlresolvers import reverse
 
-from sentry_sdk._compat import PY2
+from sentry_sdk._compat import PY2, PY310
 from sentry_sdk import capture_message, capture_exception, configure_scope
+from sentry_sdk.consts import SPANDATA
 from sentry_sdk.integrations.django import DjangoIntegration
 from sentry_sdk.integrations.django.signals_handlers import _get_receiver_name
+from sentry_sdk.integrations.django.caching import _get_span_description
 from sentry_sdk.integrations.executing import ExecutingIntegration
-
 from tests.integrations.django.myapp.wsgi import application
+from tests.integrations.django.utils import pytest_mark_django_db_decorator
 
-# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that
-# requires explicit database allow from failing the test
-pytest_mark_django_db_decorator = partial(pytest.mark.django_db)
-try:
-    pytest_version = tuple(map(int, pytest_django.__version__.split(".")))
-    if pytest_version > (4, 2, 0):
-        pytest_mark_django_db_decorator = partial(
-            pytest.mark.django_db, databases="__all__"
-        )
-except ValueError:
-    if "dev" in pytest_django.__version__:
-        pytest_mark_django_db_decorator = partial(
-            pytest.mark.django_db, databases="__all__"
-        )
-except AttributeError:
-    pass
+DJANGO_VERSION = DJANGO_VERSION[:2]
 
 
 @pytest.fixture
@@ -47,6 +37,36 @@ def client():
     return Client(application)
 
 
+@pytest.fixture
+def use_django_caching(settings):
+    settings.CACHES = {
+        "default": {
+            "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
+            "LOCATION": "unique-snowflake-%s" % random.randint(1, 1000000),
+        }
+    }
+
+
+@pytest.fixture
+def use_django_caching_with_middlewares(settings):
+    settings.CACHES = {
+        "default": {
+            "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
+            "LOCATION": "unique-snowflake-%s" % random.randint(1, 1000000),
+        }
+    }
+    if hasattr(settings, "MIDDLEWARE"):
+        middleware = settings.MIDDLEWARE
+    elif hasattr(settings, "MIDDLEWARE_CLASSES"):
+        middleware = settings.MIDDLEWARE_CLASSES
+    else:
+        middleware = None
+
+    if middleware is not None:
+        middleware.insert(0, "django.middleware.cache.UpdateCacheMiddleware")
+        middleware.append("django.middleware.cache.FetchFromCacheMiddleware")
+
+
 def test_view_exceptions(sentry_init, client, capture_exceptions, capture_events):
     sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
     exceptions = capture_exceptions()
@@ -144,6 +164,112 @@ def test_transaction_with_class_view(sentry_init, client, capture_events):
     assert event["message"] == "hi"
 
 
+def test_has_trace_if_performance_enabled(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+    client.head(reverse("view_exc_with_msg"))
+
+    (msg_event, error_event, transaction_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_has_trace_if_performance_disabled(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+    )
+    events = capture_events()
+    client.head(reverse("view_exc_with_msg"))
+
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_trace_from_headers_if_performance_enabled(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        traces_sample_rate=1.0,
+    )
+
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    client.head(
+        reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header}
+    )
+
+    (msg_event, error_event, transaction_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+def test_trace_from_headers_if_performance_disabled(
+    sentry_init, client, capture_events
+):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+    )
+
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    client.head(
+        reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header}
+    )
+
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
 @pytest.mark.forked
 @pytest.mark.django_db
 def test_user_captured(sentry_init, client, capture_events):
@@ -300,6 +426,25 @@ def test_sql_dict_query_params(sentry_init, capture_events):
     assert crumb["data"]["db.params"] == {"my_foo": 10}
 
 
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_response_trace(sentry_init, client, capture_events, render_span_tree):
+    pytest.importorskip("rest_framework")
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        traces_sample_rate=1.0,
+    )
+
+    events = capture_events()
+    content, status, headers = client.get(reverse("rest_json_response"))
+    assert status == "200 OK"
+
+    assert (
+        '- op="view.response.render": description="serialize response"'
+        in render_span_tree(events[0])
+    )
+
+
 @pytest.mark.parametrize(
     "query",
     [
@@ -428,14 +573,19 @@ def test_django_connect_trace(sentry_init, client, capture_events, render_span_t
     content, status, headers = client.get(reverse("postgres_select"))
     assert status == "200 OK"
 
-    assert '- op="db": description="connect"' in render_span_tree(events[0])
+    (event,) = events
+
+    for span in event["spans"]:
+        if span.get("op") == "db":
+            data = span.get("data")
+            assert data.get(SPANDATA.DB_SYSTEM) == "postgresql"
+
+    assert '- op="db": description="connect"' in render_span_tree(event)
 
 
 @pytest.mark.forked
 @pytest_mark_django_db_decorator(transaction=True)
-def test_django_connect_breadcrumbs(
-    sentry_init, client, capture_events, render_span_tree
-):
+def test_django_connect_breadcrumbs(sentry_init, capture_events):
     """
     Verify we record a breadcrumb when opening a new database.
     """
@@ -469,6 +619,43 @@ def test_django_connect_breadcrumbs(
     ]
 
 
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+def test_db_connection_span_data(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        send_default_pii=True,
+        traces_sample_rate=1.0,
+    )
+    from django.db import connections
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    content, status, headers = client.get(reverse("postgres_select"))
+    assert status == "200 OK"
+
+    (event,) = events
+
+    for span in event["spans"]:
+        if span.get("op") == "db":
+            data = span.get("data")
+            assert data.get(SPANDATA.DB_SYSTEM) == "postgresql"
+            assert (
+                data.get(SPANDATA.DB_NAME)
+                == connections["postgres"].get_connection_params()["database"]
+            )
+            assert data.get(SPANDATA.SERVER_ADDRESS) == os.environ.get(
+                "SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost"
+            )
+            assert data.get(SPANDATA.SERVER_PORT) == 5432
+
+
 @pytest.mark.parametrize(
     "transaction_style,client_url,expected_transaction,expected_source,expected_response",
     [
@@ -556,6 +743,29 @@ def test_read_request(sentry_init, client, capture_events):
     assert "data" not in event["request"]
 
 
+def test_template_tracing_meta(sentry_init, client, capture_events):
+    sentry_init(integrations=[DjangoIntegration()])
+    events = capture_events()
+
+    content, _, _ = client.get(reverse("template_test3"))
+    rendered_meta = b"".join(content).decode("utf-8")
+
+    traceparent, baggage = events[0]["message"].split("\n")
+    assert traceparent != ""
+    assert baggage != ""
+
+    match = re.match(
+        r'^<meta name="sentry-trace" content="([^\"]*)"><meta name="baggage" content="([^\"]*)">\n',
+        rendered_meta,
+    )
+    assert match is not None
+    assert match.group(1) == traceparent
+
+    # Python 2 does not preserve sort order
+    rendered_baggage = match.group(2)
+    assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))
+
+
 @pytest.mark.parametrize("with_executing_integration", [[], [ExecutingIntegration()]])
 def test_template_exception(
     sentry_init, client, capture_events, with_executing_integration
@@ -582,7 +792,6 @@ def test_template_exception(
 
     assert template_frame["post_context"] == ["11\n", "12\n", "13\n", "14\n", "15\n"]
     assert template_frame["lineno"] == 10
-    assert template_frame["in_app"]
     assert template_frame["filename"].endswith("error.html")
 
     filenames = [
@@ -652,7 +861,7 @@ def test_does_not_capture_403(sentry_init, client, capture_events, endpoint):
     sentry_init(integrations=[DjangoIntegration()])
     events = capture_events()
 
-    _content, status, _headers = client.get(reverse(endpoint))
+    _, status, _ = client.get(reverse(endpoint))
     assert status.lower() == "403 forbidden"
 
     assert not events
@@ -666,95 +875,137 @@ def test_render_spans(sentry_init, client, capture_events, render_span_tree):
     views_tests = [
         (
             reverse("template_test2"),
-            '- op="django.template.render": description="[user_name.html, ...]"',
+            '- op="template.render": description="[user_name.html, ...]"',
         ),
     ]
     if DJANGO_VERSION >= (1, 7):
         views_tests.append(
             (
                 reverse("template_test"),
-                '- op="django.template.render": description="user_name.html"',
+                '- op="template.render": description="user_name.html"',
             ),
         )
 
     for url, expected_line in views_tests:
         events = capture_events()
-        _content, status, _headers = client.get(url)
+        client.get(url)
         transaction = events[0]
         assert expected_line in render_span_tree(transaction)
 
 
+if DJANGO_VERSION >= (1, 10):
+    EXPECTED_MIDDLEWARE_SPANS = """\
+- op="http.server": description=null
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
+    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
+      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
+        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__"
+          - op="middleware.django": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__"
+            - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+            - op="view.render": description="message"\
+"""
+else:
+    EXPECTED_MIDDLEWARE_SPANS = """\
+- op="http.server": description=null
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
+  - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
+  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
+  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+  - op="view.render": description="message"
+  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response"
+  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_response"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\
+"""
+
+
 def test_middleware_spans(sentry_init, client, capture_events, render_span_tree):
     sentry_init(
-        integrations=[DjangoIntegration()],
+        integrations=[
+            DjangoIntegration(signals_spans=False),
+        ],
         traces_sample_rate=1.0,
-        _experiments={"record_sql_params": True},
     )
     events = capture_events()
 
-    _content, status, _headers = client.get(reverse("message"))
+    client.get(reverse("message"))
 
     message, transaction = events
 
     assert message["message"] == "hi"
+    assert render_span_tree(transaction) == EXPECTED_MIDDLEWARE_SPANS
 
-    if DJANGO_VERSION >= (1, 10):
-        assert (
-            render_span_tree(transaction)
-            == """\
+
+def test_middleware_spans_disabled(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(middleware_spans=False, signals_spans=False),
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("message"))
+
+    message, transaction = events
+
+    assert message["message"] == "hi"
+    assert not len(transaction["spans"])
+
+
+if DJANGO_VERSION >= (1, 10):
+    EXPECTED_SIGNALS_SPANS = """\
 - op="http.server": description=null
-  - op="django.signals": description="django.db.reset_queries"
-  - op="django.signals": description="django.db.close_old_connections"
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
-    - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
-      - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
-        - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__"
-          - op="django.middleware": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__"
-            - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
-            - op="django.view": description="message"\
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"\
 """
-        )
-
-    else:
-        assert (
-            render_span_tree(transaction)
-            == """\
+else:
+    EXPECTED_SIGNALS_SPANS = """\
 - op="http.server": description=null
-  - op="django.signals": description="django.db.reset_queries"
-  - op="django.signals": description="django.db.close_old_connections"
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
-  - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
-  - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
-  - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
-  - op="django.view": description="message"
-  - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response"
-  - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_response"
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"\
 """
-        )
 
 
-def test_middleware_spans_disabled(sentry_init, client, capture_events):
+def test_signals_spans(sentry_init, client, capture_events, render_span_tree):
     sentry_init(
-        integrations=[DjangoIntegration(middleware_spans=False)], traces_sample_rate=1.0
+        integrations=[
+            DjangoIntegration(middleware_spans=False),
+        ],
+        traces_sample_rate=1.0,
     )
     events = capture_events()
 
-    _content, status, _headers = client.get(reverse("message"))
+    client.get(reverse("message"))
 
     message, transaction = events
 
     assert message["message"] == "hi"
+    assert render_span_tree(transaction) == EXPECTED_SIGNALS_SPANS
 
-    assert len(transaction["spans"]) == 2
-
-    assert transaction["spans"][0]["op"] == "django.signals"
+    assert transaction["spans"][0]["op"] == "event.django"
     assert transaction["spans"][0]["description"] == "django.db.reset_queries"
 
-    assert transaction["spans"][1]["op"] == "django.signals"
+    assert transaction["spans"][1]["op"] == "event.django"
     assert transaction["spans"][1]["description"] == "django.db.close_old_connections"
 
 
+def test_signals_spans_disabled(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(middleware_spans=False, signals_spans=False),
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("message"))
+
+    message, transaction = events
+
+    assert message["message"] == "hi"
+    assert not transaction["spans"]
+
+
 def test_csrf(sentry_init, client):
     """
     Assert that CSRF view decorator works even with the view wrapped in our own
@@ -834,4 +1085,244 @@ def test_get_receiver_name():
 
     a_partial = partial(dummy)
     name = _get_receiver_name(a_partial)
-    assert name == str(a_partial)
+    if PY310:
+        assert name == "functools.partial(<function " + a_partial.func.__name__ + ">)"
+    else:
+        assert name == "partial(<function " + a_partial.func.__name__ + ">)"
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_disabled_middleware(
+    sentry_init, client, capture_events, use_django_caching_with_middlewares
+):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=False,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("not_cached_view"))
+    client.get(reverse("not_cached_view"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 0
+    assert len(second_event["spans"]) == 0
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_disabled_decorator(
+    sentry_init, client, capture_events, use_django_caching
+):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=False,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("cached_view"))
+    client.get(reverse("cached_view"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 0
+    assert len(second_event["spans"]) == 0
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_disabled_templatetag(
+    sentry_init, client, capture_events, use_django_caching
+):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=False,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("view_with_cached_template_fragment"))
+    client.get(reverse("view_with_cached_template_fragment"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 0
+    assert len(second_event["spans"]) == 0
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_middleware(
+    sentry_init, client, capture_events, use_django_caching_with_middlewares
+):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=True,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+
+    client.application.load_middleware()
+    events = capture_events()
+
+    client.get(reverse("not_cached_view"))
+    client.get(reverse("not_cached_view"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 1
+    assert first_event["spans"][0]["op"] == "cache.get_item"
+    assert first_event["spans"][0]["description"].startswith(
+        "get views.decorators.cache.cache_header."
+    )
+    assert first_event["spans"][0]["data"] == {"cache.hit": False}
+
+    assert len(second_event["spans"]) == 2
+    assert second_event["spans"][0]["op"] == "cache.get_item"
+    assert second_event["spans"][0]["description"].startswith(
+        "get views.decorators.cache.cache_header."
+    )
+    assert second_event["spans"][0]["data"] == {"cache.hit": False}
+
+    assert second_event["spans"][1]["op"] == "cache.get_item"
+    assert second_event["spans"][1]["description"].startswith(
+        "get views.decorators.cache.cache_page."
+    )
+    assert second_event["spans"][1]["data"]["cache.hit"]
+    assert "cache.item_size" in second_event["spans"][1]["data"]
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_decorator(sentry_init, client, capture_events, use_django_caching):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=True,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("cached_view"))
+    client.get(reverse("cached_view"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 1
+    assert first_event["spans"][0]["op"] == "cache.get_item"
+    assert first_event["spans"][0]["description"].startswith(
+        "get views.decorators.cache.cache_header."
+    )
+    assert first_event["spans"][0]["data"] == {"cache.hit": False}
+
+    assert len(second_event["spans"]) == 2
+    assert second_event["spans"][0]["op"] == "cache.get_item"
+    assert second_event["spans"][0]["description"].startswith(
+        "get views.decorators.cache.cache_header."
+    )
+    assert second_event["spans"][0]["data"] == {"cache.hit": False}
+
+    assert second_event["spans"][1]["op"] == "cache.get_item"
+    assert second_event["spans"][1]["description"].startswith(
+        "get views.decorators.cache.cache_page."
+    )
+    assert second_event["spans"][1]["data"]["cache.hit"]
+    assert "cache.item_size" in second_event["spans"][1]["data"]
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_templatetag(
+    sentry_init, client, capture_events, use_django_caching
+):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=True,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("view_with_cached_template_fragment"))
+    client.get(reverse("view_with_cached_template_fragment"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 1
+    assert first_event["spans"][0]["op"] == "cache.get_item"
+    assert first_event["spans"][0]["description"].startswith(
+        "get template.cache.some_identifier."
+    )
+    assert first_event["spans"][0]["data"] == {"cache.hit": False}
+
+    assert len(second_event["spans"]) == 1
+    assert second_event["spans"][0]["op"] == "cache.get_item"
+    assert second_event["spans"][0]["description"].startswith(
+        "get template.cache.some_identifier."
+    )
+    assert second_event["spans"][0]["data"]["cache.hit"]
+    assert "cache.item_size" in second_event["spans"][0]["data"]
+
+
+@pytest.mark.parametrize(
+    "method_name, args, kwargs, expected_description",
+    [
+        ("get", None, None, "get "),
+        ("get", [], {}, "get "),
+        ("get", ["bla", "blub", "foo"], {}, "get bla"),
+        (
+            "get_many",
+            [["bla 1", "bla 2", "bla 3"], "blub", "foo"],
+            {},
+            "get_many ['bla 1', 'bla 2', 'bla 3']",
+        ),
+        (
+            "get_many",
+            [["bla 1", "bla 2", "bla 3"], "blub", "foo"],
+            {"key": "bar"},
+            "get_many ['bla 1', 'bla 2', 'bla 3']",
+        ),
+        ("get", [], {"key": "bar"}, "get bar"),
+        (
+            "get",
+            "something",
+            {},
+            "get s",
+        ),  # this should never happen, just making sure that we are not raising an exception in that case.
+    ],
+)
+def test_cache_spans_get_span_description(
+    method_name, args, kwargs, expected_description
+):
+    assert _get_span_description(method_name, args, kwargs) == expected_description
diff --git a/tests/integrations/django/test_data_scrubbing.py b/tests/integrations/django/test_data_scrubbing.py
new file mode 100644
index 0000000..b3e5311
--- /dev/null
+++ b/tests/integrations/django/test_data_scrubbing.py
@@ -0,0 +1,83 @@
+import pytest
+
+from werkzeug.test import Client
+
+from sentry_sdk.integrations.django import DjangoIntegration
+from tests.integrations.django.myapp.wsgi import application
+from tests.integrations.django.utils import pytest_mark_django_db_decorator
+
+try:
+    from django.urls import reverse
+except ImportError:
+    from django.core.urlresolvers import reverse
+
+
+@pytest.fixture
+def client():
+    return Client(application)
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_session_cookies_removed(
+    sentry_init,
+    client,
+    capture_events,
+):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=False)
+    events = capture_events()
+    client.set_cookie("localhost", "sessionid", "123")
+    client.set_cookie("localhost", "csrftoken", "456")
+    client.set_cookie("localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert "cookies" not in event["request"]
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_session_cookies_filtered(
+    sentry_init,
+    client,
+    capture_events,
+):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    events = capture_events()
+    client.set_cookie("localhost", "sessionid", "123")
+    client.set_cookie("localhost", "csrftoken", "456")
+    client.set_cookie("localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert event["request"]["cookies"] == {
+        "sessionid": "[Filtered]",
+        "csrftoken": "[Filtered]",
+        "foo": "bar",
+    }
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_custom_session_cookies_filtered(
+    sentry_init,
+    client,
+    capture_events,
+    settings,
+):
+    settings.SESSION_COOKIE_NAME = "my_sess"
+    settings.CSRF_COOKIE_NAME = "csrf_secret"
+
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    events = capture_events()
+    client.set_cookie("localhost", "my_sess", "123")
+    client.set_cookie("localhost", "csrf_secret", "456")
+    client.set_cookie("localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert event["request"]["cookies"] == {
+        "my_sess": "[Filtered]",
+        "csrf_secret": "[Filtered]",
+        "foo": "bar",
+    }
diff --git a/tests/integrations/django/test_transactions.py b/tests/integrations/django/test_transactions.py
index 6f16d88..160da92 100644
--- a/tests/integrations/django/test_transactions.py
+++ b/tests/integrations/django/test_transactions.py
@@ -24,6 +24,9 @@ example_url_conf = (
     url(r"^api/(?P<version>(v1|v2))/author/$", lambda x: ""),
     url(r"^report/", lambda x: ""),
     url(r"^example/", include(included_url_conf)),
+    url(
+        r"^(?P<slug>[$\\-_.+!*(),\\w//]+)/$", lambda x: ""
+    ),  # example of complex regex from django-cms
 )
 
 
@@ -53,6 +56,16 @@ def test_legacy_resolver_included_match():
     assert result == "/example/foo/bar/{param}"
 
 
+def test_complex_regex_from_django_cms():
+    """
+    Reference: https://github.com/getsentry/sentry-python/issues/1527
+    """
+
+    resolver = RavenResolver()
+    result = resolver.resolve("/,/", example_url_conf)
+    assert result == "/{slug}/"
+
+
 @pytest.mark.skipif(django.VERSION < (2, 0), reason="Requires Django > 2.0")
 def test_legacy_resolver_newstyle_django20_urlconf():
     from django.urls import path
diff --git a/tests/integrations/django/utils.py b/tests/integrations/django/utils.py
new file mode 100644
index 0000000..8f68c8f
--- /dev/null
+++ b/tests/integrations/django/utils.py
@@ -0,0 +1,22 @@
+from functools import partial
+
+import pytest
+import pytest_django
+
+
+# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that
+# requires explicit database allow from failing the test
+pytest_mark_django_db_decorator = partial(pytest.mark.django_db)
+try:
+    pytest_version = tuple(map(int, pytest_django.__version__.split(".")))
+    if pytest_version > (4, 2, 0):
+        pytest_mark_django_db_decorator = partial(
+            pytest.mark.django_db, databases="__all__"
+        )
+except ValueError:
+    if "dev" in pytest_django.__version__:
+        pytest_mark_django_db_decorator = partial(
+            pytest.mark.django_db, databases="__all__"
+        )
+except AttributeError:
+    pass
diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py
index bc61cfc..86e7a61 100644
--- a/tests/integrations/fastapi/test_fastapi.py
+++ b/tests/integrations/fastapi/test_fastapi.py
@@ -1,14 +1,23 @@
+import json
+import logging
+import threading
+
 import pytest
 from sentry_sdk.integrations.fastapi import FastApiIntegration
 
 fastapi = pytest.importorskip("fastapi")
 
-from fastapi import FastAPI
+from fastapi import FastAPI, Request
 from fastapi.testclient import TestClient
 from sentry_sdk import capture_message
 from sentry_sdk.integrations.starlette import StarletteIntegration
 from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
 
 def fastapi_app_factory():
     app = FastAPI()
@@ -23,6 +32,20 @@ def fastapi_app_factory():
         capture_message("Hi")
         return {"message": "Hi"}
 
+    @app.get("/sync/thread_ids")
+    def _thread_ids_sync():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
+    @app.get("/async/thread_ids")
+    async def _thread_ids_async():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
     return app
 
 
@@ -135,3 +158,63 @@ def test_legacy_setup(
 
     (event,) = events
     assert event["transaction"] == "/message/{message_id}"
+
+
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
+def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, endpoint):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+    app = fastapi_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    envelopes = capture_envelopes()
+
+    client = TestClient(asgi_app)
+    response = client.get(endpoint)
+    assert response.status_code == 200
+
+    data = json.loads(response.content)
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
+
+
+@pytest.mark.asyncio
+async def test_original_request_not_scrubbed(sentry_init, capture_events):
+    sentry_init(
+        integrations=[StarletteIntegration(), FastApiIntegration()],
+        traces_sample_rate=1.0,
+        debug=True,
+    )
+
+    app = FastAPI()
+
+    @app.post("/error")
+    async def _error(request: Request):
+        logging.critical("Oh no!")
+        assert request.headers["Authorization"] == "Bearer ohno"
+        assert await request.json() == {"password": "secret"}
+
+        return {"error": "Oh no!"}
+
+    events = capture_events()
+
+    client = TestClient(app)
+    client.post(
+        "/error", json={"password": "secret"}, headers={"Authorization": "Bearer ohno"}
+    )
+
+    event = events[0]
+    assert event["request"]["data"] == {"password": "[Filtered]"}
+    assert event["request"]["headers"]["authorization"] == "[Filtered]"
diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py
index 8983c4e..ae93d13 100644
--- a/tests/integrations/flask/test_flask.py
+++ b/tests/integrations/flask/test_flask.py
@@ -1,4 +1,5 @@
 import json
+import re
 import pytest
 import logging
 
@@ -28,6 +29,7 @@ from sentry_sdk import (
 )
 from sentry_sdk.integrations.logging import LoggingIntegration
 import sentry_sdk.integrations.flask as flask_sentry
+from sentry_sdk.serializer import MAX_DATABAG_BREADTH
 
 
 login_manager = LoginManager()
@@ -392,7 +394,9 @@ def test_flask_formdata_request_appear_transaction_body(
 
 @pytest.mark.parametrize("input_char", ["a", b"a"])
 def test_flask_too_large_raw_request(sentry_init, input_char, capture_events, app):
-    sentry_init(integrations=[flask_sentry.FlaskIntegration()], request_bodies="small")
+    sentry_init(
+        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="small"
+    )
 
     data = input_char * 2000
 
@@ -419,7 +423,9 @@ def test_flask_too_large_raw_request(sentry_init, input_char, capture_events, ap
 
 
 def test_flask_files_and_form(sentry_init, capture_events, app):
-    sentry_init(integrations=[flask_sentry.FlaskIntegration()], request_bodies="always")
+    sentry_init(
+        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="always"
+    )
 
     data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
 
@@ -447,6 +453,34 @@ def test_flask_files_and_form(sentry_init, capture_events, app):
     assert not event["request"]["data"]["file"]
 
 
+def test_json_not_truncated_if_max_request_body_size_is_always(
+    sentry_init, capture_events, app
+):
+    sentry_init(
+        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="always"
+    )
+
+    data = {
+        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
+    }
+
+    @app.route("/", methods=["POST"])
+    def index():
+        assert request.get_json() == data
+        assert request.get_data() == json.dumps(data).encode("ascii")
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    response = client.post("/", content_type="application/json", data=json.dumps(data))
+    assert response.status_code == 200
+
+    (event,) = events
+    assert event["request"]["data"] == data
+
+
 @pytest.mark.parametrize(
     "integrations",
     [
@@ -738,6 +772,25 @@ def test_tracing_error(sentry_init, capture_events, app):
     assert exception["type"] == "ZeroDivisionError"
 
 
+def test_error_has_trace_context_if_tracing_disabled(sentry_init, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    events = capture_events()
+
+    @app.route("/error")
+    def error():
+        1 / 0
+
+    with pytest.raises(ZeroDivisionError):
+        with app.test_client() as client:
+            response = client.get("/error")
+            assert response.status_code == 500
+
+    (error_event,) = events
+
+    assert error_event["contexts"]["trace"]
+
+
 def test_class_based_views(sentry_init, app, capture_events):
     sentry_init(integrations=[flask_sentry.FlaskIntegration()])
     events = capture_events()
@@ -760,22 +813,38 @@ def test_class_based_views(sentry_init, app, capture_events):
     assert event["transaction"] == "hello_class"
 
 
-def test_sentry_trace_context(sentry_init, app, capture_events):
+@pytest.mark.parametrize(
+    "template_string", ["{{ sentry_trace }}", "{{ sentry_trace_meta }}"]
+)
+def test_template_tracing_meta(sentry_init, app, capture_events, template_string):
     sentry_init(integrations=[flask_sentry.FlaskIntegration()])
     events = capture_events()
 
     @app.route("/")
     def index():
-        sentry_span = Hub.current.scope.span
-        capture_message(sentry_span.to_traceparent())
-        return render_template_string("{{ sentry_trace }}")
+        hub = Hub.current
+        capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
+        return render_template_string(template_string)
 
     with app.test_client() as client:
         response = client.get("/")
         assert response.status_code == 200
-        assert response.data.decode(
-            "utf-8"
-        ) == '<meta name="sentry-trace" content="%s" />' % (events[0]["message"],)
+
+        rendered_meta = response.data.decode("utf-8")
+        traceparent, baggage = events[0]["message"].split("\n")
+        assert traceparent != ""
+        assert baggage != ""
+
+    match = re.match(
+        r'^<meta name="sentry-trace" content="([^\"]*)"><meta name="baggage" content="([^\"]*)">',
+        rendered_meta,
+    )
+    assert match is not None
+    assert match.group(1) == traceparent
+
+    # Python 2 does not preserve sort order
+    rendered_baggage = match.group(2)
+    assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))
 
 
 def test_dont_override_sentry_trace_context(sentry_init, app):
@@ -789,3 +858,57 @@ def test_dont_override_sentry_trace_context(sentry_init, app):
         response = client.get("/")
         assert response.status_code == 200
         assert response.data == b"hi"
+
+
+def test_request_not_modified_by_reference(sentry_init, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    @app.route("/", methods=["POST"])
+    def index():
+        logging.critical("oops")
+        assert request.get_json() == {"password": "ohno"}
+        assert request.headers["Authorization"] == "Bearer ohno"
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    client.post(
+        "/", json={"password": "ohno"}, headers={"Authorization": "Bearer ohno"}
+    )
+
+    (event,) = events
+
+    assert event["request"]["data"]["password"] == "[Filtered]"
+    assert event["request"]["headers"]["Authorization"] == "[Filtered]"
+
+
+@pytest.mark.parametrize("traces_sample_rate", [None, 1.0])
+def test_replay_event_context(sentry_init, capture_events, app, traces_sample_rate):
+    """
+    Tests that the replay context is added to the event context.
+    This is not strictly a Flask integration test, but it's the easiest way to test this.
+    """
+    sentry_init(traces_sample_rate=traces_sample_rate)
+
+    @app.route("/error")
+    def error():
+        return 1 / 0
+
+    events = capture_events()
+
+    client = app.test_client()
+    headers = {
+        "baggage": "other-vendor-value-1=foo;bar;baz,sentry-trace_id=771a43a4192642f0b136d5159a501700,sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,other-vendor-value-2=foo;bar,sentry-replay_id=12312012123120121231201212312012",
+        "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-1",
+    }
+    with pytest.raises(ZeroDivisionError):
+        client.get("/error", headers=headers)
+
+    event = events[0]
+
+    assert event["contexts"]
+    assert event["contexts"]["replay"]
+    assert (
+        event["contexts"]["replay"]["replay_id"] == "12312012123120121231201212312012"
+    )
diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py
index 5f41300..678219d 100644
--- a/tests/integrations/gcp/test_gcp.py
+++ b/tests/integrations/gcp/test_gcp.py
@@ -93,9 +93,8 @@ def init_sdk(timeout_warning=False, **extra_init_args):
 @pytest.fixture
 def run_cloud_function():
     def inner(code, subprocess_kwargs=()):
-
-        event = []
-        envelope = []
+        events = []
+        envelopes = []
         return_value = None
 
         # STEP : Create a zip of cloud function
@@ -133,10 +132,10 @@ def run_cloud_function():
                 print("GCP:", line)
                 if line.startswith("EVENT: "):
                     line = line[len("EVENT: ") :]
-                    event = json.loads(line)
+                    events.append(json.loads(line))
                 elif line.startswith("ENVELOPE: "):
                     line = line[len("ENVELOPE: ") :]
-                    envelope = json.loads(line)
+                    envelopes.append(json.loads(line))
                 elif line.startswith("RETURN VALUE: "):
                     line = line[len("RETURN VALUE: ") :]
                     return_value = json.loads(line)
@@ -145,13 +144,13 @@ def run_cloud_function():
 
             stream.close()
 
-        return envelope, event, return_value
+        return envelopes, events, return_value
 
     return inner
 
 
 def test_handled_exception(run_cloud_function):
-    envelope, event, return_value = run_cloud_function(
+    _, events, return_value = run_cloud_function(
         dedent(
             """
         functionhandler = None
@@ -168,16 +167,17 @@ def test_handled_exception(run_cloud_function):
         """
         )
     )
-    assert event["level"] == "error"
-    (exception,) = event["exception"]["values"]
+    assert events[0]["level"] == "error"
+    (exception,) = events[0]["exception"]["values"]
 
     assert exception["type"] == "Exception"
     assert exception["value"] == "something went wrong"
-    assert exception["mechanism"] == {"type": "gcp", "handled": False}
+    assert exception["mechanism"]["type"] == "gcp"
+    assert not exception["mechanism"]["handled"]
 
 
 def test_unhandled_exception(run_cloud_function):
-    envelope, event, return_value = run_cloud_function(
+    _, events, _ = run_cloud_function(
         dedent(
             """
         functionhandler = None
@@ -195,16 +195,17 @@ def test_unhandled_exception(run_cloud_function):
         """
         )
     )
-    assert event["level"] == "error"
-    (exception,) = event["exception"]["values"]
+    assert events[0]["level"] == "error"
+    (exception,) = events[0]["exception"]["values"]
 
     assert exception["type"] == "ZeroDivisionError"
     assert exception["value"] == "division by zero"
-    assert exception["mechanism"] == {"type": "gcp", "handled": False}
+    assert exception["mechanism"]["type"] == "gcp"
+    assert not exception["mechanism"]["handled"]
 
 
 def test_timeout_error(run_cloud_function):
-    envelope, event, return_value = run_cloud_function(
+    _, events, _ = run_cloud_function(
         dedent(
             """
         functionhandler = None
@@ -222,19 +223,20 @@ def test_timeout_error(run_cloud_function):
         """
         )
     )
-    assert event["level"] == "error"
-    (exception,) = event["exception"]["values"]
+    assert events[0]["level"] == "error"
+    (exception,) = events[0]["exception"]["values"]
 
     assert exception["type"] == "ServerlessTimeoutWarning"
     assert (
         exception["value"]
         == "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds."
     )
-    assert exception["mechanism"] == {"type": "threading", "handled": False}
+    assert exception["mechanism"]["type"] == "threading"
+    assert not exception["mechanism"]["handled"]
 
 
 def test_performance_no_error(run_cloud_function):
-    envelope, event, return_value = run_cloud_function(
+    envelopes, _, _ = run_cloud_function(
         dedent(
             """
         functionhandler = None
@@ -252,15 +254,15 @@ def test_performance_no_error(run_cloud_function):
         )
     )
 
-    assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
-    assert envelope["transaction"].startswith("Google Cloud function")
-    assert envelope["transaction_info"] == {"source": "component"}
-    assert envelope["transaction"] in envelope["request"]["url"]
+    assert envelopes[0]["type"] == "transaction"
+    assert envelopes[0]["contexts"]["trace"]["op"] == "function.gcp"
+    assert envelopes[0]["transaction"].startswith("Google Cloud function")
+    assert envelopes[0]["transaction_info"] == {"source": "component"}
+    assert envelopes[0]["transaction"] in envelopes[0]["request"]["url"]
 
 
 def test_performance_error(run_cloud_function):
-    envelope, event, return_value = run_cloud_function(
+    envelopes, events, _ = run_cloud_function(
         dedent(
             """
         functionhandler = None
@@ -278,16 +280,18 @@ def test_performance_error(run_cloud_function):
         )
     )
 
-    assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
-    assert envelope["transaction"].startswith("Google Cloud function")
-    assert envelope["transaction"] in envelope["request"]["url"]
-    assert event["level"] == "error"
-    (exception,) = event["exception"]["values"]
+    assert envelopes[0]["level"] == "error"
+    (exception,) = envelopes[0]["exception"]["values"]
 
     assert exception["type"] == "Exception"
     assert exception["value"] == "something went wrong"
-    assert exception["mechanism"] == {"type": "gcp", "handled": False}
+    assert exception["mechanism"]["type"] == "gcp"
+    assert not exception["mechanism"]["handled"]
+
+    assert envelopes[1]["type"] == "transaction"
+    assert envelopes[1]["contexts"]["trace"]["op"] == "function.gcp"
+    assert envelopes[1]["transaction"].startswith("Google Cloud function")
+    assert envelopes[1]["transaction"] in envelopes[0]["request"]["url"]
 
 
 def test_traces_sampler_gets_correct_values_in_sampling_context(
@@ -367,3 +371,184 @@ def test_traces_sampler_gets_correct_values_in_sampling_context(
     )
 
     assert return_value["AssertionError raised"] is False
+
+
+def test_error_has_new_trace_context_performance_enabled(run_cloud_function):
+    """
+    Check if an 'trace' context is added to errros and transactions when performance monitoring is enabled.
+    """
+    envelopes, _, _ = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+        event = {}
+        def cloud_function(functionhandler, event):
+            sentry_sdk.capture_message("hi")
+            x = 3/0
+            return "3"
+        """
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+    (msg_event, error_event, transaction_event) = envelopes
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_error_has_new_trace_context_performance_disabled(run_cloud_function):
+    """
+    Check if an 'trace' context is added to errros and transactions when performance monitoring is disabled.
+    """
+    _, events, _ = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+        event = {}
+        def cloud_function(functionhandler, event):
+            sentry_sdk.capture_message("hi")
+            x = 3/0
+            return "3"
+        """
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=None),  # this is the default, just added for clarity
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_error_has_existing_trace_context_performance_enabled(run_cloud_function):
+    """
+    Check if an 'trace' context is added to errros and transactions
+    from the incoming 'sentry-trace' header when performance monitoring is enabled.
+    """
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    envelopes, _, _ = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+
+        from collections import namedtuple
+        GCPEvent = namedtuple("GCPEvent", ["headers"])
+        event = GCPEvent(headers={"sentry-trace": "%s"})
+
+        def cloud_function(functionhandler, event):
+            sentry_sdk.capture_message("hi")
+            x = 3/0
+            return "3"
+        """
+            % sentry_trace_header
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+    (msg_event, error_event, transaction_event) = envelopes
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
+
+
+def test_error_has_existing_trace_context_performance_disabled(run_cloud_function):
+    """
+    Check if an 'trace' context is added to errros and transactions
+    from the incoming 'sentry-trace' header when performance monitoring is disabled.
+    """
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    _, events, _ = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+
+        from collections import namedtuple
+        GCPEvent = namedtuple("GCPEvent", ["headers"])
+        event = GCPEvent(headers={"sentry-trace": "%s"})
+
+        def cloud_function(functionhandler, event):
+            sentry_sdk.capture_message("hi")
+            x = 3/0
+            return "3"
+        """
+            % sentry_trace_header
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=None),  # this is the default, just added for clarity
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
diff --git a/tests/integrations/grpc/__init__.py b/tests/integrations/grpc/__init__.py
new file mode 100644
index 0000000..88a0a20
--- /dev/null
+++ b/tests/integrations/grpc/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("grpc")
diff --git a/tests/integrations/grpc/grpc_test_service.proto b/tests/integrations/grpc/grpc_test_service.proto
new file mode 100644
index 0000000..43497c7
--- /dev/null
+++ b/tests/integrations/grpc/grpc_test_service.proto
@@ -0,0 +1,11 @@
+syntax = "proto3";
+
+package grpc_test_server;
+
+service gRPCTestService{
+  rpc TestServe(gRPCTestMessage) returns (gRPCTestMessage);
+}
+
+message gRPCTestMessage {
+  string text = 1;
+}
diff --git a/tests/integrations/grpc/grpc_test_service_pb2.py b/tests/integrations/grpc/grpc_test_service_pb2.py
new file mode 100644
index 0000000..94765da
--- /dev/null
+++ b/tests/integrations/grpc/grpc_test_service_pb2.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: grpc_test_service.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import builder as _builder
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
+    b'\n\x17grpc_test_service.proto\x12\x10grpc_test_server"\x1f\n\x0fgRPCTestMessage\x12\x0c\n\x04text\x18\x01 \x01(\t2d\n\x0fgRPCTestService\x12Q\n\tTestServe\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessageb\x06proto3'
+)
+
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "grpc_test_service_pb2", globals())
+if _descriptor._USE_C_DESCRIPTORS == False:
+    DESCRIPTOR._options = None
+    _GRPCTESTMESSAGE._serialized_start = 45
+    _GRPCTESTMESSAGE._serialized_end = 76
+    _GRPCTESTSERVICE._serialized_start = 78
+    _GRPCTESTSERVICE._serialized_end = 178
+# @@protoc_insertion_point(module_scope)
diff --git a/tests/integrations/grpc/grpc_test_service_pb2.pyi b/tests/integrations/grpc/grpc_test_service_pb2.pyi
new file mode 100644
index 0000000..02a0b70
--- /dev/null
+++ b/tests/integrations/grpc/grpc_test_service_pb2.pyi
@@ -0,0 +1,32 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import builtins
+import google.protobuf.descriptor
+import google.protobuf.message
+import sys
+
+if sys.version_info >= (3, 8):
+    import typing as typing_extensions
+else:
+    import typing_extensions
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
+
+@typing_extensions.final
+class gRPCTestMessage(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    TEXT_FIELD_NUMBER: builtins.int
+    text: builtins.str
+    def __init__(
+        self,
+        *,
+        text: builtins.str = ...,
+    ) -> None: ...
+    def ClearField(
+        self, field_name: typing_extensions.Literal["text", b"text"]
+    ) -> None: ...
+
+global___gRPCTestMessage = gRPCTestMessage
diff --git a/tests/integrations/grpc/grpc_test_service_pb2_grpc.py b/tests/integrations/grpc/grpc_test_service_pb2_grpc.py
new file mode 100644
index 0000000..73b7d94
--- /dev/null
+++ b/tests/integrations/grpc/grpc_test_service_pb2_grpc.py
@@ -0,0 +1,79 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+import tests.integrations.grpc.grpc_test_service_pb2 as grpc__test__service__pb2
+
+
+class gRPCTestServiceStub(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.TestServe = channel.unary_unary(
+            "/grpc_test_server.gRPCTestService/TestServe",
+            request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+            response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
+        )
+
+
+class gRPCTestServiceServicer(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def TestServe(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details("Method not implemented!")
+        raise NotImplementedError("Method not implemented!")
+
+
+def add_gRPCTestServiceServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+        "TestServe": grpc.unary_unary_rpc_method_handler(
+            servicer.TestServe,
+            request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
+            response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+        ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+        "grpc_test_server.gRPCTestService", rpc_method_handlers
+    )
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class gRPCTestService(object):
+    """Missing associated documentation comment in .proto file."""
+
+    @staticmethod
+    def TestServe(
+        request,
+        target,
+        options=(),
+        channel_credentials=None,
+        call_credentials=None,
+        insecure=False,
+        compression=None,
+        wait_for_ready=None,
+        timeout=None,
+        metadata=None,
+    ):
+        return grpc.experimental.unary_unary(
+            request,
+            target,
+            "/grpc_test_server.gRPCTestService/TestServe",
+            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+            grpc__test__service__pb2.gRPCTestMessage.FromString,
+            options,
+            channel_credentials,
+            insecure,
+            call_credentials,
+            compression,
+            wait_for_ready,
+            timeout,
+            metadata,
+        )
diff --git a/tests/integrations/grpc/test_grpc.py b/tests/integrations/grpc/test_grpc.py
new file mode 100644
index 0000000..92883e9
--- /dev/null
+++ b/tests/integrations/grpc/test_grpc.py
@@ -0,0 +1,189 @@
+from __future__ import absolute_import
+
+import os
+
+from concurrent import futures
+
+import grpc
+import pytest
+
+from sentry_sdk import Hub, start_transaction
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.grpc.client import ClientInterceptor
+from sentry_sdk.integrations.grpc.server import ServerInterceptor
+from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage
+from tests.integrations.grpc.grpc_test_service_pb2_grpc import (
+    gRPCTestServiceServicer,
+    add_gRPCTestServiceServicer_to_server,
+    gRPCTestServiceStub,
+)
+
+PORT = 50051
+PORT += os.getpid() % 100  # avoid port conflicts when running tests in parallel
+
+
+@pytest.mark.forked
+def test_grpc_server_starts_transaction(sentry_init, capture_events_forksafe):
+    sentry_init(traces_sample_rate=1.0)
+    events = capture_events_forksafe()
+
+    server = _set_up()
+
+    with grpc.insecure_channel(f"localhost:{PORT}") as channel:
+        stub = gRPCTestServiceStub(channel)
+        stub.TestServe(gRPCTestMessage(text="test"))
+
+    _tear_down(server=server)
+
+    events.write_file.close()
+    event = events.read_event()
+    span = event["spans"][0]
+
+    assert event["type"] == "transaction"
+    assert event["transaction_info"] == {
+        "source": "custom",
+    }
+    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
+    assert span["op"] == "test"
+
+
+@pytest.mark.forked
+def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe):
+    sentry_init(traces_sample_rate=1.0)
+    events = capture_events_forksafe()
+
+    server = _set_up()
+
+    with grpc.insecure_channel(f"localhost:{PORT}") as channel:
+        stub = gRPCTestServiceStub(channel)
+
+        with start_transaction() as transaction:
+            metadata = (
+                (
+                    "baggage",
+                    "sentry-trace_id={trace_id},sentry-environment=test,"
+                    "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format(
+                        trace_id=transaction.trace_id
+                    ),
+                ),
+                (
+                    "sentry-trace",
+                    "{trace_id}-{parent_span_id}-{sampled}".format(
+                        trace_id=transaction.trace_id,
+                        parent_span_id=transaction.span_id,
+                        sampled=1,
+                    ),
+                ),
+            )
+            stub.TestServe(gRPCTestMessage(text="test"), metadata=metadata)
+
+    _tear_down(server=server)
+
+    events.write_file.close()
+    event = events.read_event()
+    span = event["spans"][0]
+
+    assert event["type"] == "transaction"
+    assert event["transaction_info"] == {
+        "source": "custom",
+    }
+    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
+    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+    assert span["op"] == "test"
+
+
+@pytest.mark.forked
+def test_grpc_client_starts_span(sentry_init, capture_events_forksafe):
+    sentry_init(traces_sample_rate=1.0)
+    events = capture_events_forksafe()
+    interceptors = [ClientInterceptor()]
+
+    server = _set_up()
+
+    with grpc.insecure_channel(f"localhost:{PORT}") as channel:
+        channel = grpc.intercept_channel(channel, *interceptors)
+        stub = gRPCTestServiceStub(channel)
+
+        with start_transaction():
+            stub.TestServe(gRPCTestMessage(text="test"))
+
+    _tear_down(server=server)
+
+    events.write_file.close()
+    events.read_event()
+    local_transaction = events.read_event()
+    span = local_transaction["spans"][0]
+
+    assert len(local_transaction["spans"]) == 1
+    assert span["op"] == OP.GRPC_CLIENT
+    assert (
+        span["description"]
+        == "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
+    )
+    assert span["data"] == {
+        "type": "unary unary",
+        "method": "/grpc_test_server.gRPCTestService/TestServe",
+        "code": "OK",
+    }
+
+
+@pytest.mark.forked
+def test_grpc_client_and_servers_interceptors_integration(
+    sentry_init, capture_events_forksafe
+):
+    sentry_init(traces_sample_rate=1.0)
+    events = capture_events_forksafe()
+    interceptors = [ClientInterceptor()]
+
+    server = _set_up()
+
+    with grpc.insecure_channel(f"localhost:{PORT}") as channel:
+        channel = grpc.intercept_channel(channel, *interceptors)
+        stub = gRPCTestServiceStub(channel)
+
+        with start_transaction():
+            stub.TestServe(gRPCTestMessage(text="test"))
+
+    _tear_down(server=server)
+
+    events.write_file.close()
+    server_transaction = events.read_event()
+    local_transaction = events.read_event()
+
+    assert (
+        server_transaction["contexts"]["trace"]["trace_id"]
+        == local_transaction["contexts"]["trace"]["trace_id"]
+    )
+
+
+def _set_up():
+    server = grpc.server(
+        futures.ThreadPoolExecutor(max_workers=2),
+        interceptors=[ServerInterceptor(find_name=_find_name)],
+    )
+
+    add_gRPCTestServiceServicer_to_server(TestService, server)
+    server.add_insecure_port(f"[::]:{PORT}")
+    server.start()
+
+    return server
+
+
+def _tear_down(server: grpc.Server):
+    server.stop(None)
+
+
+def _find_name(request):
+    return request.__class__
+
+
+class TestService(gRPCTestServiceServicer):
+    events = []
+
+    @staticmethod
+    def TestServe(request, context):  # noqa: N802
+        hub = Hub.current
+        with hub.start_span(op="test", description="test"):
+            pass
+
+        return gRPCTestMessage(text=request.text)
diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py
index 4623f13..e141faa 100644
--- a/tests/integrations/httpx/test_httpx.py
+++ b/tests/integrations/httpx/test_httpx.py
@@ -1,66 +1,299 @@
 import asyncio
 
+import pytest
 import httpx
+import responses
 
 from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.consts import MATCH_ALL, SPANDATA
 from sentry_sdk.integrations.httpx import HttpxIntegration
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
 
-def test_crumb_capture_and_hint(sentry_init, capture_events):
+
+@pytest.mark.parametrize(
+    "httpx_client",
+    (httpx.Client(), httpx.AsyncClient()),
+)
+def test_crumb_capture_and_hint(sentry_init, capture_events, httpx_client):
     def before_breadcrumb(crumb, hint):
         crumb["data"]["extra"] = "foo"
         return crumb
 
     sentry_init(integrations=[HttpxIntegration()], before_breadcrumb=before_breadcrumb)
-    clients = (httpx.Client(), httpx.AsyncClient())
-    for i, c in enumerate(clients):
-        with start_transaction():
-            events = capture_events()
-
-            url = "https://httpbin.org/status/200"
-            if not asyncio.iscoroutinefunction(c.get):
-                response = c.get(url)
-            else:
-                response = asyncio.get_event_loop().run_until_complete(c.get(url))
-
-            assert response.status_code == 200
-            capture_message("Testing!")
-
-            (event,) = events
-            # send request twice so we need get breadcrumb by index
-            crumb = event["breadcrumbs"]["values"][i]
-            assert crumb["type"] == "http"
-            assert crumb["category"] == "httplib"
-            assert crumb["data"] == {
-                "url": url,
-                "method": "GET",
-                "status_code": 200,
-                "reason": "OK",
-                "extra": "foo",
-            }
-
-
-def test_outgoing_trace_headers(sentry_init):
+
+    url = "http://example.com/"
+    responses.add(responses.GET, url, status=200)
+
+    with start_transaction():
+        events = capture_events()
+
+        if asyncio.iscoroutinefunction(httpx_client.get):
+            response = asyncio.get_event_loop().run_until_complete(
+                httpx_client.get(url)
+            )
+        else:
+            response = httpx_client.get(url)
+
+        assert response.status_code == 200
+        capture_message("Testing!")
+
+        (event,) = events
+
+        crumb = event["breadcrumbs"]["values"][0]
+        assert crumb["type"] == "http"
+        assert crumb["category"] == "httplib"
+        assert crumb["data"] == {
+            "url": url,
+            SPANDATA.HTTP_METHOD: "GET",
+            SPANDATA.HTTP_FRAGMENT: "",
+            SPANDATA.HTTP_QUERY: "",
+            SPANDATA.HTTP_STATUS_CODE: 200,
+            "reason": "OK",
+            "extra": "foo",
+        }
+
+
+@pytest.mark.parametrize(
+    "httpx_client",
+    (httpx.Client(), httpx.AsyncClient()),
+)
+def test_outgoing_trace_headers(sentry_init, httpx_client):
     sentry_init(traces_sample_rate=1.0, integrations=[HttpxIntegration()])
-    clients = (httpx.Client(), httpx.AsyncClient())
-    for i, c in enumerate(clients):
-        with start_transaction(
-            name="/interactions/other-dogs/new-dog",
-            op="greeting.sniff",
-            # make trace_id difference between transactions
-            trace_id=f"012345678901234567890123456789{i}",
-        ) as transaction:
-            url = "https://httpbin.org/status/200"
-            if not asyncio.iscoroutinefunction(c.get):
-                response = c.get(url)
-            else:
-                response = asyncio.get_event_loop().run_until_complete(c.get(url))
-
-            request_span = transaction._span_recorder.spans[-1]
-            assert response.request.headers[
-                "sentry-trace"
-            ] == "{trace_id}-{parent_span_id}-{sampled}".format(
-                trace_id=transaction.trace_id,
-                parent_span_id=request_span.span_id,
-                sampled=1,
+
+    url = "http://example.com/"
+    responses.add(responses.GET, url, status=200)
+
+    with start_transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        trace_id="01234567890123456789012345678901",
+    ) as transaction:
+        if asyncio.iscoroutinefunction(httpx_client.get):
+            response = asyncio.get_event_loop().run_until_complete(
+                httpx_client.get(url)
             )
+        else:
+            response = httpx_client.get(url)
+
+        request_span = transaction._span_recorder.spans[-1]
+        assert response.request.headers[
+            "sentry-trace"
+        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
+            trace_id=transaction.trace_id,
+            parent_span_id=request_span.span_id,
+            sampled=1,
+        )
+
+
+@pytest.mark.parametrize(
+    "httpx_client",
+    (httpx.Client(), httpx.AsyncClient()),
+)
+def test_outgoing_trace_headers_append_to_baggage(sentry_init, httpx_client):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[HttpxIntegration()],
+        release="d08ebdb9309e1b004c6f52202de58a09c2268e42",
+    )
+
+    url = "http://example.com/"
+    responses.add(responses.GET, url, status=200)
+
+    with start_transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        trace_id="01234567890123456789012345678901",
+    ) as transaction:
+        if asyncio.iscoroutinefunction(httpx_client.get):
+            response = asyncio.get_event_loop().run_until_complete(
+                httpx_client.get(url, headers={"baGGage": "custom=data"})
+            )
+        else:
+            response = httpx_client.get(url, headers={"baGGage": "custom=data"})
+
+        request_span = transaction._span_recorder.spans[-1]
+        assert response.request.headers[
+            "sentry-trace"
+        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
+            trace_id=transaction.trace_id,
+            parent_span_id=request_span.span_id,
+            sampled=1,
+        )
+        assert (
+            response.request.headers["baggage"]
+            == "custom=data,sentry-trace_id=01234567890123456789012345678901,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true"
+        )
+
+
+@pytest.mark.parametrize(
+    "httpx_client,trace_propagation_targets,url,trace_propagated",
+    [
+        [
+            httpx.Client(),
+            None,
+            "https://example.com/",
+            False,
+        ],
+        [
+            httpx.Client(),
+            [],
+            "https://example.com/",
+            False,
+        ],
+        [
+            httpx.Client(),
+            [MATCH_ALL],
+            "https://example.com/",
+            True,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com/"],
+            "https://example.com/",
+            True,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com/"],
+            "https://example.com",
+            False,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com"],
+            "https://example.com",
+            True,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://example.net",
+            False,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://good.example.net",
+            True,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://good.example.net/some/thing",
+            True,
+        ],
+        [
+            httpx.AsyncClient(),
+            None,
+            "https://example.com/",
+            False,
+        ],
+        [
+            httpx.AsyncClient(),
+            [],
+            "https://example.com/",
+            False,
+        ],
+        [
+            httpx.AsyncClient(),
+            [MATCH_ALL],
+            "https://example.com/",
+            True,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com/"],
+            "https://example.com/",
+            True,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com/"],
+            "https://example.com",
+            False,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com"],
+            "https://example.com",
+            True,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://example.net",
+            False,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://good.example.net",
+            True,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://good.example.net/some/thing",
+            True,
+        ],
+    ],
+)
+def test_option_trace_propagation_targets(
+    sentry_init,
+    httpx_client,
+    httpx_mock,  # this comes from pytest-httpx
+    trace_propagation_targets,
+    url,
+    trace_propagated,
+):
+    httpx_mock.add_response()
+
+    sentry_init(
+        release="test",
+        trace_propagation_targets=trace_propagation_targets,
+        traces_sample_rate=1.0,
+        integrations=[HttpxIntegration()],
+    )
+
+    if asyncio.iscoroutinefunction(httpx_client.get):
+        asyncio.get_event_loop().run_until_complete(httpx_client.get(url))
+    else:
+        httpx_client.get(url)
+
+    request_headers = httpx_mock.get_request().headers
+
+    if trace_propagated:
+        assert "sentry-trace" in request_headers
+    else:
+        assert "sentry-trace" not in request_headers
+
+
+@pytest.mark.tests_internal_exceptions
+def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
+    sentry_init(integrations=[HttpxIntegration()])
+
+    httpx_client = httpx.Client()
+    url = "http://example.com"
+    responses.add(responses.GET, url, status=200)
+
+    events = capture_events()
+    with mock.patch(
+        "sentry_sdk.integrations.httpx.parse_url",
+        side_effect=ValueError,
+    ):
+        response = httpx_client.get(url)
+
+    assert response.status_code == 200
+    capture_message("Testing!")
+
+    (event,) = events
+    assert event["breadcrumbs"]["values"][0]["data"] == {
+        SPANDATA.HTTP_METHOD: "GET",
+        SPANDATA.HTTP_STATUS_CODE: 200,
+        "reason": "OK",
+        # no url related data
+    }
diff --git a/tests/integrations/huey/__init__.py b/tests/integrations/huey/__init__.py
new file mode 100644
index 0000000..448a7eb
--- /dev/null
+++ b/tests/integrations/huey/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("huey")
diff --git a/tests/integrations/huey/test_huey.py b/tests/integrations/huey/test_huey.py
new file mode 100644
index 0000000..29e4d37
--- /dev/null
+++ b/tests/integrations/huey/test_huey.py
@@ -0,0 +1,168 @@
+import pytest
+from decimal import DivisionByZero
+
+from sentry_sdk import start_transaction
+from sentry_sdk.integrations.huey import HueyIntegration
+
+from huey.api import MemoryHuey, Result
+from huey.exceptions import RetryTask
+
+
+@pytest.fixture
+def init_huey(sentry_init):
+    def inner():
+        sentry_init(
+            integrations=[HueyIntegration()],
+            traces_sample_rate=1.0,
+            send_default_pii=True,
+            debug=True,
+        )
+
+        return MemoryHuey(name="sentry_sdk")
+
+    return inner
+
+
+@pytest.fixture(autouse=True)
+def flush_huey_tasks(init_huey):
+    huey = init_huey()
+    huey.flush()
+
+
+def execute_huey_task(huey, func, *args, **kwargs):
+    exceptions = kwargs.pop("exceptions", None)
+    result = func(*args, **kwargs)
+    task = huey.dequeue()
+    if exceptions is not None:
+        try:
+            huey.execute(task)
+        except exceptions:
+            pass
+    else:
+        huey.execute(task)
+    return result
+
+
+def test_task_result(init_huey):
+    huey = init_huey()
+
+    @huey.task()
+    def increase(num):
+        return num + 1
+
+    result = increase(3)
+
+    assert isinstance(result, Result)
+    assert len(huey) == 1
+    task = huey.dequeue()
+    assert huey.execute(task) == 4
+    assert result.get() == 4
+
+
+@pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
+def test_task_transaction(capture_events, init_huey, task_fails):
+    huey = init_huey()
+
+    @huey.task()
+    def division(a, b):
+        return a / b
+
+    events = capture_events()
+    execute_huey_task(
+        huey, division, 1, int(not task_fails), exceptions=(DivisionByZero,)
+    )
+
+    if task_fails:
+        error_event = events.pop(0)
+        assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+        assert error_event["exception"]["values"][0]["mechanism"]["type"] == "huey"
+
+    (event,) = events
+    assert event["type"] == "transaction"
+    assert event["transaction"] == "division"
+    assert event["transaction_info"] == {"source": "task"}
+
+    if task_fails:
+        assert event["contexts"]["trace"]["status"] == "internal_error"
+    else:
+        assert event["contexts"]["trace"]["status"] == "ok"
+
+    assert "huey_task_id" in event["tags"]
+    assert "huey_task_retry" in event["tags"]
+
+
+def test_task_retry(capture_events, init_huey):
+    huey = init_huey()
+    context = {"retry": True}
+
+    @huey.task()
+    def retry_task(context):
+        if context["retry"]:
+            context["retry"] = False
+            raise RetryTask()
+
+    events = capture_events()
+    result = execute_huey_task(huey, retry_task, context)
+    (event,) = events
+
+    assert event["transaction"] == "retry_task"
+    assert event["tags"]["huey_task_id"] == result.task.id
+    assert len(huey) == 1
+
+    task = huey.dequeue()
+    huey.execute(task)
+    (event, _) = events
+
+    assert event["transaction"] == "retry_task"
+    assert event["tags"]["huey_task_id"] == result.task.id
+    assert len(huey) == 0
+
+
+@pytest.mark.parametrize("lock_name", ["lock.a", "lock.b"], ids=["locked", "unlocked"])
+def test_task_lock(capture_events, init_huey, lock_name):
+    huey = init_huey()
+
+    task_lock_name = "lock.a"
+    should_be_locked = task_lock_name == lock_name
+
+    @huey.task()
+    @huey.lock_task(task_lock_name)
+    def maybe_locked_task():
+        pass
+
+    events = capture_events()
+
+    with huey.lock_task(lock_name):
+        assert huey.is_locked(task_lock_name) == should_be_locked
+        result = execute_huey_task(huey, maybe_locked_task)
+
+    (event,) = events
+
+    assert event["transaction"] == "maybe_locked_task"
+    assert event["tags"]["huey_task_id"] == result.task.id
+    assert (
+        event["contexts"]["trace"]["status"] == "aborted" if should_be_locked else "ok"
+    )
+    assert len(huey) == 0
+
+
+def test_huey_enqueue(init_huey, capture_events):
+    huey = init_huey()
+
+    @huey.task(name="different_task_name")
+    def dummy_task():
+        pass
+
+    events = capture_events()
+
+    with start_transaction() as transaction:
+        dummy_task()
+
+    (event,) = events
+
+    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+    assert event["contexts"]["trace"]["span_id"] == transaction.span_id
+
+    assert len(event["spans"])
+    assert event["spans"][0]["op"] == "queue.submit.huey"
+    assert event["spans"][0]["description"] == "different_task_name"
diff --git a/tests/integrations/loguru/__init__.py b/tests/integrations/loguru/__init__.py
new file mode 100644
index 0000000..9d67fb3
--- /dev/null
+++ b/tests/integrations/loguru/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("loguru")
diff --git a/tests/integrations/loguru/test_loguru.py b/tests/integrations/loguru/test_loguru.py
new file mode 100644
index 0000000..48133aa
--- /dev/null
+++ b/tests/integrations/loguru/test_loguru.py
@@ -0,0 +1,117 @@
+import pytest
+from loguru import logger
+
+import sentry_sdk
+from sentry_sdk.integrations.loguru import LoguruIntegration, LoggingLevels
+
+logger.remove(0)  # don't print to console
+
+
+@pytest.mark.parametrize(
+    "level,created_event",
+    [
+        # None - no breadcrumb
+        # False - no event
+        # True - event created
+        (LoggingLevels.TRACE, None),
+        (LoggingLevels.DEBUG, None),
+        (LoggingLevels.INFO, False),
+        (LoggingLevels.SUCCESS, False),
+        (LoggingLevels.WARNING, False),
+        (LoggingLevels.ERROR, True),
+        (LoggingLevels.CRITICAL, True),
+    ],
+)
+@pytest.mark.parametrize("disable_breadcrumbs", [True, False])
+@pytest.mark.parametrize("disable_events", [True, False])
+def test_just_log(
+    sentry_init,
+    capture_events,
+    level,
+    created_event,
+    disable_breadcrumbs,
+    disable_events,
+):
+    sentry_init(
+        integrations=[
+            LoguruIntegration(
+                level=None if disable_breadcrumbs else LoggingLevels.INFO.value,
+                event_level=None if disable_events else LoggingLevels.ERROR.value,
+            )
+        ],
+        default_integrations=False,
+    )
+    events = capture_events()
+
+    getattr(logger, level.name.lower())("test")
+
+    formatted_message = (
+        " | "
+        + "{:9}".format(level.name.upper())
+        + "| tests.integrations.loguru.test_loguru:test_just_log:46 - test"
+    )
+
+    if not created_event:
+        assert not events
+
+        breadcrumbs = sentry_sdk.Hub.current.scope._breadcrumbs
+        if (
+            not disable_breadcrumbs and created_event is not None
+        ):  # not None == not TRACE or DEBUG level
+            (breadcrumb,) = breadcrumbs
+            assert breadcrumb["level"] == level.name.lower()
+            assert breadcrumb["category"] == "tests.integrations.loguru.test_loguru"
+            assert breadcrumb["message"][23:] == formatted_message
+        else:
+            assert not breadcrumbs
+
+        return
+
+    if disable_events:
+        assert not events
+        return
+
+    (event,) = events
+    assert event["level"] == (level.name.lower())
+    assert event["logger"] == "tests.integrations.loguru.test_loguru"
+    assert event["logentry"]["message"][23:] == formatted_message
+
+
+def test_breadcrumb_format(sentry_init, capture_events):
+    sentry_init(
+        integrations=[
+            LoguruIntegration(
+                level=LoggingLevels.INFO.value,
+                event_level=None,
+                breadcrumb_format="{message}",
+            )
+        ],
+        default_integrations=False,
+    )
+
+    logger.info("test")
+    formatted_message = "test"
+
+    breadcrumbs = sentry_sdk.Hub.current.scope._breadcrumbs
+    (breadcrumb,) = breadcrumbs
+    assert breadcrumb["message"] == formatted_message
+
+
+def test_event_format(sentry_init, capture_events):
+    sentry_init(
+        integrations=[
+            LoguruIntegration(
+                level=None,
+                event_level=LoggingLevels.ERROR.value,
+                event_format="{message}",
+            )
+        ],
+        default_integrations=False,
+    )
+    events = capture_events()
+
+    logger.error("test")
+    formatted_message = "test"
+
+    (event,) = events
+    assert event["logentry"]["message"] == formatted_message
diff --git a/tests/integrations/modules/test_modules.py b/tests/integrations/modules/test_modules.py
index 3f4d7bd..c709797 100644
--- a/tests/integrations/modules/test_modules.py
+++ b/tests/integrations/modules/test_modules.py
@@ -1,6 +1,22 @@
+import pytest
+import re
 import sentry_sdk
 
-from sentry_sdk.integrations.modules import ModulesIntegration
+from sentry_sdk.integrations.modules import (
+    ModulesIntegration,
+    _get_installed_modules,
+)
+
+
+def _normalize_distribution_name(name):
+    # type: (str) -> str
+    """Normalize distribution name according to PEP-0503.
+
+    See:
+    https://peps.python.org/pep-0503/#normalized-names
+    for more details.
+    """
+    return re.sub(r"[-_.]+", "-", name).lower()
 
 
 def test_basic(sentry_init, capture_events):
@@ -12,3 +28,44 @@ def test_basic(sentry_init, capture_events):
     (event,) = events
     assert "sentry-sdk" in event["modules"]
     assert "pytest" in event["modules"]
+
+
+def test_installed_modules():
+    try:
+        from importlib.metadata import distributions, version
+
+        importlib_available = True
+    except ImportError:
+        importlib_available = False
+
+    try:
+        import pkg_resources
+
+        pkg_resources_available = True
+    except ImportError:
+        pkg_resources_available = False
+
+    installed_distributions = {
+        _normalize_distribution_name(dist): version
+        for dist, version in _get_installed_modules().items()
+    }
+
+    if importlib_available:
+        importlib_distributions = {
+            _normalize_distribution_name(dist.metadata["Name"]): version(
+                dist.metadata["Name"]
+            )
+            for dist in distributions()
+            if dist.metadata["Name"] is not None
+            and version(dist.metadata["Name"]) is not None
+        }
+        assert installed_distributions == importlib_distributions
+
+    elif pkg_resources_available:
+        pkg_resources_distributions = {
+            _normalize_distribution_name(dist.key): dist.version
+            for dist in pkg_resources.working_set
+        }
+        assert installed_distributions == pkg_resources_distributions
+    else:
+        pytest.fail("Neither importlib nor pkg_resources is available")
diff --git a/tests/integrations/opentelemetry/__init__.py b/tests/integrations/opentelemetry/__init__.py
new file mode 100644
index 0000000..39ecc61
--- /dev/null
+++ b/tests/integrations/opentelemetry/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+django = pytest.importorskip("opentelemetry")
diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py
new file mode 100644
index 0000000..510118f
--- /dev/null
+++ b/tests/integrations/opentelemetry/test_propagator.py
@@ -0,0 +1,252 @@
+try:
+    from unittest import mock  # python 3.3 and above
+    from unittest.mock import MagicMock
+except ImportError:
+    import mock  # python < 3.3
+    from mock import MagicMock
+
+from opentelemetry.context import get_current
+from opentelemetry.trace.propagation import get_current_span
+from opentelemetry.trace import (
+    set_span_in_context,
+    TraceFlags,
+    SpanContext,
+)
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+
+from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator
+from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
+from sentry_sdk.tracing_utils import Baggage
+
+
+def test_extract_no_context_no_sentry_trace_header():
+    """
+    No context and NO Sentry trace data in getter.
+    Extract should return empty context.
+    """
+    carrier = None
+    context = None
+    getter = MagicMock()
+    getter.get.return_value = None
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert modified_context == {}
+
+
+def test_extract_context_no_sentry_trace_header():
+    """
+    Context but NO Sentry trace data in getter.
+    Extract should return context as is.
+    """
+    carrier = None
+    context = {"some": "value"}
+    getter = MagicMock()
+    getter.get.return_value = None
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert modified_context == context
+
+
+def test_extract_empty_context_sentry_trace_header_no_baggage():
+    """
+    Empty context but Sentry trace data but NO Baggage in getter.
+    Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id.
+    """
+    carrier = None
+    context = {}
+    getter = MagicMock()
+    getter.get.side_effect = [
+        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
+        None,
+    ]
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert len(modified_context.keys()) == 3
+
+    assert modified_context[SENTRY_TRACE_KEY] == {
+        "trace_id": "1234567890abcdef1234567890abcdef",
+        "parent_span_id": "1234567890abcdef",
+        "parent_sampled": True,
+    }
+    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ""
+
+    span_context = get_current_span(modified_context).get_span_context()
+    assert span_context.span_id == int("1234567890abcdef", 16)
+    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
+
+
+def test_extract_context_sentry_trace_header_baggage():
+    """
+    Empty context but Sentry trace data and Baggage in getter.
+    Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id.
+    """
+    baggage_header = (
+        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
+        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
+    )
+
+    carrier = None
+    context = {"some": "value"}
+    getter = MagicMock()
+    getter.get.side_effect = [
+        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
+        [baggage_header],
+    ]
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert len(modified_context.keys()) == 4
+
+    assert modified_context[SENTRY_TRACE_KEY] == {
+        "trace_id": "1234567890abcdef1234567890abcdef",
+        "parent_span_id": "1234567890abcdef",
+        "parent_sampled": True,
+    }
+
+    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == (
+        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
+    )
+
+    span_context = get_current_span(modified_context).get_span_context()
+    assert span_context.span_id == int("1234567890abcdef", 16)
+    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
+
+
+def test_inject_empty_otel_span_map():
+    """
+    Empty otel_span_map.
+    So there is no sentry_span to be found in inject()
+    and the function is returned early and no setters are called.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.get_span_context.return_value = span_context
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_not_called()
+
+
+def test_inject_sentry_span_no_baggage():
+    """
+    Inject a sentry span with no baggage.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    trace_id = "1234567890abcdef1234567890abcdef"
+    span_id = "1234567890abcdef"
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.get_span_context.return_value = span_context
+
+    sentry_span = MagicMock()
+    sentry_span.to_traceparent = mock.Mock(
+        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+    )
+    sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=None)
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map[span_id] = sentry_span
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_called_once_with(
+            carrier,
+            "sentry-trace",
+            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
+        )
+
+
+def test_inject_sentry_span_baggage():
+    """
+    Inject a sentry span with baggage.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    trace_id = "1234567890abcdef1234567890abcdef"
+    span_id = "1234567890abcdef"
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.get_span_context.return_value = span_context
+
+    sentry_span = MagicMock()
+    sentry_span.to_traceparent = mock.Mock(
+        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+    )
+    sentry_items = {
+        "sentry-trace_id": "771a43a4192642f0b136d5159a501700",
+        "sentry-public_key": "49d0f7386ad645858ae85020e393bef3",
+        "sentry-sample_rate": 0.01337,
+        "sentry-user_id": "Amélie",
+    }
+    baggage = Baggage(sentry_items=sentry_items)
+    sentry_span.containing_transaction.get_baggage = MagicMock(return_value=baggage)
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map[span_id] = sentry_span
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_any_call(
+            carrier,
+            "sentry-trace",
+            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
+        )
+
+        setter.set.assert_any_call(
+            carrier,
+            "baggage",
+            baggage.serialize(),
+        )
diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py
new file mode 100644
index 0000000..679e51e
--- /dev/null
+++ b/tests/integrations/opentelemetry/test_span_processor.py
@@ -0,0 +1,528 @@
+from datetime import datetime
+import time
+import pytest
+
+try:
+    from unittest import mock  # python 3.3 and above
+    from unittest.mock import MagicMock
+except ImportError:
+    import mock
+    from mock import MagicMock  # python < 3.3
+
+from sentry_sdk.integrations.opentelemetry.span_processor import (
+    SentrySpanProcessor,
+    link_trace_context_to_error_event,
+)
+from sentry_sdk.tracing import Span, Transaction
+
+from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode
+from sentry_sdk.tracing_utils import extract_sentrytrace_data
+
+
+def test_is_sentry_span():
+    otel_span = MagicMock()
+
+    hub = MagicMock()
+    hub.client = None
+
+    span_processor = SentrySpanProcessor()
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    client = MagicMock()
+    client.options = {"instrumenter": "otel"}
+    client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    hub.client = client
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    otel_span.attributes = {
+        "http.url": "https://example.com",
+    }
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    otel_span.attributes = {
+        "http.url": "https://o123456.ingest.sentry.io/api/123/envelope",
+    }
+    assert span_processor._is_sentry_span(hub, otel_span)
+
+
+def test_get_otel_context():
+    otel_span = MagicMock()
+    otel_span.attributes = {"foo": "bar"}
+    otel_span.resource = MagicMock()
+    otel_span.resource.attributes = {"baz": "qux"}
+
+    span_processor = SentrySpanProcessor()
+    otel_context = span_processor._get_otel_context(otel_span)
+
+    assert otel_context == {
+        "attributes": {"foo": "bar"},
+        "resource": {"baz": "qux"},
+    }
+
+
+def test_get_trace_data_with_span_and_trace():
+    otel_span = MagicMock()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = None
+
+    parent_context = {}
+
+    span_processor = SentrySpanProcessor()
+    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+    assert sentry_trace_data["span_id"] == "1234567890abcdef"
+    assert sentry_trace_data["parent_span_id"] is None
+    assert sentry_trace_data["parent_sampled"] is None
+    assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_span_and_trace_and_parent():
+    otel_span = MagicMock()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    span_processor = SentrySpanProcessor()
+    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+    assert sentry_trace_data["span_id"] == "1234567890abcdef"
+    assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+    assert sentry_trace_data["parent_sampled"] is None
+    assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_sentry_trace():
+    otel_span = MagicMock()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+            ),
+            None,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"] is True
+        assert sentry_trace_data["baggage"] is None
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-0"
+            ),
+            None,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"] is False
+        assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_sentry_trace_and_baggage():
+    otel_span = MagicMock()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    baggage = (
+        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
+    )
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+            ),
+            baggage,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"]
+        assert sentry_trace_data["baggage"] == baggage
+
+
+def test_update_span_with_otel_data_http_method():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.kind = SpanKind.CLIENT
+    otel_span.attributes = {
+        "http.method": "GET",
+        "http.status_code": 429,
+        "http.status_text": "xxx",
+        "http.user_agent": "curl/7.64.1",
+        "net.peer.name": "example.com",
+        "http.target": "/",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "http.client"
+    assert sentry_span.description == "GET example.com /"
+    assert sentry_span.status == "resource_exhausted"
+
+    assert sentry_span._data["http.method"] == "GET"
+    assert sentry_span._data["http.response.status_code"] == 429
+    assert sentry_span._data["http.status_text"] == "xxx"
+    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
+    assert sentry_span._data["net.peer.name"] == "example.com"
+    assert sentry_span._data["http.target"] == "/"
+
+
+@pytest.mark.parametrize(
+    "otel_status, expected_status",
+    [
+        pytest.param(Status(StatusCode.UNSET), None, id="unset"),
+        pytest.param(Status(StatusCode.OK), "ok", id="ok"),
+        pytest.param(Status(StatusCode.ERROR), "internal_error", id="error"),
+    ],
+)
+def test_update_span_with_otel_status(otel_status, expected_status):
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.kind = SpanKind.INTERNAL
+    otel_span.status = otel_status
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_status(sentry_span, otel_span)
+
+    assert sentry_span.get_trace_context().get("status") == expected_status
+
+
+def test_update_span_with_otel_data_http_method2():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.kind = SpanKind.SERVER
+    otel_span.attributes = {
+        "http.method": "GET",
+        "http.status_code": 429,
+        "http.status_text": "xxx",
+        "http.user_agent": "curl/7.64.1",
+        "http.url": "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "http.server"
+    assert sentry_span.description == "GET https://example.com/status/403"
+    assert sentry_span.status == "resource_exhausted"
+
+    assert sentry_span._data["http.method"] == "GET"
+    assert sentry_span._data["http.response.status_code"] == 429
+    assert sentry_span._data["http.status_text"] == "xxx"
+    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
+    assert (
+        sentry_span._data["http.url"]
+        == "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef"
+    )
+
+
+def test_update_span_with_otel_data_db_query():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.attributes = {
+        "db.system": "postgresql",
+        "db.statement": "SELECT * FROM table where pwd = '123456'",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "db"
+    assert sentry_span.description == "SELECT * FROM table where pwd = '123456'"
+
+    assert sentry_span._data["db.system"] == "postgresql"
+    assert (
+        sentry_span._data["db.statement"] == "SELECT * FROM table where pwd = '123456'"
+    )
+
+
+def test_on_start_transaction():
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.start_time = time.time_ns()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
+    ):
+        span_processor = SentrySpanProcessor()
+        span_processor.on_start(otel_span, parent_context)
+
+        fake_hub.current.start_transaction.assert_called_once_with(
+            name="Sample OTel Span",
+            span_id="1234567890abcdef",
+            parent_span_id="abcdef1234567890",
+            trace_id="1234567890abcdef1234567890abcdef",
+            baggage=None,
+            start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+            instrumenter="otel",
+        )
+
+        assert len(span_processor.otel_span_map.keys()) == 1
+        assert list(span_processor.otel_span_map.keys())[0] == "1234567890abcdef"
+
+
+def test_on_start_child():
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.start_time = time.time_ns()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
+    ):
+        fake_span = MagicMock()
+
+        span_processor = SentrySpanProcessor()
+        span_processor.otel_span_map["abcdef1234567890"] = fake_span
+        span_processor.on_start(otel_span, parent_context)
+
+        fake_span.start_child.assert_called_once_with(
+            span_id="1234567890abcdef",
+            description="Sample OTel Span",
+            start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+            instrumenter="otel",
+        )
+
+        assert len(span_processor.otel_span_map.keys()) == 2
+        assert "abcdef1234567890" in span_processor.otel_span_map.keys()
+        assert "1234567890abcdef" in span_processor.otel_span_map.keys()
+
+
+def test_on_end_no_sentry_span():
+    """
+    If on_end is called on a span that is not in the otel_span_map, it should be a no-op.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map = {}
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+
+    span_processor.on_end(otel_span)
+
+    span_processor._get_otel_context.assert_not_called()
+    span_processor._update_span_with_otel_data.assert_not_called()
+
+
+def test_on_end_sentry_transaction():
+    """
+    Test on_end for a sentry Transaction.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    otel_span.status = Status(StatusCode.OK)
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+
+    fake_sentry_span = MagicMock(spec=Transaction)
+    fake_sentry_span.set_context = MagicMock()
+    fake_sentry_span.finish = MagicMock()
+
+    span_processor = SentrySpanProcessor()
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span
+
+    span_processor.on_end(otel_span)
+
+    fake_sentry_span.set_context.assert_called_once()
+    span_processor._update_span_with_otel_data.assert_not_called()
+    fake_sentry_span.set_status.assert_called_once_with("ok")
+    fake_sentry_span.finish.assert_called_once()
+
+
+def test_on_end_sentry_span():
+    """
+    Test on_end for a sentry Span.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    otel_span.status = Status(StatusCode.OK)
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+
+    fake_sentry_span = MagicMock(spec=Span)
+    fake_sentry_span.set_context = MagicMock()
+    fake_sentry_span.finish = MagicMock()
+
+    span_processor = SentrySpanProcessor()
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span
+
+    span_processor.on_end(otel_span)
+
+    fake_sentry_span.set_context.assert_not_called()
+    span_processor._update_span_with_otel_data.assert_called_once_with(
+        fake_sentry_span, otel_span
+    )
+    fake_sentry_span.set_status.assert_called_once_with("ok")
+    fake_sentry_span.finish.assert_called_once()
+
+
+def test_link_trace_context_to_error_event():
+    """
+    Test that the trace context is added to the error event.
+    """
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    span_id = "1234567890abcdef"
+    trace_id = "1234567890abcdef1234567890abcdef"
+
+    fake_trace_context = {
+        "bla": "blub",
+        "foo": "bar",
+        "baz": 123,
+    }
+
+    sentry_span = MagicMock()
+    sentry_span.get_trace_context = MagicMock(return_value=fake_trace_context)
+
+    otel_span_map = {
+        span_id: sentry_span,
+    }
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        is_remote=True,
+    )
+    otel_span = MagicMock()
+    otel_span.get_span_context = MagicMock(return_value=span_context)
+
+    fake_event = {"event_id": "1234567890abcdef1234567890abcdef"}
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_current_span",
+        return_value=otel_span,
+    ):
+        event = link_trace_context_to_error_event(fake_event, otel_span_map)
+
+        assert event
+        assert event == fake_event  # the event is changed in place inside the function
+        assert "contexts" in event
+        assert "trace" in event["contexts"]
+        assert event["contexts"]["trace"] == fake_trace_context
diff --git a/tests/integrations/pure_eval/test_pure_eval.py b/tests/integrations/pure_eval/test_pure_eval.py
index e7da025..2d1a920 100644
--- a/tests/integrations/pure_eval/test_pure_eval.py
+++ b/tests/integrations/pure_eval/test_pure_eval.py
@@ -8,8 +8,8 @@ from sentry_sdk.integrations.pure_eval import PureEvalIntegration
 
 
 @pytest.mark.parametrize("integrations", [[], [PureEvalIntegration()]])
-def test_with_locals_enabled(sentry_init, capture_events, integrations):
-    sentry_init(with_locals=True, integrations=integrations)
+def test_include_local_variables_enabled(sentry_init, capture_events, integrations):
+    sentry_init(include_local_variables=True, integrations=integrations)
     events = capture_events()
 
     def foo():
diff --git a/tests/integrations/pymongo/__init__.py b/tests/integrations/pymongo/__init__.py
new file mode 100644
index 0000000..91223b0
--- /dev/null
+++ b/tests/integrations/pymongo/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("pymongo")
diff --git a/tests/integrations/pymongo/test_pymongo.py b/tests/integrations/pymongo/test_pymongo.py
new file mode 100644
index 0000000..89701c9
--- /dev/null
+++ b/tests/integrations/pymongo/test_pymongo.py
@@ -0,0 +1,424 @@
+from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.integrations.pymongo import PyMongoIntegration, _strip_pii
+
+from mockupdb import MockupDB, OpQuery
+from pymongo import MongoClient
+import pytest
+
+
+@pytest.fixture(scope="session")
+def mongo_server():
+    server = MockupDB(verbose=True)
+    server.autoresponds("ismaster", maxWireVersion=6)
+    server.run()
+    server.autoresponds(
+        {"find": "test_collection"}, cursor={"id": 123, "firstBatch": []}
+    )
+    # Find query changed somewhere between PyMongo 3.1 and 3.12.
+    # This line is to respond to "find" queries sent by old PyMongo the same way it's done above.
+    server.autoresponds(OpQuery({"foobar": 1}), cursor={"id": 123, "firstBatch": []})
+    server.autoresponds({"insert": "test_collection"}, ok=1)
+    server.autoresponds({"insert": "erroneous"}, ok=0, errmsg="test error")
+    yield server
+    server.stop()
+
+
+@pytest.mark.parametrize("with_pii", [False, True])
+def test_transactions(sentry_init, capture_events, mongo_server, with_pii):
+    sentry_init(
+        integrations=[PyMongoIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=with_pii,
+    )
+    events = capture_events()
+
+    connection = MongoClient(mongo_server.uri)
+
+    with start_transaction():
+        list(
+            connection["test_db"]["test_collection"].find({"foobar": 1})
+        )  # force query execution
+        connection["test_db"]["test_collection"].insert_one({"foo": 2})
+        try:
+            connection["test_db"]["erroneous"].insert_many([{"bar": 3}, {"baz": 4}])
+            pytest.fail("Request should raise")
+        except Exception:
+            pass
+
+    (event,) = events
+    (find, insert_success, insert_fail) = event["spans"]
+
+    common_tags = {
+        "db.name": "test_db",
+        "db.system": "mongodb",
+        "net.peer.name": mongo_server.host,
+        "net.peer.port": str(mongo_server.port),
+    }
+    for span in find, insert_success, insert_fail:
+        assert span["data"][SPANDATA.DB_SYSTEM] == "mongodb"
+        assert span["data"][SPANDATA.DB_NAME] == "test_db"
+        assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
+        assert span["data"][SPANDATA.SERVER_PORT] == mongo_server.port
+        for field, value in common_tags.items():
+            assert span["tags"][field] == value
+
+    assert find["op"] == "db.query"
+    assert insert_success["op"] == "db.query"
+    assert insert_fail["op"] == "db.query"
+
+    assert find["tags"]["db.operation"] == "find"
+    assert insert_success["tags"]["db.operation"] == "insert"
+    assert insert_fail["tags"]["db.operation"] == "insert"
+
+    assert find["description"].startswith("find {")
+    assert insert_success["description"].startswith("insert {")
+    assert insert_fail["description"].startswith("insert {")
+    if with_pii:
+        assert "1" in find["description"]
+        assert "2" in insert_success["description"]
+        assert "3" in insert_fail["description"] and "4" in insert_fail["description"]
+    else:
+        # All values in filter replaced by "%s"
+        assert "1" not in find["description"]
+        # All keys below top level replaced by "%s"
+        assert "2" not in insert_success["description"]
+        assert (
+            "3" not in insert_fail["description"]
+            and "4" not in insert_fail["description"]
+        )
+
+    assert find["tags"]["status"] == "ok"
+    assert insert_success["tags"]["status"] == "ok"
+    assert insert_fail["tags"]["status"] == "internal_error"
+
+
+@pytest.mark.parametrize("with_pii", [False, True])
+def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii):
+    sentry_init(
+        integrations=[PyMongoIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=with_pii,
+    )
+    events = capture_events()
+
+    connection = MongoClient(mongo_server.uri)
+
+    list(
+        connection["test_db"]["test_collection"].find({"foobar": 1})
+    )  # force query execution
+    capture_message("hi")
+
+    (event,) = events
+    (crumb,) = event["breadcrumbs"]["values"]
+
+    assert crumb["category"] == "query"
+    assert crumb["message"].startswith("find {")
+    if with_pii:
+        assert "1" in crumb["message"]
+    else:
+        assert "1" not in crumb["message"]
+    assert crumb["type"] == "db.query"
+    assert crumb["data"] == {
+        "db.name": "test_db",
+        "db.system": "mongodb",
+        "db.operation": "find",
+        "net.peer.name": mongo_server.host,
+        "net.peer.port": str(mongo_server.port),
+    }
+
+
+@pytest.mark.parametrize(
+    "testcase",
+    [
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "anton2",
+                        "email": "anton@somewhere.io",
+                        "password": "c4e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf2",
+                    }
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
+                ],
+            },
+        },
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "indiana4",
+                        "email": "indy@jones.org",
+                        "password": "63e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf016b",
+                        "_id": "635bc7403cb4f8a736f61cf3",
+                    }
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
+                ],
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {},
+                "limit": 1,
+                "singleBatch": True,
+            },
+            "command_stripped": {
+                "find": "my_collection",
+                "filter": {},
+                "limit": 1,
+                "singleBatch": True,
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {"username": "notthere"},
+                "limit": 1,
+                "singleBatch": True,
+            },
+            "command_stripped": {
+                "find": "my_collection",
+                "filter": {"username": "%s"},
+                "limit": 1,
+                "singleBatch": True,
+            },
+        },
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "userx1",
+                        "email": "x@somewhere.io",
+                        "password": "ccc86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf4",
+                    },
+                    {
+                        "username": "userx2",
+                        "email": "x@somewhere.io",
+                        "password": "xxx86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf5",
+                    },
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
+                ],
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {"email": "ada@lovelace.com"},
+            },
+            "command_stripped": {"find": "my_collection", "filter": {"email": "%s"}},
+        },
+        {
+            "command": {
+                "aggregate": "my_collection",
+                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
+                "cursor": {},
+            },
+            "command_stripped": {
+                "aggregate": "my_collection",
+                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
+                "cursor": "%s",
+            },
+        },
+        {
+            "command": {
+                "aggregate": "my_collection",
+                "pipeline": [
+                    {"$match": {"email": "x@somewhere.io"}},
+                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
+                ],
+                "cursor": {},
+            },
+            "command_stripped": {
+                "aggregate": "my_collection",
+                "pipeline": [
+                    {"$match": {"email": "%s"}},
+                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
+                ],
+                "cursor": "%s",
+            },
+        },
+        {
+            "command": {
+                "createIndexes": "my_collection",
+                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
+            },
+            "command_stripped": {
+                "createIndexes": "my_collection",
+                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "anton@somewhere.io"}),
+                    (
+                        "u",
+                        {
+                            "email": "anton2@somwehre.io",
+                            "extra_field": "extra_content",
+                            "new": "bla",
+                        },
+                    ),
+                    ("multi", False),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "anton2@somwehre.io"}),
+                    ("u", {"$rename": {"new": "new_field"}}),
+                    ("multi", False),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "x@somewhere.io"}),
+                    ("u", {"$rename": {"password": "pwd"}}),
+                    ("multi", True),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": [("q", {"username": "userx2"}), ("limit", 1)],
+            },
+            "command_stripped": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": "%s",
+            },
+        },
+        {
+            "command": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": [("q", {"email": "xplus@somewhere.io"}), ("limit", 0)],
+            },
+            "command_stripped": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "ada@lovelace.com"},
+                "new": False,
+                "remove": True,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "remove": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "anton2@somewhere.io"},
+                "new": False,
+                "update": {"email": "anton3@somwehre.io", "extra_field": "xxx"},
+                "upsert": False,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "update": {"email": "%s", "extra_field": "%s"},
+                "upsert": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "anton3@somewhere.io"},
+                "new": False,
+                "update": {"$rename": {"extra_field": "extra_field2"}},
+                "upsert": False,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "update": {"$rename": "%s"},
+                "upsert": "%s",
+            },
+        },
+        {
+            "command": {
+                "renameCollection": "test.my_collection",
+                "to": "test.new_collection",
+            },
+            "command_stripped": {
+                "renameCollection": "test.my_collection",
+                "to": "test.new_collection",
+            },
+        },
+        {
+            "command": {"drop": "new_collection"},
+            "command_stripped": {"drop": "new_collection"},
+        },
+    ],
+)
+def test_strip_pii(testcase):
+    assert _strip_pii(testcase["command"]) == testcase["command_stripped"]
diff --git a/tests/integrations/pyramid/test_pyramid.py b/tests/integrations/pyramid/test_pyramid.py
index 0f8755a..1f93a52 100644
--- a/tests/integrations/pyramid/test_pyramid.py
+++ b/tests/integrations/pyramid/test_pyramid.py
@@ -1,8 +1,6 @@
 import json
 import logging
-import pkg_resources
 import pytest
-
 from io import BytesIO
 
 import pyramid.testing
@@ -12,13 +10,23 @@ from pyramid.response import Response
 
 from sentry_sdk import capture_message, add_breadcrumb
 from sentry_sdk.integrations.pyramid import PyramidIntegration
+from sentry_sdk.serializer import MAX_DATABAG_BREADTH
 
 from werkzeug.test import Client
 
 
-PYRAMID_VERSION = tuple(
-    map(int, pkg_resources.get_distribution("pyramid").version.split("."))
-)
+try:
+    from importlib.metadata import version
+
+    PYRAMID_VERSION = tuple(map(int, version("pyramid").split(".")))
+
+except ImportError:
+    # < py3.8
+    import pkg_resources
+
+    PYRAMID_VERSION = tuple(
+        map(int, pkg_resources.get_distribution("pyramid").version.split("."))
+    )
 
 
 def hi(request):
@@ -89,7 +97,10 @@ def test_view_exceptions(
     (event,) = events
     (breadcrumb,) = event["breadcrumbs"]["values"]
     assert breadcrumb["message"] == "hi2"
-    assert event["exception"]["values"][0]["mechanism"]["type"] == "pyramid"
+    # Checking only the last value in the exceptions list,
+    # because Pyramid >= 1.9 returns a chained exception and before just a single exception
+    assert event["exception"]["values"][-1]["mechanism"]["type"] == "pyramid"
+    assert event["exception"]["values"][-1]["type"] == "ZeroDivisionError"
 
 
 def test_has_context(route, get_client, sentry_init, capture_events):
@@ -192,8 +203,33 @@ def test_flask_empty_json_request(sentry_init, capture_events, route, get_client
     assert event["request"]["data"] == data
 
 
+def test_json_not_truncated_if_max_request_body_size_is_always(
+    sentry_init, capture_events, route, get_client
+):
+    sentry_init(integrations=[PyramidIntegration()], max_request_body_size="always")
+
+    data = {
+        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
+    }
+
+    @route("/")
+    def index(request):
+        assert request.json == data
+        assert request.text == json.dumps(data)
+        capture_message("hi")
+        return Response("ok")
+
+    events = capture_events()
+
+    client = get_client()
+    client.post("/", content_type="application/json", data=json.dumps(data))
+
+    (event,) = events
+    assert event["request"]["data"] == data
+
+
 def test_files_and_form(sentry_init, capture_events, route, get_client):
-    sentry_init(integrations=[PyramidIntegration()], request_bodies="always")
+    sentry_init(integrations=[PyramidIntegration()], max_request_body_size="always")
 
     data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
 
diff --git a/tests/integrations/quart/test_quart.py b/tests/integrations/quart/test_quart.py
index 6d2c590..e3b1c87 100644
--- a/tests/integrations/quart/test_quart.py
+++ b/tests/integrations/quart/test_quart.py
@@ -1,13 +1,9 @@
+import json
+import threading
+
 import pytest
 import pytest_asyncio
 
-quart = pytest.importorskip("quart")
-
-from quart import Quart, Response, abort, stream_with_context
-from quart.views import View
-
-from quart_auth import AuthManager, AuthUser, login_user
-
 from sentry_sdk import (
     set_tag,
     configure_scope,
@@ -18,8 +14,21 @@ from sentry_sdk import (
 from sentry_sdk.integrations.logging import LoggingIntegration
 import sentry_sdk.integrations.quart as quart_sentry
 
+quart = pytest.importorskip("quart")
+
+from quart import Quart, Response, abort, stream_with_context
+from quart.views import View
+
+from quart_auth import AuthUser, login_user
+
+try:
+    from quart_auth import QuartAuth
+
+    auth_manager = QuartAuth()
+except ImportError:
+    from quart_auth import AuthManager
 
-auth_manager = AuthManager()
+    auth_manager = AuthManager()
 
 
 @pytest_asyncio.fixture
@@ -41,6 +50,20 @@ async def app():
         capture_message("hi with id")
         return "ok with id"
 
+    @app.get("/sync/thread_ids")
+    def _thread_ids_sync():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
+    @app.get("/async/thread_ids")
+    async def _thread_ids_async():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
     return app
 
 
@@ -523,3 +546,30 @@ async def test_class_based_views(sentry_init, app, capture_events):
 
     assert event["message"] == "hi"
     assert event["transaction"] == "hello_class"
+
+
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, app):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+
+    envelopes = capture_envelopes()
+
+    async with app.test_client() as client:
+        response = await client.get(endpoint)
+        assert response.status_code == 200
+
+    data = json.loads(response.content)
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
diff --git a/tests/integrations/redis/asyncio/__init__.py b/tests/integrations/redis/asyncio/__init__.py
new file mode 100644
index 0000000..bd93246
--- /dev/null
+++ b/tests/integrations/redis/asyncio/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("fakeredis.aioredis")
diff --git a/tests/integrations/redis/asyncio/test_redis_asyncio.py b/tests/integrations/redis/asyncio/test_redis_asyncio.py
new file mode 100644
index 0000000..f97960f
--- /dev/null
+++ b/tests/integrations/redis/asyncio/test_redis_asyncio.py
@@ -0,0 +1,75 @@
+import pytest
+
+from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.integrations.redis import RedisIntegration
+
+from fakeredis.aioredis import FakeRedis
+
+
+@pytest.mark.asyncio
+async def test_async_basic(sentry_init, capture_events):
+    sentry_init(integrations=[RedisIntegration()])
+    events = capture_events()
+
+    connection = FakeRedis()
+
+    await connection.get("foobar")
+    capture_message("hi")
+
+    (event,) = events
+    (crumb,) = event["breadcrumbs"]["values"]
+
+    assert crumb == {
+        "category": "redis",
+        "message": "GET 'foobar'",
+        "data": {
+            "db.operation": "GET",
+            "redis.key": "foobar",
+            "redis.command": "GET",
+            "redis.is_cluster": False,
+        },
+        "timestamp": crumb["timestamp"],
+        "type": "redis",
+    }
+
+
+@pytest.mark.parametrize(
+    "is_transaction, send_default_pii, expected_first_ten",
+    [
+        (False, False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
+        (True, True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
+    ],
+)
+@pytest.mark.asyncio
+async def test_async_redis_pipeline(
+    sentry_init, capture_events, is_transaction, send_default_pii, expected_first_ten
+):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
+    events = capture_events()
+
+    connection = FakeRedis()
+    with start_transaction():
+        pipeline = connection.pipeline(transaction=is_transaction)
+        pipeline.get("foo")
+        pipeline.set("bar", 1)
+        pipeline.set("baz", 2)
+        await pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "db.redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"] == {
+        "redis.commands": {
+            "count": 3,
+            "first_ten": expected_first_ten,
+        }
+    }
+    assert span["tags"] == {
+        "redis.transaction": is_transaction,
+        "redis.is_cluster": False,
+    }
diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py
index 4b3f2a7..e5d760b 100644
--- a/tests/integrations/redis/test_redis.py
+++ b/tests/integrations/redis/test_redis.py
@@ -1,8 +1,15 @@
+import pytest
+
 from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.consts import SPANDATA
 from sentry_sdk.integrations.redis import RedisIntegration
 
 from fakeredis import FakeStrictRedis
-import pytest
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
 
 
 def test_basic(sentry_init, capture_events):
@@ -24,20 +31,32 @@ def test_basic(sentry_init, capture_events):
             "redis.key": "foobar",
             "redis.command": "GET",
             "redis.is_cluster": False,
+            "db.operation": "GET",
         },
         "timestamp": crumb["timestamp"],
         "type": "redis",
     }
 
 
-@pytest.mark.parametrize("is_transaction", [False, True])
-def test_redis_pipeline(sentry_init, capture_events, is_transaction):
-    sentry_init(integrations=[RedisIntegration()], traces_sample_rate=1.0)
+@pytest.mark.parametrize(
+    "is_transaction, send_default_pii, expected_first_ten",
+    [
+        (False, False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
+        (True, True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
+    ],
+)
+def test_redis_pipeline(
+    sentry_init, capture_events, is_transaction, send_default_pii, expected_first_ten
+):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
     events = capture_events()
 
     connection = FakeStrictRedis()
     with start_transaction():
-
         pipeline = connection.pipeline(transaction=is_transaction)
         pipeline.get("foo")
         pipeline.set("bar", 1)
@@ -46,15 +65,180 @@ def test_redis_pipeline(sentry_init, capture_events, is_transaction):
 
     (event,) = events
     (span,) = event["spans"]
-    assert span["op"] == "redis"
+    assert span["op"] == "db.redis"
     assert span["description"] == "redis.pipeline.execute"
     assert span["data"] == {
         "redis.commands": {
             "count": 3,
-            "first_ten": ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"],
-        }
+            "first_ten": expected_first_ten,
+        },
+        SPANDATA.DB_SYSTEM: "redis",
     }
     assert span["tags"] == {
         "redis.transaction": is_transaction,
         "redis.is_cluster": False,
     }
+
+
+def test_sensitive_data(sentry_init, capture_events):
+    # fakeredis does not support the AUTH command, so we need to mock it
+    with mock.patch(
+        "sentry_sdk.integrations.redis._COMMANDS_INCLUDING_SENSITIVE_DATA", ["get"]
+    ):
+        sentry_init(
+            integrations=[RedisIntegration()],
+            traces_sample_rate=1.0,
+            send_default_pii=True,
+        )
+        events = capture_events()
+
+        connection = FakeStrictRedis()
+        with start_transaction():
+            connection.get(
+                "this is super secret"
+            )  # because fakeredis does not support AUTH we use GET instead
+
+        (event,) = events
+        spans = event["spans"]
+        assert spans[0]["op"] == "db.redis"
+        assert spans[0]["description"] == "GET [Filtered]"
+
+
+def test_pii_data_redacted(sentry_init, capture_events):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+    with start_transaction():
+        connection.set("somekey1", "my secret string1")
+        connection.set("somekey2", "my secret string2")
+        connection.get("somekey2")
+        connection.delete("somekey1", "somekey2")
+
+    (event,) = events
+    spans = event["spans"]
+    assert spans[0]["op"] == "db.redis"
+    assert spans[0]["description"] == "SET 'somekey1' [Filtered]"
+    assert spans[1]["description"] == "SET 'somekey2' [Filtered]"
+    assert spans[2]["description"] == "GET 'somekey2'"
+    assert spans[3]["description"] == "DEL 'somekey1' [Filtered]"
+
+
+def test_pii_data_sent(sentry_init, capture_events):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+    )
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+    with start_transaction():
+        connection.set("somekey1", "my secret string1")
+        connection.set("somekey2", "my secret string2")
+        connection.get("somekey2")
+        connection.delete("somekey1", "somekey2")
+
+    (event,) = events
+    spans = event["spans"]
+    assert spans[0]["op"] == "db.redis"
+    assert spans[0]["description"] == "SET 'somekey1' 'my secret string1'"
+    assert spans[1]["description"] == "SET 'somekey2' 'my secret string2'"
+    assert spans[2]["description"] == "GET 'somekey2'"
+    assert spans[3]["description"] == "DEL 'somekey1' 'somekey2'"
+
+
+def test_data_truncation(sentry_init, capture_events):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+    )
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+    with start_transaction():
+        long_string = "a" * 100000
+        connection.set("somekey1", long_string)
+        short_string = "b" * 10
+        connection.set("somekey2", short_string)
+
+    (event,) = events
+    spans = event["spans"]
+    assert spans[0]["op"] == "db.redis"
+    assert spans[0]["description"] == "SET 'somekey1' '%s..." % (
+        long_string[: 1024 - len("...") - len("SET 'somekey1' '")],
+    )
+    assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,)
+
+
+def test_data_truncation_custom(sentry_init, capture_events):
+    sentry_init(
+        integrations=[RedisIntegration(max_data_size=30)],
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+    )
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+    with start_transaction():
+        long_string = "a" * 100000
+        connection.set("somekey1", long_string)
+        short_string = "b" * 10
+        connection.set("somekey2", short_string)
+
+    (event,) = events
+    spans = event["spans"]
+    assert spans[0]["op"] == "db.redis"
+    assert spans[0]["description"] == "SET 'somekey1' '%s..." % (
+        long_string[: 30 - len("...") - len("SET 'somekey1' '")],
+    )
+    assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,)
+
+
+def test_breadcrumbs(sentry_init, capture_events):
+    sentry_init(
+        integrations=[RedisIntegration(max_data_size=30)],
+        send_default_pii=True,
+    )
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+
+    long_string = "a" * 100000
+    connection.set("somekey1", long_string)
+    short_string = "b" * 10
+    connection.set("somekey2", short_string)
+
+    capture_message("hi")
+
+    (event,) = events
+    crumbs = event["breadcrumbs"]["values"]
+
+    assert crumbs[0] == {
+        "message": "SET 'somekey1' 'aaaaaaaaaaa...",
+        "type": "redis",
+        "category": "redis",
+        "data": {
+            "db.operation": "SET",
+            "redis.is_cluster": False,
+            "redis.command": "SET",
+            "redis.key": "somekey1",
+        },
+        "timestamp": crumbs[0]["timestamp"],
+    }
+    assert crumbs[1] == {
+        "message": "SET 'somekey2' 'bbbbbbbbbb'",
+        "type": "redis",
+        "category": "redis",
+        "data": {
+            "db.operation": "SET",
+            "redis.is_cluster": False,
+            "redis.command": "SET",
+            "redis.key": "somekey2",
+        },
+        "timestamp": crumbs[1]["timestamp"],
+    }
diff --git a/tests/integrations/rediscluster/test_rediscluster.py b/tests/integrations/rediscluster/test_rediscluster.py
index 62923cf..32eb8c4 100644
--- a/tests/integrations/rediscluster/test_rediscluster.py
+++ b/tests/integrations/rediscluster/test_rediscluster.py
@@ -1,5 +1,6 @@
 import pytest
 from sentry_sdk import capture_message
+from sentry_sdk.consts import SPANDATA
 from sentry_sdk.api import start_transaction
 from sentry_sdk.integrations.redis import RedisIntegration
 
@@ -13,7 +14,6 @@ if hasattr(rediscluster, "StrictRedisCluster"):
 
 @pytest.fixture(autouse=True)
 def monkeypatch_rediscluster_classes(reset_integrations):
-
     try:
         pipeline_cls = rediscluster.pipeline.ClusterPipeline
     except AttributeError:
@@ -42,6 +42,7 @@ def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events):
         "category": "redis",
         "message": "GET 'foobar'",
         "data": {
+            "db.operation": "GET",
             "redis.key": "foobar",
             "redis.command": "GET",
             "redis.is_cluster": True,
@@ -51,8 +52,21 @@ def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events):
     }
 
 
-def test_rediscluster_pipeline(sentry_init, capture_events):
-    sentry_init(integrations=[RedisIntegration()], traces_sample_rate=1.0)
+@pytest.mark.parametrize(
+    "send_default_pii, expected_first_ten",
+    [
+        (False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
+        (True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
+    ],
+)
+def test_rediscluster_pipeline(
+    sentry_init, capture_events, send_default_pii, expected_first_ten
+):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
     events = capture_events()
 
     rc = rediscluster.RedisCluster(connection_pool=True)
@@ -65,13 +79,14 @@ def test_rediscluster_pipeline(sentry_init, capture_events):
 
     (event,) = events
     (span,) = event["spans"]
-    assert span["op"] == "redis"
+    assert span["op"] == "db.redis"
     assert span["description"] == "redis.pipeline.execute"
     assert span["data"] == {
         "redis.commands": {
             "count": 3,
-            "first_ten": ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"],
-        }
+            "first_ten": expected_first_ten,
+        },
+        SPANDATA.DB_SYSTEM: "redis",
     }
     assert span["tags"] == {
         "redis.transaction": False,  # For Cluster, this is always False
diff --git a/tests/integrations/requests/test_requests.py b/tests/integrations/requests/test_requests.py
index 02c6636..aecf647 100644
--- a/tests/integrations/requests/test_requests.py
+++ b/tests/integrations/requests/test_requests.py
@@ -1,16 +1,27 @@
 import pytest
+import responses
 
 requests = pytest.importorskip("requests")
 
 from sentry_sdk import capture_message
+from sentry_sdk.consts import SPANDATA
 from sentry_sdk.integrations.stdlib import StdlibIntegration
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
 
 def test_crumb_capture(sentry_init, capture_events):
     sentry_init(integrations=[StdlibIntegration()])
+
+    url = "http://example.com/"
+    responses.add(responses.GET, url, status=200)
+
     events = capture_events()
 
-    response = requests.get("https://httpbin.org/status/418")
+    response = requests.get(url)
     capture_message("Testing!")
 
     (event,) = events
@@ -18,8 +29,36 @@ def test_crumb_capture(sentry_init, capture_events):
     assert crumb["type"] == "http"
     assert crumb["category"] == "httplib"
     assert crumb["data"] == {
-        "url": "https://httpbin.org/status/418",
-        "method": "GET",
-        "status_code": response.status_code,
+        "url": url,
+        SPANDATA.HTTP_METHOD: "GET",
+        SPANDATA.HTTP_FRAGMENT: "",
+        SPANDATA.HTTP_QUERY: "",
+        SPANDATA.HTTP_STATUS_CODE: response.status_code,
+        "reason": response.reason,
+    }
+
+
+@pytest.mark.tests_internal_exceptions
+def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
+    sentry_init(integrations=[StdlibIntegration()])
+
+    url = "https://example.com"
+    responses.add(responses.GET, url, status=200)
+
+    events = capture_events()
+
+    with mock.patch(
+        "sentry_sdk.integrations.stdlib.parse_url",
+        side_effect=ValueError,
+    ):
+        response = requests.get(url)
+
+    capture_message("Testing!")
+
+    (event,) = events
+    assert event["breadcrumbs"]["values"][0]["data"] == {
+        SPANDATA.HTTP_METHOD: "GET",
+        SPANDATA.HTTP_STATUS_CODE: response.status_code,
         "reason": response.reason,
+        # no url related data
     }
diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py
index 651bf22..270a92e 100644
--- a/tests/integrations/rq/test_rq.py
+++ b/tests/integrations/rq/test_rq.py
@@ -1,5 +1,6 @@
 import pytest
 from fakeredis import FakeStrictRedis
+from sentry_sdk import configure_scope, start_transaction
 from sentry_sdk.integrations.rq import RqIntegration
 
 import rq
@@ -58,13 +59,18 @@ def test_basic(sentry_init, capture_events):
     assert exception["stacktrace"]["frames"][-1]["vars"]["foo"] == "42"
 
     assert event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
-    assert event["extra"]["rq-job"] == {
-        "args": [],
-        "description": "tests.integrations.rq.test_rq.crashing_job(foo=42)",
-        "func": "tests.integrations.rq.test_rq.crashing_job",
-        "job_id": event["extra"]["rq-job"]["job_id"],
-        "kwargs": {"foo": 42},
-    }
+
+    extra = event["extra"]["rq-job"]
+    assert extra["args"] == []
+    assert extra["kwargs"] == {"foo": 42}
+    assert extra["description"] == "tests.integrations.rq.test_rq.crashing_job(foo=42)"
+    assert extra["func"] == "tests.integrations.rq.test_rq.crashing_job"
+    assert "job_id" in extra
+    assert "enqueued_at" in extra
+
+    # older versions don't persist started_at correctly
+    if tuple(map(int, rq.VERSION.split("."))) >= (0, 9):
+        assert "started_at" in extra
 
 
 def test_transport_shutdown(sentry_init, capture_events_forksafe):
@@ -88,7 +94,6 @@ def test_transport_shutdown(sentry_init, capture_events_forksafe):
 def test_transaction_with_error(
     sentry_init, capture_events, DictionaryContaining  # noqa:N803
 ):
-
     sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
     events = capture_events()
 
@@ -101,7 +106,7 @@ def test_transaction_with_error(
     error_event, envelope = events
 
     assert error_event["transaction"] == "tests.integrations.rq.test_rq.chew_up_shoes"
-    assert error_event["contexts"]["trace"]["op"] == "rq.task"
+    assert error_event["contexts"]["trace"]["op"] == "queue.task.rq"
     assert error_event["exception"]["values"][0]["type"] == "Exception"
     assert (
         error_event["exception"]["values"][0]["value"]
@@ -121,6 +126,71 @@ def test_transaction_with_error(
     )
 
 
+def test_error_has_trace_context_if_tracing_disabled(
+    sentry_init,
+    capture_events,
+):
+    sentry_init(integrations=[RqIntegration()])
+    events = capture_events()
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.SimpleWorker([queue], connection=queue.connection)
+
+    queue.enqueue(crashing_job, foo=None)
+    worker.work(burst=True)
+
+    (error_event,) = events
+
+    assert error_event["contexts"]["trace"]
+
+
+def test_tracing_enabled(
+    sentry_init,
+    capture_events,
+):
+    sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.SimpleWorker([queue], connection=queue.connection)
+
+    with start_transaction(op="rq transaction") as transaction:
+        queue.enqueue(crashing_job, foo=None)
+        worker.work(burst=True)
+
+    error_event, envelope, _ = events
+
+    assert error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
+    assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+
+    assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"]
+
+
+def test_tracing_disabled(
+    sentry_init,
+    capture_events,
+):
+    sentry_init(integrations=[RqIntegration()])
+    events = capture_events()
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.SimpleWorker([queue], connection=queue.connection)
+
+    with configure_scope() as scope:
+        queue.enqueue(crashing_job, foo=None)
+        worker.work(burst=True)
+
+        (error_event,) = events
+
+        assert (
+            error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
+        )
+        assert (
+            error_event["contexts"]["trace"]["trace_id"]
+            == scope._propagation_context["trace_id"]
+        )
+
+
 def test_transaction_no_error(
     sentry_init, capture_events, DictionaryContaining  # noqa:N803
 ):
@@ -136,7 +206,7 @@ def test_transaction_no_error(
     envelope = events[0]
 
     assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "rq.task"
+    assert envelope["contexts"]["trace"]["op"] == "queue.task.rq"
     assert envelope["transaction"] == "tests.integrations.rq.test_rq.do_trick"
     assert envelope["extra"]["rq-job"] == DictionaryContaining(
         {
diff --git a/tests/integrations/socket/__init__.py b/tests/integrations/socket/__init__.py
new file mode 100644
index 0000000..893069b
--- /dev/null
+++ b/tests/integrations/socket/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("socket")
diff --git a/tests/integrations/socket/test_socket.py b/tests/integrations/socket/test_socket.py
new file mode 100644
index 0000000..914ba0b
--- /dev/null
+++ b/tests/integrations/socket/test_socket.py
@@ -0,0 +1,51 @@
+import socket
+
+from sentry_sdk import start_transaction
+from sentry_sdk.integrations.socket import SocketIntegration
+
+
+def test_getaddrinfo_trace(sentry_init, capture_events):
+    sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    with start_transaction():
+        socket.getaddrinfo("example.com", 443)
+
+    (event,) = events
+    (span,) = event["spans"]
+
+    assert span["op"] == "socket.dns"
+    assert span["description"] == "example.com:443"
+    assert span["data"] == {
+        "host": "example.com",
+        "port": 443,
+    }
+
+
+def test_create_connection_trace(sentry_init, capture_events):
+    timeout = 10
+
+    sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    with start_transaction():
+        socket.create_connection(("example.com", 443), timeout, None)
+
+    (event,) = events
+    (connect_span, dns_span) = event["spans"]
+    # as getaddrinfo gets called in create_connection it should also contain a dns span
+
+    assert connect_span["op"] == "socket.connection"
+    assert connect_span["description"] == "example.com:443"
+    assert connect_span["data"] == {
+        "address": ["example.com", 443],
+        "timeout": timeout,
+        "source_address": None,
+    }
+
+    assert dns_span["op"] == "socket.dns"
+    assert dns_span["description"] == "example.com:443"
+    assert dns_span["data"] == {
+        "host": "example.com",
+        "port": 443,
+    }
diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py
index e9d8c4e..eb1792b 100644
--- a/tests/integrations/sqlalchemy/test_sqlalchemy.py
+++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py
@@ -5,11 +5,13 @@ from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
 from sqlalchemy.exc import IntegrityError
 from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy.orm import relationship, sessionmaker
+from sqlalchemy import text
 
 from sentry_sdk import capture_message, start_transaction, configure_scope
+from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, SPANDATA
 from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
-from sentry_sdk.utils import json_dumps, MAX_STRING_LENGTH
 from sentry_sdk.serializer import MAX_EVENT_BYTES
+from sentry_sdk.utils import json_dumps
 
 
 def test_orm_queries(sentry_init, capture_events):
@@ -74,7 +76,6 @@ def test_orm_queries(sentry_init, capture_events):
     sys.version_info < (3,), reason="This sqla usage seems to be broken on Py2"
 )
 def test_transactions(sentry_init, capture_events, render_span_tree):
-
     sentry_init(
         integrations=[SqlalchemyIntegration()],
         _experiments={"record_sql_params": True},
@@ -119,6 +120,12 @@ def test_transactions(sentry_init, capture_events, render_span_tree):
 
     (event,) = events
 
+    for span in event["spans"]:
+        assert span["data"][SPANDATA.DB_SYSTEM] == "sqlite"
+        assert span["data"][SPANDATA.DB_NAME] == ":memory:"
+        assert SPANDATA.SERVER_ADDRESS not in span["data"]
+        assert SPANDATA.SERVER_PORT not in span["data"]
+
     assert (
         render_span_tree(event)
         == """\
@@ -143,14 +150,13 @@ def test_long_sql_query_preserved(sentry_init, capture_events):
     sentry_init(
         traces_sample_rate=1,
         integrations=[SqlalchemyIntegration()],
-        _experiments={"smart_transaction_trimming": True},
     )
     events = capture_events()
 
     engine = create_engine("sqlite:///:memory:")
     with start_transaction(name="test"):
         with engine.connect() as con:
-            con.execute(" UNION ".join("SELECT {}".format(i) for i in range(100)))
+            con.execute(text(" UNION ".join("SELECT {}".format(i) for i in range(100))))
 
     (event,) = events
     description = event["spans"][0]["description"]
@@ -158,15 +164,14 @@ def test_long_sql_query_preserved(sentry_init, capture_events):
     assert description.endswith("SELECT 98 UNION SELECT 99")
 
 
-def test_too_large_event_truncated(sentry_init, capture_events):
+def test_large_event_not_truncated(sentry_init, capture_events):
     sentry_init(
         traces_sample_rate=1,
         integrations=[SqlalchemyIntegration()],
-        _experiments={"smart_transaction_trimming": True},
     )
     events = capture_events()
 
-    long_str = "x" * (MAX_STRING_LENGTH + 10)
+    long_str = "x" * (DEFAULT_MAX_VALUE_LENGTH + 10)
 
     with configure_scope() as scope:
 
@@ -178,41 +183,45 @@ def test_too_large_event_truncated(sentry_init, capture_events):
     engine = create_engine("sqlite:///:memory:")
     with start_transaction(name="test"):
         with engine.connect() as con:
-            for _ in range(2000):
-                con.execute(" UNION ".join("SELECT {}".format(i) for i in range(100)))
+            for _ in range(1500):
+                con.execute(
+                    text(" UNION ".join("SELECT {}".format(i) for i in range(100)))
+                )
 
     (event,) = events
 
-    # Because of attached metadata in the "_meta" key, we may send out a little
-    # bit more than MAX_EVENT_BYTES.
-    max_bytes = 1.2 * MAX_EVENT_BYTES
-    assert len(json_dumps(event)) < max_bytes
+    assert len(json_dumps(event)) > MAX_EVENT_BYTES
 
     # Some spans are discarded.
     assert len(event["spans"]) == 1000
 
-    for i, span in enumerate(event["spans"]):
-        description = span["description"]
-
-        assert description.startswith("SELECT ")
-        if str(i) in event["_meta"]["spans"]:
-            # Description must have been truncated
-            assert len(description) == 10
-            assert description.endswith("...")
-        else:
-            # Description was not truncated, check for original length
-            assert len(description) == 1583
-            assert description.endswith("SELECT 98 UNION SELECT 99")
-
-    # Smoke check the meta info for one of the spans.
-    assert next(iter(event["_meta"]["spans"].values())) == {
-        "description": {"": {"len": 1583, "rem": [["!limit", "x", 7, 10]]}}
-    }
+    # Span descriptions are not truncated.
+    description = event["spans"][0]["description"]
+    assert len(description) == 1583
+    assert description.startswith("SELECT 0")
+    assert description.endswith("SELECT 98 UNION SELECT 99")
+
+    description = event["spans"][999]["description"]
+    assert len(description) == 1583
+    assert description.startswith("SELECT 0")
+    assert description.endswith("SELECT 98 UNION SELECT 99")
 
     # Smoke check that truncation of other fields has not changed.
-    assert len(event["message"]) == MAX_STRING_LENGTH
+    assert len(event["message"]) == DEFAULT_MAX_VALUE_LENGTH
 
     # The _meta for other truncated fields should be there as well.
     assert event["_meta"]["message"] == {
         "": {"len": 1034, "rem": [["!limit", "x", 1021, 1024]]}
     }
+
+
+def test_engine_name_not_string(sentry_init):
+    sentry_init(
+        integrations=[SqlalchemyIntegration()],
+    )
+
+    engine = create_engine("sqlite:///:memory:")
+    engine.dialect.name = b"sqlite"
+
+    with engine.connect() as con:
+        con.execute(text("SELECT 0"))
diff --git a/tests/integrations/starlette/templates/trace_meta.html b/tests/integrations/starlette/templates/trace_meta.html
new file mode 100644
index 0000000..139fd16
--- /dev/null
+++ b/tests/integrations/starlette/templates/trace_meta.html
@@ -0,0 +1 @@
+{{ sentry_trace_meta }}
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index 5908eba..cb2f4a8 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -1,12 +1,17 @@
 import asyncio
 import base64
+import functools
 import json
+import logging
 import os
+import re
+import threading
 
 import pytest
 
 from sentry_sdk import last_event_id, capture_exception
 from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.utils import parse_version
 
 try:
     from unittest import mock  # python 3.3 and above
@@ -18,7 +23,6 @@ from sentry_sdk.integrations.starlette import (
     StarletteIntegration,
     StarletteRequestExtractor,
 )
-from sentry_sdk.utils import AnnotatedValue
 
 starlette = pytest.importorskip("starlette")
 from starlette.authentication import (
@@ -31,6 +35,8 @@ from starlette.middleware import Middleware
 from starlette.middleware.authentication import AuthenticationMiddleware
 from starlette.testclient import TestClient
 
+STARLETTE_VERSION = parse_version(starlette.__version__)
+
 PICTURE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "photo.jpg")
 
 BODY_JSON = {"some": "json", "for": "testing", "nested": {"numbers": 123}}
@@ -39,6 +45,16 @@ BODY_FORM = """--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="usern
     "{{image_data}}", str(base64.b64encode(open(PICTURE, "rb").read()))
 )
 
+FORM_RECEIVE_MESSAGES = [
+    {"type": "http.request", "body": BODY_FORM.encode("utf-8")},
+    {"type": "http.disconnect"},
+]
+
+JSON_RECEIVE_MESSAGES = [
+    {"type": "http.request", "body": json.dumps(BODY_JSON).encode("utf-8")},
+    {"type": "http.disconnect"},
+]
+
 PARSED_FORM = starlette.datastructures.FormData(
     [
         ("username", "Jane"),
@@ -48,16 +64,10 @@ PARSED_FORM = starlette.datastructures.FormData(
             starlette.datastructures.UploadFile(
                 filename="photo.jpg",
                 file=open(PICTURE, "rb"),
-                content_type="image/jpeg",
             ),
         ),
     ]
 )
-PARSED_BODY = {
-    "username": "Jane",
-    "password": "hello123",
-    "photo": AnnotatedValue("", {"rem": [["!raw", "x"]]}),
-}
 
 # Dummy ASGI scope for creating mock Starlette requests
 SCOPE = {
@@ -81,7 +91,20 @@ SCOPE = {
 }
 
 
+async def _mock_receive(msg):
+    return msg
+
+
+from sentry_sdk import Hub
+from starlette.templating import Jinja2Templates
+
+
 def starlette_app_factory(middleware=None, debug=True):
+    template_dir = os.path.join(
+        os.getcwd(), "tests", "integrations", "starlette", "templates"
+    )
+    templates = Jinja2Templates(directory=template_dir)
+
     async def _homepage(request):
         1 / 0
         return starlette.responses.JSONResponse({"status": "ok"})
@@ -97,6 +120,32 @@ def starlette_app_factory(middleware=None, debug=True):
         capture_message("hi")
         return starlette.responses.JSONResponse({"status": "ok"})
 
+    def _thread_ids_sync(request):
+        return starlette.responses.JSONResponse(
+            {
+                "main": threading.main_thread().ident,
+                "active": threading.current_thread().ident,
+            }
+        )
+
+    async def _thread_ids_async(request):
+        return starlette.responses.JSONResponse(
+            {
+                "main": threading.main_thread().ident,
+                "active": threading.current_thread().ident,
+            }
+        )
+
+    async def _render_template(request):
+        hub = Hub.current
+        capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
+
+        template_context = {
+            "request": request,
+            "msg": "Hello Template World!",
+        }
+        return templates.TemplateResponse("trace_meta.html", template_context)
+
     app = starlette.applications.Starlette(
         debug=debug,
         routes=[
@@ -104,6 +153,9 @@ def starlette_app_factory(middleware=None, debug=True):
             starlette.routing.Route("/custom_error", _custom_error),
             starlette.routing.Route("/message", _message),
             starlette.routing.Route("/message/{message_id}", _message_with_id),
+            starlette.routing.Route("/sync/thread_ids", _thread_ids_sync),
+            starlette.routing.Route("/async/thread_ids", _thread_ids_async),
+            starlette.routing.Route("/render_template", _render_template),
         ],
         middleware=middleware,
     )
@@ -152,20 +204,75 @@ class AsyncIterator:
             raise StopAsyncIteration
 
 
+class SampleMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        # only handle http requests
+        if scope["type"] != "http":
+            await self.app(scope, receive, send)
+            return
+
+        async def do_stuff(message):
+            if message["type"] == "http.response.start":
+                # do something here.
+                pass
+
+            await send(message)
+
+        await self.app(scope, receive, do_stuff)
+
+
+class SampleReceiveSendMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        await self.app(scope, receive, send)
+
+
+class SamplePartialReceiveSendMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        async def my_receive(*args, **kwargs):
+            pass
+
+        async def my_send(*args, **kwargs):
+            pass
+
+        partial_receive = functools.partial(my_receive)
+        partial_send = functools.partial(my_send)
+
+        await self.app(scope, partial_receive, partial_send)
+
+
 @pytest.mark.asyncio
 async def test_starlettrequestextractor_content_length(sentry_init):
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        scope = SCOPE.copy()
-        scope["headers"] = [
-            [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
-        ]
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
+    ]
+    starlette_request = starlette.requests.Request(scope)
+    extractor = StarletteRequestExtractor(starlette_request)
 
-        assert await extractor.content_length() == len(json.dumps(BODY_JSON))
+    assert await extractor.content_length() == len(json.dumps(BODY_JSON))
 
 
 @pytest.mark.asyncio
@@ -181,82 +288,82 @@ async def test_starlettrequestextractor_cookies(sentry_init):
 
 @pytest.mark.asyncio
 async def test_starlettrequestextractor_json(sentry_init):
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        starlette_request = starlette.requests.Request(SCOPE)
-        extractor = StarletteRequestExtractor(starlette_request)
-
-        assert extractor.is_json()
-        assert await extractor.json() == BODY_JSON
+    starlette_request = starlette.requests.Request(SCOPE)
 
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
 
-@pytest.mark.asyncio
-async def test_starlettrequestextractor_parsed_body_json(sentry_init):
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        starlette_request = starlette.requests.Request(SCOPE)
-        extractor = StarletteRequestExtractor(starlette_request)
+    extractor = StarletteRequestExtractor(starlette_request)
 
-        parsed_body = await extractor.parsed_body()
-        assert parsed_body == BODY_JSON
+    assert extractor.is_json()
+    assert await extractor.json() == BODY_JSON
 
 
 @pytest.mark.asyncio
-async def test_starlettrequestextractor_parsed_body_form(sentry_init):
+async def test_starlettrequestextractor_form(sentry_init):
     scope = SCOPE.copy()
     scope["headers"] = [
         [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
     ]
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(BODY_FORM),
-    ):
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
+    # TODO add test for content-type: "application/x-www-form-urlencoded"
+
+    starlette_request = starlette.requests.Request(scope)
 
-        parsed_body = await extractor.parsed_body()
-        assert parsed_body.keys() == PARSED_BODY.keys()
-        assert parsed_body["username"] == PARSED_BODY["username"]
-        assert parsed_body["password"] == PARSED_BODY["password"]
-        assert parsed_body["photo"].metadata == PARSED_BODY["photo"].metadata
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    form_data = await extractor.form()
+    assert form_data.keys() == PARSED_FORM.keys()
+    assert form_data["username"] == PARSED_FORM["username"]
+    assert form_data["password"] == PARSED_FORM["password"]
+    assert form_data["photo"].filename == PARSED_FORM["photo"].filename
+
+    # Make sure we still can read the body
+    # after alreading it with extractor.form() above.
+    body = await extractor.request.body()
+    assert body
 
 
 @pytest.mark.asyncio
-async def test_starlettrequestextractor_form(sentry_init):
+async def test_starlettrequestextractor_body_consumed_twice(
+    sentry_init, capture_events
+):
+    """
+    Starlette does cache when you read the request data via `request.json()`
+    or `request.body()`, but it does NOT when using `request.form()`.
+    So we have an edge case when the Sentry Starlette reads the body using `.form()`
+    and the user wants to read the body using `.body()`.
+    Because the underlying stream can not be consumed twice and is not cached.
+
+    We have fixed this in `StarletteRequestExtractor.form()` by consuming the body
+    first with `.body()` (to put it into the `_body` cache and then consume it with `.form()`.
+
+    If this behavior is changed in Starlette and the `request.form()` in Starlette
+    is also caching the body, this test will fail.
+
+    See also https://github.com/encode/starlette/discussions/1933
+    """
     scope = SCOPE.copy()
     scope["headers"] = [
         [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
     ]
-    # TODO add test for content-type: "application/x-www-form-urlencoded"
 
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(BODY_FORM),
-    ):
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
+    starlette_request = starlette.requests.Request(scope)
 
-        form_data = await extractor.form()
-        assert form_data.keys() == PARSED_FORM.keys()
-        assert form_data["username"] == PARSED_FORM["username"]
-        assert form_data["password"] == PARSED_FORM["password"]
-        assert form_data["photo"].filename == PARSED_FORM["photo"].filename
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
 
+    extractor = StarletteRequestExtractor(starlette_request)
 
-@pytest.mark.asyncio
-async def test_starlettrequestextractor_raw_data(sentry_init):
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        starlette_request = starlette.requests.Request(SCOPE)
-        extractor = StarletteRequestExtractor(starlette_request)
+    await extractor.request.form()
 
-        assert await extractor.raw_data() == bytes(json.dumps(BODY_JSON), "utf-8")
+    with pytest.raises(RuntimeError):
+        await extractor.request.body()
 
 
 @pytest.mark.asyncio
@@ -271,22 +378,23 @@ async def test_starlettrequestextractor_extract_request_info_too_big(sentry_init
         [b"content-length", str(len(BODY_FORM)).encode()],
         [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
     ]
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(BODY_FORM),
-    ):
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
-
-        request_info = await extractor.extract_request_info()
-
-        assert request_info
-        assert request_info["cookies"] == {
-            "tasty_cookie": "strawberry",
-            "yummy_cookie": "choco",
-        }
-        # Because request is too big only the AnnotatedValue is extracted.
-        assert request_info["data"].metadata == {"rem": [["!config", "x"]]}
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    request_info = await extractor.extract_request_info()
+
+    assert request_info
+    assert request_info["cookies"] == {
+        "tasty_cookie": "strawberry",
+        "yummy_cookie": "choco",
+    }
+    # Because request is too big only the AnnotatedValue is extracted.
+    assert request_info["data"].metadata == {"rem": [["!config", "x"]]}
 
 
 @pytest.mark.asyncio
@@ -302,21 +410,22 @@ async def test_starlettrequestextractor_extract_request_info(sentry_init):
         [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
     ]
 
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
+    starlette_request = starlette.requests.Request(scope)
 
-        request_info = await extractor.extract_request_info()
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
 
-        assert request_info
-        assert request_info["cookies"] == {
-            "tasty_cookie": "strawberry",
-            "yummy_cookie": "choco",
-        }
-        assert request_info["data"] == BODY_JSON
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    request_info = await extractor.extract_request_info()
+
+    assert request_info
+    assert request_info["cookies"] == {
+        "tasty_cookie": "strawberry",
+        "yummy_cookie": "choco",
+    }
+    assert request_info["data"] == BODY_JSON
 
 
 @pytest.mark.asyncio
@@ -332,18 +441,19 @@ async def test_starlettrequestextractor_extract_request_info_no_pii(sentry_init)
         [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
     ]
 
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
+    starlette_request = starlette.requests.Request(scope)
 
-        request_info = await extractor.extract_request_info()
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
 
-        assert request_info
-        assert "cookies" not in request_info
-        assert request_info["data"] == BODY_JSON
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    request_info = await extractor.extract_request_info()
+
+    assert request_info
+    assert "cookies" not in request_info
+    assert request_info["data"] == BODY_JSON
 
 
 @pytest.mark.parametrize(
@@ -540,12 +650,179 @@ def test_middleware_spans(sentry_init, capture_events):
 
     idx = 0
     for span in transaction_event["spans"]:
-        if span["op"] == "starlette.middleware":
+        if span["op"] == "middleware.starlette":
             assert span["description"] == expected[idx]
             assert span["tags"]["starlette.middleware_name"] == expected[idx]
             idx += 1
 
 
+def test_middleware_callback_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(middleware=[Middleware(SampleMiddleware)])
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlette",
+            "description": "ServerErrorMiddleware",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "SampleMiddleware",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "ExceptionMiddleware",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "SampleMiddleware.__call__.<locals>.do_stuff",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.<locals>._send",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "_ASGIAdapter.send.<locals>.send"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request.<locals>.send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "SampleMiddleware.__call__.<locals>.do_stuff",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.<locals>._send",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "_ASGIAdapter.send.<locals>.send"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request.<locals>.send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+    ]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"] == expected[idx]["description"]
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
+def test_middleware_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(SampleReceiveSendMiddleware)]
+    )
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        # NOTE: the assert statements checking
+        # for correct behaviour are in `SampleReceiveSendMiddleware`!
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+
+def test_middleware_partial_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(SamplePartialReceiveSendMiddleware)]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlette",
+            "description": "ServerErrorMiddleware",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "SamplePartialReceiveSendMiddleware",
+            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.receive",
+            "description": "_ASGIAdapter.send.<locals>.receive"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request.<locals>.receive",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.<locals>._send",
+            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "_ASGIAdapter.send.<locals>.send"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request.<locals>.send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "ExceptionMiddleware",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "functools.partial(<function SamplePartialReceiveSendMiddleware.__call__.<locals>.my_send at ",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "functools.partial(<function SamplePartialReceiveSendMiddleware.__call__.<locals>.my_send at ",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+    ]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"].startswith(expected[idx]["description"])
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
 def test_last_event_id(sentry_init, capture_events):
     sentry_init(
         integrations=[StarletteIntegration()],
@@ -588,3 +865,93 @@ def test_legacy_setup(
 
     (event,) = events
     assert event["transaction"] == "/message/{message_id}"
+
+
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
+def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, endpoint):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+    app = starlette_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    envelopes = capture_envelopes()
+
+    client = TestClient(asgi_app)
+    response = client.get(endpoint)
+    assert response.status_code == 200
+
+    data = json.loads(response.content)
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
+
+
+def test_original_request_not_scrubbed(sentry_init, capture_events):
+    sentry_init(integrations=[StarletteIntegration()])
+
+    events = capture_events()
+
+    async def _error(request):
+        logging.critical("Oh no!")
+        assert request.headers["Authorization"] == "Bearer ohno"
+        assert await request.json() == {"password": "ohno"}
+        return starlette.responses.JSONResponse({"status": "Oh no!"})
+
+    app = starlette.applications.Starlette(
+        routes=[
+            starlette.routing.Route("/error", _error, methods=["POST"]),
+        ],
+    )
+
+    client = TestClient(app)
+    client.post(
+        "/error",
+        json={"password": "ohno"},
+        headers={"Authorization": "Bearer ohno"},
+    )
+
+    event = events[0]
+    assert event["request"]["data"] == {"password": "[Filtered]"}
+    assert event["request"]["headers"]["authorization"] == "[Filtered]"
+
+
+@pytest.mark.skipif(STARLETTE_VERSION < (0, 24), reason="Requires Starlette >= 0.24")
+def test_template_tracing_meta(sentry_init, capture_events):
+    sentry_init(
+        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
+        integrations=[StarletteIntegration()],
+    )
+    events = capture_events()
+
+    app = starlette_app_factory()
+
+    client = TestClient(app)
+    response = client.get("/render_template")
+    assert response.status_code == 200
+
+    rendered_meta = response.text
+    traceparent, baggage = events[0]["message"].split("\n")
+    assert traceparent != ""
+    assert baggage != ""
+
+    match = re.match(
+        r'^<meta name="sentry-trace" content="([^\"]*)"><meta name="baggage" content="([^\"]*)">',
+        rendered_meta,
+    )
+    assert match is not None
+    assert match.group(1) == traceparent
+
+    # Python 2 does not preserve sort order
+    rendered_baggage = match.group(2)
+    assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))
diff --git a/tests/integrations/starlite/__init__.py b/tests/integrations/starlite/__init__.py
new file mode 100644
index 0000000..4c10376
--- /dev/null
+++ b/tests/integrations/starlite/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("starlite")
diff --git a/tests/integrations/starlite/test_starlite.py b/tests/integrations/starlite/test_starlite.py
new file mode 100644
index 0000000..603697c
--- /dev/null
+++ b/tests/integrations/starlite/test_starlite.py
@@ -0,0 +1,325 @@
+import functools
+
+import pytest
+
+from sentry_sdk import capture_exception, capture_message, last_event_id
+from sentry_sdk.integrations.starlite import StarliteIntegration
+
+starlite = pytest.importorskip("starlite")
+
+from typing import Any, Dict
+
+from starlite import AbstractMiddleware, LoggingConfig, Starlite, get, Controller
+from starlite.middleware import LoggingMiddlewareConfig, RateLimitConfig
+from starlite.middleware.session.memory_backend import MemoryBackendConfig
+from starlite.status_codes import HTTP_500_INTERNAL_SERVER_ERROR
+from starlite.testing import TestClient
+
+
+class SampleMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send) -> None:
+        async def do_stuff(message):
+            if message["type"] == "http.response.start":
+                # do something here.
+                pass
+            await send(message)
+
+        await self.app(scope, receive, do_stuff)
+
+
+class SampleReceiveSendMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        await self.app(scope, receive, send)
+
+
+class SamplePartialReceiveSendMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        async def my_receive(*args, **kwargs):
+            pass
+
+        async def my_send(*args, **kwargs):
+            pass
+
+        partial_receive = functools.partial(my_receive)
+        partial_send = functools.partial(my_send)
+
+        await self.app(scope, partial_receive, partial_send)
+
+
+def starlite_app_factory(middleware=None, debug=True, exception_handlers=None):
+    class MyController(Controller):
+        path = "/controller"
+
+        @get("/error")
+        async def controller_error(self) -> None:
+            raise Exception("Whoa")
+
+    @get("/some_url")
+    async def homepage_handler() -> Dict[str, Any]:
+        1 / 0
+        return {"status": "ok"}
+
+    @get("/custom_error", name="custom_name")
+    async def custom_error() -> Any:
+        raise Exception("Too Hot")
+
+    @get("/message")
+    async def message() -> Dict[str, Any]:
+        capture_message("hi")
+        return {"status": "ok"}
+
+    @get("/message/{message_id:str}")
+    async def message_with_id() -> Dict[str, Any]:
+        capture_message("hi")
+        return {"status": "ok"}
+
+    logging_config = LoggingConfig()
+
+    app = Starlite(
+        route_handlers=[
+            homepage_handler,
+            custom_error,
+            message,
+            message_with_id,
+            MyController,
+        ],
+        debug=debug,
+        middleware=middleware,
+        logging_config=logging_config,
+        exception_handlers=exception_handlers,
+    )
+
+    return app
+
+
+@pytest.mark.parametrize(
+    "test_url,expected_error,expected_message,expected_tx_name",
+    [
+        (
+            "/some_url",
+            ZeroDivisionError,
+            "division by zero",
+            "tests.integrations.starlite.test_starlite.starlite_app_factory.<locals>.homepage_handler",
+        ),
+        (
+            "/custom_error",
+            Exception,
+            "Too Hot",
+            "custom_name",
+        ),
+        (
+            "/controller/error",
+            Exception,
+            "Whoa",
+            "partial(<function tests.integrations.starlite.test_starlite.starlite_app_factory.<locals>.MyController.controller_error>)",
+        ),
+    ],
+)
+def test_catch_exceptions(
+    sentry_init,
+    capture_exceptions,
+    capture_events,
+    test_url,
+    expected_error,
+    expected_message,
+    expected_tx_name,
+):
+    sentry_init(integrations=[StarliteIntegration()])
+    starlite_app = starlite_app_factory()
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = TestClient(starlite_app)
+    try:
+        client.get(test_url)
+    except Exception:
+        pass
+
+    (exc,) = exceptions
+    assert isinstance(exc, expected_error)
+    assert str(exc) == expected_message
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "starlite"
+    assert event["transaction"] == expected_tx_name
+
+
+def test_middleware_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+
+    logging_config = LoggingMiddlewareConfig()
+    session_config = MemoryBackendConfig()
+    rate_limit_config = RateLimitConfig(rate_limit=("hour", 5))
+
+    starlite_app = starlite_app_factory(
+        middleware=[
+            session_config.middleware,
+            logging_config.middleware,
+            rate_limit_config.middleware,
+        ]
+    )
+    events = capture_events()
+
+    client = TestClient(
+        starlite_app, raise_server_exceptions=False, base_url="http://testserver.local"
+    )
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = ["SessionMiddleware", "LoggingMiddleware", "RateLimitMiddleware"]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        if span["op"] == "middleware.starlite":
+            assert span["description"] == expected[idx]
+            assert span["tags"]["starlite.middleware_name"] == expected[idx]
+            idx += 1
+
+
+def test_middleware_callback_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(middleware=[SampleMiddleware])
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlite",
+            "description": "SampleMiddleware",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "TestClientTransport.create_send.<locals>.send",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "TestClientTransport.create_send.<locals>.send",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+    ]
+    print(transaction_event["spans"])
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"] == expected[idx]["description"]
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
+def test_middleware_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(middleware=[SampleReceiveSendMiddleware])
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        # NOTE: the assert statements checking
+        # for correct behaviour are in `SampleReceiveSendMiddleware`!
+        client.get("/message")
+    except Exception:
+        pass
+
+
+def test_middleware_partial_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(
+        middleware=[SamplePartialReceiveSendMiddleware]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlite",
+            "description": "SamplePartialReceiveSendMiddleware",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.receive",
+            "description": "TestClientTransport.create_receive.<locals>.receive",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "TestClientTransport.create_send.<locals>.send",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+    ]
+
+    print(transaction_event["spans"])
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"].startswith(expected[idx]["description"])
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
+def test_last_event_id(sentry_init, capture_events):
+    sentry_init(
+        integrations=[StarliteIntegration()],
+    )
+    events = capture_events()
+
+    def handler(request, exc):
+        capture_exception(exc)
+        return starlite.response.Response(last_event_id(), status_code=500)
+
+    app = starlite_app_factory(
+        debug=False, exception_handlers={HTTP_500_INTERNAL_SERVER_ERROR: handler}
+    )
+
+    client = TestClient(app, raise_server_exceptions=False)
+    response = client.get("/custom_error")
+    assert response.status_code == 500
+    print(events)
+    event = events[-1]
+    assert response.content.strip().decode("ascii").strip('"') == event["event_id"]
+    (exception,) = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "Too Hot"
diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py
index 839dc01..e40f522 100644
--- a/tests/integrations/stdlib/test_httplib.py
+++ b/tests/integrations/stdlib/test_httplib.py
@@ -1,6 +1,5 @@
-import platform
-import sys
 import random
+
 import pytest
 
 try:
@@ -12,39 +11,48 @@ except ImportError:
 
 try:
     # py2
-    from httplib import HTTPSConnection
+    from httplib import HTTPConnection, HTTPSConnection
 except ImportError:
     # py3
-    from http.client import HTTPSConnection
+    from http.client import HTTPConnection, HTTPSConnection
 
 try:
     from unittest import mock  # python 3.3 and above
 except ImportError:
     import mock  # python < 3.3
 
+
 from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.consts import MATCH_ALL, SPANDATA
 from sentry_sdk.tracing import Transaction
 from sentry_sdk.integrations.stdlib import StdlibIntegration
 
+from tests.conftest import create_mock_http_server
+
+PORT = create_mock_http_server()
+
 
 def test_crumb_capture(sentry_init, capture_events):
     sentry_init(integrations=[StdlibIntegration()])
     events = capture_events()
 
-    url = "https://httpbin.org/status/200"
-    response = urlopen(url)
-    assert response.getcode() == 200
+    url = "http://localhost:{}/some/random/url".format(PORT)
+    urlopen(url)
+
     capture_message("Testing!")
 
     (event,) = events
     (crumb,) = event["breadcrumbs"]["values"]
+
     assert crumb["type"] == "http"
     assert crumb["category"] == "httplib"
     assert crumb["data"] == {
         "url": url,
-        "method": "GET",
-        "status_code": 200,
+        SPANDATA.HTTP_METHOD: "GET",
+        SPANDATA.HTTP_STATUS_CODE: 200,
         "reason": "OK",
+        SPANDATA.HTTP_FRAGMENT: "",
+        SPANDATA.HTTP_QUERY: "",
     }
 
 
@@ -56,9 +64,9 @@ def test_crumb_capture_hint(sentry_init, capture_events):
     sentry_init(integrations=[StdlibIntegration()], before_breadcrumb=before_breadcrumb)
     events = capture_events()
 
-    url = "https://httpbin.org/status/200"
-    response = urlopen(url)
-    assert response.getcode() == 200
+    url = "http://localhost:{}/some/random/url".format(PORT)
+    urlopen(url)
+
     capture_message("Testing!")
 
     (event,) = events
@@ -67,14 +75,23 @@ def test_crumb_capture_hint(sentry_init, capture_events):
     assert crumb["category"] == "httplib"
     assert crumb["data"] == {
         "url": url,
-        "method": "GET",
-        "status_code": 200,
+        SPANDATA.HTTP_METHOD: "GET",
+        SPANDATA.HTTP_STATUS_CODE: 200,
         "reason": "OK",
         "extra": "foo",
+        SPANDATA.HTTP_FRAGMENT: "",
+        SPANDATA.HTTP_QUERY: "",
     }
 
-    if platform.python_implementation() != "PyPy":
-        assert sys.getrefcount(response) == 2
+
+def test_empty_realurl(sentry_init, capture_events):
+    """
+    Ensure that after using sentry_sdk.init you can putrequest a
+    None url.
+    """
+
+    sentry_init(dsn="")
+    HTTPConnection("example.com", port=443).putrequest("POST", None)
 
 
 def test_httplib_misuse(sentry_init, capture_events, request):
@@ -90,19 +107,19 @@ def test_httplib_misuse(sentry_init, capture_events, request):
     sentry_init()
     events = capture_events()
 
-    conn = HTTPSConnection("httpbin.org", 443)
+    conn = HTTPConnection("localhost", PORT)
 
     # make sure we release the resource, even if the test fails
     request.addfinalizer(conn.close)
 
-    conn.request("GET", "/anything/foo")
+    conn.request("GET", "/200")
 
     with pytest.raises(Exception):
         # This raises an exception, because we didn't call `getresponse` for
         # the previous request yet.
         #
         # This call should not affect our breadcrumb.
-        conn.request("POST", "/anything/bar")
+        conn.request("POST", "/200")
 
     response = conn.getresponse()
     assert response._method == "GET"
@@ -115,10 +132,12 @@ def test_httplib_misuse(sentry_init, capture_events, request):
     assert crumb["type"] == "http"
     assert crumb["category"] == "httplib"
     assert crumb["data"] == {
-        "url": "https://httpbin.org/anything/foo",
-        "method": "GET",
-        "status_code": 200,
+        "url": "http://localhost:{}/200".format(PORT),
+        SPANDATA.HTTP_METHOD: "GET",
+        SPANDATA.HTTP_STATUS_CODE: 200,
         "reason": "OK",
+        SPANDATA.HTTP_FRAGMENT: "",
+        SPANDATA.HTTP_QUERY: "",
     }
 
 
@@ -146,7 +165,6 @@ def test_outgoing_trace_headers(sentry_init, monkeypatch):
         op="greeting.sniff",
         trace_id="12312012123120121231201212312012",
     ) as transaction:
-
         HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers")
 
         (request_str,) = mock_send.call_args[0]
@@ -210,6 +228,7 @@ def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch):
         expected_outgoing_baggage_items = [
             "sentry-trace_id=%s" % transaction.trace_id,
             "sentry-sample_rate=0.5",
+            "sentry-sampled=%s" % "true" if transaction.sampled else "false",
             "sentry-release=foo",
             "sentry-environment=production",
         ]
@@ -217,3 +236,108 @@ def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch):
         assert sorted(request_headers["baggage"].split(",")) == sorted(
             expected_outgoing_baggage_items
         )
+
+
+@pytest.mark.parametrize(
+    "trace_propagation_targets,host,path,trace_propagated",
+    [
+        [
+            [],
+            "example.com",
+            "/",
+            False,
+        ],
+        [
+            None,
+            "example.com",
+            "/",
+            False,
+        ],
+        [
+            [MATCH_ALL],
+            "example.com",
+            "/",
+            True,
+        ],
+        [
+            ["https://example.com/"],
+            "example.com",
+            "/",
+            True,
+        ],
+        [
+            ["https://example.com/"],
+            "example.com",
+            "",
+            False,
+        ],
+        [
+            ["https://example.com"],
+            "example.com",
+            "",
+            True,
+        ],
+        [
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "example.net",
+            "",
+            False,
+        ],
+        [
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "good.example.net",
+            "",
+            True,
+        ],
+        [
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "good.example.net",
+            "/some/thing",
+            True,
+        ],
+    ],
+)
+def test_option_trace_propagation_targets(
+    sentry_init, monkeypatch, trace_propagation_targets, host, path, trace_propagated
+):
+    # HTTPSConnection.send is passed a string containing (among other things)
+    # the headers on the request. Mock it so we can check the headers, and also
+    # so it doesn't try to actually talk to the internet.
+    mock_send = mock.Mock()
+    monkeypatch.setattr(HTTPSConnection, "send", mock_send)
+
+    sentry_init(
+        trace_propagation_targets=trace_propagation_targets,
+        traces_sample_rate=1.0,
+    )
+
+    headers = {
+        "baggage": (
+            "sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+            "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
+        )
+    }
+
+    transaction = Transaction.continue_from_headers(headers)
+
+    with start_transaction(
+        transaction=transaction,
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        trace_id="12312012123120121231201212312012",
+    ) as transaction:
+        HTTPSConnection(host).request("GET", path)
+
+        (request_str,) = mock_send.call_args[0]
+        request_headers = {}
+        for line in request_str.decode("utf-8").split("\r\n")[1:]:
+            if line:
+                key, val = line.split(": ")
+                request_headers[key] = val
+
+        if trace_propagated:
+            assert "sentry-trace" in request_headers
+            assert "baggage" in request_headers
+        else:
+            assert "sentry-trace" not in request_headers
+            assert "baggage" not in request_headers
diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py
index 67b79e2..912717d 100644
--- a/tests/integrations/threading/test_threading.py
+++ b/tests/integrations/threading/test_threading.py
@@ -1,5 +1,5 @@
 import gc
-
+import sys
 from threading import Thread
 
 import pytest
@@ -7,6 +7,9 @@ import pytest
 from sentry_sdk import configure_scope, capture_message
 from sentry_sdk.integrations.threading import ThreadingIntegration
 
+original_start = Thread.start
+original_run = Thread.run
+
 
 @pytest.mark.forked
 @pytest.mark.parametrize("integrations", [[ThreadingIntegration()], []])
@@ -26,7 +29,8 @@ def test_handles_exceptions(sentry_init, capture_events, integrations):
 
         (exception,) = event["exception"]["values"]
         assert exception["type"] == "ZeroDivisionError"
-        assert exception["mechanism"] == {"type": "threading", "handled": False}
+        assert exception["mechanism"]["type"] == "threading"
+        assert not exception["mechanism"]["handled"]
     else:
         assert not events
 
@@ -60,7 +64,8 @@ def test_propagates_hub(sentry_init, capture_events, propagate_hub):
     (exception,) = event["exception"]["values"]
 
     assert exception["type"] == "ZeroDivisionError"
-    assert exception["mechanism"] == {"type": "threading", "handled": False}
+    assert exception["mechanism"]["type"] == "threading"
+    assert not exception["mechanism"]["handled"]
 
     if propagate_hub:
         assert event["tags"]["stage1"] == "true"
@@ -114,3 +119,47 @@ def test_double_patching(sentry_init, capture_events):
     for event in events:
         (exception,) = event["exception"]["values"]
         assert exception["type"] == "ZeroDivisionError"
+
+
+@pytest.mark.skipif(sys.version_info < (3, 2), reason="no __qualname__ in older python")
+def test_wrapper_attributes(sentry_init):
+    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
+
+    def target():
+        assert t.run.__name__ == "run"
+        assert t.run.__qualname__ == original_run.__qualname__
+
+    t = Thread(target=target)
+    t.start()
+    t.join()
+
+    assert Thread.start.__name__ == "start"
+    assert Thread.start.__qualname__ == original_start.__qualname__
+    assert t.start.__name__ == "start"
+    assert t.start.__qualname__ == original_start.__qualname__
+
+    assert Thread.run.__name__ == "run"
+    assert Thread.run.__qualname__ == original_run.__qualname__
+    assert t.run.__name__ == "run"
+    assert t.run.__qualname__ == original_run.__qualname__
+
+
+@pytest.mark.skipif(
+    sys.version_info > (2, 7),
+    reason="simpler test for py2.7 without py3 only __qualname__",
+)
+def test_wrapper_attributes_no_qualname(sentry_init):
+    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
+
+    def target():
+        assert t.run.__name__ == "run"
+
+    t = Thread(target=target)
+    t.start()
+    t.join()
+
+    assert Thread.start.__name__ == "start"
+    assert t.start.__name__ == "start"
+
+    assert Thread.run.__name__ == "run"
+    assert t.run.__name__ == "run"
diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py
index c0dac2d..2160154 100644
--- a/tests/integrations/tornado/test_tornado.py
+++ b/tests/integrations/tornado/test_tornado.py
@@ -2,7 +2,7 @@ import json
 
 import pytest
 
-from sentry_sdk import configure_scope, start_transaction
+from sentry_sdk import configure_scope, start_transaction, capture_message
 from sentry_sdk.integrations.tornado import TornadoIntegration
 
 from tornado.web import RequestHandler, Application, HTTPError
@@ -46,6 +46,12 @@ class CrashingHandler(RequestHandler):
         1 / 0
 
 
+class CrashingWithMessageHandler(RequestHandler):
+    def get(self):
+        capture_message("hi")
+        1 / 0
+
+
 class HelloHandler(RequestHandler):
     async def get(self):
         with configure_scope() as scope:
@@ -292,3 +298,145 @@ def test_json(tornado_testcase, sentry_init, capture_events):
     assert exception["value"] == "[]"
     assert event
     assert event["request"]["data"] == {"foo": {"bar": 42}}
+
+
+def test_error_has_new_trace_context_performance_enabled(
+    tornado_testcase, sentry_init, capture_events
+):
+    """
+    Check if an 'trace' context is added to errros and transactions when performance monitoring is enabled.
+    """
+    sentry_init(
+        integrations=[TornadoIntegration()],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
+    client.fetch("/hi")
+
+    (msg_event, error_event, transaction_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_error_has_new_trace_context_performance_disabled(
+    tornado_testcase, sentry_init, capture_events
+):
+    """
+    Check if an 'trace' context is added to errros and transactions when performance monitoring is disabled.
+    """
+    sentry_init(
+        integrations=[TornadoIntegration()],
+        traces_sample_rate=None,  # this is the default, just added for clarity
+    )
+    events = capture_events()
+
+    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
+    client.fetch("/hi")
+
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_error_has_existing_trace_context_performance_enabled(
+    tornado_testcase, sentry_init, capture_events
+):
+    """
+    Check if an 'trace' context is added to errros and transactions
+    from the incoming 'sentry-trace' header when performance monitoring is enabled.
+    """
+    sentry_init(
+        integrations=[TornadoIntegration()],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    headers = {"sentry-trace": sentry_trace_header}
+
+    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
+    client.fetch("/hi", headers=headers)
+
+    (msg_event, error_event, transaction_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
+
+
+def test_error_has_existing_trace_context_performance_disabled(
+    tornado_testcase, sentry_init, capture_events
+):
+    """
+    Check if an 'trace' context is added to errros and transactions
+    from the incoming 'sentry-trace' header when performance monitoring is disabled.
+    """
+    sentry_init(
+        integrations=[TornadoIntegration()],
+        traces_sample_rate=None,  # this is the default, just added for clarity
+    )
+    events = capture_events()
+
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    headers = {"sentry-trace": sentry_trace_header}
+
+    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
+    client.fetch("/hi", headers=headers)
+
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py
index 4bf4e66..3616c7c 100644
--- a/tests/integrations/wsgi/test_wsgi.py
+++ b/tests/integrations/wsgi/test_wsgi.py
@@ -1,12 +1,13 @@
+import sys
+
 from werkzeug.test import Client
 
 import pytest
 
 import sentry_sdk
+from sentry_sdk import capture_message
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk.profiler import teardown_profiler
 from collections import Counter
-from sentry_sdk.utils import PY33
 
 try:
     from unittest import mock  # python 3.3 and above
@@ -140,6 +141,8 @@ def test_transaction_with_error(
     assert error_event["transaction"] == "generic WSGI request"
     assert error_event["contexts"]["trace"]["op"] == "http.server"
     assert error_event["exception"]["values"][0]["type"] == "Exception"
+    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "wsgi"
+    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
     assert (
         error_event["exception"]["values"][0]["value"]
         == "Fetch aborted. The ball was not returned."
@@ -180,8 +183,139 @@ def test_transaction_no_error(
     )
 
 
+def test_has_trace_if_performance_enabled(
+    sentry_init,
+    capture_events,
+):
+    def dogpark(environ, start_response):
+        capture_message("Attempting to fetch the ball")
+        raise Exception("Fetch aborted. The ball was not returned.")
+
+    sentry_init(traces_sample_rate=1.0)
+    app = SentryWsgiMiddleware(dogpark)
+    client = Client(app)
+    events = capture_events()
+
+    with pytest.raises(Exception):
+        client.get("http://dogs.are.great/sit/stay/rollover/")
+
+    msg_event, error_event, transaction_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_has_trace_if_performance_disabled(
+    sentry_init,
+    capture_events,
+):
+    def dogpark(environ, start_response):
+        capture_message("Attempting to fetch the ball")
+        raise Exception("Fetch aborted. The ball was not returned.")
+
+    sentry_init()
+    app = SentryWsgiMiddleware(dogpark)
+    client = Client(app)
+    events = capture_events()
+
+    with pytest.raises(Exception):
+        client.get("http://dogs.are.great/sit/stay/rollover/")
+
+    msg_event, error_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+
+def test_trace_from_headers_if_performance_enabled(
+    sentry_init,
+    capture_events,
+):
+    def dogpark(environ, start_response):
+        capture_message("Attempting to fetch the ball")
+        raise Exception("Fetch aborted. The ball was not returned.")
+
+    sentry_init(traces_sample_rate=1.0)
+    app = SentryWsgiMiddleware(dogpark)
+    client = Client(app)
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    with pytest.raises(Exception):
+        client.get(
+            "http://dogs.are.great/sit/stay/rollover/",
+            headers={"sentry-trace": sentry_trace_header},
+        )
+
+    msg_event, error_event, transaction_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+def test_trace_from_headers_if_performance_disabled(
+    sentry_init,
+    capture_events,
+):
+    def dogpark(environ, start_response):
+        capture_message("Attempting to fetch the ball")
+        raise Exception("Fetch aborted. The ball was not returned.")
+
+    sentry_init()
+    app = SentryWsgiMiddleware(dogpark)
+    client = Client(app)
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    with pytest.raises(Exception):
+        client.get(
+            "http://dogs.are.great/sit/stay/rollover/",
+            headers={"sentry-trace": sentry_trace_header},
+        )
+
+    msg_event, error_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
 def test_traces_sampler_gets_correct_values_in_sampling_context(
-    sentry_init, DictionaryContaining, ObjectDescribedBy  # noqa:N803
+    sentry_init,
+    DictionaryContaining,  # noqa:N803
 ):
     def app(environ, start_response):
         start_response("200 OK", [])
@@ -284,38 +418,31 @@ def test_auto_session_tracking_with_aggregates(sentry_init, capture_envelopes):
     assert len(session_aggregates) == 1
 
 
-if PY33:
-
-    @pytest.fixture
-    def profiling():
-        yield
-        teardown_profiler()
+@pytest.mark.skipif(
+    sys.version_info < (3, 3), reason="Profiling is only supported in Python >= 3.3"
+)
+@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
+def test_profile_sent(
+    sentry_init,
+    capture_envelopes,
+    teardown_profiling,
+):
+    def test_app(environ, start_response):
+        start_response("200 OK", [])
+        return ["Go get the ball! Good dog!"]
 
-    @pytest.mark.parametrize(
-        "profiles_sample_rate,should_send",
-        [(1.0, True), (0.75, True), (0.25, False), (None, False)],
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
     )
-    def test_profile_sent_when_profiling_enabled(
-        capture_envelopes, sentry_init, profiling, profiles_sample_rate, should_send
-    ):
-        def test_app(environ, start_response):
-            start_response("200 OK", [])
-            return ["Go get the ball! Good dog!"]
-
-        sentry_init(
-            traces_sample_rate=1.0,
-            _experiments={"profiles_sample_rate": profiles_sample_rate},
-        )
-        app = SentryWsgiMiddleware(test_app)
-        envelopes = capture_envelopes()
-
-        with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
-            client = Client(app)
-            client.get("/")
-
-        profile_sent = False
-        for item in envelopes[0].items:
-            if item.headers["type"] == "profile":
-                profile_sent = True
-                break
-        assert profile_sent == should_send
+    app = SentryWsgiMiddleware(test_app)
+    envelopes = capture_envelopes()
+
+    client = Client(app)
+    client.get("/")
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
diff --git a/tests/test_api.py b/tests/test_api.py
new file mode 100644
index 0000000..1adb909
--- /dev/null
+++ b/tests/test_api.py
@@ -0,0 +1,115 @@
+from sentry_sdk import (
+    configure_scope,
+    continue_trace,
+    get_baggage,
+    get_current_span,
+    get_traceparent,
+    start_transaction,
+)
+from sentry_sdk.hub import Hub
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+def test_get_current_span():
+    fake_hub = mock.MagicMock()
+    fake_hub.scope = mock.MagicMock()
+
+    fake_hub.scope.span = mock.MagicMock()
+    assert get_current_span(fake_hub) == fake_hub.scope.span
+
+    fake_hub.scope.span = None
+    assert get_current_span(fake_hub) is None
+
+
+def test_get_current_span_default_hub(sentry_init):
+    sentry_init()
+
+    assert get_current_span() is None
+
+    with configure_scope() as scope:
+        fake_span = mock.MagicMock()
+        scope.span = fake_span
+
+        assert get_current_span() == fake_span
+
+
+def test_get_current_span_default_hub_with_transaction(sentry_init):
+    sentry_init()
+
+    assert get_current_span() is None
+
+    with start_transaction() as new_transaction:
+        assert get_current_span() == new_transaction
+
+
+def test_traceparent_with_tracing_enabled(sentry_init):
+    sentry_init(traces_sample_rate=1.0)
+
+    with start_transaction() as transaction:
+        expected_traceparent = "%s-%s-1" % (
+            transaction.trace_id,
+            transaction.span_id,
+        )
+        assert get_traceparent() == expected_traceparent
+
+
+def test_traceparent_with_tracing_disabled(sentry_init):
+    sentry_init()
+
+    propagation_context = Hub.current.scope._propagation_context
+    expected_traceparent = "%s-%s" % (
+        propagation_context["trace_id"],
+        propagation_context["span_id"],
+    )
+    assert get_traceparent() == expected_traceparent
+
+
+def test_baggage_with_tracing_disabled(sentry_init):
+    sentry_init(release="1.0.0", environment="dev")
+    propagation_context = Hub.current.scope._propagation_context
+    expected_baggage = (
+        "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0".format(
+            propagation_context["trace_id"]
+        )
+    )
+    # order not guaranteed in older python versions
+    assert sorted(get_baggage().split(",")) == sorted(expected_baggage.split(","))
+
+
+def test_baggage_with_tracing_enabled(sentry_init):
+    sentry_init(traces_sample_rate=1.0, release="1.0.0", environment="dev")
+    with start_transaction() as transaction:
+        expected_baggage = "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled={}".format(
+            transaction.trace_id, "true" if transaction.sampled else "false"
+        )
+        # order not guaranteed in older python versions
+        assert sorted(get_baggage().split(",")) == sorted(expected_baggage.split(","))
+
+
+def test_continue_trace(sentry_init):
+    sentry_init()
+
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    transaction = continue_trace(
+        {
+            "sentry-trace": "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled),
+            "baggage": "sentry-trace_id=566e3688a61d4bc888951642d6f14a19",
+        },
+        name="some name",
+    )
+    with start_transaction(transaction):
+        assert transaction.name == "some name"
+
+        propagation_context = Hub.current.scope._propagation_context
+        assert propagation_context["trace_id"] == transaction.trace_id == trace_id
+        assert propagation_context["parent_span_id"] == parent_span_id
+        assert propagation_context["parent_sampled"] == parent_sampled
+        assert propagation_context["dynamic_sampling_context"] == {
+            "trace_id": "566e3688a61d4bc888951642d6f14a19"
+        }
diff --git a/tests/test_basics.py b/tests/test_basics.py
index 1e2feaf..751b0a6 100644
--- a/tests/test_basics.py
+++ b/tests/test_basics.py
@@ -1,6 +1,7 @@
+import logging
 import os
 import sys
-import logging
+import time
 
 import pytest
 
@@ -16,7 +17,6 @@ from sentry_sdk import (
     last_event_id,
     Hub,
 )
-
 from sentry_sdk._compat import reraise
 from sentry_sdk.integrations import _AUTO_ENABLING_INTEGRATIONS
 from sentry_sdk.integrations.logging import LoggingIntegration
@@ -24,6 +24,8 @@ from sentry_sdk.scope import (  # noqa: F401
     add_global_event_processor,
     global_event_processors,
 )
+from sentry_sdk.utils import get_sdk_name
+from sentry_sdk.tracing_utils import has_tracing_enabled
 
 
 def test_processors(sentry_init, capture_events):
@@ -90,7 +92,93 @@ def test_event_id(sentry_init, capture_events):
     assert Hub.current.last_event_id() == event_id
 
 
-def test_option_callback(sentry_init, capture_events, monkeypatch):
+def test_generic_mechanism(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        raise ValueError("aha!")
+    except Exception:
+        capture_exception()
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "generic"
+    assert event["exception"]["values"][0]["mechanism"]["handled"]
+
+
+def test_option_before_send(sentry_init, capture_events):
+    def before_send(event, hint):
+        event["extra"] = {"before_send_called": True}
+        return event
+
+    def do_this():
+        try:
+            raise ValueError("aha!")
+        except Exception:
+            capture_exception()
+
+    sentry_init(before_send=before_send)
+    events = capture_events()
+
+    do_this()
+
+    (event,) = events
+    assert event["extra"] == {"before_send_called": True}
+
+
+def test_option_before_send_discard(sentry_init, capture_events):
+    def before_send_discard(event, hint):
+        return None
+
+    def do_this():
+        try:
+            raise ValueError("aha!")
+        except Exception:
+            capture_exception()
+
+    sentry_init(before_send=before_send_discard)
+    events = capture_events()
+
+    do_this()
+
+    assert len(events) == 0
+
+
+def test_option_before_send_transaction(sentry_init, capture_events):
+    def before_send_transaction(event, hint):
+        assert event["type"] == "transaction"
+        event["extra"] = {"before_send_transaction_called": True}
+        return event
+
+    sentry_init(
+        before_send_transaction=before_send_transaction,
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+    transaction = start_transaction(name="foo")
+    transaction.finish()
+
+    (event,) = events
+    assert event["transaction"] == "foo"
+    assert event["extra"] == {"before_send_transaction_called": True}
+
+
+def test_option_before_send_transaction_discard(sentry_init, capture_events):
+    def before_send_transaction_discard(event, hint):
+        return None
+
+    sentry_init(
+        before_send_transaction=before_send_transaction_discard,
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+    transaction = start_transaction(name="foo")
+    transaction.finish()
+
+    assert len(events) == 0
+
+
+def test_option_before_breadcrumb(sentry_init, capture_events, monkeypatch):
     drop_events = False
     drop_breadcrumbs = False
     reports = []
@@ -142,6 +230,32 @@ def test_option_callback(sentry_init, capture_events, monkeypatch):
     assert crumb["type"] == "default"
 
 
+@pytest.mark.parametrize(
+    "enable_tracing, traces_sample_rate, tracing_enabled, updated_traces_sample_rate",
+    [
+        (None, None, False, None),
+        (False, 0.0, False, 0.0),
+        (False, 1.0, False, 1.0),
+        (None, 1.0, True, 1.0),
+        (True, 1.0, True, 1.0),
+        (None, 0.0, True, 0.0),  # We use this as - it's configured but turned off
+        (True, 0.0, True, 0.0),  # We use this as - it's configured but turned off
+        (True, None, True, 1.0),
+    ],
+)
+def test_option_enable_tracing(
+    sentry_init,
+    enable_tracing,
+    traces_sample_rate,
+    tracing_enabled,
+    updated_traces_sample_rate,
+):
+    sentry_init(enable_tracing=enable_tracing, traces_sample_rate=traces_sample_rate)
+    options = Hub.current.client.options
+    assert has_tracing_enabled(options) is tracing_enabled
+    assert options["traces_sample_rate"] == updated_traces_sample_rate
+
+
 def test_breadcrumb_arguments(sentry_init, capture_events):
     assert_hint = {"bar": 42}
 
@@ -437,3 +551,136 @@ def test_event_processor_drop_records_client_report(
     assert reports == [("event_processor", "error"), ("event_processor", "transaction")]
 
     global_event_processors.pop()
+
+
+@pytest.mark.parametrize(
+    "installed_integrations, expected_name",
+    [
+        # integrations with own name
+        (["django"], "sentry.python.django"),
+        (["flask"], "sentry.python.flask"),
+        (["fastapi"], "sentry.python.fastapi"),
+        (["bottle"], "sentry.python.bottle"),
+        (["falcon"], "sentry.python.falcon"),
+        (["quart"], "sentry.python.quart"),
+        (["sanic"], "sentry.python.sanic"),
+        (["starlette"], "sentry.python.starlette"),
+        (["chalice"], "sentry.python.chalice"),
+        (["serverless"], "sentry.python.serverless"),
+        (["pyramid"], "sentry.python.pyramid"),
+        (["tornado"], "sentry.python.tornado"),
+        (["aiohttp"], "sentry.python.aiohttp"),
+        (["aws_lambda"], "sentry.python.aws_lambda"),
+        (["gcp"], "sentry.python.gcp"),
+        (["beam"], "sentry.python.beam"),
+        (["asgi"], "sentry.python.asgi"),
+        (["wsgi"], "sentry.python.wsgi"),
+        # integrations without name
+        (["argv"], "sentry.python"),
+        (["atexit"], "sentry.python"),
+        (["boto3"], "sentry.python"),
+        (["celery"], "sentry.python"),
+        (["dedupe"], "sentry.python"),
+        (["excepthook"], "sentry.python"),
+        (["executing"], "sentry.python"),
+        (["modules"], "sentry.python"),
+        (["pure_eval"], "sentry.python"),
+        (["redis"], "sentry.python"),
+        (["rq"], "sentry.python"),
+        (["sqlalchemy"], "sentry.python"),
+        (["stdlib"], "sentry.python"),
+        (["threading"], "sentry.python"),
+        (["trytond"], "sentry.python"),
+        (["logging"], "sentry.python"),
+        (["gnu_backtrace"], "sentry.python"),
+        (["httpx"], "sentry.python"),
+        # precedence of frameworks
+        (["flask", "django", "celery"], "sentry.python.django"),
+        (["fastapi", "flask", "redis"], "sentry.python.flask"),
+        (["bottle", "fastapi", "httpx"], "sentry.python.fastapi"),
+        (["falcon", "bottle", "logging"], "sentry.python.bottle"),
+        (["quart", "falcon", "gnu_backtrace"], "sentry.python.falcon"),
+        (["sanic", "quart", "sqlalchemy"], "sentry.python.quart"),
+        (["starlette", "sanic", "rq"], "sentry.python.sanic"),
+        (["chalice", "starlette", "modules"], "sentry.python.starlette"),
+        (["serverless", "chalice", "pure_eval"], "sentry.python.chalice"),
+        (["pyramid", "serverless", "modules"], "sentry.python.serverless"),
+        (["tornado", "pyramid", "executing"], "sentry.python.pyramid"),
+        (["aiohttp", "tornado", "dedupe"], "sentry.python.tornado"),
+        (["aws_lambda", "aiohttp", "boto3"], "sentry.python.aiohttp"),
+        (["gcp", "aws_lambda", "atexit"], "sentry.python.aws_lambda"),
+        (["beam", "gcp", "argv"], "sentry.python.gcp"),
+        (["asgi", "beam", "stdtlib"], "sentry.python.beam"),
+        (["wsgi", "asgi", "boto3"], "sentry.python.asgi"),
+        (["wsgi", "celery", "redis"], "sentry.python.wsgi"),
+    ],
+)
+def test_get_sdk_name(installed_integrations, expected_name):
+    assert get_sdk_name(installed_integrations) == expected_name
+
+
+def _hello_world(word):
+    return "Hello, {}".format(word)
+
+
+def test_functions_to_trace(sentry_init, capture_events):
+    functions_to_trace = [
+        {"qualified_name": "tests.test_basics._hello_world"},
+        {"qualified_name": "time.sleep"},
+    ]
+
+    sentry_init(
+        traces_sample_rate=1.0,
+        functions_to_trace=functions_to_trace,
+    )
+
+    events = capture_events()
+
+    with start_transaction(name="something"):
+        time.sleep(0)
+
+        for word in ["World", "You"]:
+            _hello_world(word)
+
+    assert len(events) == 1
+
+    (event,) = events
+
+    assert len(event["spans"]) == 3
+    assert event["spans"][0]["description"] == "time.sleep"
+    assert event["spans"][1]["description"] == "tests.test_basics._hello_world"
+    assert event["spans"][2]["description"] == "tests.test_basics._hello_world"
+
+
+class WorldGreeter:
+    def __init__(self, word):
+        self.word = word
+
+    def greet(self, new_word=None):
+        return "Hello, {}".format(new_word if new_word else self.word)
+
+
+def test_functions_to_trace_with_class(sentry_init, capture_events):
+    functions_to_trace = [
+        {"qualified_name": "tests.test_basics.WorldGreeter.greet"},
+    ]
+
+    sentry_init(
+        traces_sample_rate=1.0,
+        functions_to_trace=functions_to_trace,
+    )
+
+    events = capture_events()
+
+    with start_transaction(name="something"):
+        wg = WorldGreeter("World")
+        wg.greet()
+        wg.greet("You")
+
+    assert len(events) == 1
+
+    (event,) = events
+
+    assert len(event["spans"]) == 2
+    assert event["spans"][0]["description"] == "tests.test_basics.WorldGreeter.greet"
+    assert event["spans"][1]["description"] == "tests.test_basics.WorldGreeter.greet"
diff --git a/tests/test_client.py b/tests/test_client.py
index 5523647..83257ab 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -22,8 +22,14 @@ from sentry_sdk.integrations.executing import ExecutingIntegration
 from sentry_sdk.transport import Transport
 from sentry_sdk._compat import reraise, text_type, PY2
 from sentry_sdk.utils import HAS_CHAINED_EXCEPTIONS
+from sentry_sdk.utils import logger
 from sentry_sdk.serializer import MAX_DATABAG_BREADTH
-from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS
+from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, DEFAULT_MAX_VALUE_LENGTH
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
 
 if PY2:
     # Importing ABCs from collections is deprecated, and will stop working in 3.8
@@ -227,6 +233,16 @@ def test_transport_option(monkeypatch):
             "arg_https_proxy": "https://localhost/123",
             "expected_proxy_scheme": "https",
         },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "env_no_proxy": "sentry.io,example.com",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "https://localhost/123",
+            "expected_proxy_scheme": "https",
+            "arg_proxy_headers": {"Test-Header": "foo-bar"},
+        },
     ],
 )
 def test_proxy(monkeypatch, testcase):
@@ -236,17 +252,97 @@ def test_proxy(monkeypatch, testcase):
         monkeypatch.setenv("HTTPS_PROXY", testcase["env_https_proxy"])
     if testcase.get("env_no_proxy") is not None:
         monkeypatch.setenv("NO_PROXY", testcase["env_no_proxy"])
+
     kwargs = {}
+
     if testcase["arg_http_proxy"] is not None:
         kwargs["http_proxy"] = testcase["arg_http_proxy"]
     if testcase["arg_https_proxy"] is not None:
         kwargs["https_proxy"] = testcase["arg_https_proxy"]
+    if testcase.get("arg_proxy_headers") is not None:
+        kwargs["proxy_headers"] = testcase["arg_proxy_headers"]
+
     client = Client(testcase["dsn"], **kwargs)
+
     if testcase["expected_proxy_scheme"] is None:
         assert client.transport._pool.proxy is None
     else:
         assert client.transport._pool.proxy.scheme == testcase["expected_proxy_scheme"]
 
+        if testcase.get("arg_proxy_headers") is not None:
+            assert client.transport._pool.proxy_headers == testcase["arg_proxy_headers"]
+
+
+@pytest.mark.parametrize(
+    "testcase",
+    [
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": "http://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_class": "<class 'urllib3.poolmanager.ProxyManager'>",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": "socks4a://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_class": "<class 'urllib3.contrib.socks.SOCKSProxyManager'>",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": "socks4://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_class": "<class 'urllib3.contrib.socks.SOCKSProxyManager'>",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": "socks5h://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_class": "<class 'urllib3.contrib.socks.SOCKSProxyManager'>",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": "socks5://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_class": "<class 'urllib3.contrib.socks.SOCKSProxyManager'>",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "socks4a://localhost/123",
+            "expected_proxy_class": "<class 'urllib3.contrib.socks.SOCKSProxyManager'>",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "socks4://localhost/123",
+            "expected_proxy_class": "<class 'urllib3.contrib.socks.SOCKSProxyManager'>",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "socks5h://localhost/123",
+            "expected_proxy_class": "<class 'urllib3.contrib.socks.SOCKSProxyManager'>",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "socks5://localhost/123",
+            "expected_proxy_class": "<class 'urllib3.contrib.socks.SOCKSProxyManager'>",
+        },
+    ],
+)
+def test_socks_proxy(testcase):
+    kwargs = {}
+
+    if testcase["arg_http_proxy"] is not None:
+        kwargs["http_proxy"] = testcase["arg_http_proxy"]
+    if testcase["arg_https_proxy"] is not None:
+        kwargs["https_proxy"] = testcase["arg_https_proxy"]
+
+    client = Client(testcase["dsn"], **kwargs)
+    assert str(type(client.transport._pool)) == testcase["expected_proxy_class"]
+
 
 def test_simple_transport(sentry_init):
     events = []
@@ -276,8 +372,62 @@ def test_ignore_errors(sentry_init, capture_events):
     pytest.raises(EventCapturedError, lambda: e(ValueError()))
 
 
-def test_with_locals_enabled(sentry_init, capture_events):
-    sentry_init(with_locals=True)
+def test_with_locals_deprecation_enabled(sentry_init):
+    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+        sentry_init(with_locals=True)
+
+        client = Hub.current.client
+        assert "with_locals" not in client.options
+        assert "include_local_variables" in client.options
+        assert client.options["include_local_variables"]
+
+        fake_warning.assert_called_once_with(
+            "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. Please use 'include_local_variables'. The option 'with_locals' will be removed in the future."
+        )
+
+
+def test_with_locals_deprecation_disabled(sentry_init):
+    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+        sentry_init(with_locals=False)
+
+        client = Hub.current.client
+        assert "with_locals" not in client.options
+        assert "include_local_variables" in client.options
+        assert not client.options["include_local_variables"]
+
+        fake_warning.assert_called_once_with(
+            "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. Please use 'include_local_variables'. The option 'with_locals' will be removed in the future."
+        )
+
+
+def test_include_local_variables_deprecation(sentry_init):
+    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+        sentry_init(include_local_variables=False)
+
+        client = Hub.current.client
+        assert "with_locals" not in client.options
+        assert "include_local_variables" in client.options
+        assert not client.options["include_local_variables"]
+
+        fake_warning.assert_not_called()
+
+
+def test_request_bodies_deprecation(sentry_init):
+    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+        sentry_init(request_bodies="small")
+
+        client = Hub.current.client
+        assert "request_bodies" not in client.options
+        assert "max_request_body_size" in client.options
+        assert client.options["max_request_body_size"] == "small"
+
+        fake_warning.assert_called_once_with(
+            "Deprecated: The option 'request_bodies' was renamed to 'max_request_body_size'. Please use 'max_request_body_size'. The option 'request_bodies' will be removed in the future."
+        )
+
+
+def test_include_local_variables_enabled(sentry_init, capture_events):
+    sentry_init(include_local_variables=True)
     events = capture_events()
     try:
         1 / 0
@@ -292,8 +442,8 @@ def test_with_locals_enabled(sentry_init, capture_events):
     )
 
 
-def test_with_locals_disabled(sentry_init, capture_events):
-    sentry_init(with_locals=False)
+def test_include_local_variables_disabled(sentry_init, capture_events):
+    sentry_init(include_local_variables=False)
     events = capture_events()
     try:
         1 / 0
@@ -308,6 +458,38 @@ def test_with_locals_disabled(sentry_init, capture_events):
     )
 
 
+def test_include_source_context_enabled(sentry_init, capture_events):
+    sentry_init(include_source_context=True)
+    events = capture_events()
+    try:
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    (event,) = events
+
+    frame = event["exception"]["values"][0]["stacktrace"]["frames"][0]
+    assert "post_context" in frame
+    assert "pre_context" in frame
+    assert "context_line" in frame
+
+
+def test_include_source_context_disabled(sentry_init, capture_events):
+    sentry_init(include_source_context=False)
+    events = capture_events()
+    try:
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    (event,) = events
+
+    frame = event["exception"]["values"][0]["stacktrace"]["frames"][0]
+    assert "post_context" not in frame
+    assert "pre_context" not in frame
+    assert "context_line" not in frame
+
+
 @pytest.mark.parametrize("integrations", [[], [ExecutingIntegration()]])
 def test_function_names(sentry_init, capture_events, integrations):
     sentry_init(integrations=integrations)
@@ -357,7 +539,7 @@ def test_attach_stacktrace_enabled(sentry_init, capture_events):
 
 
 def test_attach_stacktrace_enabled_no_locals(sentry_init, capture_events):
-    sentry_init(attach_stacktrace=True, with_locals=False)
+    sentry_init(attach_stacktrace=True, include_local_variables=False)
     events = capture_events()
 
     def foo():
@@ -386,7 +568,6 @@ def test_attach_stacktrace_in_app(sentry_init, capture_events):
     pytest_frames = [f for f in frames if f["module"].startswith("_pytest")]
     assert pytest_frames
     assert all(f["in_app"] is False for f in pytest_frames)
-    assert any(f["in_app"] for f in frames)
 
 
 def test_attach_stacktrace_disabled(sentry_init, capture_events):
@@ -830,7 +1011,52 @@ def test_init_string_types(dsn, sentry_init):
     )
 
 
-def test_envelope_types():
+def test_sending_events_with_tracing():
+    """
+    Tests for calling the right transport method (capture_event vs
+    capture_envelope) from the SDK client for different data types.
+    """
+
+    envelopes = []
+    events = []
+
+    class CustomTransport(Transport):
+        def capture_envelope(self, envelope):
+            envelopes.append(envelope)
+
+        def capture_event(self, event):
+            events.append(event)
+
+    with Hub(Client(enable_tracing=True, transport=CustomTransport())):
+        try:
+            1 / 0
+        except Exception:
+            event_id = capture_exception()
+
+        # Assert error events get passed in via capture_envelope
+        assert not events
+        envelope = envelopes.pop()
+        (item,) = envelope.items
+        assert item.data_category == "error"
+        assert item.headers.get("type") == "event"
+        assert item.get_event()["event_id"] == event_id
+
+        with start_transaction(name="foo"):
+            pass
+
+        # Assert transactions get passed in via capture_envelope
+        assert not events
+        envelope = envelopes.pop()
+
+        (item,) = envelope.items
+        assert item.data_category == "transaction"
+        assert item.headers.get("type") == "transaction"
+
+    assert not envelopes
+    assert not events
+
+
+def test_sending_events_with_no_tracing():
     """
     Tests for calling the right transport method (capture_event vs
     capture_envelope) from the SDK client for different data types.
@@ -846,8 +1072,11 @@ def test_envelope_types():
         def capture_event(self, event):
             events.append(event)
 
-    with Hub(Client(traces_sample_rate=1.0, transport=CustomTransport())):
-        event_id = capture_message("hello")
+    with Hub(Client(enable_tracing=False, transport=CustomTransport())):
+        try:
+            1 / 0
+        except Exception:
+            event_id = capture_exception()
 
         # Assert error events get passed in via capture_event
         assert not envelopes
@@ -861,11 +1090,7 @@ def test_envelope_types():
 
         # Assert transactions get passed in via capture_envelope
         assert not events
-        envelope = envelopes.pop()
-
-        (item,) = envelope.items
-        assert item.data_category == "transaction"
-        assert item.headers.get("type") == "transaction"
+        assert not envelopes
 
     assert not envelopes
     assert not events
@@ -893,3 +1118,21 @@ def test_multiple_positional_args(sentry_init):
     with pytest.raises(TypeError) as exinfo:
         sentry_init(1, None)
     assert "Only single positional argument is expected" in str(exinfo.value)
+
+
+@pytest.mark.parametrize(
+    "sdk_options, expected_data_length",
+    [
+        ({}, DEFAULT_MAX_VALUE_LENGTH),
+        ({"max_value_length": 1800}, 1800),
+    ],
+)
+def test_max_value_length_option(
+    sentry_init, capture_events, sdk_options, expected_data_length
+):
+    sentry_init(sdk_options)
+    events = capture_events()
+
+    capture_message("a" * 2000)
+
+    assert len(events[0]["message"]) == expected_data_length
diff --git a/tests/test_conftest.py b/tests/test_conftest.py
index 8a2d4ce..1b006ed 100644
--- a/tests/test_conftest.py
+++ b/tests/test_conftest.py
@@ -24,7 +24,6 @@ import pytest
 def test_string_containing(
     test_string, expected_result, StringContaining  # noqa: N803
 ):
-
     assert (test_string == StringContaining("dogs")) is expected_result
 
 
@@ -49,7 +48,6 @@ def test_string_containing(
 def test_dictionary_containing(
     test_dict, expected_result, DictionaryContaining  # noqa: N803
 ):
-
     assert (
         test_dict == DictionaryContaining({"dogs": "yes", "cats": "maybe"})
     ) is expected_result
@@ -98,7 +96,6 @@ def test_object_described_by(
     attrs_only_result,
     ObjectDescribedBy,  # noqa: N803
 ):
-
     assert (
         test_obj == ObjectDescribedBy(type=Dog, attrs={"name": "Maisey", "age": 7})
     ) is type_and_attrs_result
diff --git a/tests/test_crons.py b/tests/test_crons.py
new file mode 100644
index 0000000..5bdeb6c
--- /dev/null
+++ b/tests/test_crons.py
@@ -0,0 +1,163 @@
+import pytest
+import uuid
+
+import sentry_sdk
+from sentry_sdk.crons import capture_checkin
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+@sentry_sdk.monitor(monitor_slug="abc123")
+def _hello_world(name):
+    return "Hello, {}".format(name)
+
+
+@sentry_sdk.monitor(monitor_slug="def456")
+def _break_world(name):
+    1 / 0
+    return "Hello, {}".format(name)
+
+
+def test_decorator(sentry_init):
+    sentry_init()
+
+    with mock.patch(
+        "sentry_sdk.crons.decorator.capture_checkin"
+    ) as fake_capture_checking:
+        result = _hello_world("Grace")
+        assert result == "Hello, Grace"
+
+        # Check for initial checkin
+        fake_capture_checking.assert_has_calls(
+            [
+                mock.call(monitor_slug="abc123", status="in_progress"),
+            ]
+        )
+
+        # Check for final checkin
+        assert fake_capture_checking.call_args[1]["monitor_slug"] == "abc123"
+        assert fake_capture_checking.call_args[1]["status"] == "ok"
+        assert fake_capture_checking.call_args[1]["duration"]
+        assert fake_capture_checking.call_args[1]["check_in_id"]
+
+
+def test_decorator_error(sentry_init):
+    sentry_init()
+
+    with mock.patch(
+        "sentry_sdk.crons.decorator.capture_checkin"
+    ) as fake_capture_checking:
+        with pytest.raises(Exception):
+            result = _break_world("Grace")
+
+        assert "result" not in locals()
+
+        # Check for initial checkin
+        fake_capture_checking.assert_has_calls(
+            [
+                mock.call(monitor_slug="def456", status="in_progress"),
+            ]
+        )
+
+        # Check for final checkin
+        assert fake_capture_checking.call_args[1]["monitor_slug"] == "def456"
+        assert fake_capture_checking.call_args[1]["status"] == "error"
+        assert fake_capture_checking.call_args[1]["duration"]
+        assert fake_capture_checking.call_args[1]["check_in_id"]
+
+
+def test_capture_checkin_simple(sentry_init):
+    sentry_init()
+
+    check_in_id = capture_checkin(
+        monitor_slug="abc123",
+        check_in_id="112233",
+        status=None,
+        duration=None,
+    )
+    assert check_in_id == "112233"
+
+
+def test_sample_rate_doesnt_affect_crons(sentry_init, capture_envelopes):
+    sentry_init(sample_rate=0)
+    envelopes = capture_envelopes()
+
+    capture_checkin(check_in_id="112233")
+
+    assert len(envelopes) == 1
+
+    check_in = envelopes[0].items[0].payload.json
+    assert check_in["check_in_id"] == "112233"
+
+
+def test_capture_checkin_new_id(sentry_init):
+    sentry_init()
+
+    with mock.patch("uuid.uuid4") as mock_uuid:
+        mock_uuid.return_value = uuid.UUID("a8098c1a-f86e-11da-bd1a-00112444be1e")
+        check_in_id = capture_checkin(
+            monitor_slug="abc123",
+            check_in_id=None,
+            status=None,
+            duration=None,
+        )
+
+        assert check_in_id == "a8098c1af86e11dabd1a00112444be1e"
+
+
+def test_end_to_end(sentry_init, capture_envelopes):
+    sentry_init()
+    envelopes = capture_envelopes()
+
+    capture_checkin(
+        monitor_slug="abc123",
+        check_in_id="112233",
+        duration=123,
+        status="ok",
+    )
+
+    check_in = envelopes[0].items[0].payload.json
+
+    # Check for final checkin
+    assert check_in["check_in_id"] == "112233"
+    assert check_in["monitor_slug"] == "abc123"
+    assert check_in["status"] == "ok"
+    assert check_in["duration"] == 123
+
+
+def test_monitor_config(sentry_init, capture_envelopes):
+    sentry_init()
+    envelopes = capture_envelopes()
+
+    monitor_config = {
+        "schedule": {"type": "crontab", "value": "0 0 * * *"},
+    }
+
+    capture_checkin(monitor_slug="abc123", monitor_config=monitor_config)
+    check_in = envelopes[0].items[0].payload.json
+
+    # Check for final checkin
+    assert check_in["monitor_slug"] == "abc123"
+    assert check_in["monitor_config"] == monitor_config
+
+    # Without passing a monitor_config the field is not in the checkin
+    capture_checkin(monitor_slug="abc123")
+    check_in = envelopes[1].items[0].payload.json
+
+    assert check_in["monitor_slug"] == "abc123"
+    assert "monitor_config" not in check_in
+
+
+def test_capture_checkin_sdk_not_initialized():
+    # Tests that the capture_checkin does not raise an error when Sentry SDK is not initialized.
+    # sentry_init() is intentionally omitted.
+    check_in_id = capture_checkin(
+        monitor_slug="abc123",
+        check_in_id="112233",
+        status=None,
+        duration=None,
+    )
+    assert check_in_id == "112233"
diff --git a/tests/test_envelope.py b/tests/test_envelope.py
index b6a3ddf..a8b3ac1 100644
--- a/tests/test_envelope.py
+++ b/tests/test_envelope.py
@@ -1,16 +1,8 @@
 from sentry_sdk.envelope import Envelope
 from sentry_sdk.session import Session
 from sentry_sdk import capture_event
-from sentry_sdk.tracing_utils import compute_tracestate_value
 import sentry_sdk.client
 
-import pytest
-
-try:
-    from unittest import mock  # python 3.3 and above
-except ImportError:
-    import mock  # python < 3.3
-
 
 def generate_transaction_item():
     return {
@@ -26,16 +18,15 @@ def generate_transaction_item():
                 "parent_span_id": None,
                 "description": "<OrganizationContext>",
                 "op": "greeting.sniff",
-                "tracestate": compute_tracestate_value(
-                    {
-                        "trace_id": "12312012123120121231201212312012",
-                        "environment": "dogpark",
-                        "release": "off.leash.park",
-                        "public_key": "dogsarebadatkeepingsecrets",
-                        "user": {"id": 12312013, "segment": "bigs"},
-                        "transaction": "/interactions/other-dogs/new-dog",
-                    }
-                ),
+                "dynamic_sampling_context": {
+                    "trace_id": "12312012123120121231201212312012",
+                    "sample_rate": "1.0",
+                    "environment": "dogpark",
+                    "release": "off.leash.park",
+                    "public_key": "dogsarebadatkeepingsecrets",
+                    "user_segment": "bigs",
+                    "transaction": "/interactions/other-dogs/new-dog",
+                },
             }
         },
         "spans": [
@@ -88,25 +79,16 @@ def test_add_and_get_session():
             assert item.payload.json == expected.to_json()
 
 
-# TODO (kmclb) remove this parameterization once tracestate is a real feature
-@pytest.mark.parametrize("tracestate_enabled", [True, False])
-def test_envelope_headers(
-    sentry_init, capture_envelopes, monkeypatch, tracestate_enabled
-):
+def test_envelope_headers(sentry_init, capture_envelopes, monkeypatch):
     monkeypatch.setattr(
         sentry_sdk.client,
         "format_timestamp",
         lambda x: "2012-11-21T12:31:12.415908Z",
     )
 
-    monkeypatch.setattr(
-        sentry_sdk.client,
-        "has_tracestate_enabled",
-        mock.Mock(return_value=tracestate_enabled),
-    )
-
     sentry_init(
         dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+        traces_sample_rate=1.0,
     )
     envelopes = capture_envelopes()
 
@@ -114,24 +96,19 @@ def test_envelope_headers(
 
     assert len(envelopes) == 1
 
-    if tracestate_enabled:
-        assert envelopes[0].headers == {
-            "event_id": "15210411201320122115110420122013",
-            "sent_at": "2012-11-21T12:31:12.415908Z",
-            "trace": {
-                "trace_id": "12312012123120121231201212312012",
-                "environment": "dogpark",
-                "release": "off.leash.park",
-                "public_key": "dogsarebadatkeepingsecrets",
-                "user": {"id": 12312013, "segment": "bigs"},
-                "transaction": "/interactions/other-dogs/new-dog",
-            },
-        }
-    else:
-        assert envelopes[0].headers == {
-            "event_id": "15210411201320122115110420122013",
-            "sent_at": "2012-11-21T12:31:12.415908Z",
-        }
+    assert envelopes[0].headers == {
+        "event_id": "15210411201320122115110420122013",
+        "sent_at": "2012-11-21T12:31:12.415908Z",
+        "trace": {
+            "trace_id": "12312012123120121231201212312012",
+            "sample_rate": "1.0",
+            "environment": "dogpark",
+            "release": "off.leash.park",
+            "public_key": "dogsarebadatkeepingsecrets",
+            "user_segment": "bigs",
+            "transaction": "/interactions/other-dogs/new-dog",
+        },
+    }
 
 
 def test_envelope_with_sized_items():
diff --git a/tests/test_exceptiongroup.py b/tests/test_exceptiongroup.py
new file mode 100644
index 0000000..4c7afc5
--- /dev/null
+++ b/tests/test_exceptiongroup.py
@@ -0,0 +1,308 @@
+import sys
+import pytest
+
+from sentry_sdk.utils import event_from_exception
+
+
+try:
+    # Python 3.11
+    from builtins import ExceptionGroup  # type: ignore
+except ImportError:
+    # Python 3.10 and below
+    ExceptionGroup = None
+
+
+minimum_python_311 = pytest.mark.skipif(
+    sys.version_info < (3, 11), reason="ExceptionGroup tests need Python >= 3.11"
+)
+
+
+@minimum_python_311
+def test_exceptiongroup():
+    exception_group = None
+
+    try:
+        try:
+            raise RuntimeError("something")
+        except RuntimeError:
+            raise ExceptionGroup(
+                "nested",
+                [
+                    ValueError(654),
+                    ExceptionGroup(
+                        "imports",
+                        [
+                            ImportError("no_such_module"),
+                            ModuleNotFoundError("another_module"),
+                        ],
+                    ),
+                    TypeError("int"),
+                ],
+            )
+    except ExceptionGroup as e:
+        exception_group = e
+
+    (event, _) = event_from_exception(
+        exception_group,
+        client_options={
+            "include_local_variables": True,
+            "include_source_context": True,
+            "max_value_length": 1024,
+        },
+        mechanism={"type": "test_suite", "handled": False},
+    )
+
+    values = event["exception"]["values"]
+
+    # For this test the stacktrace and the module is not important
+    for x in values:
+        if "stacktrace" in x:
+            del x["stacktrace"]
+        if "module" in x:
+            del x["module"]
+
+    expected_values = [
+        {
+            "mechanism": {
+                "exception_id": 6,
+                "handled": False,
+                "parent_id": 0,
+                "source": "exceptions[2]",
+                "type": "chained",
+            },
+            "type": "TypeError",
+            "value": "int",
+        },
+        {
+            "mechanism": {
+                "exception_id": 5,
+                "handled": False,
+                "parent_id": 3,
+                "source": "exceptions[1]",
+                "type": "chained",
+            },
+            "type": "ModuleNotFoundError",
+            "value": "another_module",
+        },
+        {
+            "mechanism": {
+                "exception_id": 4,
+                "handled": False,
+                "parent_id": 3,
+                "source": "exceptions[0]",
+                "type": "chained",
+            },
+            "type": "ImportError",
+            "value": "no_such_module",
+        },
+        {
+            "mechanism": {
+                "exception_id": 3,
+                "handled": False,
+                "is_exception_group": True,
+                "parent_id": 0,
+                "source": "exceptions[1]",
+                "type": "chained",
+            },
+            "type": "ExceptionGroup",
+            "value": "imports",
+        },
+        {
+            "mechanism": {
+                "exception_id": 2,
+                "handled": False,
+                "parent_id": 0,
+                "source": "exceptions[0]",
+                "type": "chained",
+            },
+            "type": "ValueError",
+            "value": "654",
+        },
+        {
+            "mechanism": {
+                "exception_id": 1,
+                "handled": False,
+                "parent_id": 0,
+                "source": "__context__",
+                "type": "chained",
+            },
+            "type": "RuntimeError",
+            "value": "something",
+        },
+        {
+            "mechanism": {
+                "exception_id": 0,
+                "handled": False,
+                "is_exception_group": True,
+                "type": "test_suite",
+            },
+            "type": "ExceptionGroup",
+            "value": "nested",
+        },
+    ]
+
+    assert values == expected_values
+
+
+@minimum_python_311
+def test_exceptiongroup_simple():
+    exception_group = None
+
+    try:
+        raise ExceptionGroup(
+            "simple",
+            [
+                RuntimeError("something strange's going on"),
+            ],
+        )
+    except ExceptionGroup as e:
+        exception_group = e
+
+    (event, _) = event_from_exception(
+        exception_group,
+        client_options={
+            "include_local_variables": True,
+            "include_source_context": True,
+            "max_value_length": 1024,
+        },
+        mechanism={"type": "test_suite", "handled": False},
+    )
+
+    exception_values = event["exception"]["values"]
+
+    assert len(exception_values) == 2
+
+    assert exception_values[0]["type"] == "RuntimeError"
+    assert exception_values[0]["value"] == "something strange's going on"
+    assert exception_values[0]["mechanism"] == {
+        "type": "chained",
+        "handled": False,
+        "exception_id": 1,
+        "source": "exceptions[0]",
+        "parent_id": 0,
+    }
+
+    assert exception_values[1]["type"] == "ExceptionGroup"
+    assert exception_values[1]["value"] == "simple"
+    assert exception_values[1]["mechanism"] == {
+        "type": "test_suite",
+        "handled": False,
+        "exception_id": 0,
+        "is_exception_group": True,
+    }
+    frame = exception_values[1]["stacktrace"]["frames"][0]
+    assert frame["module"] == "tests.test_exceptiongroup"
+    assert frame["context_line"] == "        raise ExceptionGroup("
+
+
+@minimum_python_311
+def test_exception_chain_cause():
+    exception_chain_cause = ValueError("Exception with cause")
+    exception_chain_cause.__context__ = TypeError("Exception in __context__")
+    exception_chain_cause.__cause__ = TypeError(
+        "Exception in __cause__"
+    )  # this implicitly sets exception_chain_cause.__suppress_context__=True
+
+    (event, _) = event_from_exception(
+        exception_chain_cause,
+        client_options={
+            "include_local_variables": True,
+            "include_source_context": True,
+            "max_value_length": 1024,
+        },
+        mechanism={"type": "test_suite", "handled": False},
+    )
+
+    expected_exception_values = [
+        {
+            "mechanism": {
+                "handled": False,
+                "type": "test_suite",
+            },
+            "module": None,
+            "type": "TypeError",
+            "value": "Exception in __cause__",
+        },
+        {
+            "mechanism": {
+                "handled": False,
+                "type": "test_suite",
+            },
+            "module": None,
+            "type": "ValueError",
+            "value": "Exception with cause",
+        },
+    ]
+
+    exception_values = event["exception"]["values"]
+    assert exception_values == expected_exception_values
+
+
+@minimum_python_311
+def test_exception_chain_context():
+    exception_chain_context = ValueError("Exception with context")
+    exception_chain_context.__context__ = TypeError("Exception in __context__")
+
+    (event, _) = event_from_exception(
+        exception_chain_context,
+        client_options={
+            "include_local_variables": True,
+            "include_source_context": True,
+            "max_value_length": 1024,
+        },
+        mechanism={"type": "test_suite", "handled": False},
+    )
+
+    expected_exception_values = [
+        {
+            "mechanism": {
+                "handled": False,
+                "type": "test_suite",
+            },
+            "module": None,
+            "type": "TypeError",
+            "value": "Exception in __context__",
+        },
+        {
+            "mechanism": {
+                "handled": False,
+                "type": "test_suite",
+            },
+            "module": None,
+            "type": "ValueError",
+            "value": "Exception with context",
+        },
+    ]
+
+    exception_values = event["exception"]["values"]
+    assert exception_values == expected_exception_values
+
+
+@minimum_python_311
+def test_simple_exception():
+    simple_excpetion = ValueError("A simple exception")
+
+    (event, _) = event_from_exception(
+        simple_excpetion,
+        client_options={
+            "include_local_variables": True,
+            "include_source_context": True,
+            "max_value_length": 1024,
+        },
+        mechanism={"type": "test_suite", "handled": False},
+    )
+
+    expected_exception_values = [
+        {
+            "mechanism": {
+                "handled": False,
+                "type": "test_suite",
+            },
+            "module": None,
+            "type": "ValueError",
+            "value": "A simple exception",
+        },
+    ]
+
+    exception_values = event["exception"]["values"]
+    assert exception_values == expected_exception_values
diff --git a/tests/test_lru_cache.py b/tests/test_lru_cache.py
new file mode 100644
index 0000000..5343e76
--- /dev/null
+++ b/tests/test_lru_cache.py
@@ -0,0 +1,37 @@
+import pytest
+
+from sentry_sdk._lru_cache import LRUCache
+
+
+@pytest.mark.parametrize("max_size", [-10, -1, 0])
+def test_illegal_size(max_size):
+    with pytest.raises(AssertionError):
+        LRUCache(max_size=max_size)
+
+
+def test_simple_set_get():
+    cache = LRUCache(1)
+    assert cache.get(1) is None
+    cache.set(1, 1)
+    assert cache.get(1) == 1
+
+
+def test_overwrite():
+    cache = LRUCache(1)
+    assert cache.get(1) is None
+    cache.set(1, 1)
+    assert cache.get(1) == 1
+    cache.set(1, 2)
+    assert cache.get(1) == 2
+
+
+def test_cache_eviction():
+    cache = LRUCache(3)
+    cache.set(1, 1)
+    cache.set(2, 2)
+    cache.set(3, 3)
+    assert cache.get(1) == 1
+    assert cache.get(2) == 2
+    cache.set(4, 4)
+    assert cache.get(3) is None
+    assert cache.get(4) == 4
diff --git a/tests/test_monitor.py b/tests/test_monitor.py
new file mode 100644
index 0000000..db405b9
--- /dev/null
+++ b/tests/test_monitor.py
@@ -0,0 +1,87 @@
+import random
+
+from sentry_sdk import Hub, start_transaction
+from sentry_sdk.transport import Transport
+
+
+class HealthyTestTransport(Transport):
+    def _send_event(self, event):
+        pass
+
+    def _send_envelope(self, envelope):
+        pass
+
+    def is_healthy(self):
+        return True
+
+
+class UnhealthyTestTransport(HealthyTestTransport):
+    def is_healthy(self):
+        return False
+
+
+def test_no_monitor_if_disabled(sentry_init):
+    sentry_init(transport=HealthyTestTransport())
+    assert Hub.current.client.monitor is None
+
+
+def test_monitor_if_enabled(sentry_init):
+    sentry_init(
+        transport=HealthyTestTransport(),
+        _experiments={"enable_backpressure_handling": True},
+    )
+
+    monitor = Hub.current.client.monitor
+    assert monitor is not None
+    assert monitor._thread is None
+
+    assert monitor.is_healthy() is True
+    assert monitor.downsample_factor == 1
+    assert monitor._thread is not None
+    assert monitor._thread.name == "sentry.monitor"
+
+
+def test_monitor_unhealthy(sentry_init):
+    sentry_init(
+        transport=UnhealthyTestTransport(),
+        _experiments={"enable_backpressure_handling": True},
+    )
+
+    monitor = Hub.current.client.monitor
+    monitor.interval = 0.1
+
+    assert monitor.is_healthy() is True
+    monitor.run()
+    assert monitor.is_healthy() is False
+    assert monitor.downsample_factor == 2
+    monitor.run()
+    assert monitor.downsample_factor == 4
+
+
+def test_transaction_uses_downsampled_rate(
+    sentry_init, capture_client_reports, monkeypatch
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        transport=UnhealthyTestTransport(),
+        _experiments={"enable_backpressure_handling": True},
+    )
+
+    reports = capture_client_reports()
+
+    monitor = Hub.current.client.monitor
+    monitor.interval = 0.1
+
+    # make sure rng doesn't sample
+    monkeypatch.setattr(random, "random", lambda: 0.9)
+
+    assert monitor.is_healthy() is True
+    monitor.run()
+    assert monitor.is_healthy() is False
+    assert monitor.downsample_factor == 2
+
+    with start_transaction(name="foobar") as transaction:
+        assert transaction.sampled is False
+        assert transaction.sample_rate == 0.5
+
+    assert reports == [("backpressure", "transaction")]
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
new file mode 100644
index 0000000..70110e1
--- /dev/null
+++ b/tests/test_profiler.py
@@ -0,0 +1,834 @@
+import inspect
+import os
+import sys
+import threading
+import time
+
+import pytest
+
+from collections import defaultdict
+from sentry_sdk import start_transaction
+from sentry_sdk.profiler import (
+    GeventScheduler,
+    Profile,
+    Scheduler,
+    ThreadScheduler,
+    extract_frame,
+    extract_stack,
+    frame_id,
+    get_current_thread_id,
+    get_frame_name,
+    setup_profiler,
+)
+from sentry_sdk.tracing import Transaction
+from sentry_sdk._lru_cache import LRUCache
+from sentry_sdk._queue import Queue
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+try:
+    import gevent
+except ImportError:
+    gevent = None
+
+
+def requires_python_version(major, minor, reason=None):
+    if reason is None:
+        reason = "Requires Python {}.{}".format(major, minor)
+    return pytest.mark.skipif(sys.version_info < (major, minor), reason=reason)
+
+
+requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled")
+
+
+def process_test_sample(sample):
+    # insert a mock hashable for the stack
+    return [(tid, (stack, stack)) for tid, stack in sample]
+
+
+def non_experimental_options(mode=None, sample_rate=None):
+    return {"profiler_mode": mode, "profiles_sample_rate": sample_rate}
+
+
+def experimental_options(mode=None, sample_rate=None):
+    return {
+        "_experiments": {"profiler_mode": mode, "profiles_sample_rate": sample_rate}
+    }
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("foo"),
+        pytest.param(
+            "gevent",
+            marks=pytest.mark.skipif(gevent is not None, reason="gevent not enabled"),
+        ),
+    ],
+)
+@pytest.mark.parametrize(
+    "make_options",
+    [
+        pytest.param(experimental_options, id="experiment"),
+        pytest.param(non_experimental_options, id="non experimental"),
+    ],
+)
+def test_profiler_invalid_mode(mode, make_options, teardown_profiling):
+    with pytest.raises(ValueError):
+        setup_profiler(make_options(mode))
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("thread"),
+        pytest.param("sleep"),
+        pytest.param("gevent", marks=requires_gevent),
+    ],
+)
+@pytest.mark.parametrize(
+    "make_options",
+    [
+        pytest.param(experimental_options, id="experiment"),
+        pytest.param(non_experimental_options, id="non experimental"),
+    ],
+)
+def test_profiler_valid_mode(mode, make_options, teardown_profiling):
+    # should not raise any exceptions
+    setup_profiler(make_options(mode))
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    "make_options",
+    [
+        pytest.param(experimental_options, id="experiment"),
+        pytest.param(non_experimental_options, id="non experimental"),
+    ],
+)
+def test_profiler_setup_twice(make_options, teardown_profiling):
+    # setting up the first time should return True to indicate success
+    assert setup_profiler(make_options())
+    # setting up the second time should return False to indicate no-op
+    assert not setup_profiler(make_options())
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("thread"),
+        pytest.param("gevent", marks=requires_gevent),
+    ],
+)
+@pytest.mark.parametrize(
+    ("profiles_sample_rate", "profile_count"),
+    [
+        pytest.param(1.00, 1, id="profiler sampled at 1.00"),
+        pytest.param(0.75, 1, id="profiler sampled at 0.75"),
+        pytest.param(0.25, 0, id="profiler sampled at 0.25"),
+        pytest.param(0.00, 0, id="profiler sampled at 0.00"),
+        pytest.param(None, 0, id="profiler not enabled"),
+    ],
+)
+@pytest.mark.parametrize(
+    "make_options",
+    [
+        pytest.param(experimental_options, id="experiment"),
+        pytest.param(non_experimental_options, id="non experimental"),
+    ],
+)
+@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
+def test_profiles_sample_rate(
+    sentry_init,
+    capture_envelopes,
+    capture_client_reports,
+    teardown_profiling,
+    profiles_sample_rate,
+    profile_count,
+    make_options,
+    mode,
+):
+    options = make_options(mode=mode, sample_rate=profiles_sample_rate)
+    sentry_init(
+        traces_sample_rate=1.0,
+        profiler_mode=options.get("profiler_mode"),
+        profiles_sample_rate=options.get("profiles_sample_rate"),
+        _experiments=options.get("_experiments", {}),
+    )
+
+    envelopes = capture_envelopes()
+    reports = capture_client_reports()
+
+    with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
+        with start_transaction(name="profiling"):
+            pass
+
+    items = defaultdict(list)
+    for envelope in envelopes:
+        for item in envelope.items:
+            items[item.type].append(item)
+
+    assert len(items["transaction"]) == 1
+    assert len(items["profile"]) == profile_count
+    if profiles_sample_rate is None or profiles_sample_rate == 0:
+        assert reports == []
+    elif profile_count:
+        assert reports == []
+    else:
+        assert reports == [("sample_rate", "profile")]
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("thread"),
+        pytest.param("gevent", marks=requires_gevent),
+    ],
+)
+@pytest.mark.parametrize(
+    ("profiles_sampler", "profile_count"),
+    [
+        pytest.param(lambda _: 1.00, 1, id="profiler sampled at 1.00"),
+        pytest.param(lambda _: 0.75, 1, id="profiler sampled at 0.75"),
+        pytest.param(lambda _: 0.25, 0, id="profiler sampled at 0.25"),
+        pytest.param(lambda _: 0.00, 0, id="profiler sampled at 0.00"),
+        pytest.param(lambda _: None, 0, id="profiler not enabled"),
+        pytest.param(
+            lambda ctx: 1 if ctx["transaction_context"]["name"] == "profiling" else 0,
+            1,
+            id="profiler sampled for transaction name",
+        ),
+        pytest.param(
+            lambda ctx: 0 if ctx["transaction_context"]["name"] == "profiling" else 1,
+            0,
+            id="profiler not sampled for transaction name",
+        ),
+        pytest.param(
+            lambda _: "1", 0, id="profiler not sampled because string sample rate"
+        ),
+        pytest.param(lambda _: True, 1, id="profiler sampled at True"),
+        pytest.param(lambda _: False, 0, id="profiler sampled at False"),
+    ],
+)
+@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
+def test_profiles_sampler(
+    sentry_init,
+    capture_envelopes,
+    capture_client_reports,
+    teardown_profiling,
+    profiles_sampler,
+    profile_count,
+    mode,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        profiles_sampler=profiles_sampler,
+    )
+
+    envelopes = capture_envelopes()
+    reports = capture_client_reports()
+
+    with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
+        with start_transaction(name="profiling"):
+            pass
+
+    items = defaultdict(list)
+    for envelope in envelopes:
+        for item in envelope.items:
+            items[item.type].append(item)
+
+    assert len(items["transaction"]) == 1
+    assert len(items["profile"]) == profile_count
+    if profile_count:
+        assert reports == []
+    else:
+        assert reports == [("sample_rate", "profile")]
+
+
+@requires_python_version(3, 3)
+def test_minimum_unique_samples_required(
+    sentry_init,
+    capture_envelopes,
+    capture_client_reports,
+    teardown_profiling,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+
+    envelopes = capture_envelopes()
+    reports = capture_client_reports()
+
+    with start_transaction(name="profiling"):
+        pass
+
+    items = defaultdict(list)
+    for envelope in envelopes:
+        for item in envelope.items:
+            items[item.type].append(item)
+
+    assert len(items["transaction"]) == 1
+    # because we dont leave any time for the profiler to
+    # take any samples, it should be not be sent
+    assert len(items["profile"]) == 0
+    assert reports == [("insufficient_data", "profile")]
+
+
+@requires_python_version(3, 3)
+def test_profile_captured(
+    sentry_init,
+    capture_envelopes,
+    teardown_profiling,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+
+    envelopes = capture_envelopes()
+
+    with start_transaction(name="profiling"):
+        time.sleep(0.05)
+
+    items = defaultdict(list)
+    for envelope in envelopes:
+        for item in envelope.items:
+            items[item.type].append(item)
+
+    assert len(items["transaction"]) == 1
+    assert len(items["profile"]) == 1
+
+
+def get_frame(depth=1):
+    """
+    This function is not exactly true to its name. Depending on
+    how it is called, the true depth of the stack can be deeper
+    than the argument implies.
+    """
+    if depth <= 0:
+        raise ValueError("only positive integers allowed")
+    if depth > 1:
+        return get_frame(depth=depth - 1)
+    return inspect.currentframe()
+
+
+class GetFrameBase:
+    def inherited_instance_method(self):
+        return inspect.currentframe()
+
+    def inherited_instance_method_wrapped(self):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @classmethod
+    def inherited_class_method(cls):
+        return inspect.currentframe()
+
+    @classmethod
+    def inherited_class_method_wrapped(cls):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @staticmethod
+    def inherited_static_method():
+        return inspect.currentframe()
+
+
+class GetFrame(GetFrameBase):
+    def instance_method(self):
+        return inspect.currentframe()
+
+    def instance_method_wrapped(self):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @classmethod
+    def class_method(cls):
+        return inspect.currentframe()
+
+    @classmethod
+    def class_method_wrapped(cls):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @staticmethod
+    def static_method():
+        return inspect.currentframe()
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("frame", "frame_name"),
+    [
+        pytest.param(
+            get_frame(),
+            "get_frame",
+            id="function",
+        ),
+        pytest.param(
+            (lambda: inspect.currentframe())(),
+            "<lambda>",
+            id="lambda",
+        ),
+        pytest.param(
+            GetFrame().instance_method(),
+            "GetFrame.instance_method",
+            id="instance_method",
+        ),
+        pytest.param(
+            GetFrame().instance_method_wrapped()(),
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrame.instance_method_wrapped.<locals>.wrapped",
+            id="instance_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().class_method(),
+            "GetFrame.class_method",
+            id="class_method",
+        ),
+        pytest.param(
+            GetFrame().class_method_wrapped()(),
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrame.class_method_wrapped.<locals>.wrapped",
+            id="class_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().static_method(),
+            "static_method" if sys.version_info < (3, 11) else "GetFrame.static_method",
+            id="static_method",
+        ),
+        pytest.param(
+            GetFrame().inherited_instance_method(),
+            "GetFrameBase.inherited_instance_method",
+            id="inherited_instance_method",
+        ),
+        pytest.param(
+            GetFrame().inherited_instance_method_wrapped()(),
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrameBase.inherited_instance_method_wrapped.<locals>.wrapped",
+            id="instance_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().inherited_class_method(),
+            "GetFrameBase.inherited_class_method",
+            id="inherited_class_method",
+        ),
+        pytest.param(
+            GetFrame().inherited_class_method_wrapped()(),
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrameBase.inherited_class_method_wrapped.<locals>.wrapped",
+            id="inherited_class_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().inherited_static_method(),
+            "inherited_static_method"
+            if sys.version_info < (3, 11)
+            else "GetFrameBase.inherited_static_method",
+            id="inherited_static_method",
+        ),
+    ],
+)
+def test_get_frame_name(frame, frame_name):
+    assert get_frame_name(frame) == frame_name
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("get_frame", "function"),
+    [
+        pytest.param(lambda: get_frame(depth=1), "get_frame", id="simple"),
+    ],
+)
+def test_extract_frame(get_frame, function):
+    cwd = os.getcwd()
+    frame = get_frame()
+    extracted_frame = extract_frame(frame_id(frame), frame, cwd)
+
+    # the abs_path should be equal toe the normalized path of the co_filename
+    assert extracted_frame["abs_path"] == os.path.normpath(frame.f_code.co_filename)
+
+    # the module should be pull from this test module
+    assert extracted_frame["module"] == __name__
+
+    # the filename should be the file starting after the cwd
+    assert extracted_frame["filename"] == __file__[len(cwd) + 1 :]
+
+    assert extracted_frame["function"] == function
+
+    # the lineno will shift over time as this file is modified so just check
+    # that it is an int
+    assert isinstance(extracted_frame["lineno"], int)
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("depth", "max_stack_depth", "actual_depth"),
+    [
+        pytest.param(1, 128, 1, id="less than"),
+        pytest.param(256, 128, 128, id="greater than"),
+        pytest.param(128, 128, 128, id="equals"),
+    ],
+)
+def test_extract_stack_with_max_depth(depth, max_stack_depth, actual_depth):
+    # introduce a lambda that we'll be looking for in the stack
+    frame = (lambda: get_frame(depth=depth))()
+
+    # plus 1 because we introduced a lambda intentionally that we'll
+    # look for in the final stack to make sure its in the right position
+    base_stack_depth = len(inspect.stack()) + 1
+
+    # increase the max_depth by the `base_stack_depth` to account
+    # for the extra frames pytest will add
+    _, frame_ids, frames = extract_stack(
+        frame, LRUCache(max_size=1), max_stack_depth=max_stack_depth + base_stack_depth
+    )
+    assert len(frame_ids) == base_stack_depth + actual_depth
+    assert len(frames) == base_stack_depth + actual_depth
+
+    for i in range(actual_depth):
+        assert frames[i]["function"] == "get_frame", i
+
+    # index 0 contains the inner most frame on the stack, so the lamdba
+    # should be at index `actual_depth`
+    if sys.version_info >= (3, 11):
+        assert (
+            frames[actual_depth]["function"]
+            == "test_extract_stack_with_max_depth.<locals>.<lambda>"
+        ), actual_depth
+    else:
+        assert frames[actual_depth]["function"] == "<lambda>", actual_depth
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("frame", "depth"),
+    [(get_frame(depth=1), len(inspect.stack()))],
+)
+def test_extract_stack_with_cache(frame, depth):
+    # make sure cache has enough room or this test will fail
+    cache = LRUCache(max_size=depth)
+    _, _, frames1 = extract_stack(frame, cache)
+    _, _, frames2 = extract_stack(frame, cache)
+
+    assert len(frames1) > 0
+    assert len(frames2) > 0
+    assert len(frames1) == len(frames2)
+    for i, (frame1, frame2) in enumerate(zip(frames1, frames2)):
+        # DO NOT use `==` for the assertion here since we are
+        # testing for identity, and using `==` would test for
+        # equality which would always pass since we're extract
+        # the same stack.
+        assert frame1 is frame2, i
+
+
+@requires_python_version(3, 3)
+def test_get_current_thread_id_explicit_thread():
+    results = Queue(maxsize=1)
+
+    def target1():
+        pass
+
+    def target2():
+        results.put(get_current_thread_id(thread1))
+
+    thread1 = threading.Thread(target=target1)
+    thread1.start()
+
+    thread2 = threading.Thread(target=target2)
+    thread2.start()
+
+    thread2.join()
+    thread1.join()
+
+    assert thread1.ident == results.get(timeout=1)
+
+
+@requires_python_version(3, 3)
+@requires_gevent
+def test_get_current_thread_id_gevent_in_thread():
+    results = Queue(maxsize=1)
+
+    def target():
+        job = gevent.spawn(get_current_thread_id)
+        job.join()
+        results.put(job.value)
+
+    thread = threading.Thread(target=target)
+    thread.start()
+    thread.join()
+    assert thread.ident == results.get(timeout=1)
+
+
+@requires_python_version(3, 3)
+def test_get_current_thread_id_running_thread():
+    results = Queue(maxsize=1)
+
+    def target():
+        results.put(get_current_thread_id())
+
+    thread = threading.Thread(target=target)
+    thread.start()
+    thread.join()
+    assert thread.ident == results.get(timeout=1)
+
+
+@requires_python_version(3, 3)
+def test_get_current_thread_id_main_thread():
+    results = Queue(maxsize=1)
+
+    def target():
+        # mock that somehow the current thread doesn't exist
+        with mock.patch("threading.current_thread", side_effect=[None]):
+            results.put(get_current_thread_id())
+
+    thread_id = threading.main_thread().ident if sys.version_info >= (3, 4) else None
+
+    thread = threading.Thread(target=target)
+    thread.start()
+    thread.join()
+    assert thread_id == results.get(timeout=1)
+
+
+def get_scheduler_threads(scheduler):
+    return [thread for thread in threading.enumerate() if thread.name == scheduler.name]
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("scheduler_class",),
+    [
+        pytest.param(ThreadScheduler, id="thread scheduler"),
+        pytest.param(
+            GeventScheduler,
+            marks=[
+                requires_gevent,
+                pytest.mark.skip(
+                    reason="cannot find this thread via threading.enumerate()"
+                ),
+            ],
+            id="gevent scheduler",
+        ),
+    ],
+)
+def test_thread_scheduler_single_background_thread(scheduler_class):
+    scheduler = scheduler_class(frequency=1000)
+
+    # not yet setup, no scheduler threads yet
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+    scheduler.setup()
+
+    # setup but no profiles started so still no threads
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+    scheduler.ensure_running()
+
+    # the scheduler will start always 1 thread
+    assert len(get_scheduler_threads(scheduler)) == 1
+
+    scheduler.ensure_running()
+
+    # the scheduler still only has 1 thread
+    assert len(get_scheduler_threads(scheduler)) == 1
+
+    scheduler.teardown()
+
+    # once finished, the thread should stop
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("scheduler_class",),
+    [
+        pytest.param(ThreadScheduler, id="thread scheduler"),
+        pytest.param(GeventScheduler, marks=requires_gevent, id="gevent scheduler"),
+    ],
+)
+@mock.patch("sentry_sdk.profiler.MAX_PROFILE_DURATION_NS", 1)
+def test_max_profile_duration_reached(scheduler_class):
+    sample = [("1", extract_stack(get_frame(), LRUCache(max_size=1)))]
+
+    with scheduler_class(frequency=1000) as scheduler:
+        transaction = Transaction(sampled=True)
+        with Profile(transaction, scheduler=scheduler) as profile:
+            # profile just started, it's active
+            assert profile.active
+
+            # write a sample at the start time, so still active
+            profile.write(profile.start_ns + 0, sample)
+            assert profile.active
+
+            # write a sample at max time, so still active
+            profile.write(profile.start_ns + 1, sample)
+            assert profile.active
+
+            # write a sample PAST the max time, so now inactive
+            profile.write(profile.start_ns + 2, sample)
+            assert not profile.active
+
+
+class NoopScheduler(Scheduler):
+    def setup(self):
+        # type: () -> None
+        pass
+
+    def teardown(self):
+        # type: () -> None
+        pass
+
+    def ensure_running(self):
+        # type: () -> None
+        pass
+
+
+current_thread = threading.current_thread()
+thread_metadata = {
+    str(current_thread.ident): {
+        "name": str(current_thread.name),
+    },
+}
+
+
+sample_stacks = [
+    extract_stack(get_frame(), LRUCache(max_size=1), max_stack_depth=1),
+    extract_stack(get_frame(), LRUCache(max_size=1), max_stack_depth=2),
+]
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("samples", "expected"),
+    [
+        pytest.param(
+            [],
+            {
+                "frames": [],
+                "samples": [],
+                "stacks": [],
+                "thread_metadata": thread_metadata,
+            },
+            id="empty",
+        ),
+        pytest.param(
+            [(6, [("1", sample_stacks[0])])],
+            {
+                "frames": [],
+                "samples": [],
+                "stacks": [],
+                "thread_metadata": thread_metadata,
+            },
+            id="single sample out of range",
+        ),
+        pytest.param(
+            [(0, [("1", sample_stacks[0])])],
+            {
+                "frames": [sample_stacks[0][2][0]],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                ],
+                "stacks": [[0]],
+                "thread_metadata": thread_metadata,
+            },
+            id="single sample in range",
+        ),
+        pytest.param(
+            [
+                (0, [("1", sample_stacks[0])]),
+                (1, [("1", sample_stacks[0])]),
+            ],
+            {
+                "frames": [sample_stacks[0][2][0]],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                    {
+                        "elapsed_since_start_ns": "1",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                ],
+                "stacks": [[0]],
+                "thread_metadata": thread_metadata,
+            },
+            id="two identical stacks",
+        ),
+        pytest.param(
+            [
+                (0, [("1", sample_stacks[0])]),
+                (1, [("1", sample_stacks[1])]),
+            ],
+            {
+                "frames": [
+                    sample_stacks[0][2][0],
+                    sample_stacks[1][2][0],
+                ],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                    {
+                        "elapsed_since_start_ns": "1",
+                        "thread_id": "1",
+                        "stack_id": 1,
+                    },
+                ],
+                "stacks": [[0], [1, 0]],
+                "thread_metadata": thread_metadata,
+            },
+            id="two identical stacks",
+        ),
+    ],
+)
+@mock.patch("sentry_sdk.profiler.MAX_PROFILE_DURATION_NS", 5)
+def test_profile_processing(
+    DictionaryContaining,  # noqa: N803
+    samples,
+    expected,
+):
+    with NoopScheduler(frequency=1000) as scheduler:
+        transaction = Transaction(sampled=True)
+        with Profile(transaction, scheduler=scheduler) as profile:
+            for ts, sample in samples:
+                # force the sample to be written at a time relative to the
+                # start of the profile
+                now = profile.start_ns + ts
+                profile.write(now, sample)
+
+            processed = profile.process()
+
+            assert processed["thread_metadata"] == DictionaryContaining(
+                expected["thread_metadata"]
+            )
+            assert processed["frames"] == expected["frames"]
+            assert processed["stacks"] == expected["stacks"]
+            assert processed["samples"] == expected["samples"]
diff --git a/tests/test_scope.py b/tests/test_scope.py
index d90a89f..8bdd46e 100644
--- a/tests/test_scope.py
+++ b/tests/test_scope.py
@@ -1,7 +1,14 @@
 import copy
+import os
+import pytest
 from sentry_sdk import capture_exception
 from sentry_sdk.scope import Scope
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
 
 def test_copying():
     s1 = Scope()
@@ -62,3 +69,91 @@ def test_common_args():
     assert s2._extras == {"k": "v", "foo": "bar"}
     assert s2._tags == {"a": "b", "x": "y"}
     assert s2._contexts == {"os": {"name": "Blafasel"}, "device": {"a": "b"}}
+
+
+BAGGAGE_VALUE = (
+    "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+    "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
+    "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
+)
+
+SENTRY_TRACE_VALUE = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1"
+
+
+@pytest.mark.parametrize(
+    "env,excepted_value",
+    [
+        (
+            {
+                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
+            },
+            {
+                "sentry-trace": SENTRY_TRACE_VALUE,
+            },
+        ),
+        (
+            {
+                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
+            },
+            {
+                "baggage": BAGGAGE_VALUE,
+            },
+        ),
+        (
+            {
+                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
+                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
+            },
+            {
+                "sentry-trace": SENTRY_TRACE_VALUE,
+                "baggage": BAGGAGE_VALUE,
+            },
+        ),
+        (
+            {
+                "SENTRY_USE_ENVIRONMENT": "",
+                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
+                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
+            },
+            {
+                "sentry-trace": SENTRY_TRACE_VALUE,
+                "baggage": BAGGAGE_VALUE,
+            },
+        ),
+        (
+            {
+                "SENTRY_USE_ENVIRONMENT": "True",
+                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
+                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
+            },
+            {
+                "sentry-trace": SENTRY_TRACE_VALUE,
+                "baggage": BAGGAGE_VALUE,
+            },
+        ),
+        (
+            {
+                "SENTRY_USE_ENVIRONMENT": "no",
+                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
+                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
+            },
+            None,
+        ),
+        (
+            {
+                "SENTRY_USE_ENVIRONMENT": "True",
+                "MY_OTHER_VALUE": "asdf",
+                "SENTRY_RELEASE": "1.0.0",
+            },
+            None,
+        ),
+    ],
+)
+def test_load_trace_data_from_env(env, excepted_value):
+    new_env = os.environ.copy()
+    new_env.update(env)
+
+    with mock.patch.dict(os.environ, new_env):
+        s = Scope()
+        incoming_trace_data = s._load_trace_data_from_env()
+        assert incoming_trace_data == excepted_value
diff --git a/tests/test_scrubber.py b/tests/test_scrubber.py
new file mode 100644
index 0000000..4b2dfff
--- /dev/null
+++ b/tests/test_scrubber.py
@@ -0,0 +1,171 @@
+import sys
+import logging
+
+from sentry_sdk import capture_exception, capture_event, start_transaction, start_span
+from sentry_sdk.utils import event_from_exception
+from sentry_sdk.scrubber import EventScrubber
+
+
+logger = logging.getLogger(__name__)
+logger.setLevel(logging.DEBUG)
+
+
+def test_request_scrubbing(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        1 / 0
+    except ZeroDivisionError:
+        ev, _hint = event_from_exception(sys.exc_info())
+
+        ev["request"] = {
+            "headers": {
+                "COOKIE": "secret",
+                "authorization": "Bearer bla",
+                "ORIGIN": "google.com",
+            },
+            "cookies": {
+                "sessionid": "secret",
+                "foo": "bar",
+            },
+            "data": {
+                "token": "secret",
+                "foo": "bar",
+            },
+        }
+
+        capture_event(ev)
+
+    (event,) = events
+
+    assert event["request"] == {
+        "headers": {
+            "COOKIE": "[Filtered]",
+            "authorization": "[Filtered]",
+            "ORIGIN": "google.com",
+        },
+        "cookies": {"sessionid": "[Filtered]", "foo": "bar"},
+        "data": {"token": "[Filtered]", "foo": "bar"},
+    }
+
+    assert event["_meta"]["request"] == {
+        "headers": {
+            "COOKIE": {"": {"rem": [["!config", "s"]]}},
+            "authorization": {"": {"rem": [["!config", "s"]]}},
+        },
+        "cookies": {"sessionid": {"": {"rem": [["!config", "s"]]}}},
+        "data": {"token": {"": {"rem": [["!config", "s"]]}}},
+    }
+
+
+def test_stack_var_scrubbing(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        password = "supersecret"  # noqa
+        api_key = "1231231231"  # noqa
+        safe = "keepthis"  # noqa
+        1 / 0
+    except ZeroDivisionError:
+        capture_exception()
+
+    (event,) = events
+
+    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
+    (frame,) = frames
+    assert frame["vars"]["password"] == "[Filtered]"
+    assert frame["vars"]["api_key"] == "[Filtered]"
+    assert frame["vars"]["safe"] == "'keepthis'"
+
+    meta = event["_meta"]["exception"]["values"]["0"]["stacktrace"]["frames"]["0"][
+        "vars"
+    ]
+    assert meta == {
+        "password": {"": {"rem": [["!config", "s"]]}},
+        "api_key": {"": {"rem": [["!config", "s"]]}},
+    }
+
+
+def test_breadcrumb_extra_scrubbing(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    logger.info("bread", extra=dict(foo=42, password="secret"))
+    logger.critical("whoops", extra=dict(bar=69, auth="secret"))
+
+    (event,) = events
+
+    assert event["extra"]["bar"] == 69
+    assert event["extra"]["auth"] == "[Filtered]"
+
+    assert event["breadcrumbs"]["values"][0]["data"] == {
+        "foo": 42,
+        "password": "[Filtered]",
+    }
+
+    assert event["_meta"]["extra"]["auth"] == {"": {"rem": [["!config", "s"]]}}
+    assert event["_meta"]["breadcrumbs"] == {
+        "values": {"0": {"data": {"password": {"": {"rem": [["!config", "s"]]}}}}}
+    }
+
+
+def test_span_data_scrubbing(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0)
+    events = capture_events()
+
+    with start_transaction(name="hi"):
+        with start_span(op="foo", description="bar") as span:
+            span.set_data("password", "secret")
+            span.set_data("datafoo", "databar")
+
+    (event,) = events
+    assert event["spans"][0]["data"] == {"password": "[Filtered]", "datafoo": "databar"}
+    assert event["_meta"]["spans"] == {
+        "0": {"data": {"password": {"": {"rem": [["!config", "s"]]}}}}
+    }
+
+
+def test_custom_denylist(sentry_init, capture_events):
+    sentry_init(event_scrubber=EventScrubber(denylist=["my_sensitive_var"]))
+    events = capture_events()
+
+    try:
+        my_sensitive_var = "secret"  # noqa
+        safe = "keepthis"  # noqa
+        1 / 0
+    except ZeroDivisionError:
+        capture_exception()
+
+    (event,) = events
+
+    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
+    (frame,) = frames
+    assert frame["vars"]["my_sensitive_var"] == "[Filtered]"
+    assert frame["vars"]["safe"] == "'keepthis'"
+
+    meta = event["_meta"]["exception"]["values"]["0"]["stacktrace"]["frames"]["0"][
+        "vars"
+    ]
+    assert meta == {
+        "my_sensitive_var": {"": {"rem": [["!config", "s"]]}},
+    }
+
+
+def test_scrubbing_doesnt_affect_local_vars(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        password = "cat123"
+        1 / 0
+    except ZeroDivisionError:
+        capture_exception()
+
+    (event,) = events
+
+    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
+    (frame,) = frames
+    assert frame["vars"]["password"] == "[Filtered]"
+    assert password == "cat123"
diff --git a/tests/test_serializer.py b/tests/test_serializer.py
index f5ecc75..ddc65c9 100644
--- a/tests/test_serializer.py
+++ b/tests/test_serializer.py
@@ -1,7 +1,8 @@
+import re
 import sys
 import pytest
 
-from sentry_sdk.serializer import serialize
+from sentry_sdk.serializer import MAX_DATABAG_BREADTH, MAX_DATABAG_DEPTH, serialize
 
 try:
     from hypothesis import given
@@ -39,14 +40,24 @@ def message_normalizer(validate_event_schema):
 
 @pytest.fixture
 def extra_normalizer(validate_event_schema):
-    def inner(message, **kwargs):
-        event = serialize({"extra": {"foo": message}}, **kwargs)
+    def inner(extra, **kwargs):
+        event = serialize({"extra": {"foo": extra}}, **kwargs)
         validate_event_schema(event)
         return event["extra"]["foo"]
 
     return inner
 
 
+@pytest.fixture
+def body_normalizer(validate_event_schema):
+    def inner(body, **kwargs):
+        event = serialize({"request": {"data": body}}, **kwargs)
+        validate_event_schema(event)
+        return event["request"]["data"]
+
+    return inner
+
+
 def test_bytes_serialization_decode(message_normalizer):
     binary = b"abc123\x80\xf0\x9f\x8d\x95"
     result = message_normalizer(binary, should_repr_strings=False)
@@ -62,6 +73,27 @@ def test_bytes_serialization_repr(message_normalizer):
     assert result == r"b'abc123\x80\xf0\x9f\x8d\x95'"
 
 
+def test_bytearray_serialization_decode(message_normalizer):
+    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=False)
+    # fmt: off
+    assert result == u"abc123\ufffd\U0001f355"
+    # fmt: on
+
+
+@pytest.mark.xfail(sys.version_info < (3,), reason="Known safe_repr bugs in Py2.7")
+def test_bytearray_serialization_repr(message_normalizer):
+    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=True)
+    assert result == r"bytearray(b'abc123\x80\xf0\x9f\x8d\x95')"
+
+
+def test_memoryview_serialization_repr(message_normalizer):
+    binary = memoryview(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=False)
+    assert re.match(r"^<memory at 0x\w+>$", result)
+
+
 def test_serialize_sets(extra_normalizer):
     result = extra_normalizer({1, 2, 3})
     assert result == [1, 2, 3]
@@ -86,3 +118,46 @@ def test_custom_mapping_doesnt_mess_with_mock(extra_normalizer):
     m = mock.Mock()
     extra_normalizer(m)
     assert len(m.mock_calls) == 0
+
+
+def test_trim_databag_breadth(body_normalizer):
+    data = {
+        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
+    }
+
+    result = body_normalizer(data)
+
+    assert len(result) == MAX_DATABAG_BREADTH
+    for key, value in result.items():
+        assert data.get(key) == value
+
+
+def test_no_trimming_if_max_request_body_size_is_always(body_normalizer):
+    data = {
+        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
+    }
+    curr = data
+    for _ in range(MAX_DATABAG_DEPTH + 5):
+        curr["nested"] = {}
+        curr = curr["nested"]
+
+    result = body_normalizer(data, max_request_body_size="always")
+
+    assert result == data
+
+
+def test_max_value_length_default(body_normalizer):
+    data = {"key": "a" * 2000}
+
+    result = body_normalizer(data)
+
+    assert len(result["key"]) == 1024  # fallback max length
+
+
+def test_max_value_length(body_normalizer):
+    data = {"key": "a" * 2000}
+
+    max_value_length = 1800
+    result = body_normalizer(data, max_value_length=max_value_length)
+
+    assert len(result["key"]) == max_value_length
diff --git a/tests/test_utils.py b/tests/test_utils.py
new file mode 100644
index 0000000..47460d3
--- /dev/null
+++ b/tests/test_utils.py
@@ -0,0 +1,425 @@
+import pytest
+import re
+import sys
+
+from sentry_sdk.utils import (
+    Components,
+    is_valid_sample_rate,
+    logger,
+    match_regex_list,
+    parse_url,
+    parse_version,
+    sanitize_url,
+    serialize_frame,
+)
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+@pytest.mark.parametrize(
+    ("url", "expected_result"),
+    [
+        ("http://localhost:8000", "http://localhost:8000"),
+        ("http://example.com", "http://example.com"),
+        ("https://example.com", "https://example.com"),
+        (
+            "example.com?token=abc&sessionid=123&save=true",
+            "example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+        (
+            "http://example.com?token=abc&sessionid=123&save=true",
+            "http://example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+        (
+            "https://example.com?token=abc&sessionid=123&save=true",
+            "https://example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+        (
+            "http://localhost:8000/?token=abc&sessionid=123&save=true",
+            "http://localhost:8000/?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+        (
+            "ftp://username:password@ftp.example.com:9876/bla/blub#foo",
+            "ftp://[Filtered]:[Filtered]@ftp.example.com:9876/bla/blub#foo",
+        ),
+        (
+            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
+            "https://[Filtered]:[Filtered]@example.com/bla/blub?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]#fragment",
+        ),
+        ("bla/blub/foo", "bla/blub/foo"),
+        ("/bla/blub/foo/", "/bla/blub/foo/"),
+        (
+            "bla/blub/foo?token=abc&sessionid=123&save=true",
+            "bla/blub/foo?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+        (
+            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
+            "/bla/blub/foo/?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+    ],
+)
+def test_sanitize_url(url, expected_result):
+    # sort parts because old Python versions (<3.6) don't preserve order
+    sanitized_url = sanitize_url(url)
+    parts = sorted(re.split(r"\&|\?|\#", sanitized_url))
+    expected_parts = sorted(re.split(r"\&|\?|\#", expected_result))
+
+    assert parts == expected_parts
+
+
+@pytest.mark.parametrize(
+    ("url", "expected_result"),
+    [
+        (
+            "http://localhost:8000",
+            Components(
+                scheme="http", netloc="localhost:8000", path="", query="", fragment=""
+            ),
+        ),
+        (
+            "http://example.com",
+            Components(
+                scheme="http", netloc="example.com", path="", query="", fragment=""
+            ),
+        ),
+        (
+            "https://example.com",
+            Components(
+                scheme="https", netloc="example.com", path="", query="", fragment=""
+            ),
+        ),
+        (
+            "example.com?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="",
+                netloc="",
+                path="example.com",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+        (
+            "http://example.com?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="http",
+                netloc="example.com",
+                path="",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+        (
+            "https://example.com?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="https",
+                netloc="example.com",
+                path="",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+        (
+            "http://localhost:8000/?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="http",
+                netloc="localhost:8000",
+                path="/",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+        (
+            "ftp://username:password@ftp.example.com:9876/bla/blub#foo",
+            Components(
+                scheme="ftp",
+                netloc="[Filtered]:[Filtered]@ftp.example.com:9876",
+                path="/bla/blub",
+                query="",
+                fragment="foo",
+            ),
+        ),
+        (
+            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
+            Components(
+                scheme="https",
+                netloc="[Filtered]:[Filtered]@example.com",
+                path="/bla/blub",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="fragment",
+            ),
+        ),
+        (
+            "bla/blub/foo",
+            Components(
+                scheme="", netloc="", path="bla/blub/foo", query="", fragment=""
+            ),
+        ),
+        (
+            "bla/blub/foo?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="",
+                netloc="",
+                path="bla/blub/foo",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+        (
+            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="",
+                netloc="",
+                path="/bla/blub/foo/",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+    ],
+)
+def test_sanitize_url_and_split(url, expected_result):
+    sanitized_url = sanitize_url(url, split=True)
+    # sort query because old Python versions (<3.6) don't preserve order
+    query = sorted(sanitized_url.query.split("&"))
+    expected_query = sorted(expected_result.query.split("&"))
+
+    assert sanitized_url.scheme == expected_result.scheme
+    assert sanitized_url.netloc == expected_result.netloc
+    assert query == expected_query
+    assert sanitized_url.path == expected_result.path
+    assert sanitized_url.fragment == expected_result.fragment
+
+
+@pytest.mark.parametrize(
+    ("url", "sanitize", "expected_url", "expected_query", "expected_fragment"),
+    [
+        # Test with sanitize=True
+        (
+            "https://example.com",
+            True,
+            "https://example.com",
+            "",
+            "",
+        ),
+        (
+            "example.com?token=abc&sessionid=123&save=true",
+            True,
+            "example.com",
+            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+            "",
+        ),
+        (
+            "https://example.com?token=abc&sessionid=123&save=true",
+            True,
+            "https://example.com",
+            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+            "",
+        ),
+        (
+            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
+            True,
+            "https://[Filtered]:[Filtered]@example.com/bla/blub",
+            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+            "fragment",
+        ),
+        (
+            "bla/blub/foo",
+            True,
+            "bla/blub/foo",
+            "",
+            "",
+        ),
+        (
+            "/bla/blub/foo/#baz",
+            True,
+            "/bla/blub/foo/",
+            "",
+            "baz",
+        ),
+        (
+            "bla/blub/foo?token=abc&sessionid=123&save=true",
+            True,
+            "bla/blub/foo",
+            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+            "",
+        ),
+        (
+            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
+            True,
+            "/bla/blub/foo/",
+            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+            "",
+        ),
+        # Test with sanitize=False
+        (
+            "https://example.com",
+            False,
+            "https://example.com",
+            "",
+            "",
+        ),
+        (
+            "example.com?token=abc&sessionid=123&save=true",
+            False,
+            "example.com",
+            "token=abc&sessionid=123&save=true",
+            "",
+        ),
+        (
+            "https://example.com?token=abc&sessionid=123&save=true",
+            False,
+            "https://example.com",
+            "token=abc&sessionid=123&save=true",
+            "",
+        ),
+        (
+            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
+            False,
+            "https://[Filtered]:[Filtered]@example.com/bla/blub",
+            "token=abc&sessionid=123&save=true",
+            "fragment",
+        ),
+        (
+            "bla/blub/foo",
+            False,
+            "bla/blub/foo",
+            "",
+            "",
+        ),
+        (
+            "/bla/blub/foo/#baz",
+            False,
+            "/bla/blub/foo/",
+            "",
+            "baz",
+        ),
+        (
+            "bla/blub/foo?token=abc&sessionid=123&save=true",
+            False,
+            "bla/blub/foo",
+            "token=abc&sessionid=123&save=true",
+            "",
+        ),
+        (
+            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
+            False,
+            "/bla/blub/foo/",
+            "token=abc&sessionid=123&save=true",
+            "",
+        ),
+    ],
+)
+def test_parse_url(url, sanitize, expected_url, expected_query, expected_fragment):
+    assert parse_url(url, sanitize=sanitize).url == expected_url
+    assert parse_url(url, sanitize=sanitize).fragment == expected_fragment
+
+    # sort parts because old Python versions (<3.6) don't preserve order
+    sanitized_query = parse_url(url, sanitize=sanitize).query
+    query_parts = sorted(re.split(r"\&|\?|\#", sanitized_query))
+    expected_query_parts = sorted(re.split(r"\&|\?|\#", expected_query))
+
+    assert query_parts == expected_query_parts
+
+
+@pytest.mark.parametrize(
+    "rate",
+    [0.0, 0.1231, 1.0, True, False],
+)
+def test_accepts_valid_sample_rate(rate):
+    with mock.patch.object(logger, "warning", mock.Mock()):
+        result = is_valid_sample_rate(rate, source="Testing")
+        assert logger.warning.called is False
+        assert result is True
+
+
+@pytest.mark.parametrize(
+    "rate",
+    [
+        "dogs are great",  # wrong type
+        (0, 1),  # wrong type
+        {"Maisey": "Charllie"},  # wrong type
+        [True, True],  # wrong type
+        {0.2012},  # wrong type
+        float("NaN"),  # wrong type
+        None,  # wrong type
+        -1.121,  # wrong value
+        1.231,  # wrong value
+    ],
+)
+def test_warns_on_invalid_sample_rate(rate, StringContaining):  # noqa: N803
+    with mock.patch.object(logger, "warning", mock.Mock()):
+        result = is_valid_sample_rate(rate, source="Testing")
+        logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
+        assert result is False
+
+
+@pytest.mark.parametrize(
+    "include_source_context",
+    [True, False],
+)
+def test_include_source_context_when_serializing_frame(include_source_context):
+    frame = sys._getframe()
+    result = serialize_frame(frame, include_source_context=include_source_context)
+
+    assert include_source_context ^ ("pre_context" in result) ^ True
+    assert include_source_context ^ ("context_line" in result) ^ True
+    assert include_source_context ^ ("post_context" in result) ^ True
+
+
+@pytest.mark.parametrize(
+    "item,regex_list,expected_result",
+    [
+        ["", [], False],
+        [None, [], False],
+        ["", None, False],
+        [None, None, False],
+        ["some-string", [], False],
+        ["some-string", None, False],
+        ["some-string", ["some-string"], True],
+        ["some-string", ["some"], False],
+        ["some-string", ["some$"], False],  # same as above
+        ["some-string", ["some.*"], True],
+        ["some-string", ["Some"], False],  # we do case sensitive matching
+        ["some-string", [".*string$"], True],
+    ],
+)
+def test_match_regex_list(item, regex_list, expected_result):
+    assert match_regex_list(item, regex_list) == expected_result
+
+
+@pytest.mark.parametrize(
+    "version,expected_result",
+    [
+        ["3.5.15", (3, 5, 15)],
+        ["2.0.9", (2, 0, 9)],
+        ["2.0.0", (2, 0, 0)],
+        ["0.6.0", (0, 6, 0)],
+        ["2.0.0.post1", (2, 0, 0)],
+        ["2.0.0rc3", (2, 0, 0)],
+        ["2.0.0rc2", (2, 0, 0)],
+        ["2.0.0rc1", (2, 0, 0)],
+        ["2.0.0b4", (2, 0, 0)],
+        ["2.0.0b3", (2, 0, 0)],
+        ["2.0.0b2", (2, 0, 0)],
+        ["2.0.0b1", (2, 0, 0)],
+        ["0.6beta3", (0, 6)],
+        ["0.6beta2", (0, 6)],
+        ["0.6beta1", (0, 6)],
+        ["0.4.2b", (0, 4, 2)],
+        ["0.4.2a", (0, 4, 2)],
+        ["0.0.1", (0, 0, 1)],
+        ["0.0.0", (0, 0, 0)],
+        ["1", (1,)],
+        ["1.0", (1, 0)],
+        ["1.0.0", (1, 0, 0)],
+        [" 1.0.0 ", (1, 0, 0)],
+        ["  1.0.0   ", (1, 0, 0)],
+        ["x1.0.0", None],
+        ["1.0.0x", None],
+        ["x1.0.0x", None],
+    ],
+)
+def test_parse_version(version, expected_result):
+    assert parse_version(version) == expected_result
diff --git a/tests/tracing/test_baggage.py b/tests/tracing/test_baggage.py
index 185a085..fa856e0 100644
--- a/tests/tracing/test_baggage.py
+++ b/tests/tracing/test_baggage.py
@@ -23,7 +23,7 @@ def test_mixed_baggage():
     header = (
         "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
         "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
-        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
+        "sentry-user_id=Am%C3%A9lie, sentry-foo=bar, other-vendor-value-2=foo;bar;"
     )
 
     baggage = Baggage.from_incoming_header(header)
@@ -35,6 +35,7 @@ def test_mixed_baggage():
         "trace_id": "771a43a4192642f0b136d5159a501700",
         "user_id": "Amélie",
         "sample_rate": "0.01337",
+        "foo": "bar",
     }
 
     assert (
@@ -47,13 +48,15 @@ def test_mixed_baggage():
         "trace_id": "771a43a4192642f0b136d5159a501700",
         "user_id": "Amélie",
         "sample_rate": "0.01337",
+        "foo": "bar",
     }
 
     assert sorted(baggage.serialize().split(",")) == sorted(
         (
             "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
             "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
-            "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
+            "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,"
+            "sentry-foo=bar"
         ).split(",")
     )
 
@@ -61,7 +64,7 @@ def test_mixed_baggage():
         (
             "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
             "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
-            "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,"
+            "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,sentry-foo=bar,"
             "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
         ).split(",")
     )
diff --git a/tests/tracing/test_decorator_py2.py b/tests/tracing/test_decorator_py2.py
new file mode 100644
index 0000000..9969786
--- /dev/null
+++ b/tests/tracing/test_decorator_py2.py
@@ -0,0 +1,54 @@
+from sentry_sdk.tracing_utils_py2 import (
+    start_child_span_decorator as start_child_span_decorator_py2,
+)
+from sentry_sdk.utils import logger
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+def my_example_function():
+    return "return_of_sync_function"
+
+
+def test_trace_decorator_py2():
+    fake_start_child = mock.MagicMock()
+    fake_transaction = mock.MagicMock()
+    fake_transaction.start_child = fake_start_child
+
+    with mock.patch(
+        "sentry_sdk.tracing_utils_py2.get_current_span",
+        return_value=fake_transaction,
+    ):
+        result = my_example_function()
+        fake_start_child.assert_not_called()
+        assert result == "return_of_sync_function"
+
+        result2 = start_child_span_decorator_py2(my_example_function)()
+        fake_start_child.assert_called_once_with(
+            op="function", description="test_decorator_py2.my_example_function"
+        )
+        assert result2 == "return_of_sync_function"
+
+
+def test_trace_decorator_py2_no_trx():
+    fake_transaction = None
+
+    with mock.patch(
+        "sentry_sdk.tracing_utils_py2.get_current_span",
+        return_value=fake_transaction,
+    ):
+        with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+            result = my_example_function()
+            fake_warning.assert_not_called()
+            assert result == "return_of_sync_function"
+
+            result2 = start_child_span_decorator_py2(my_example_function)()
+            fake_warning.assert_called_once_with(
+                "Can not create a child span for %s. "
+                "Please start a Sentry transaction before calling this function.",
+                "test_decorator_py2.my_example_function",
+            )
+            assert result2 == "return_of_sync_function"
diff --git a/tests/tracing/test_decorator_py3.py b/tests/tracing/test_decorator_py3.py
new file mode 100644
index 0000000..c458e8a
--- /dev/null
+++ b/tests/tracing/test_decorator_py3.py
@@ -0,0 +1,103 @@
+from unittest import mock
+import pytest
+import sys
+
+from sentry_sdk.tracing_utils_py3 import (
+    start_child_span_decorator as start_child_span_decorator_py3,
+)
+from sentry_sdk.utils import logger
+
+if sys.version_info < (3, 6):
+    pytest.skip("Async decorator only works on Python 3.6+", allow_module_level=True)
+
+
+def my_example_function():
+    return "return_of_sync_function"
+
+
+async def my_async_example_function():
+    return "return_of_async_function"
+
+
+def test_trace_decorator_sync_py3():
+    fake_start_child = mock.MagicMock()
+    fake_transaction = mock.MagicMock()
+    fake_transaction.start_child = fake_start_child
+
+    with mock.patch(
+        "sentry_sdk.tracing_utils_py3.get_current_span",
+        return_value=fake_transaction,
+    ):
+        result = my_example_function()
+        fake_start_child.assert_not_called()
+        assert result == "return_of_sync_function"
+
+        result2 = start_child_span_decorator_py3(my_example_function)()
+        fake_start_child.assert_called_once_with(
+            op="function", description="test_decorator_py3.my_example_function"
+        )
+        assert result2 == "return_of_sync_function"
+
+
+def test_trace_decorator_sync_py3_no_trx():
+    fake_transaction = None
+
+    with mock.patch(
+        "sentry_sdk.tracing_utils_py3.get_current_span",
+        return_value=fake_transaction,
+    ):
+        with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+            result = my_example_function()
+            fake_warning.assert_not_called()
+            assert result == "return_of_sync_function"
+
+            result2 = start_child_span_decorator_py3(my_example_function)()
+            fake_warning.assert_called_once_with(
+                "Can not create a child span for %s. "
+                "Please start a Sentry transaction before calling this function.",
+                "test_decorator_py3.my_example_function",
+            )
+            assert result2 == "return_of_sync_function"
+
+
+@pytest.mark.asyncio
+async def test_trace_decorator_async_py3():
+    fake_start_child = mock.MagicMock()
+    fake_transaction = mock.MagicMock()
+    fake_transaction.start_child = fake_start_child
+
+    with mock.patch(
+        "sentry_sdk.tracing_utils_py3.get_current_span",
+        return_value=fake_transaction,
+    ):
+        result = await my_async_example_function()
+        fake_start_child.assert_not_called()
+        assert result == "return_of_async_function"
+
+        result2 = await start_child_span_decorator_py3(my_async_example_function)()
+        fake_start_child.assert_called_once_with(
+            op="function", description="test_decorator_py3.my_async_example_function"
+        )
+        assert result2 == "return_of_async_function"
+
+
+@pytest.mark.asyncio
+async def test_trace_decorator_async_py3_no_trx():
+    fake_transaction = None
+
+    with mock.patch(
+        "sentry_sdk.tracing_utils_py3.get_current_span",
+        return_value=fake_transaction,
+    ):
+        with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+            result = await my_async_example_function()
+            fake_warning.assert_not_called()
+            assert result == "return_of_async_function"
+
+            result2 = await start_child_span_decorator_py3(my_async_example_function)()
+            fake_warning.assert_called_once_with(
+                "Can not create a child span for %s. "
+                "Please start a Sentry transaction before calling this function.",
+                "test_decorator_py3.my_async_example_function",
+            )
+            assert result2 == "return_of_async_function"
diff --git a/tests/tracing/test_http_headers.py b/tests/tracing/test_http_headers.py
index 3db967b..443bb16 100644
--- a/tests/tracing/test_http_headers.py
+++ b/tests/tracing/test_http_headers.py
@@ -1,16 +1,7 @@
-import json
-
 import pytest
 
-import sentry_sdk
-from sentry_sdk.tracing import Transaction, Span
-from sentry_sdk.tracing_utils import (
-    compute_tracestate_value,
-    extract_sentrytrace_data,
-    extract_tracestate_data,
-    reinflate_tracestate,
-)
-from sentry_sdk.utils import from_base64, to_base64
+from sentry_sdk.tracing import Transaction
+from sentry_sdk.tracing_utils import extract_sentrytrace_data
 
 
 try:
@@ -19,142 +10,8 @@ except ImportError:
     import mock  # python < 3.3
 
 
-def test_tracestate_computation(sentry_init):
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    sentry_sdk.set_user({"id": 12312013, "segment": "bigs"})
-
-    transaction = Transaction(
-        name="/interactions/other-dogs/new-dog",
-        op="greeting.sniff",
-        trace_id="12312012123120121231201212312012",
-    )
-
-    # force lazy computation to create a value
-    transaction.to_tracestate()
-
-    computed_value = transaction._sentry_tracestate.replace("sentry=", "")
-    # we have to decode and reinflate the data because we can guarantee that the
-    # order of the entries in the jsonified dict will be the same here as when
-    # the tracestate is computed
-    reinflated_trace_data = json.loads(from_base64(computed_value))
-
-    assert reinflated_trace_data == {
-        "trace_id": "12312012123120121231201212312012",
-        "environment": "dogpark",
-        "release": "off.leash.park",
-        "public_key": "dogsarebadatkeepingsecrets",
-        "user": {"id": 12312013, "segment": "bigs"},
-        "transaction": "/interactions/other-dogs/new-dog",
-    }
-
-
-def test_doesnt_add_new_tracestate_to_transaction_when_none_given(sentry_init):
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    transaction = Transaction(
-        name="/interactions/other-dogs/new-dog",
-        op="greeting.sniff",
-        # sentry_tracestate=< value would be passed here >
-    )
-
-    assert transaction._sentry_tracestate is None
-
-
-def test_adds_tracestate_to_transaction_when_to_traceparent_called(sentry_init):
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    transaction = Transaction(
-        name="/interactions/other-dogs/new-dog",
-        op="greeting.sniff",
-    )
-
-    # no inherited tracestate, and none created in Transaction constructor
-    assert transaction._sentry_tracestate is None
-
-    transaction.to_tracestate()
-
-    assert transaction._sentry_tracestate is not None
-
-
-def test_adds_tracestate_to_transaction_when_getting_trace_context(sentry_init):
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    transaction = Transaction(
-        name="/interactions/other-dogs/new-dog",
-        op="greeting.sniff",
-    )
-
-    # no inherited tracestate, and none created in Transaction constructor
-    assert transaction._sentry_tracestate is None
-
-    transaction.get_trace_context()
-
-    assert transaction._sentry_tracestate is not None
-
-
-@pytest.mark.parametrize(
-    "set_by", ["inheritance", "to_tracestate", "get_trace_context"]
-)
-def test_tracestate_is_immutable_once_set(sentry_init, monkeypatch, set_by):
-    monkeypatch.setattr(
-        sentry_sdk.tracing,
-        "compute_tracestate_entry",
-        mock.Mock(return_value="sentry=doGsaREgReaT"),
-    )
-
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    # for each scenario, get to the point where tracestate has been set
-    if set_by == "inheritance":
-        transaction = Transaction(
-            name="/interactions/other-dogs/new-dog",
-            op="greeting.sniff",
-            sentry_tracestate=("sentry=doGsaREgReaT"),
-        )
-    else:
-        transaction = Transaction(
-            name="/interactions/other-dogs/new-dog",
-            op="greeting.sniff",
-        )
-
-        if set_by == "to_tracestate":
-            transaction.to_tracestate()
-        if set_by == "get_trace_context":
-            transaction.get_trace_context()
-
-    assert transaction._sentry_tracestate == "sentry=doGsaREgReaT"
-
-    # user data would be included in tracestate if it were recomputed at this point
-    sentry_sdk.set_user({"id": 12312013, "segment": "bigs"})
-
-    # value hasn't changed
-    assert transaction._sentry_tracestate == "sentry=doGsaREgReaT"
-
-
 @pytest.mark.parametrize("sampled", [True, False, None])
-def test_to_traceparent(sentry_init, sampled):
-
+def test_to_traceparent(sampled):
     transaction = Transaction(
         name="/interactions/other-dogs/new-dog",
         op="greeting.sniff",
@@ -164,56 +21,13 @@ def test_to_traceparent(sentry_init, sampled):
 
     traceparent = transaction.to_traceparent()
 
-    trace_id, parent_span_id, parent_sampled = traceparent.split("-")
-    assert trace_id == "12312012123120121231201212312012"
-    assert parent_span_id == transaction.span_id
-    assert parent_sampled == (
-        "1" if sampled is True else "0" if sampled is False else ""
-    )
-
-
-def test_to_tracestate(sentry_init):
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    # it correctly uses the value from the transaction itself or the span's
-    # containing transaction
-    transaction_no_third_party = Transaction(
-        trace_id="12312012123120121231201212312012",
-        sentry_tracestate="sentry=doGsaREgReaT",
-    )
-    non_orphan_span = Span()
-    non_orphan_span._containing_transaction = transaction_no_third_party
-    assert transaction_no_third_party.to_tracestate() == "sentry=doGsaREgReaT"
-    assert non_orphan_span.to_tracestate() == "sentry=doGsaREgReaT"
-
-    # it combines sentry and third-party values correctly
-    transaction_with_third_party = Transaction(
-        trace_id="12312012123120121231201212312012",
-        sentry_tracestate="sentry=doGsaREgReaT",
-        third_party_tracestate="maisey=silly",
-    )
-    assert (
-        transaction_with_third_party.to_tracestate()
-        == "sentry=doGsaREgReaT,maisey=silly"
-    )
-
-    # it computes a tracestate from scratch for orphan transactions
-    orphan_span = Span(
-        trace_id="12312012123120121231201212312012",
-    )
-    assert orphan_span._containing_transaction is None
-    assert orphan_span.to_tracestate() == "sentry=" + compute_tracestate_value(
-        {
-            "trace_id": "12312012123120121231201212312012",
-            "environment": "dogpark",
-            "release": "off.leash.park",
-            "public_key": "dogsarebadatkeepingsecrets",
-        }
-    )
+    parts = traceparent.split("-")
+    assert parts[0] == "12312012123120121231201212312012"  # trace_id
+    assert parts[1] == transaction.span_id  # parent_span_id
+    if sampled is None:
+        assert len(parts) == 2
+    else:
+        assert parts[2] == "1" if sampled is True else "0"  # sampled
 
 
 @pytest.mark.parametrize("sampling_decision", [True, False])
@@ -228,78 +42,12 @@ def test_sentrytrace_extraction(sampling_decision):
     }
 
 
-@pytest.mark.parametrize(
-    ("incoming_header", "expected_sentry_value", "expected_third_party"),
-    [
-        # sentry only
-        ("sentry=doGsaREgReaT", "sentry=doGsaREgReaT", None),
-        # sentry only, invalid (`!` isn't a valid base64 character)
-        ("sentry=doGsaREgReaT!", None, None),
-        # stuff before
-        ("maisey=silly,sentry=doGsaREgReaT", "sentry=doGsaREgReaT", "maisey=silly"),
-        # stuff after
-        ("sentry=doGsaREgReaT,maisey=silly", "sentry=doGsaREgReaT", "maisey=silly"),
-        # stuff before and after
-        (
-            "charlie=goofy,sentry=doGsaREgReaT,maisey=silly",
-            "sentry=doGsaREgReaT",
-            "charlie=goofy,maisey=silly",
-        ),
-        # multiple before
-        (
-            "charlie=goofy,maisey=silly,sentry=doGsaREgReaT",
-            "sentry=doGsaREgReaT",
-            "charlie=goofy,maisey=silly",
-        ),
-        # multiple after
-        (
-            "sentry=doGsaREgReaT,charlie=goofy,maisey=silly",
-            "sentry=doGsaREgReaT",
-            "charlie=goofy,maisey=silly",
-        ),
-        # multiple before and after
-        (
-            "charlie=goofy,maisey=silly,sentry=doGsaREgReaT,bodhi=floppy,cory=loyal",
-            "sentry=doGsaREgReaT",
-            "charlie=goofy,maisey=silly,bodhi=floppy,cory=loyal",
-        ),
-        # only third-party data
-        ("maisey=silly", None, "maisey=silly"),
-        # invalid third-party data, valid sentry data
-        ("maisey_is_silly,sentry=doGsaREgReaT", "sentry=doGsaREgReaT", None),
-        # valid third-party data, invalid sentry data
-        ("maisey=silly,sentry=doGsaREgReaT!", None, "maisey=silly"),
-        # nothing valid at all
-        ("maisey_is_silly,sentry=doGsaREgReaT!", None, None),
-    ],
-)
-def test_tracestate_extraction(
-    incoming_header, expected_sentry_value, expected_third_party
-):
-    assert extract_tracestate_data(incoming_header) == {
-        "sentry_tracestate": expected_sentry_value,
-        "third_party_tracestate": expected_third_party,
-    }
-
-
-# TODO (kmclb) remove this parameterization once tracestate is a real feature
-@pytest.mark.parametrize("tracestate_enabled", [True, False])
-def test_iter_headers(sentry_init, monkeypatch, tracestate_enabled):
+def test_iter_headers(monkeypatch):
     monkeypatch.setattr(
         Transaction,
         "to_traceparent",
         mock.Mock(return_value="12312012123120121231201212312012-0415201309082013-0"),
     )
-    monkeypatch.setattr(
-        Transaction,
-        "to_tracestate",
-        mock.Mock(return_value="sentry=doGsaREgReaT,charlie=goofy"),
-    )
-    monkeypatch.setattr(
-        sentry_sdk.tracing,
-        "has_tracestate_enabled",
-        mock.Mock(return_value=tracestate_enabled),
-    )
 
     transaction = Transaction(
         name="/interactions/other-dogs/new-dog",
@@ -310,23 +58,3 @@ def test_iter_headers(sentry_init, monkeypatch, tracestate_enabled):
     assert (
         headers["sentry-trace"] == "12312012123120121231201212312012-0415201309082013-0"
     )
-    if tracestate_enabled:
-        assert "tracestate" in headers
-        assert headers["tracestate"] == "sentry=doGsaREgReaT,charlie=goofy"
-    else:
-        assert "tracestate" not in headers
-
-
-@pytest.mark.parametrize(
-    "data",
-    [  # comes out with no trailing `=`
-        {"name": "Maisey", "birthday": "12/31/12"},
-        # comes out with one trailing `=`
-        {"dogs": "yes", "cats": "maybe"},
-        # comes out with two trailing `=`
-        {"name": "Charlie", "birthday": "11/21/12"},
-    ],
-)
-def test_tracestate_reinflation(data):
-    encoded_tracestate = to_base64(json.dumps(data)).strip("=")
-    assert reinflate_tracestate(encoded_tracestate) == data
diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py
index f42df10..0fe8117 100644
--- a/tests/tracing/test_integration_tests.py
+++ b/tests/tracing/test_integration_tests.py
@@ -63,13 +63,9 @@ def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_r
     envelopes = capture_envelopes()
 
     # make a parent transaction (normally this would be in a different service)
-    with start_transaction(
-        name="hi", sampled=True if sample_rate == 0 else None
-    ) as parent_transaction:
+    with start_transaction(name="hi", sampled=True if sample_rate == 0 else None):
         with start_span() as old_span:
             old_span.sampled = sampled
-            tracestate = parent_transaction._sentry_tracestate
-
             headers = dict(Hub.current.iter_trace_propagation_headers(old_span))
             headers["baggage"] = (
                 "other-vendor-value-1=foo;bar;baz, "
@@ -79,8 +75,7 @@ def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_r
                 "other-vendor-value-2=foo;bar;"
             )
 
-    # child transaction, to prove that we can read 'sentry-trace' and
-    # `tracestate` header data correctly
+    # child transaction, to prove that we can read 'sentry-trace' header data correctly
     child_transaction = Transaction.continue_from_headers(headers, name="WRONG")
     assert child_transaction is not None
     assert child_transaction.parent_sampled == sampled
@@ -88,7 +83,6 @@ def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_r
     assert child_transaction.same_process_as_parent is False
     assert child_transaction.parent_span_id == old_span.span_id
     assert child_transaction.span_id != old_span.span_id
-    assert child_transaction._sentry_tracestate == tracestate
 
     baggage = child_transaction._baggage
     assert baggage
@@ -178,13 +172,14 @@ def test_dynamic_sampling_head_sdk_creates_dsc(
         "environment": "production",
         "release": "foo",
         "sample_rate": str(sample_rate),
+        "sampled": "true" if transaction.sampled else "false",
         "transaction": "Head SDK tx",
         "trace_id": trace_id,
     }
 
     expected_baggage = (
-        "sentry-environment=production,sentry-release=foo,sentry-sample_rate=%s,sentry-transaction=Head%%20SDK%%20tx,sentry-trace_id=%s"
-        % (sample_rate, trace_id)
+        "sentry-environment=production,sentry-release=foo,sentry-sample_rate=%s,sentry-transaction=Head%%20SDK%%20tx,sentry-trace_id=%s,sentry-sampled=%s"
+        % (sample_rate, trace_id, "true" if transaction.sampled else "false")
     )
     assert sorted(baggage.serialize().split(",")) == sorted(expected_baggage.split(","))
 
@@ -194,6 +189,7 @@ def test_dynamic_sampling_head_sdk_creates_dsc(
         "environment": "production",
         "release": "foo",
         "sample_rate": str(sample_rate),
+        "sampled": "true" if transaction.sampled else "false",
         "transaction": "Head SDK tx",
         "trace_id": trace_id,
     }
diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py
index b51b5dc..49b1f53 100644
--- a/tests/tracing/test_misc.py
+++ b/tests/tracing/test_misc.py
@@ -4,14 +4,18 @@ import uuid
 import os
 
 import sentry_sdk
-from sentry_sdk import Hub, start_span, start_transaction
+from sentry_sdk import Hub, start_span, start_transaction, set_measurement
+from sentry_sdk.consts import MATCH_ALL
 from sentry_sdk.tracing import Span, Transaction
-from sentry_sdk.tracing_utils import has_tracestate_enabled
+from sentry_sdk.tracing_utils import should_propagate_trace
+from sentry_sdk.utils import Dsn
 
 try:
     from unittest import mock  # python 3.3 and above
+    from unittest.mock import MagicMock
 except ImportError:
     import mock  # python < 3.3
+    from mock import MagicMock
 
 
 def test_span_trimming(sentry_init, capture_events):
@@ -232,24 +236,8 @@ def test_circular_references(monkeypatch, sentry_init, request):
     assert gc.collect() == 0
 
 
-# TODO (kmclb) remove this test once tracestate is a real feature
-@pytest.mark.parametrize("tracestate_enabled", [True, False, None])
-def test_has_tracestate_enabled(sentry_init, tracestate_enabled):
-    experiments = (
-        {"propagate_tracestate": tracestate_enabled}
-        if tracestate_enabled is not None
-        else {}
-    )
-    sentry_init(_experiments=experiments)
-
-    if tracestate_enabled is True:
-        assert has_tracestate_enabled() is True
-    else:
-        assert has_tracestate_enabled() is False
-
-
 def test_set_meaurement(sentry_init, capture_events):
-    sentry_init(traces_sample_rate=1.0, _experiments={"custom_measurements": True})
+    sentry_init(traces_sample_rate=1.0)
 
     events = capture_events()
 
@@ -274,3 +262,94 @@ def test_set_meaurement(sentry_init, capture_events):
     assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"}
     assert event["measurements"]["metric.baz"] == {"value": 420.69, "unit": "custom"}
     assert event["measurements"]["metric.foobar"] == {"value": 17.99, "unit": "percent"}
+
+
+def test_set_meaurement_public_api(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0)
+
+    events = capture_events()
+
+    with start_transaction(name="measuring stuff"):
+        set_measurement("metric.foo", 123)
+        set_measurement("metric.bar", 456, unit="second")
+
+    (event,) = events
+    assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""}
+    assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"}
+
+
+@pytest.mark.parametrize(
+    "trace_propagation_targets,url,expected_propagation_decision",
+    [
+        (None, "http://example.com", False),
+        ([], "http://example.com", False),
+        ([MATCH_ALL], "http://example.com", True),
+        (["localhost"], "localhost:8443/api/users", True),
+        (["localhost"], "http://localhost:8443/api/users", True),
+        (["localhost"], "mylocalhost:8080/api/users", True),
+        ([r"^/api"], "/api/envelopes", True),
+        ([r"^/api"], "/backend/api/envelopes", False),
+        ([r"myApi.com/v[2-4]"], "myApi.com/v2/projects", True),
+        ([r"myApi.com/v[2-4]"], "myApi.com/v1/projects", False),
+        ([r"https:\/\/.*"], "https://example.com", True),
+        (
+            [r"https://.*"],
+            "https://example.com",
+            True,
+        ),  # to show escaping is not needed
+        ([r"https://.*"], "http://example.com/insecure/", False),
+    ],
+)
+def test_should_propagate_trace(
+    trace_propagation_targets, url, expected_propagation_decision
+):
+    hub = MagicMock()
+    hub.client = MagicMock()
+    hub.client.options = {"trace_propagation_targets": trace_propagation_targets}
+    hub.client.transport = MagicMock()
+    hub.client.transport.parsed_dsn = Dsn("https://bla@xxx.sentry.io/12312012")
+
+    assert should_propagate_trace(hub, url) == expected_propagation_decision
+
+
+@pytest.mark.parametrize(
+    "dsn,url,expected_propagation_decision",
+    [
+        (
+            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+            "http://example.com",
+            True,
+        ),
+        (
+            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+            False,
+        ),
+        (
+            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+            "http://squirrelchasers.ingest.sentry.io/12312012",
+            False,
+        ),
+        (
+            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+            "http://ingest.sentry.io/12312012",
+            True,
+        ),
+        (
+            "https://abc@localsentry.example.com/12312012",
+            "http://localsentry.example.com",
+            False,
+        ),
+    ],
+)
+def test_should_propagate_trace_to_sentry(
+    sentry_init, dsn, url, expected_propagation_decision
+):
+    sentry_init(
+        dsn=dsn,
+        traces_sample_rate=1.0,
+    )
+
+    Hub.current.client.transport.parsed_dsn = Dsn(dsn)
+
+    assert should_propagate_trace(Hub.current, url) == expected_propagation_decision
diff --git a/tests/tracing/test_noop_span.py b/tests/tracing/test_noop_span.py
new file mode 100644
index 0000000..9896afb
--- /dev/null
+++ b/tests/tracing/test_noop_span.py
@@ -0,0 +1,52 @@
+import sentry_sdk
+from sentry_sdk.tracing import NoOpSpan
+
+# This tests make sure, that the examples from the documentation [1]
+# are working when OTel (OpenTelementry) instrumentation is turned on
+# and therefore the Senntry tracing should not do anything.
+#
+# 1: https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/
+
+
+def test_noop_start_transaction(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    with sentry_sdk.start_transaction(
+        op="task", name="test_transaction_name"
+    ) as transaction:
+        assert isinstance(transaction, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is transaction
+
+        transaction.name = "new name"
+
+
+def test_noop_start_span(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    with sentry_sdk.start_span(op="http", description="GET /") as span:
+        assert isinstance(span, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is span
+
+        span.set_tag("http.response.status_code", 418)
+        span.set_data("http.entity_type", "teapot")
+
+
+def test_noop_transaction_start_child(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    transaction = sentry_sdk.start_transaction(name="task")
+    assert isinstance(transaction, NoOpSpan)
+
+    with transaction.start_child(op="child_task") as child:
+        assert isinstance(child, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is child
+
+
+def test_noop_span_start_child(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+    span = sentry_sdk.start_span(name="task")
+    assert isinstance(span, NoOpSpan)
+
+    with span.start_child(op="child_task") as child:
+        assert isinstance(child, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is child
diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py
index 9975aba..6101a94 100644
--- a/tests/tracing/test_sampling.py
+++ b/tests/tracing/test_sampling.py
@@ -2,9 +2,8 @@ import random
 
 import pytest
 
-from sentry_sdk import Hub, start_span, start_transaction
+from sentry_sdk import Hub, start_span, start_transaction, capture_exception
 from sentry_sdk.tracing import Transaction
-from sentry_sdk.tracing_utils import is_valid_sample_rate
 from sentry_sdk.utils import logger
 
 try:
@@ -51,38 +50,6 @@ def test_no_double_sampling(sentry_init, capture_events):
     assert len(events) == 1
 
 
-@pytest.mark.parametrize(
-    "rate",
-    [0.0, 0.1231, 1.0, True, False],
-)
-def test_accepts_valid_sample_rate(rate):
-    with mock.patch.object(logger, "warning", mock.Mock()):
-        result = is_valid_sample_rate(rate)
-        assert logger.warning.called is False
-        assert result is True
-
-
-@pytest.mark.parametrize(
-    "rate",
-    [
-        "dogs are great",  # wrong type
-        (0, 1),  # wrong type
-        {"Maisey": "Charllie"},  # wrong type
-        [True, True],  # wrong type
-        {0.2012},  # wrong type
-        float("NaN"),  # wrong type
-        None,  # wrong type
-        -1.121,  # wrong value
-        1.231,  # wrong value
-    ],
-)
-def test_warns_on_invalid_sample_rate(rate, StringContaining):  # noqa: N803
-    with mock.patch.object(logger, "warning", mock.Mock()):
-        result = is_valid_sample_rate(rate)
-        logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
-        assert result is False
-
-
 @pytest.mark.parametrize("sampling_decision", [True, False])
 def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision(
     sentry_init, sampling_decision
@@ -109,7 +76,6 @@ def test_uses_traces_sample_rate_correctly(
     sentry_init(traces_sample_rate=traces_sample_rate)
 
     with mock.patch.object(random, "random", return_value=0.5):
-
         transaction = start_transaction(name="dogpark")
         assert transaction.sampled is expected_decision
 
@@ -126,7 +92,6 @@ def test_uses_traces_sampler_return_value_correctly(
     sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))
 
     with mock.patch.object(random, "random", return_value=0.5):
-
         transaction = start_transaction(name="dogpark")
         assert transaction.sampled is expected_decision
 
@@ -261,6 +226,18 @@ def test_passes_custom_samling_context_from_start_transaction_to_traces_sampler(
     )
 
 
+def test_sample_rate_affects_errors(sentry_init, capture_events):
+    sentry_init(sample_rate=0)
+    events = capture_events()
+
+    try:
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    assert len(events) == 0
+
+
 @pytest.mark.parametrize(
     "traces_sampler_return_value",
     [
diff --git a/tests/utils/test_general.py b/tests/utils/test_general.py
index b85975b..6f53de3 100644
--- a/tests/utils/test_general.py
+++ b/tests/utils/test_general.py
@@ -11,10 +11,12 @@ from sentry_sdk.utils import (
     safe_repr,
     exceptions_from_error_tuple,
     filename_for_module,
-    handle_in_app_impl,
     iter_event_stacktraces,
     to_base64,
     from_base64,
+    set_in_app_in_frames,
+    strip_string,
+    AnnotatedValue,
 )
 from sentry_sdk._compat import text_type, string_types
 
@@ -131,25 +133,376 @@ def test_parse_invalid_dsn(dsn):
         dsn = Dsn(dsn)
 
 
-@pytest.mark.parametrize("empty", [None, []])
-def test_in_app(empty):
-    assert handle_in_app_impl(
-        [{"module": "foo"}, {"module": "bar"}],
-        in_app_include=["foo"],
-        in_app_exclude=empty,
-    ) == [{"module": "foo", "in_app": True}, {"module": "bar"}]
-
-    assert handle_in_app_impl(
-        [{"module": "foo"}, {"module": "bar"}],
-        in_app_include=["foo"],
-        in_app_exclude=["foo"],
-    ) == [{"module": "foo", "in_app": True}, {"module": "bar"}]
+@pytest.mark.parametrize(
+    "frame,in_app_include,in_app_exclude,project_root,resulting_frame",
+    [
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+            None,
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+        ],
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+        ],
+        # include
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,  # because there is no module set
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+                "in_app": False,  # because there is no module set
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+        ],
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+        ],
+        # exclude
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+        ],
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+            },
+            None,
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+            },
+            None,
+            ["fastapi"],
+            None,
+            {
+                "module": "fastapi.routing",
+                "in_app": False,
+            },
+        ],
+        # with project_root set
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            None,
+            "/home/ubuntu/fastapi",
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            ["main"],
+            None,
+            "/home/ubuntu/fastapi",
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            ["main"],
+            "/home/ubuntu/fastapi",
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+                "in_app": False,
+            },
+        ],
+    ],
+)
+def test_set_in_app_in_frames(
+    frame, in_app_include, in_app_exclude, project_root, resulting_frame
+):
+    new_frames = set_in_app_in_frames(
+        [frame],
+        in_app_include=in_app_include,
+        in_app_exclude=in_app_exclude,
+        project_root=project_root,
+    )
 
-    assert handle_in_app_impl(
-        [{"module": "foo"}, {"module": "bar"}],
-        in_app_include=empty,
-        in_app_exclude=["foo"],
-    ) == [{"module": "foo", "in_app": False}, {"module": "bar", "in_app": True}]
+    assert new_frames[0] == resulting_frame
 
 
 def test_iter_stacktraces():
@@ -217,3 +570,24 @@ def test_failed_base64_conversion(input):
     # failures
     if type(input) not in string_types:
         assert to_base64(input) is None
+
+
+def test_strip_string():
+    # If value is None returns None.
+    assert strip_string(None) is None
+
+    # If max_length is not passed, returns the full text (up to 1024 bytes).
+    text_1024_long = "a" * 1024
+    assert strip_string(text_1024_long).count("a") == 1024
+
+    # If value exceeds the max_length, returns an AnnotatedValue.
+    text_1025_long = "a" * 1025
+    stripped_text = strip_string(text_1025_long)
+    assert isinstance(stripped_text, AnnotatedValue)
+    assert stripped_text.value.count("a") == 1021  # + '...' is 1024
+
+    # If text has unicode characters, it counts bytes and not number of characters.
+    # fmt: off
+    text_with_unicode_character = u"éê"
+    assert strip_string(text_with_unicode_character, max_length=2).value == u"é..."
+    # fmt: on
diff --git a/tests/utils/test_transaction.py b/tests/utils/test_transaction.py
index e1aa123..bfb87f4 100644
--- a/tests/utils/test_transaction.py
+++ b/tests/utils/test_transaction.py
@@ -1,5 +1,15 @@
+import sys
+from functools import partial
+
+import pytest
+
 from sentry_sdk.utils import transaction_from_function
 
+try:
+    from functools import partialmethod
+except ImportError:
+    pass
+
 
 class MyClass:
     def myfunc(self):
@@ -10,6 +20,16 @@ def myfunc():
     pass
 
 
+@partial
+def my_partial():
+    pass
+
+
+my_lambda = lambda: None
+
+my_partial_lambda = partial(lambda: None)
+
+
 def test_transaction_from_function():
     x = transaction_from_function
     assert x(MyClass) == "tests.utils.test_transaction.MyClass"
@@ -18,3 +38,26 @@ def test_transaction_from_function():
     assert x(None) is None
     assert x(42) is None
     assert x(lambda: None).endswith("<lambda>")
+    assert x(my_lambda) == "tests.utils.test_transaction.<lambda>"
+    assert (
+        x(my_partial) == "partial(<function tests.utils.test_transaction.my_partial>)"
+    )
+    assert (
+        x(my_partial_lambda)
+        == "partial(<function tests.utils.test_transaction.<lambda>>)"
+    )
+
+
+@pytest.mark.skipif(sys.version_info < (3, 4), reason="Require python 3.4 or higher")
+def test_transaction_from_function_partialmethod():
+    x = transaction_from_function
+
+    class MyPartialClass:
+        @partialmethod
+        def my_partial_method(self):
+            pass
+
+    assert (
+        x(MyPartialClass.my_partial_method)
+        == "partialmethod(<function tests.utils.test_transaction.test_transaction_from_function_partialmethod.<locals>.MyPartialClass.my_partial_method>)"
+    )
diff --git a/tox.ini b/tox.ini
index 2b26d2f..6746077 100644
--- a/tox.ini
+++ b/tox.ini
@@ -5,96 +5,158 @@
 
 [tox]
 envlist =
-    # === Core ===
-    py{2.7,3.4,3.5,3.6,3.7,3.8,3.9,3.10}
+    # === Common ===
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-common
 
     # === Integrations ===
-    # General format is {pythonversion}-{integrationname}-{frameworkversion}
+    # General format is {pythonversion}-{integrationname}-v{frameworkversion}
     # 1 blank line between different integrations
     # Each framework version should only be mentioned once. I.e:
-    #   {py3.7,py3.10}-django-{3.2}
-    #   {py3.10}-django-{4.0}
+    #   {py3.7,py3.10}-django-v{3.2}
+    #   {py3.10}-django-v{4.0}
     # instead of:
-    #   {py3.7}-django-{3.2}
-    #   {py3.7,py3.10}-django-{3.2,4.0}
+    #   {py3.7}-django-v{3.2}
+    #   {py3.7,py3.10}-django-v{3.2,4.0}
 
-    # Django 1.x
-    {py2.7,py3.5}-django-{1.8,1.9,1.10}
-    {py2.7,py3.5,py3.6,py3.7}-django-{1.11}
-    # Django 2.x
-    {py3.5,py3.6,py3.7}-django-{2.0,2.1}
-    {py3.5,py3.6,py3.7,py3.8,py3.9}-django-{2.2}
-    # Django 3.x
-    {py3.6,py3.7,py3.8,py3.9}-django-{3.0,3.1}
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-django-{3.2}
-    # Django 4.x
-    {py3.8,py3.9,py3.10}-django-{4.0,4.1}
+    # AIOHTTP
+    {py3.7}-aiohttp-v{3.5}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-aiohttp-v{3.6}
 
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12,1.0}
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-1.1
-    {py3.6,py3.8,py3.9,py3.10}-flask-2.0
+    # Arq
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-arq
 
-    {py3.7,py3.8,py3.9,py3.10}-asgi
+    # Asgi
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-asgi
 
-    {py3.7,py3.8,py3.9,py3.10}-starlette-{0.19.1,0.20}
+    # AWS Lambda
+    # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions.
+    {py3.7}-aws_lambda
 
-    {py3.7,py3.8,py3.9,py3.10}-fastapi
+    # Beam
+    {py3.7}-beam-v{2.12,2.13,2.32,2.33}
 
-    {py3.7,py3.8,py3.9,py3.10}-quart
+    # Boto3
+    {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
 
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-0.12
+    # Bottle
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-bottle-v{0.12}
 
-    {py2.7,py3.5,py3.6,py3.7}-falcon-1.4
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-2.0
+    # Celery
+    {py2.7}-celery-v{3}
+    {py2.7,py3.5,py3.6}-celery-v{4.1,4.2}
+    {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-v{4.3,4.4}
+    {py3.6,py3.7,py3.8}-celery-v{5.0}
+    {py3.7,py3.8,py3.9,py3.10}-celery-v{5.1,5.2}
+    # TODO: enable when celery is ready {py3.7,py3.8,py3.9,py3.10,py3.11}-celery-v{5.3}
 
-    {py3.5,py3.6,py3.7}-sanic-{0.8,18}
-    {py3.6,py3.7}-sanic-19
-    {py3.6,py3.7,py3.8}-sanic-20
-    {py3.7,py3.8,py3.9,py3.10}-sanic-21
-    {py3.7,py3.8,py3.9,py3.10}-sanic-22
+    # Chalice
+    {py3.6,py3.7,py3.8}-chalice-v{1.18,1.20,1.22,1.24}
 
-    {py2.7}-celery-3
-    {py2.7,py3.5,py3.6}-celery-{4.1,4.2}
-    {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4}
-    {py3.6,py3.7,py3.8}-celery-{5.0}
-    {py3.7,py3.8,py3.9,py3.10}-celery-{5.1,5.2}
+    # Cloud Resource Context
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-cloud_resource_context
 
-    py3.7-beam-{2.12,2.13,2.32,2.33}
+    # Django
+    # - Django 1.x
+    {py2.7,py3.5}-django-v{1.8,1.9,1.10}
+    {py2.7,py3.5,py3.6,py3.7}-django-v{1.11}
+    # - Django 2.x
+    {py3.5,py3.6,py3.7}-django-v{2.0,2.1}
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-django-v{2.2}
+    # - Django 3.x
+    {py3.6,py3.7,py3.8,py3.9}-django-v{3.0,3.1}
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{3.2}
+    # - Django 4.x
+    {py3.8,py3.9,py3.10,py3.11}-django-v{4.0,4.1}
 
-    # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions.
-    py3.7-aws_lambda
+    # Falcon
+    {py2.7,py3.5,py3.6,py3.7}-falcon-v{1.4}
+    {py2.7,py3.5,py3.6,py3.7}-falcon-v{2.0}
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-falcon-v{3.0}
+
+    # FastAPI
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-fastapi
+
+    # Flask
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12,1.0}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{1.1}
+    {py3.6,py3.8,py3.9,py3.10,py3.11}-flask-v{2.0}
+
+    # Gevent
+    {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-gevent
+
+    # GCP
+    {py3.7}-gcp
+
+    # Grpc
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-grpc-v{1.21.1,1.22.1,1.23.1,1.24.1,1.25.0,1.26.0,1.27.1,1.28.1,1.29.0,1.30.0,1.31.0,1.32.0,1.33.1,1.34.0,1.36.0,1.37.0,1.38.0,1.39.0,1.40.0,1.41.1,1.43.0,1.44.0,1.46.1,1.48.1,1.51.3,1.53.0}
 
-    py3.7-gcp
+    # HTTPX
+    {py3.6,py3.7,py3.8,py3.9}-httpx-v{0.16,0.17,0.18}
+    {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-v{0.19,0.20,0.21,0.22}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-httpx-v{0.23}
 
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-{1.6,1.7,1.8,1.9,1.10}
+    # Huey
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-huey-2
 
-    {py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11}
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{0.12,0.13,1.0,1.1,1.2,1.3}
-    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-{1.4,1.5}
+    # Loguru
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-loguru-v{0.5,0.6,0.7}
 
-    py3.7-aiohttp-3.5
-    {py3.7,py3.8,py3.9,py3.10}-aiohttp-3.6
+    # OpenTelemetry (OTel)
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-opentelemetry
 
-    {py3.7,py3.8,py3.9}-tornado-{5}
-    {py3.7,py3.8,py3.9,py3.10}-tornado-{6}
+    # pure_eval
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-pure_eval
 
-    {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-{4.6,5.0,5.2}
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-trytond-{5.4}
+    # PyMongo (Mongo DB)
+    {py2.7,py3.6}-pymongo-v{3.1}
+    {py2.7,py3.6,py3.7,py3.8,py3.9}-pymongo-v{3.12}
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-pymongo-v{4.0}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-pymongo-v{4.1,4.2}
 
+    # Pyramid
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-pyramid-v{1.6,1.7,1.8,1.9,1.10}
+
+    # Quart
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-quart-v{0.16,0.17,0.18}
+
+    # Redis
+    {py2.7,py3.7,py3.8,py3.9,py3.10,py3.11}-redis
+
+    # Redis Cluster
+    {py2.7,py3.7,py3.8,py3.9}-rediscluster-v{1,2.1.0,2}
+
+    # Requests
     {py2.7,py3.8,py3.9}-requests
 
-    {py2.7,py3.7,py3.8,py3.9}-redis
-    {py2.7,py3.7,py3.8,py3.9}-rediscluster-{1,2.1.0,2}
+    # RQ (Redis Queue)
+    {py2.7,py3.5,py3.6}-rq-v{0.6,0.7,0.8,0.9,0.10,0.11}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-v{0.12,0.13,1.0,1.1,1.2,1.3}
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-rq-v{1.4,1.5}
+
+    # Sanic
+    {py3.5,py3.6,py3.7}-sanic-v{0.8,18}
+    {py3.6,py3.7}-sanic-v{19}
+    {py3.6,py3.7,py3.8}-sanic-v{20}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-sanic-v{21}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-sanic-v{22}
 
-    {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-{1.2,1.3}
+    # Starlette
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-starlette-v{0.20,0.22,0.24,0.26,0.28}
 
-    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pure_eval
+    # Starlite
+    {py3.8,py3.9,py3.10,py3.11}-starlite
 
-    {py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19,1.20}
+    # SQL Alchemy
+    {py2.7,py3.7,py3.8,py3.9,py3.10,py3.11}-sqlalchemy-v{1.2,1.3,1.4}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-sqlalchemy-v{2.0}
 
-    {py2.7,py3.6,py3.7,py3.8}-boto3-{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
+    # Tornado
+    {py3.7,py3.8,py3.9}-tornado-v{5}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-tornado-v{6}
 
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-{0.16,0.17}
+    # Trytond
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-v{4.6,5.0,5.2}
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-trytond-v{5.4}
 
 [testenv]
 deps =
@@ -103,210 +165,327 @@ deps =
     # with the -r flag
     -r test-requirements.txt
 
-    py3.4: colorama==0.4.1
-    py3.4: watchdog==0.10.7
-
-    django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
-
-    {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels>2
-    {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
-    {py2.7,py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
-
-    django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0
-    django-{2.2,3.0,3.1,3.2}: pytest-django>=4.0
-    django-{2.2,3.0,3.1,3.2}: Werkzeug<2.0
-
-    django-{4.0,4.1}: djangorestframework
-    django-{4.0,4.1}: pytest-asyncio
-    django-{4.0,4.1}: psycopg2-binary
-    django-{4.0,4.1}: pytest-django
-    django-{4.0,4.1}: Werkzeug
-
-    django-1.8: Django>=1.8,<1.9
-    django-1.9: Django>=1.9,<1.10
-    django-1.10: Django>=1.10,<1.11
-    django-1.11: Django>=1.11,<1.12
-    django-2.0: Django>=2.0,<2.1
-    django-2.1: Django>=2.1,<2.2
-    django-2.2: Django>=2.2,<2.3
-    django-3.0: Django>=3.0,<3.1
-    django-3.1: Django>=3.1,<3.2
-    django-3.2: Django>=3.2,<3.3
-    django-4.0: Django>=4.0,<4.1
-    django-4.1: Django>=4.1,<4.2
-
-    flask: flask-login
-    flask-0.11: Flask>=0.11,<0.12
-    flask-0.12: Flask>=0.12,<0.13
-    flask-1.0: Flask>=1.0,<1.1
-    flask-1.1: Flask>=1.1,<1.2
-    flask-2.0: Flask>=2.0,<2.1
-
-    asgi: pytest-asyncio
-    asgi: async-asgi-testclient
-
-    quart: quart>=0.16.1
-    quart: quart-auth
-    quart: pytest-asyncio
+    py3.8-common: hypothesis
 
-    starlette: pytest-asyncio
-    starlette: python-multipart
-    starlette: requests
-    starlette-0.19.1: starlette==0.19.1
-    starlette-0.20: starlette>=0.20.0,<0.21.0
+    linters: -r linter-requirements.txt
+    linters: werkzeug<2.3.0
 
-    fastapi: fastapi
-    fastapi: pytest-asyncio
-    fastapi: python-multipart
-    fastapi: requests
+    # Common
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-common: pytest-asyncio
 
-    bottle-0.12: bottle>=0.12,<0.13
+    # AIOHTTP
+    aiohttp-v3.4: aiohttp>=3.4.0,<3.5.0
+    aiohttp-v3.5: aiohttp>=3.5.0,<3.6.0
+    aiohttp: pytest-aiohttp
 
-    falcon-1.4: falcon>=1.4,<1.5
-    falcon-2.0: falcon>=2.0.0rc3,<3.0
+    # Arq
+    arq: arq>=0.23.0
+    arq: fakeredis>=2.2.0,<2.8
+    arq: pytest-asyncio
+    arq: async-timeout
 
-    sanic-0.8: sanic>=0.8,<0.9
-    sanic-18: sanic>=18.0,<19.0
-    sanic-19: sanic>=19.0,<20.0
-    sanic-20: sanic>=20.0,<21.0
-    sanic-21: sanic>=21.0,<22.0
-    sanic-22: sanic>=22.0,<22.9.0
+    # Asgi
+    asgi: pytest-asyncio
+    asgi: async-asgi-testclient
 
-    sanic: aiohttp
-    sanic-21: sanic_testing<22
-    sanic-22: sanic_testing<22.9.0
-    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
-    py3.5-sanic: ujson<4
+    # AWS Lambda
+    aws_lambda: boto3
 
-    beam-2.12: apache-beam>=2.12.0, <2.13.0
-    beam-2.13: apache-beam>=2.13.0, <2.14.0
-    beam-2.32: apache-beam>=2.32.0, <2.33.0
-    beam-2.33: apache-beam>=2.33.0, <2.34.0
+    # Beam
+    beam-v2.12: apache-beam>=2.12.0, <2.13.0
+    beam-v2.13: apache-beam>=2.13.0, <2.14.0
+    beam-v2.32: apache-beam>=2.32.0, <2.33.0
+    beam-v2.33: apache-beam>=2.33.0, <2.34.0
     beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python
 
+    # Boto3
+    boto3-v1.9: boto3>=1.9,<1.10
+    boto3-v1.10: boto3>=1.10,<1.11
+    boto3-v1.11: boto3>=1.11,<1.12
+    boto3-v1.12: boto3>=1.12,<1.13
+    boto3-v1.13: boto3>=1.13,<1.14
+    boto3-v1.14: boto3>=1.14,<1.15
+    boto3-v1.15: boto3>=1.15,<1.16
+    boto3-v1.16: boto3>=1.16,<1.17
+
+    # Bottle
+    bottle: Werkzeug<2.1.0
+    bottle-v0.12: bottle>=0.12,<0.13
+
+    # Celery
     celery: redis
-    celery-3: Celery>=3.1,<4.0
-    celery-4.1: Celery>=4.1,<4.2
-    celery-4.2: Celery>=4.2,<4.3
-    celery-4.3: Celery>=4.3,<4.4
+    celery-v3: Celery>=3.1,<4.0
+    celery-v4.1: Celery>=4.1,<4.2
+    celery-v4.2: Celery>=4.2,<4.3
+    celery-v4.3: Celery>=4.3,<4.4
     # https://github.com/celery/vine/pull/29#issuecomment-689498382
     celery-4.3: vine<5.0.0
     # https://github.com/celery/celery/issues/6153
-    celery-4.4: Celery>=4.4,<4.5,!=4.4.4
-    celery-5.0: Celery>=5.0,<5.1
-    celery-5.1: Celery>=5.1,<5.2
-    celery-5.2: Celery>=5.2,<5.3
+    celery-v4.4: Celery>=4.4,<4.5,!=4.4.4
+    celery-v5.0: Celery>=5.0,<5.1
+    celery-v5.1: Celery>=5.1,<5.2
+    celery-v5.2: Celery>=5.2,<5.3
 
-    py3.5-celery: newrelic<6.0.0
+    {py3.5}-celery: newrelic<6.0.0
     {py3.7}-celery: importlib-metadata<5.0
-    {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10}-celery: newrelic
-
-    requests: requests>=2.0
-
-    aws_lambda: boto3
-
-    pyramid-1.6: pyramid>=1.6,<1.7
-    pyramid-1.7: pyramid>=1.7,<1.8
-    pyramid-1.8: pyramid>=1.8,<1.9
-    pyramid-1.9: pyramid>=1.9,<1.10
-    pyramid-1.10: pyramid>=1.10,<1.11
+    {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-celery: newrelic
 
-    # https://github.com/jamesls/fakeredis/issues/245
-    rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0
-    rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2
-    rq-{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0,<1.7.4
-
-    rq-0.6: rq>=0.6,<0.7
-    rq-0.7: rq>=0.7,<0.8
-    rq-0.8: rq>=0.8,<0.9
-    rq-0.9: rq>=0.9,<0.10
-    rq-0.10: rq>=0.10,<0.11
-    rq-0.11: rq>=0.11,<0.12
-    rq-0.12: rq>=0.12,<0.13
-    rq-0.13: rq>=0.13,<0.14
-    rq-1.0: rq>=1.0,<1.1
-    rq-1.1: rq>=1.1,<1.2
-    rq-1.2: rq>=1.2,<1.3
-    rq-1.3: rq>=1.3,<1.4
-    rq-1.4: rq>=1.4,<1.5
-    rq-1.5: rq>=1.5,<1.6
-
-    aiohttp-3.4: aiohttp>=3.4.0,<3.5.0
-    aiohttp-3.5: aiohttp>=3.5.0,<3.6.0
-    aiohttp: pytest-aiohttp
-
-    tornado-5: tornado>=5,<6
-    tornado-6: tornado>=6.0a1
-
-    trytond-5.4: trytond>=5.4,<5.5
-    trytond-5.2: trytond>=5.2,<5.3
-    trytond-5.0: trytond>=5.0,<5.1
-    trytond-4.6: trytond>=4.6,<4.7
-
-    trytond-{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0
-
-    redis: fakeredis<1.7.4
+    # Chalice
+    chalice-v1.18: chalice>=1.18.0,<1.19.0
+    chalice-v1.20: chalice>=1.20.0,<1.21.0
+    chalice-v1.22: chalice>=1.22.0,<1.23.0
+    chalice-v1.24: chalice>=1.24.0,<1.25.0
+    chalice: pytest-chalice==0.0.5
 
-    rediscluster-1: redis-py-cluster>=1.0.0,<2.0.0
-    rediscluster-2.1.0: redis-py-cluster>=2.0.0,<2.1.1
-    rediscluster-2: redis-py-cluster>=2.1.1,<3.0.0
+    {py3.7}-chalice: botocore~=1.31
+    {py3.8}-chalice: botocore~=1.31
+
+    # Django
+    django: Werkzeug<2.1.0
+    django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
+
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
+    {py2.7,py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
+
+    django-v{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0
+    django-v{2.2,3.0,3.1,3.2}: pytest-django>=4.0
+    django-v{2.2,3.0,3.1,3.2}: Werkzeug<2.0
+
+    django-v{4.0,4.1}: djangorestframework
+    django-v{4.0,4.1}: pytest-asyncio
+    django-v{4.0,4.1}: psycopg2-binary
+    django-v{4.0,4.1}: pytest-django
+    django-v{4.0,4.1}: Werkzeug
+
+    django-v1.8: Django>=1.8,<1.9
+    django-v1.9: Django>=1.9,<1.10
+    django-v1.10: Django>=1.10,<1.11
+    django-v1.11: Django>=1.11,<1.12
+    django-v2.0: Django>=2.0,<2.1
+    django-v2.1: Django>=2.1,<2.2
+    django-v2.2: Django>=2.2,<2.3
+    django-v3.0: Django>=3.0,<3.1
+    django-v3.1: Django>=3.1,<3.2
+    django-v3.2: Django>=3.2,<3.3
+    django-v4.0: Django>=4.0,<4.1
+    django-v4.1: Django>=4.1,<4.2
+
+    # Falcon
+    falcon-v1.4: falcon>=1.4,<1.5
+    falcon-v2.0: falcon>=2.0.0rc3,<3.0
+    falcon-v3.0: falcon>=3.0.0,<3.1.0
+
+    # FastAPI
+    fastapi: fastapi
+    fastapi: httpx
+    fastapi: pytest-asyncio
+    fastapi: python-multipart
+    fastapi: requests
 
-    sqlalchemy-1.2: sqlalchemy>=1.2,<1.3
-    sqlalchemy-1.3: sqlalchemy>=1.3,<1.4
+    # Flask
+    flask: flask-login
+    flask: Werkzeug<2.1.0
+    flask-v0.11: Flask>=0.11,<0.12
+    flask-v0.12: Flask>=0.12,<0.13
+    flask-v1.0: Flask>=1.0,<1.1
+    flask-v1.1: Flask>=1.1,<1.2
+    flask-v2.0: Flask>=2.0,<2.1
+
+    # Gevent
+    # See http://www.gevent.org/install.html#older-versions-of-python
+    # for justification of the versions pinned below
+    py3.5-gevent: gevent==20.9.0
+    # See https://stackoverflow.com/questions/51496550/runtime-warning-greenlet-greenlet-size-changed
+    # for justification why greenlet is pinned here
+    py3.5-gevent: greenlet==0.4.17
+    {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0
+
+    # Grpc
+    grpc: grpcio-tools
+    grpc: protobuf
+    grpc: mypy-protobuf
+    grpc: types-protobuf
+
+    # HTTPX
+    httpx: pytest-httpx
+    httpx-v0.16: httpx>=0.16,<0.17
+    httpx-v0.17: httpx>=0.17,<0.18
+    httpx-v0.18: httpx>=0.18,<0.19
+    httpx-v0.19: httpx>=0.19,<0.20
+    httpx-v0.20: httpx>=0.20,<0.21
+    httpx-v0.21: httpx>=0.21,<0.22
+    httpx-v0.22: httpx>=0.22,<0.23
+    httpx-v0.23: httpx>=0.23,<0.24
+
+    # Huey
+    huey-2: huey>=2.0
+
+    # Loguru
+    loguru-v0.5: loguru>=0.5.0,<0.6.0
+    loguru-v0.6: loguru>=0.6.0,<0.7.0
+    loguru-v0.7: loguru>=0.7.0,<0.8.0
+
+    # OpenTelemetry (OTel)
+    opentelemetry: opentelemetry-distro
+
+    # pure_eval
+    pure_eval: pure_eval
 
-    linters: -r linter-requirements.txt
+    # PyMongo (MongoDB)
+    pymongo: mockupdb
+    pymongo-v3.1: pymongo>=3.1,<3.2
+    pymongo-v3.12: pymongo>=3.12,<4.0
+    pymongo-v4.0: pymongo>=4.0,<4.1
+    pymongo-v4.1: pymongo>=4.1,<4.2
+    pymongo-v4.2: pymongo>=4.2,<4.3
+
+    # Pyramid
+    pyramid: Werkzeug<2.1.0
+    pyramid-v1.6: pyramid>=1.6,<1.7
+    pyramid-v1.7: pyramid>=1.7,<1.8
+    pyramid-v1.8: pyramid>=1.8,<1.9
+    pyramid-v1.9: pyramid>=1.9,<1.10
+    pyramid-v1.10: pyramid>=1.10,<1.11
+
+    # Quart
+    quart: quart-auth
+    quart: pytest-asyncio
+    quart-v0.16: blinker<1.6
+    quart-v0.16: jinja2<3.1.0
+    quart-v0.16: Werkzeug<2.1.0
+    quart-v0.17: blinker<1.6
+    quart-v0.16: quart>=0.16.1,<0.17.0
+    quart-v0.17: quart>=0.17.0,<0.18.0
+    quart-v0.18: quart>=0.18.0,<0.19.0
+
+    # Requests
+    requests: requests>=2.0
 
-    py3.8: hypothesis
+    # Redis
+    redis: fakeredis!=1.7.4
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-redis: pytest-asyncio
 
-    pure_eval: pure_eval
-    chalice-1.16: chalice>=1.16.0,<1.17.0
-    chalice-1.17: chalice>=1.17.0,<1.18.0
-    chalice-1.18: chalice>=1.18.0,<1.19.0
-    chalice-1.19: chalice>=1.19.0,<1.20.0
-    chalice-1.20: chalice>=1.20.0,<1.21.0
-    chalice: pytest-chalice==0.0.5
+    # Redis Cluster
+    rediscluster-v1: redis-py-cluster>=1.0.0,<2.0.0
+    rediscluster-v2.1.0: redis-py-cluster>=2.0.0,<2.1.1
+    rediscluster-v2: redis-py-cluster>=2.1.1,<3.0.0
 
-    boto3-1.9: boto3>=1.9,<1.10
-    boto3-1.10: boto3>=1.10,<1.11
-    boto3-1.11: boto3>=1.11,<1.12
-    boto3-1.12: boto3>=1.12,<1.13
-    boto3-1.13: boto3>=1.13,<1.14
-    boto3-1.14: boto3>=1.14,<1.15
-    boto3-1.15: boto3>=1.15,<1.16
-    boto3-1.16: boto3>=1.16,<1.17
+    # RQ (Redis Queue)
+    # https://github.com/jamesls/fakeredis/issues/245
+    rq-v{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0
+    rq-v{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2
+    rq-v{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0,<1.7.4
+
+    rq-v0.6: rq>=0.6,<0.7
+    rq-v0.7: rq>=0.7,<0.8
+    rq-v0.8: rq>=0.8,<0.9
+    rq-v0.9: rq>=0.9,<0.10
+    rq-v0.10: rq>=0.10,<0.11
+    rq-v0.11: rq>=0.11,<0.12
+    rq-v0.12: rq>=0.12,<0.13
+    rq-v0.13: rq>=0.13,<0.14
+    rq-v1.0: rq>=1.0,<1.1
+    rq-v1.1: rq>=1.1,<1.2
+    rq-v1.2: rq>=1.2,<1.3
+    rq-v1.3: rq>=1.3,<1.4
+    rq-v1.4: rq>=1.4,<1.5
+    rq-v1.5: rq>=1.5,<1.6
+
+    # Sanic
+    sanic-v0.8: sanic>=0.8,<0.9
+    sanic-v18: sanic>=18.0,<19.0
+    sanic-v19: sanic>=19.0,<20.0
+    sanic-v20: sanic>=20.0,<21.0
+    sanic-v21: sanic>=21.0,<22.0
+    sanic-v22: sanic>=22.0,<22.9.0
+
+    sanic: websockets<11.0
+    sanic: aiohttp
+    sanic-v21: sanic_testing<22
+    sanic-v22: sanic_testing<22.9.0
+    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
+    {py3.5}-sanic: ujson<4
 
-    httpx-0.16: httpx>=0.16,<0.17
-    httpx-0.17: httpx>=0.17,<0.18
+    # Starlette
+    starlette: pytest-asyncio
+    starlette: python-multipart
+    starlette: requests
+    starlette: httpx
+    starlette: jinja2
+    starlette-v0.20: starlette>=0.20.0,<0.21.0
+    starlette-v0.22: starlette>=0.22.0,<0.23.0
+    starlette-v0.24: starlette>=0.24.0,<0.25.0
+    starlette-v0.26: starlette>=0.26.0,<0.27.0
+    starlette-v0.28: starlette>=0.28.0,<0.29.0
+
+    # Starlite
+    starlite: pytest-asyncio
+    starlite: python-multipart
+    starlite: requests
+    starlite: cryptography
+    starlite: pydantic<2.0.0
+    starlite: starlite
+    {py3.8,py3.9}-starlite: typing-extensions==4.5.0  # this is used by pydantic, which is used by starlite. When the problem is fixed in here or pydantic, this can be removed
+
+    # SQLAlchemy
+    sqlalchemy-v1.2: sqlalchemy>=1.2,<1.3
+    sqlalchemy-v1.3: sqlalchemy>=1.3,<1.4
+    sqlalchemy-v1.4: sqlalchemy>=1.4,<2.0
+    sqlalchemy-v2.0: sqlalchemy>=2.0,<2.1
+
+    # Tornado
+    tornado-v5: tornado>=5,<6
+    tornado-v6: tornado>=6.0a1
+
+    # Trytond
+    trytond-v5.4: trytond>=5.4,<5.5
+    trytond-v5.2: trytond>=5.2,<5.3
+    trytond-v5.0: trytond>=5.0,<5.1
+    trytond-v4.6: trytond>=4.6,<4.7
+
+    trytond-v{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0
 
 setenv =
     PYTHONDONTWRITEBYTECODE=1
-    TESTPATH=tests
+    common: TESTPATH=tests
+    aiohttp: TESTPATH=tests/integrations/aiohttp
+    arq: TESTPATH=tests/integrations/arq
+    asgi: TESTPATH=tests/integrations/asgi
+    aws_lambda: TESTPATH=tests/integrations/aws_lambda
     beam: TESTPATH=tests/integrations/beam
-    django: TESTPATH=tests/integrations/django
-    flask: TESTPATH=tests/integrations/flask
-    quart: TESTPATH=tests/integrations/quart
+    boto3: TESTPATH=tests/integrations/boto3
     bottle: TESTPATH=tests/integrations/bottle
-    falcon: TESTPATH=tests/integrations/falcon
     celery: TESTPATH=tests/integrations/celery
-    requests: TESTPATH=tests/integrations/requests
-    aws_lambda: TESTPATH=tests/integrations/aws_lambda
+    chalice: TESTPATH=tests/integrations/chalice
+    cloud_resource_context: TESTPATH=tests/integrations/cloud_resource_context
+    django: TESTPATH=tests/integrations/django
+    falcon: TESTPATH=tests/integrations/falcon
+    fastapi:  TESTPATH=tests/integrations/fastapi
+    flask: TESTPATH=tests/integrations/flask
+    # run all tests with gevent
+    gevent: TESTPATH=tests
     gcp: TESTPATH=tests/integrations/gcp
-    sanic: TESTPATH=tests/integrations/sanic
+    httpx: TESTPATH=tests/integrations/httpx
+    huey: TESTPATH=tests/integrations/huey
+    loguru: TESTPATH=tests/integrations/loguru
+    opentelemetry: TESTPATH=tests/integrations/opentelemetry
+    pure_eval: TESTPATH=tests/integrations/pure_eval
+    pymongo: TESTPATH=tests/integrations/pymongo
     pyramid: TESTPATH=tests/integrations/pyramid
-    rq: TESTPATH=tests/integrations/rq
-    aiohttp: TESTPATH=tests/integrations/aiohttp
-    tornado: TESTPATH=tests/integrations/tornado
-    trytond: TESTPATH=tests/integrations/trytond
+    quart: TESTPATH=tests/integrations/quart
     redis: TESTPATH=tests/integrations/redis
     rediscluster: TESTPATH=tests/integrations/rediscluster
-    asgi: TESTPATH=tests/integrations/asgi
-    starlette:  TESTPATH=tests/integrations/starlette
-    fastapi:  TESTPATH=tests/integrations/fastapi
+    requests: TESTPATH=tests/integrations/requests
+    rq: TESTPATH=tests/integrations/rq
+    sanic: TESTPATH=tests/integrations/sanic
+    starlette: TESTPATH=tests/integrations/starlette
+    starlite: TESTPATH=tests/integrations/starlite
     sqlalchemy: TESTPATH=tests/integrations/sqlalchemy
-    pure_eval: TESTPATH=tests/integrations/pure_eval
-    chalice: TESTPATH=tests/integrations/chalice
-    boto3: TESTPATH=tests/integrations/boto3
-    httpx: TESTPATH=tests/integrations/httpx
+    tornado: TESTPATH=tests/integrations/tornado
+    trytond: TESTPATH=tests/integrations/trytond
+    socket: TESTPATH=tests/integrations/socket
+    grpc: TESTPATH=tests/integrations/grpc
 
     COVERAGE_FILE=.coverage-{envname}
 passenv =
@@ -316,45 +495,50 @@ passenv =
     SENTRY_PYTHON_TEST_POSTGRES_USER
     SENTRY_PYTHON_TEST_POSTGRES_PASSWORD
     SENTRY_PYTHON_TEST_POSTGRES_NAME
+    SENTRY_PYTHON_TEST_POSTGRES_HOST
 usedevelop = True
 extras =
-    flask: flask
     bottle: bottle
     falcon: falcon
-    quart: quart
+    flask: flask
+    pymongo: pymongo
 
 basepython =
     py2.7: python2.7
-    py3.4: python3.4
     py3.5: python3.5
     py3.6: python3.6
     py3.7: python3.7
     py3.8: python3.8
     py3.9: python3.9
     py3.10: python3.10
+    py3.11: python3.11
 
     # Python version is pinned here because flake8 actually behaves differently
     # depending on which version is used. You can patch this out to point to
     # some random Python 3 binary, but then you get guaranteed mismatches with
     # CI. Other tools such as mypy and black have options that pin the Python
     # version.
-    linters: python3.9
+    linters: python3.11
 
 commands =
-    ; https://github.com/pytest-dev/pytest/issues/5532
-    {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12}: pip install pytest<5
-    {py3.6,py3.7,py3.8,py3.9}-flask-{0.11}: pip install Werkzeug<2
+    {py3.7,py3.8}-boto3: pip install urllib3<2.0.0
 
+    ; https://github.com/pytest-dev/pytest/issues/5532
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12}: pip install pytest<5
+    {py3.6,py3.7,py3.8,py3.9}-flask-v{0.11}: pip install Werkzeug<2
     ; https://github.com/pallets/flask/issues/4455
-    {py3.7,py3.8,py3.9,py3.10}-flask-{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
-
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
     ; https://github.com/more-itertools/more-itertools/issues/578
-    py3.5-flask-{0.11,0.12}: pip install more-itertools<8.11.0
+    py3.5-flask-v{0.11,0.12}: pip install more-itertools<8.11.0
 
     ; use old pytest for old Python versions:
-    {py2.7,py3.4,py3.5}: pip install pytest-forked==1.1.3
+    {py2.7,py3.5}: pip install pytest-forked==1.1.3
 
-    py.test --durations=5 {env:TESTPATH} {posargs}
+    ; Running `py.test` as an executable suffers from an import error
+    ; when loading tests in scenarios. In particular, django fails to
+    ; load the settings from the test module.
+    {py2.7}: python -m pytest --ignore-glob='*py3.py' -rsx -s --durations=5 -vvv {env:TESTPATH} {posargs}
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}: python -m pytest -rsx -s --durations=5 -vvv {env:TESTPATH} {posargs}
 
 [testenv:linters]
 commands =

More details

Full run details

Historical runs