diff --git a/.github/workflows/gdb_versions.yml b/.github/workflows/gdb_versions.yml
new file mode 100644
index 0000000..3cb66a6
--- /dev/null
+++ b/.github/workflows/gdb_versions.yml
@@ -0,0 +1,215 @@
+name: CI-GDB-releases-Ubuntu
+
+on:
+ push:
+ branches:
+ - master
+ - 'release/v*'
+ # - feature/ci-test-several*
+ paths:
+ - 'scripts/**'
+ - 'tests/**'
+
+ pull_request:
+ branches: [ master, release]
+ paths:
+ - 'scripts/**'
+ - 'tests/**'
+
+jobs:
+
+ test-gdb-release:
+ name: ${{ matrix.gdb }}-${{ matrix.python }}-${{ matrix.os }}
+ env:
+ FTP_URL: ftp://sourceware.org/pub/gdb/releases/
+ FILE_EXT: .tar.gz
+ PWD_PATH: .
+ CONFIG_MAKE_DIR_PATH: .
+ DOWNLOAD_PATH: gdb-downloads
+ GDB_BUILD_PATH: build
+ TARGET_GDB_PATH: /home/runner/work/built-gdb
+ GDB_EXE: /home/runner/work/built-gdb/bin/gdb
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: False # try all !
+ matrix:
+ python: [python2, python3]
+ os:
+ - ubuntu-latest
+ # - windows-latest
+ gdb:
+ # all 7.x versions until 7.12.1 fail to compile with recent GCC due to several new warnings (treated as errors)
+ # see this thread for instance http://sourceware-org.1504.n7.nabble.com/RFA-0-3-Fix-various-bugs-found-by-static-analysis-td415799.html#a417864
+ # - gdb-7.0.1a
+ # - gdb-7.0a
+ # - gdb-7.1a
+ # - gdb-7.2a
+ # - gdb-7.3.1
+ # - gdb-7.3a
+ # - gdb-7.4.1
+ # - gdb-7.4
+ # - gdb-7.5.1
+ # - gdb-7.5
+ # - gdb-7.6.1
+ # - gdb-7.6.2
+ # - gdb-7.6
+ # - gdb-7.7.1
+ # - gdb-7.7
+ # - gdb-7.8.1
+ # - gdb-7.8.2
+ # - gdb-7.8
+ # - gdb-7.9.1
+ # - gdb-7.9
+ # - gdb-7.10.1
+ # - gdb-7.10
+ # - gdb-7.11.1
+ # - gdb-7.11
+ # - gdb-7.12
+ - gdb-7.12.1
+ - gdb-8.0.1
+ - gdb-8.0
+ - gdb-8.1.1
+ - gdb-8.1
+ - gdb-8.2.1
+ - gdb-8.2
+ - gdb-8.3.1
+ - gdb-8.3
+ - gdb-9.1
+ steps:
+
+ - uses: actions/checkout@v2
+
+ - name: list pythons
+ run: ls /usr/bin/ | grep python
+
+ - name: list pythons
+ run: ls /usr/include -aRl | grep python
+
+ - name: Install relevant packets gnat, texinfo, gprbuild (hoping that gnat compiler might change anything to failrues observed until now)
+ run: sudo apt-get install gnat texinfo gprbuild
+ if: matrix.os == 'ubuntu-latest'
+
+ # - name: Install relevant packets gnat-gpl which will hopefully provide gprbuild, gnat and gdb
+ # run: choco install gnat-gpl # -v -d
+ # if: matrix.os == 'windows-latest'
+
+ # - name: Add GNAT to PATH, chocolatey cannot do it by itself ?
+ # run: echo "::add-path::C:\GNAT\bin"
+ # if: matrix.os == 'windows-latest'
+
+ - uses: actions/cache@v1
+ id: cache-gdb-src
+ with:
+ path: ./${{ env.DOWNLOAD_PATH }}
+ key: ${{ env.DOWNLOAD_PATH }}-${{matrix.gdb}}${{env.FILE_EXT}}
+ restore-keys: |
+ ${{ env.DOWNLOAD_PATH }}-${{matrix.gdb}}${{env.FILE_EXT}}
+
+ - uses: actions/cache@v1
+ id: cache-gdb-build
+ with:
+ path: ./${{ env.GDB_BUILD_PATH }}
+ key: ${{ env.GDB_BUILD_PATH }}-${{matrix.gdb}}${{env.FILE_EXT}}-${{ matrix.os }}-${{ matrix.python }}__
+ restore-keys: |
+ ${{ env.GDB_BUILD_PATH }}-${{matrix.gdb}}${{env.FILE_EXT}}-${{ matrix.os }}-${{ matrix.python }}__
+
+ - uses: actions/cache@v1
+ id: cache-gdb-bin
+ with:
+ path: ${{ env.TARGET_GDB_PATH }}
+ key: ${{ env.TARGET_GDB_PATH }}-${{matrix.gdb}}${{env.FILE_EXT}}-${{ matrix.os }}-${{ matrix.python }}__
+ restore-keys: |
+ ${{ env.TARGET_GDB_PATH }}-${{matrix.gdb}}${{env.FILE_EXT}}-${{ matrix.os }}-${{ matrix.python }}__
+
+ - name: Create target dir ${{ env.TARGET_GDB_PATH }}
+ run: sudo mkdir -p ${{ env.TARGET_GDB_PATH }}
+
+ - name: Create downloads dir
+ run: pwd && ls -al && mkdir -p ${{ env.DOWNLOAD_PATH }}
+
+
+ - name: Download ${{env.FTP_URL}}${{matrix.gdb}}${{env.FILE_EXT}}
+ run: wget ${{env.FTP_URL}}${{matrix.gdb}}${{env.FILE_EXT}}
+ working-directory: ${{ env.DOWNLOAD_PATH }}
+ if: steps.cache-gdb-src.outputs.cache-hit != 'true'
+
+ - name: Decompress
+ run: pwd && ls -al && tar -xzf ./${{ env.DOWNLOAD_PATH }}/${{matrix.gdb}}${{env.FILE_EXT}} && ls -al
+ if: steps.cache-gdb-bin.outputs.cache-hit != 'true'
+
+ - name: get the effective tar root dir (the newly created dir at job root dir, it can be different from targz !)
+ id: get-untar-root-dir
+ #run: echo "::set-output name=dir::$(tar -tzf ./${{ env.DOWNLOAD_PATH }}/${{matrix.gdb}}${{env.FILE_EXT}} | head -1 | cut -f1 -d"/")"
+ run: echo "::set-output name=dir::$(echo gdb-[0-9].*)"
+ if: steps.cache-gdb-bin.outputs.cache-hit != 'true'
+
+ - name: Prepare a separate build directory
+ run: pwd && ls -al && mkdir -p ${{ env.GDB_BUILD_PATH }}
+ #if: steps.cache-gdb-build.outputs.cache-hit != 'true'
+ if: steps.cache-gdb-bin.outputs.cache-hit != 'true'
+
+ - name: Configure
+ run: ../${{ steps.get-untar-root-dir.outputs.dir }}/configure --prefix=${{ env.TARGET_GDB_PATH }} --with-python=${{ matrix.python }}
+ working-directory: ./${{ env.GDB_BUILD_PATH }}
+ if: steps.cache-gdb-bin.outputs.cache-hit != 'true'
+
+ - name: Make all install
+ run: ls -al && sudo make all install
+ working-directory: ./${{ env.GDB_BUILD_PATH }}
+ if: steps.cache-gdb-bin.outputs.cache-hit != 'true'
+
+
+ - name: Check GDB version
+ run: sudo ${{ env.GDB_EXE }} --version
+
+
+ - name: Check GDB supports python
+ run: sudo ${{ env.GDB_EXE }} --command=./tests/Test_GDB_python.gdb --batch
+
+
+
+
+ # - name: Build offsets_finder (gprbuild finds automagically the .gpr file, -p to force missing dirs creation)
+ # run: gprbuild -p -vm
+ # working-directory: ./tests/offsets_finder
+
+ # # Runs a set of commands using the runners shell
+ # - name: Run offsets_finder GDB session
+ # run: sudo ${{ env.GDB_EXE }} --se=./exe/main --command=find_offsets.gdb --batch
+ # working-directory: ./tests/offsets_finder
+ # if: matrix.os == 'ubuntu-latest'
+
+ # - name: Run offsets_finder GDB session
+ # run: sudo ${{ env.GDB_EXE }} --se=./exe/main.exe --command=find_offsets.gdb --batch
+ # working-directory: ./tests/offsets_finder
+ # if: matrix.os == 'windows-latest'
+
+
+ - name: Build cpp_test_project (gprbuild finds automagically the .gpr file, -p to force missing dirs creation)
+ run: gprbuild -p -vm
+ working-directory: ./tests/cpp_test_project
+
+
+ # - name: PERFORM GDB [TEST THE TEST SUITE]
+ # run: sudo ${{ env.GDB_EXE }} --se=./exe/main.exe --command=Test_Test_Suite.gdb --batch
+ # working-directory: ./tests/cpp_test_project
+ # if: matrix.os == 'windows-latest'
+
+
+ - name: PERFORM GDB [TEST THE TEST SUITE]
+ run: sudo ${{ env.GDB_EXE }} --se=./exe/main --command=Test_Test_Suite.gdb --batch
+ working-directory: ./tests/cpp_test_project
+ if: matrix.os == 'ubuntu-latest'
+
+
+
+ # - name: PERFORM GDB [TEST SUITE]
+ # run: sudo ${{ env.GDB_EXE }} --se=./exe/main.exe --command=Test_Suite.gdb --batch
+ # working-directory: ./tests/cpp_test_project
+ # if: matrix.os == 'windows-latest'
+
+
+ - name: PERFORM GDB [TEST SUITE]
+ run: sudo ${{ env.GDB_EXE }} --se=./exe/main --command=Test_Suite.gdb --batch
+ working-directory: ./tests/cpp_test_project
+ if: matrix.os == 'ubuntu-latest'
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 12e5c89..577b2d1 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -1,103 +1,122 @@
-# This is a basic workflow to help you get started with Actions
+name: CI-OS-ubuntu-windows
-name: CI
-
-# Controls when the action will run. Triggers the workflow on push or pull request
-# events but only for the master branch
on:
push:
- branches: [ master ]
+ branches:
+ - master
+ - 'release/v*'
+ - 'feature/**'
+ paths:
+ - 'scripts/**'
+ - 'tests/**'
+
pull_request:
- branches: [ master ]
+ branches: [ master, release]
+ paths:
+ - 'scripts/**'
+ - 'tests/**'
-# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
- # This workflow contains a single job called "build"
- build-and-execute-offsets-finder:
+
+ test-pretty-printers:
# The type of runner that the job will run on
- runs-on: ubuntu-latest
+ runs-on: ${{ matrix.os }}
+ strategy:
+ matrix:
+ os:
+ - ubuntu-latest
+ - windows-latest
+
+ # env:
+ # TEST_DIR_PATH: ./tests/offsets_finder
+ # PYTHON_TEST: ./tests/Test_GDB_python.gdb
+ # GDB_TEST_SCRIPT: find_offsets.gdb
+ # EXE_FILE: ./exe/main.exe
+ # GNAT_BIN_PATH: C:\GNAT\bin
- # Steps represent a sequence of tasks that will be executed as part of the job
steps:
- - name: Install relevant packets gnat, texinfo, gdb, gprbuild
- run: sudo apt-get install gnat texinfo gdb gprbuild
+ # - name: Chocolatey configuration
+ # run: choco config list
- - name: Locate GDB version
- run: which gdb
+ # - name: Store Chocolatey cache dir
+ # run: echo ::set-env name=CHOCOLATEY_CACHE_PATH::$(choco config get cacheLocation)
- - name: Check GDB version
- run: gdb --version
+ # - name: Get chocolatey cache dir
+ # id: choco-cache-dir-path
+ # run: echo "::set-output name=dir::$(choco config get cacheLocation)"
- - name: Output GDB configuration
- run: gdb --configuration
+ # - name: Cache the chocolatey download dir
+ # uses: actions/cache@v1
+ # id: choco-cache
+ # with:
+ # path: ${{ steps.choco-cache-dir-path.outputs.dir }}
+ # key: ${{ matrix.os }}-chocolatey-download-cache
+ # restore-keys: |
+ # ${{ matrix.os }}-chocolatey-download-cache
- - name: Prepare some GDB command tests
- run: echo -e "maintenance set internal-error quit yes\nhelp python\npython\nimport sys\nprint(\"Foo\")\nsys.exit(0)\nend\nq 25" > python_test.gdb
+ - name: Install relevant packets gnat-gpl which will hopefully provide gprbuild, gnat and gdb
+ run: choco install gnat-gpl # -v -d
+ if: matrix.os == 'windows-latest'
- - name: Check GDB supports python (exit code 25 means something went wrong in the python commands) otherwise exit code should be the 0 force un sys.exit(0)
- run: gdb --command=python_test.gdb --batch
+ - name: Add GNAT to PATH, chocolatey cannot do it by itself ?
+ run: echo "::add-path::C:\GNAT\bin"
+ if: matrix.os == 'windows-latest'
- # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- - uses: actions/checkout@v2
+ - name: Install relevant packets gnat, texinfo, gdb, gprbuild
+ run: sudo apt-get install gnat texinfo gdb gprbuild
+ if: matrix.os == 'ubuntu-latest'
- - name: Build the test project (gprbuild finds automagically the .gpr file, -p to force missing dirs creation)
- run: pwd && ls -al && gprbuild -p -vm
- working-directory: ./tests/offsets_finder
- - name: append GDB quit on error
- run: echo "maintenance set internal-error quit yes $(cat find_offsets.gdb)" > find_offsets.gdb && cat find_offsets.gdb
- working-directory: ./tests/offsets_finder
+ - name: GDB location, version and configuration
+ run: gdb --version && gdb --configuration
- # Runs a set of commands using the runners shell
- - name: Start a gdb session
- run: gdb --se=./exe/main --command=find_offsets.gdb --batch
- working-directory: ./tests/offsets_finder
+ # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
+ - uses: actions/checkout@v2
- build-and-execute-test-suite:
- runs-on: ubuntu-latest
- steps:
- - name: Install relevant packets gnat, texinfo, gdb, gprbuild
- run: sudo apt-get install gnat texinfo gdb gprbuild
+ - name: Check GDB supports python
+ run: gdb --command=./tests/Test_GDB_python.gdb --batch
- - name: Locate GDB version
- run: which gdb
+ - name: Build offsets_finder (gprbuild finds automagically the .gpr file, -p to force missing dirs creation)
+ run: gprbuild -p -vm
+ working-directory: ./tests/offsets_finder
- - name: Check GDB version
- run: gdb --version
+ # Runs a set of commands using the runners shell
+ - name: Run offsets_finder GDB session
+ run: gdb --se=./exe/main --command=find_offsets.gdb --batch
+ working-directory: ./tests/offsets_finder
+ if: matrix.os == 'ubuntu-latest'
- - name: Output GDB configuration
- run: gdb --configuration
+ - name: Run offsets_finder GDB session
+ run: gdb --se=./exe/main.exe --command=find_offsets.gdb --batch
+ working-directory: ./tests/offsets_finder
+ if: matrix.os == 'windows-latest'
- - name: Prepare some GDB command tests
- run: echo -e "maintenance set internal-error quit yes\nhelp python\npython\nimport sys\nprint(\"Foo\")\nsys.exit(0)\nend\nq 25" > python_test.gdb
- - name: Check GDB supports python (exit code 25 means something went wrong in the python commands) otherwise exit code should be the 0 force un sys.exit(0)
- run: gdb --command=python_test.gdb --batch
+ - name: Build cpp_test_project (gprbuild finds automagically the .gpr file, -p to force missing dirs creation)
+ run: gprbuild -p -vm
+ working-directory: ./tests/cpp_test_project
- # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- - uses: actions/checkout@v2
- - name: build the test project (gprbuild finds automagically the .gpr file, -p to force missing dirs creation)
- run: pwd && ls -al && gprbuild -p -vm
+ - name: PERFORM GDB [TEST THE TEST SUITE]
+ run: gdb --se=./exe/main.exe --command=Test_Test_Suite.gdb --batch
working-directory: ./tests/cpp_test_project
+ if: matrix.os == 'windows-latest'
- - name: Prepend GDB quit on error (and output file for check)
- run: echo "maintenance set internal-error quit yes $(cat .gdbinit)" > .gdbinit && cat .gdbinit
- working-directory: ./tests/cpp_test_project
- - name: Prepare test test suite GDB script
- run: cat .gdbinit > test_test_suite.gdb && echo -e "source checker.py\npython test_the_test_suite()" >> test_test_suite.gdb && cat test_test_suite.gdb
+ - name: PERFORM GDB [TEST THE TEST SUITE]
+ run: gdb --se=./exe/main --command=Test_Test_Suite.gdb --batch
working-directory: ./tests/cpp_test_project
+ if: matrix.os == 'ubuntu-latest'
- - name: Prepare test suite GDB script
- run: cat .gdbinit > test_suite.gdb && echo -e "source checker.py\npython test_suite()" >> test_suite.gdb && cat test_suite.gdb
- working-directory: ./tests/cpp_test_project
- # Runs a set of commands using the runners shell
- - name: PERFORM GDB [TEST THE TEST SUITE]
- run: gdb --se=./exe/main --command=test_test_suite.gdb --batch
+
+ - name: PERFORM GDB [TEST SUITE]
+ run: gdb --se=./exe/main.exe --command=Test_Suite.gdb --batch
working-directory: ./tests/cpp_test_project
+ if: matrix.os == 'windows-latest'
+
- name: PERFORM GDB [TEST SUITE]
- run: gdb --se=./exe/main --command=test_suite.gdb --batch
+ run: gdb --se=./exe/main --command=Test_Suite.gdb --batch
working-directory: ./tests/cpp_test_project
+ if: matrix.os == 'ubuntu-latest'
diff --git a/.github/workflows/show_env.yml.bak b/.github/workflows/show_env.yml.bak
new file mode 100644
index 0000000..b2ded91
--- /dev/null
+++ b/.github/workflows/show_env.yml.bak
@@ -0,0 +1,45 @@
+name: Show env
+
+on:
+ push:
+ branches:
+ - feature/ci-test-several*
+
+jobs:
+ understand-envs:
+ env:
+ SOME_VAR: FOO
+ runs-on: ${{ matrix.os }}
+ strategy:
+ matrix:
+ os:
+ - ubuntu-latest
+ - windows-latest
+ steps:
+ - name: Is variable exported?
+ run: |
+ echo "${{ env.SOME_VAR }}"
+
+ - name: Change var to bAr
+ run: |
+ echo "::set-env name=SOME_VAR=bAr"
+
+ - name: Is variable updated ?
+ run: |
+ echo "${{ env.SOME_VAR }}"
+
+ - name: Change var to pwd
+ run: |
+ echo "::set-env name=SOME_VAR=$(pwd)"
+
+ - name: Is variable updated ?
+ run: |
+ echo "${{ env.SOME_VAR }}"
+
+ - name: Set output pwd
+ id: flex
+ run: |
+ echo "::set-output name=ze_pwd::$(pwd)"
+
+ - name: echo ze_pwd
+ run: echo ${{ steps.flex.outputs.ze_pwd }}
diff --git a/.gitignore b/.gitignore
index ef24d80..4391dce 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,3 +15,14 @@
**/**/proto.py
assets/captures/
**/**/prout.py
+
+tests/offsets_finder/before_run.txt
+
+tests/offsets_finder/types.txt
+.github/workflows/show_env.yml
+.github/workflows/gdb_versions_extractor.py
+
+.github/workflows/gdb-8.0.1.tar.gz
+
+.github/workflows/download_gdb_sources.yml.bak
+test_github_http_api.py
diff --git a/README.md b/README.md
index 86d908b..ca84e57 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,25 @@
# A simplistic GDB pretty printer for [nlohmann-json c++][3]
-![CI](https://github.com/LoneWanderer-GH/nlohmann-json-gdb/workflows/CI/badge.svg)
+![CI-OS-ubuntu-windows](https://github.com/LoneWanderer-GH/nlohmann-json-gdb/workflows/CI-OS-ubuntu-windows/badge.svg)
+![CI-GDB-releases-Ubuntu](https://github.com/LoneWanderer-GH/nlohmann-json-gdb/workflows/CI-GDB-releases-Ubuntu/badge.svg)
-Provides GDB script and python GDB script to pretty print a [nlohmann / json][3]
- - [x] compatible with a live inferior process and debug symbols
+Provides GDB script and python GDB pretty printer script that allows to to print a [nlohmann / json][3]
+ - [x] compatible with a live inferior process with debug symbols
- [x] compatible with core dump files with debug symbols
- - Tested on:
- - Win Intel x64
- - Raspbian arm x32
- - Ubuntu x64 (Github CI)
+
+This is also a playground for me to get used to git, Github, gitflow and GDB python scripting/pretty printing.
+
+## Release notes:
+
+### v0.0.1: first pretty printer release
+
+Features:
+ - improved overall GDB python pretty printer code
+ - now multiplatform (verified on some)
+ - created some sort of a CI process to check we did not mess up with features:
+ - checks that the pretty printer output matches the json.dump() output.
+ - checks various GDB releases + python versions on Ubuntu
+ - also checks on windows server (but only the gnat community GDB version obtained with chocolatey)
---
@@ -28,11 +39,27 @@ Provides GDB script and python GDB script to pretty print a [nlohmann / json][3
# 1. Prerequisites
- - *GDB 8.3* debugger installed, ready to use.
- Python support started with GDB 7, so it may work with versions starting GDB 7
- - an executable to debug **with debug symbols available to GDB** which uses the [JSON lib 3.7.3][3]
+ - *GDB* debugger installed, ready to use, and of one of the versions below
+ - Tested on Ubuntu x64, with both python 2.7 and python 3.6:
+ - GDB 7.12.1
+ - GDB 8.0
+ - GDB 8.0.1
+ - GDB 8.1
+ - GDB 8.1.1
+ - GDB 8.2
+ - GDB 8.2.1
+ - GDB 8.3
+ - GDB 8.3.1
+ - GDB 9.1
+ - Windows
+ - Server 2019 (win 10; x86_64) and Windows 10 Pro x86_64 GDB 8.3 with python 2.7.10 (from [GNAT CE 2019][2])
+ - Given the successful tests on Ubuntu x64 with various GDB and python versions, it is likely to work for the GDB + python versions above on Windows too.
+ - Tested on Raspbian arm 32, with python 2.7 and GDB 8.3.1
+ - Given the successful tests on Ubuntu x64 with various GDB and python versions, it is likely to work for the GDB versions above on Windows too.
+ - an executable to debug **with debug symbols available to GDB** which uses the [JSON lib _3.7.3_][3]. No other versions tested yet.
- or a core dump **with debug symbols available to GDB** (for linux users)
- - _Some [GDB commands knowledge][4] might be useful for your debug session to be successful ;)_
+
+ - _Some [GDB commands knowledge][4] might be useful for your debug session to be successful_
## Your GDB does not support python ?
@@ -51,7 +78,51 @@ Have a look [on this wiki page](https://github.com/LoneWanderer-GH/nlohmann-json
# 2. Installing
Just copy the GDB and/or python script you need in a folder near your executable to debug, and of course, load it into your GDB.
-See [Content](#Content) and [Usage](#Usage) sections below for more details.
+For linux users, you can do a wget on the file (or use the release package, decompress, and use the file you want)
+
+```
+# get the file
+$ wget https://raw.githubusercontent.com/LoneWanderer-GH/nlohmann-json-gdb/master/scripts/nlohmann_json.gdb
+# start GDB session
+$ gdb
+(gdb) file ... # load your exe
+(gdb) source nlohmann_json.gdb
+# print a JSON variable
+(gdb)pjson foo
+{
+ "flex" : 0.2,
+ "awesome_str": "bleh",
+ "nested": {
+ "bar": "barz"
+ }
+}
+```
+
+or
+
+```
+# get the file
+$ wget https://raw.githubusercontent.com/LoneWanderer-GH/nlohmann-json-gdb/master/scripts/nlohmann_json.gpy
+# start GDB session
+$ gdb
+(gdb) file ... # load your exe
+(gdb) source nlohmann_json.py
+# print a JSON variable
+(gdb)p foo
+$ 1 = {
+ "flex" : 0.2,
+ "awesome_str": "bleh",
+ "nested": {
+ "bar": "barz"
+ }
+}
+```
+
+For windows users, its basically the same except you may not be able to download the file in command line, links are provided in [Content](#Content) below.
+Also, your GDB might be embedded in some IDE, but its most likely a GDB console front-end.
+
+See also [Content](#Content) and [Usage](#Usage) sections below for more details of what you may find in this repo.
+
# 3. Content
@@ -137,15 +208,22 @@ see also [this GDB doc](https://doc.ecoscentric.com/gnutools/doc/gdb/Files.html#
_Coding technique for the pretty printer is quite naive, but it works.
Any seasoned advice and support appreciated. Aspects I would like to improve:_
- performance
- - walking in memory using as much GDB commands instead of hardcoding some offsets. This requires both knowledge of GDB and the JSON library symbols usage
+ - code style
+ - Release packaging
+ - Lib version checks
## Possible TODO list
- - [x] ~~the pythonGDBpretty printer core dump management is not (yet ?) done (i.e. core dump means no inferior process to call dump() in any way, and possibly less/no (debug) symbols to rely on)~~ Core dump with debug symbols tested and should be working.
+ - [ ] dont use this TODO list, but Github issues and Github project management
- [ ] printer can be customised further to print the 0x addresses, I chose not to since the whole point for me was NOT to explore them in GDB. You would have to add few python `print` here and there
- [ ] add the hexa value for floating point numbers, or for all numerical values
- - [ ] Improve method to get `std::string` `type` and `sizeof`. The current method assumes some known symbols names, that most probably depends on the compilation tools (C++11).
- Sadly, GDB command `whatis` and `ptype` cannot resolve directly and easily `std::string`
+ - [ ] reduce amount of copy/pasta between [offsets_finder.py](tests/offsets_finder/offsets_finder.py) and [nlohmann_json.py](scripts/nlohmann_json.py)
+
+ - [x] ~~the pythonGDBpretty printer core dump management is not (yet ?) done (i.e. core dump means no inferior process to call dump() in any way, and possibly less/no (debug) symbols to rely on)~~
+ Core dump with debug symbols tested and should be working.
+ - [x] ~~Improve method to get `std::string` `type` and `sizeof`. The current method assumes some known symbols names, that most probably depends on the compilation tools (C++11).
+ Sadly, GDB command `whatis` and `ptype` cannot resolve directly and easily `std::string`~~
+ Solved with the gdb type template argument type extraction feature
@@ -254,151 +332,11 @@ This part will tell if the method to find data offsets in the proposed python sc
1. build the project [simple_offsets_finder.gpr](tests/offsets_finder/simple_offsets_finder.gpr) with the command
`gprbuild -p -P debug_printer.gpr`
- 2. Start a GDB session, here is an console output example, using this console command to launch GDB:
+ 2. Start a GDB session, using this console command to launch GDB. It should tell you if the GDB code deduced offset values are consistent with the bruteforce approach.
```
gdb --se=exe/main.exe -command=find_offsets.gdb --batch
```
-
- ```
- #
- # Force gdb to use pretty print of structures managed by default (instead of a barely flat line)
- #
- # set print pretty
- #
- #
- # load the offset finder python gdb script
- #
- source offsets_finder.py
- #
- PLATFORM_BITS 64
-
- Search range will be:
- MIN: 2 - MAX: 512 - STEP: 2
-
-
- -------------The researched std::string type for this executable is-------------
- std::__cxx11::basic_string, std::allocator >-
- Using regex: ^std::__cxx.*::basic_string$
- --------------------------------------------------------------------------------
-
-
- --------The researched nlohmann::basic_json type for this executable is---------
- nlohmann::basic_json, std::allocator >, bool, long long, unsigned long long, double, std::allocator, nlohmann::adl_serializer>
- Using regex: ^nlohmann::basic_json<.*>$
- --------------------------------------------------------------------------------
-
- #
- # Auto setting break point before exe prints mixed_nested
- #
- Breakpoint 1 at 0x401753: file F:\DEV\Projets\nlohmann-json-gdb\offsets_finder\src\main.cpp, line 44.
- #
- # Running the exe
- THIS
- IS
- THE
- END
- (ABC)
- MY
-
- Breakpoint 1, main () at F:\DEV\Projets\nlohmann-json-gdb\offsets_finder\src\main.cpp:44
- 44 F:\DEV\Projets\nlohmann-json-gdb\offsets_finder\src\main.cpp: No such file or directory.
- $1 = (nlohmann::basic_json, std::allocator >, bool, long long, unsigned long long, double, std::allocator, nlohmann::adl_serializer>::object_t *) 0x1104b90
- #
- # ### Prints using python pretty printer offsetts finder ###
- #
- # Print simple_json (python pretty printer)
- $2 = Expected pair:
- Testing Node.Key offset 2
- Testing Node.Key offset 4
- Testing Node.Key offset 6
- Testing Node.Key offset 8
- Testing Node.Key offset 10
- Testing Node.Key offset 12
- Testing Node.Key offset 14
- Testing Node.Key offset 16
- Testing Node.Key offset 18
- Testing Node.Key offset 20
- Testing Node.Key offset 22
- Testing Node.Key offset 24
- Testing Node.Key offset 26
- Testing Node.Key offset 28
- Testing Node.Key offset 30
- Testing Node.Key offset 32
- Found the key '"first"'
- Testing Node.Value offset 2
- Testing Node.Value offset 4
- Testing Node.Value offset 6
- Testing Node.Value offset 8
- Testing Node.Value offset 10
- Testing Node.Value offset 12
- Testing Node.Value offset 14
- Testing Node.Value offset 16
- Testing Node.Value offset 18
- Testing Node.Value offset 20
- Testing Node.Value offset 22
- Testing Node.Value offset 24
- Testing Node.Value offset 26
- Testing Node.Value offset 28
- Testing Node.Value offset 30
- Testing Node.Value offset 32
- Found the value '"second"'
-
-
- Offsets for STD::MAP exploration from a given node are:
-
- MAGIC_OFFSET_STD_MAP_KEY = 32 = expected value from symbols 32
- MAGIC_OFFSET_STD_MAP_VAL = 32 = expected value from symbols 32
-
- ===> Offsets for STD::MAP : [ FOUND ] <===
-
-
-
- #
- # Print simple_array (python pretty printer)
- #
- $3 = Trying to search array element 996699FOO at index (2)
- Testing vector value offset 2
- value: Not a valid JSON type, continuing
- Testing vector value offset 4
- value: Not a valid JSON type, continuing
- Testing vector value offset 6
- value: null
- Testing vector value offset 8
- value: 25
- Testing vector value offset 10
- value: Not a valid JSON type, continuing
- Testing vector value offset 12
- value: Not a valid JSON type, continuing
- Testing vector value offset 14
- value: null
- Testing vector value offset 16
- value: "996699FOO"
-
-
- Offsets for STD::VECTOR exploration are:
-
- MAGIC_OFFSET_STD_VECTOR = 16
- OFFSET expected value = 16 (o["_M_impl"]["_M_start"], vector element size)
-
- ===> Offsets for STD::VECTOR : [ FOUND ] <===
-
-
-
-
-
- ############ FORCING A NORMAL EXIT (code 0) ############
-
- Errors in python should have triggered a different exit code earlier
-
- A debugging session is active.
-
- Inferior 1 [process 1320] will be killed.
-
- Quit anyway? (y or n) [answered Y; input not from terminal]
-
- ```
-
## Another approach I know of
_from a guru of my workplace_
diff --git a/scripts/gdb_helper_commands.py b/scripts/gdb_helper_commands.py
new file mode 100644
index 0000000..c1daf45
--- /dev/null
+++ b/scripts/gdb_helper_commands.py
@@ -0,0 +1,73 @@
+#
+# Licensed under the MIT License .
+# SPDX-License-Identifier: MIT
+# Copyright (c) 2020 LoneWanderer-GH https://github.com/LoneWanderer-GH
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+#
+import gdb
+
+
+# def get_fields_offset_from_type(str_type):
+# gdb_type = gdb.lookup_type(str_type)
+# s = gdb.execute("ptype /o {}".format(gdb_type), to_string=True)
+# lines = s.splitlines()
+# field_names = [f.name for f in gdb_type.fields()]
+# fields_offsets = dict()
+
+# # structure to read
+# # /* offset | size */ type = struct std::_Rb_tree_node_base {
+# # /* 0 | 4 */ std::_Rb_tree_color _M_color;
+# # /* XXX 4-byte hole */
+# # /* 8 | 8 */ _Base_ptr _M_parent;
+# # /* 16 | 8 */ _Base_ptr _M_left;
+# # /* 24 | 8 */ _Base_ptr _M_right;
+# # /**/
+# # /* total size (bytes): 32 */
+# # }
+# matcher = re.compile("\/\*\s+(\d+).*")
+# for l in lines:
+# for field in field_names:
+# if field in l:
+# match = matcher.match(l)# re.split("\|", l)[0].
+# field_offset = int(match.group(1))
+# fields_offsets[field] = field_offset
+# # print("Found offset {:02d} for {}".format(field_offset, field))
+# break # break the loop over fields names, go next line
+# else:
+# continue
+# return fields_offsets
+
+class List_Types_To_File(gdb.Command):
+ command_name_str = "list-types-to-file"
+ command_expected_args_nb = 1
+ def __init__(self):
+ super(List_Types, self).__init__(List_Types.command_name_str, gdb.COMMAND_DATA)
+
+ def invoke(self, arg, from_tty):
+
+ argv = gdb.string_to_argv(arg)
+ nb_args = len(argv)
+ if nb_args != List_Types.command_expected_args_nb:
+ raise gdb.GdbError("{} takes exactly {} argument".format(List_Types.command_name_str, List_Types.command_expected_args_nb))
+ with open(arg, "w") as f:
+ infos_ = gdb.execute("info types", to_string=True)
+ f.write(infos_)
+
+List_Types()
diff --git a/scripts/nlohmann_json.py b/scripts/nlohmann_json.py
index 5622991..962d6dd 100644
--- a/scripts/nlohmann_json.py
+++ b/scripts/nlohmann_json.py
@@ -56,17 +56,41 @@
# GDB black magic
""""""
NLOHMANN_JSON_TYPE_PREFIX = "nlohmann::basic_json"
+NLOHMANN_JSON_KIND_FIELD_NAME = "m_type"
+STD_RB_TREE_NODE_TYPE_NAME = "std::_Rb_tree_node_base"
-class NO_TYPE_ERROR(Exception):
+
+class NO_RB_TREE_TYPES_ERROR(Exception):
pass
-class NO_ENUM_TYPE_ERROR(Exception):
- pass
+# adapted from https://github.com/hugsy/gef/blob/dev/gef.py
+def show_last_exception():
+ """Display the last Python exception."""
+ print("")
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ print(" Exception raised ".center(80, HORIZONTAL_LINE))
+ print("{}: {}".format(exc_type.__name__, exc_value))
+ print(" Detailed stacktrace ".center(80, HORIZONTAL_LINE))
+ for (filename, lineno, method, code) in traceback.extract_tb(exc_traceback)[::-1]:
+ print("""{} File "{}", line {:d}, in {}()""".format(
+ DOWN_ARROW, filename, lineno, method))
+ print(" {} {}".format(RIGHT_ARROW, code))
+ print(" Last 10 GDB commands ".center(80, HORIZONTAL_LINE))
+ gdb.execute("show commands")
+ print(" Runtime environment ".center(80, HORIZONTAL_LINE))
+ print("* GDB: {}".format(gdb.VERSION))
+ print("* Python: {:d}.{:d}.{:d} - {:s}".format(sys.version_info.major, sys.version_info.minor,
+ sys.version_info.micro, sys.version_info.releaselevel))
+ print("* OS: {:s} - {:s} ({:s}) on {:s}".format(platform.system(), platform.release(),
+ platform.architecture()[0],
+ " ".join(platform.dist())))
+ print(HORIZONTAL_LINE * 80)
+ print("")
+ gdb.execute("q {}".format(ERROR_PARSING_ERROR))
+ sys.exit(ERROR_PARSING_ERROR)
-class NO_RB_TREE_TYPES_ERROR(Exception):
- pass
def find_platform_type(regex, helper_type_name):
# we suppose its a unique match, 4 lines output
@@ -75,26 +99,30 @@ def find_platform_type(regex, helper_type_name):
lines = info_types.splitlines()
# correct command should have given lines, the last one being the correct one
if len(lines) == 4:
- # split last line, after line number and spaces
- t = re.split("^\d+:\s+", lines[-1])
- # transform result
- t = "".join(t[1::]).split(";")[0]
+ for l in lines:
+ print("### Log info types output : {}".format(l))
+ l = lines[-1]
+ if l.startswith(helper_type_name):
+ # line format "type_name;"
+ t = l.split(";")[0]
+ else:
+ # example
+ # 14708: nlohmann::basic_json, std::allocator >, bool, long long, unsigned long long, double, std::allocator, nlohmann::adl_serializer>;
+ t = re.split("^\d+:\s+", lines[-1])
+ # transform result
+ t = "".join(t[1::]).split(";")[0]
print("")
- print("The researched {} type for this executable is".format(helper_type_name).center(80, "-"))
+ print("The researched {} type for this executable is".format(
+ helper_type_name).center(80, "-"))
print("{}".format(t).center(80, "-"))
- print("Using regex: {}".format(regex))
+ print("(Using regex: {})".format(regex))
print("".center(80, "-"))
print("")
return t
else:
- raise NO_TYPE_ERROR("Too many matching types found ...\n{}".format("\n\t".join(lines)))
-
-
-def find_platform_std_string_type():
- std_str_regex = "^std::__cxx.*::basic_string$" # platform/compilation dependant ?
- t = find_platform_type(std_str_regex, "std::string")
- return gdb.lookup_type(t)
+ raise ValueError(
+ "Too many matching types found for JSON ...\n{}".format("\n\t".join(lines)))
def find_platform_json_type(nlohmann_json_type_prefix):
@@ -108,81 +136,63 @@ def find_platform_json_type(nlohmann_json_type_prefix):
def find_lohmann_types():
- nlohmann_json_type_namespace = find_platform_json_type(NLOHMANN_JSON_TYPE_PREFIX)
- try:
- NLOHMANN_JSON_TYPE = gdb.lookup_type(nlohmann_json_type_namespace).pointer()
- except:
- raise NO_TYPE_ERROR("Type namespace found but could not obtain type data ... WEIRD !")
- try:
- enum_json_detail = gdb.lookup_type("nlohmann::detail::value_t").fields()
- except:
- raise NO_ENUM_TYPE_ERROR()
- return nlohmann_json_type_namespace, NLOHMANN_JSON_TYPE, enum_json_detail
-
-def get_fields_offset_from_type(str_type):
- gdb_type = gdb.lookup_type(str_type)
- s = gdb.execute("ptype /o {}".format(gdb_type), to_string=True)
- lines = s.splitlines()
- field_names = [f.name for f in gdb_type.fields()]
- fields_offsets = dict()
-
- # structure to read
- # /* offset | size */ type = struct std::_Rb_tree_node_base {
- # /* 0 | 4 */ std::_Rb_tree_color _M_color;
- # /* XXX 4-byte hole */
- # /* 8 | 8 */ _Base_ptr _M_parent;
- # /* 16 | 8 */ _Base_ptr _M_left;
- # /* 24 | 8 */ _Base_ptr _M_right;
- # /**/
- # /* total size (bytes): 32 */
- # }
- matcher = re.compile("\/\*\s+(\d+).*")
- for l in lines:
- for field in field_names:
- if field in l:
- match = matcher.match(l)# re.split("\|", l)[0].
- field_offset = int(match.group(1))
- fields_offsets[field] = field_offset
- # print("Found offset {:02d} for {}".format(field_offset, field))
- break # break the loop over fields names, go next line
- else:
- continue
- return fields_offsets
-
-def find_rb_tree_types():
+ """
+ Finds essentials types to debug nlohmann JSONs
+ """
+
+ nlohmann_json_type_namespace = find_platform_json_type(
+ NLOHMANN_JSON_TYPE_PREFIX)
+
+ # enum type that represents what is eaxtcly the current json object
+ nlohmann_json_type = gdb.lookup_type(nlohmann_json_type_namespace)
+
+ # the real type behind "std::string"
+ # std::map is a C++ template, first template arg is the std::map key type
+ nlohmann_json_map_key_type = nlohmann_json_type.template_argument(0)
+
+ enum_json_detail_type = None
+ for field in nlohmann_json_type.fields():
+ if NLOHMANN_JSON_KIND_FIELD_NAME == field.name:
+ enum_json_detail_type = field.type
+ break
+
+ enum_json_details = enum_json_detail_type.fields()
+
+ return nlohmann_json_type_namespace, nlohmann_json_type.pointer(), enum_json_details, nlohmann_json_map_key_type
+
+
+def find_std_map_rb_tree_types():
try:
- std_rb_header_offsets = get_fields_offset_from_type("std::_Rb_tree_node_base")
- std_rb_tree_node_type = gdb.lookup_type("std::_Rb_tree_node_base::_Base_ptr").pointer()
- std_rb_tree_size_type = gdb.lookup_type("std::size_t").pointer()
- return std_rb_tree_node_type, std_rb_tree_size_type, std_rb_header_offsets
+ std_rb_tree_node_type = gdb.lookup_type(STD_RB_TREE_NODE_TYPE_NAME)
+ return std_rb_tree_node_type
except:
- raise NO_RB_TREE_TYPES_ERROR()
+ raise ValueError("Could not find the required RB tree types")
-## SET GLOBAL VARIABLES
+
+# SET GLOBAL VARIABLES
try:
- STD_STRING = find_platform_std_string_type()
- NLOHMANN_JSON_TYPE_NAMESPACE, NLOHMANN_JSON_TYPE, ENUM_JSON_DETAIL = find_lohmann_types()
- STD_RB_TREE_NODE_TYPE, STD_RB_TREE_SIZE_TYPE, STD_RB_HEADER_OFFSETS = find_rb_tree_types()
-except NO_TYPE_ERROR:
- print("FATAL ERROR {}".format(ERROR_NO_CORRECT_JSON_TYPE_FOUND))
- print("FATAL ERROR {}".format(ERROR_NO_CORRECT_JSON_TYPE_FOUND))
- print("FATAL ERROR {}: missing JSON type definition, could not find the JSON type starting with {}".format(NLOHMANN_JSON_TYPE_PREFIX))
- gdb.execute("q {}".format(ERROR_NO_CORRECT_JSON_TYPE_FOUND))
-except NO_RB_TREE_TYPES_ERROR:
- print("FATAL ERROR {}".format(ERROR_NO_RB_TYPES_FOUND))
- print("FATAL ERROR {}".format(ERROR_NO_RB_TYPES_FOUND))
- print("FATAL ERROR {}: missing some STL RB tree types definition")
- gdb.execute("q {}".format(ERROR_NO_RB_TYPES_FOUND))
+ NLOHMANN_JSON_TYPE_NAMESPACE, NLOHMANN_JSON_TYPE_POINTER, ENUM_JSON_DETAIL, NLOHMANN_JSON_MAP_KEY_TYPE = find_lohmann_types()
+ STD_RB_TREE_NODE_TYPE = find_std_map_rb_tree_types()
+except:
+ show_last_exception()
+
+# convert the full namespace to only its litteral value
+# useful to access the corect variant of JSON m_value
+ENUM_LITERAL_NAMESPACE_TO_LITERAL = dict(
+ [(f.name, f.name.split("::")[-1]) for f in ENUM_JSON_DETAIL])
-ENUM_LITERAL_NAMESPACE_TO_LITERAL = dict([ (f.name, f.name.split("::")[-1]) for f in ENUM_JSON_DETAIL])
-# ENUM_LITERALS_NAMESPACE = ENUM_LITERAL_NAMESPACE_TO_LITERAL.keys()
-def std_stl_item_to_int_address(node):
- return int(str(node), 0)
+def gdb_value_address_to_int(node):
+ val = None
+ if type(node) == gdb.Value:
+ # gives the int value of the address
+ # .address returns another gdb.Value that cannot be cast to int
+ val = int(str(node), 0)
+ return val
-def parse_std_str_from_hexa_address(hexa_str):
+def parse_std_string_from_hexa_address(hexa_str):
# https://stackoverflow.com/questions/6776961/how-to-inspect-stdstring-in-gdb-with-no-source-code
return '"{}"'.format(gdb.parse_and_eval("*(char**){}".format(hexa_str)).string())
@@ -220,13 +230,14 @@ def parse_as_object(self):
# traversing tree is a an adapted copy pasta from STL gdb parser
# (http://www.yolinux.com/TUTORIALS/src/dbinit_stl_views-1.03.txt and similar links)
- node = o["_M_t"]["_M_impl"]["_M_header"]["_M_left"]
+ node = o["_M_t"]["_M_impl"]["_M_header"]["_M_left"]
tree_size = o["_M_t"]["_M_impl"]["_M_node_count"]
- size_of_node = o["_M_t"]["_M_impl"]["_M_header"]["_M_left"].referenced_value().type.sizeof
+ # for safety
+ # assert(node.referenced_value().type == STD_RB_TREE_NODE_TYPE)
+ # assert(node.referenced_value().type.sizeof == STD_RB_TREE_NODE_TYPE.sizeof)
i = 0
-
if tree_size == 0:
return "{}"
else:
@@ -234,31 +245,37 @@ def parse_as_object(self):
self.indent_level += 1
while i < tree_size:
# when it is written "+1" in the STL GDB script, it performs an increment of 1 x size of object
- key_address = std_stl_item_to_int_address(node) + size_of_node
+ # key is right after
+ key_address = gdb_value_address_to_int(
+ node) + STD_RB_TREE_NODE_TYPE.sizeof
- k_str = parse_std_str_from_hexa_address(hex(key_address))
+ k_str = parse_std_string_from_hexa_address(key_address)
- value_address = key_address + STD_STRING.sizeof
- value_object = gdb.Value(value_address).cast(NLOHMANN_JSON_TYPE)
+ value_address = key_address + NLOHMANN_JSON_MAP_KEY_TYPE.sizeof
+ value_object = gdb.Value(value_address).cast(
+ NLOHMANN_JSON_TYPE_POINTER)
- v_str = LohmannJSONPrinter(value_object, self.indent_level + 1).to_string()
+ v_str = LohmannJSONPrinter(
+ value_object, self.indent_level + 1).to_string()
k_v_str = "{} : {}".format(k_str, v_str)
- end_of_line = "\n" if tree_size <= 1 or i == (tree_size - 1) else ",\n"
+ end_of_line = "\n" if tree_size <= 1 or i == (
+ tree_size - 1) else ",\n"
- s = s + (" " * (self.indent_level * INDENT)) + k_v_str + end_of_line
+ s = s + (" " * (self.indent_level * INDENT)) + \
+ k_v_str + end_of_line
- if std_stl_item_to_int_address(node["_M_right"]) != 0:
+ if gdb_value_address_to_int(node["_M_right"]) != 0:
node = node["_M_right"]
- while std_stl_item_to_int_address(node["_M_left"]) != 0:
+ while gdb_value_address_to_int(node["_M_left"]) != 0:
node = node["_M_left"]
else:
tmp_node = node["_M_parent"]
- while std_stl_item_to_int_address(node) == std_stl_item_to_int_address(tmp_node["_M_right"]):
+ while gdb_value_address_to_int(node) == gdb_value_address_to_int(tmp_node["_M_right"]):
node = tmp_node
tmp_node = tmp_node["_M_parent"]
- if std_stl_item_to_int_address(node["_M_right"]) != std_stl_item_to_int_address(tmp_node):
+ if gdb_value_address_to_int(node["_M_right"]) != gdb_value_address_to_int(tmp_node):
node = tmp_node
i += 1
self.indent_level -= 2
@@ -266,7 +283,7 @@ def parse_as_object(self):
return s
def parse_as_str(self):
- return parse_std_str_from_hexa_address(str(self.val["m_value"][self.field_type_short]))
+ return parse_std_string_from_hexa_address(str(self.val["m_value"][self.field_type_short]))
def parse_as_leaf(self):
s = "WTFBBQ !"
@@ -288,7 +305,7 @@ def parse_as_array(self):
i = 0
# when it is written "+1" in the STL GDB script, it performs an increment of 1 x size of object
element_size = start.referenced_value().type.sizeof
- start_address = std_stl_item_to_int_address(start)
+ start_address = gdb_value_address_to_int(start)
if size == 0:
s = "[]"
else:
@@ -297,10 +314,13 @@ def parse_as_array(self):
while i < size:
offset = i * element_size
i_address = start_address + offset
- value_object = gdb.Value(i_address).cast(NLOHMANN_JSON_TYPE)
- v_str = LohmannJSONPrinter(value_object, self.indent_level + 1).to_string()
- end_of_line = "\n" if size <= 1 or i == (size -1) else ",\n"
- s = s + (" " * (self.indent_level * INDENT)) + v_str + end_of_line
+ value_object = gdb.Value(i_address).cast(
+ NLOHMANN_JSON_TYPE_POINTER)
+ v_str = LohmannJSONPrinter(
+ value_object, self.indent_level + 1).to_string()
+ end_of_line = "\n" if size <= 1 or i == (size - 1) else ",\n"
+ s = s + (" " * (self.indent_level * INDENT)) + \
+ v_str + end_of_line
i += 1
self.indent_level -= 2
s = s + (" " * (self.indent_level * INDENT)) + "]"
@@ -327,7 +347,7 @@ def parse(self):
def to_string(self):
try:
- self.field_type_full_namespace = self.val["m_type"]
+ self.field_type_full_namespace = self.val[NLOHMANN_JSON_KIND_FIELD_NAME]
str_val = str(self.field_type_full_namespace)
if not str_val in ENUM_LITERAL_NAMESPACE_TO_LITERAL:
raise ValueError("Unkown litteral for data type enum. Found {}\nNot in:\n{}".format(str_val,
@@ -344,39 +364,13 @@ def display_hint(self):
return self.val.type
-# adapted from https://github.com/hugsy/gef/blob/dev/gef.py
-def show_last_exception():
- """Display the last Python exception."""
- print("")
- exc_type, exc_value, exc_traceback = sys.exc_info()
-
- print(" Exception raised ".center(80, HORIZONTAL_LINE))
- print("{}: {}".format(exc_type.__name__, exc_value))
- print(" Detailed stacktrace ".center(80, HORIZONTAL_LINE))
- for (filename, lineno, method, code) in traceback.extract_tb(exc_traceback)[::-1]:
- print("""{} File "{}", line {:d}, in {}()""".format(DOWN_ARROW, filename, lineno, method))
- print(" {} {}".format(RIGHT_ARROW, code))
- print(" Last 10 GDB commands ".center(80, HORIZONTAL_LINE))
- gdb.execute("show commands")
- print(" Runtime environment ".center(80, HORIZONTAL_LINE))
- print("* GDB: {}".format(gdb.VERSION))
- print("* Python: {:d}.{:d}.{:d} - {:s}".format(sys.version_info.major, sys.version_info.minor,
- sys.version_info.micro, sys.version_info.releaselevel))
- print("* OS: {:s} - {:s} ({:s}) on {:s}".format(platform.system(), platform.release(),
- platform.architecture()[0],
- " ".join(platform.dist())))
- print(HORIZONTAL_LINE * 80)
- print("")
- gdb.execute("q {}".format(ERROR_PARSING_ERROR))
- sys.exit(ERROR_PARSING_ERROR)
-
-
def build_pretty_printer():
pp = gdb.printing.RegexpCollectionPrettyPrinter("nlohmann_json")
- pp.add_printer(NLOHMANN_JSON_TYPE_NAMESPACE, "^{}$".format(NLOHMANN_JSON_TYPE), LohmannJSONPrinter)
+ pp.add_printer(NLOHMANN_JSON_TYPE_NAMESPACE, "^{}$".format(
+ NLOHMANN_JSON_TYPE_POINTER), LohmannJSONPrinter)
return pp
-# executed at autoload
-# TODO: avoid multiple loads ?
-gdb.printing.register_pretty_printer(gdb.current_objfile(), build_pretty_printer())
+# executed at script load
+gdb.printing.register_pretty_printer(
+ gdb.current_objfile(), build_pretty_printer())
diff --git a/test_github_http_api.py b/test_github_http_api.py
new file mode 100644
index 0000000..dc764b9
--- /dev/null
+++ b/test_github_http_api.py
@@ -0,0 +1,4 @@
+import requests
+
+
+r = requests.get("https://api.github.com/repos/LoneWanderer-GH/nlohmann-json-gdb/actions/jobs/CI-GDB-versions")
\ No newline at end of file
diff --git a/tests/Test_GDB_python.gdb b/tests/Test_GDB_python.gdb
new file mode 100644
index 0000000..23966c2
--- /dev/null
+++ b/tests/Test_GDB_python.gdb
@@ -0,0 +1,27 @@
+
+echo \n
+echo #############\n
+echo # Python Help\n
+echo #############\n
+help python
+
+echo \n
+echo #############################################################\n
+echo # Execute a basic python script (will force exit with code 0)\n
+echo #############################################################\n
+
+python
+import sys
+c = 0
+print("Python script seems to work ...")
+print("Exiting with code: {}".format(c))
+sys.exit(0)
+end
+
+
+echo \n
+echo \n
+echo #########################################################\n
+echo # Something went wrong in python script, for exit code 25\n
+echo #########################################################\n
+q 25
diff --git a/tests/cpp_test_project/Test_Suite.gdb b/tests/cpp_test_project/Test_Suite.gdb
new file mode 100644
index 0000000..8b70185
--- /dev/null
+++ b/tests/cpp_test_project/Test_Suite.gdb
@@ -0,0 +1,5 @@
+source .gdbinit
+
+source checker.py
+
+python test_suite()
\ No newline at end of file
diff --git a/tests/cpp_test_project/Test_Test_Suite.gdb b/tests/cpp_test_project/Test_Test_Suite.gdb
new file mode 100644
index 0000000..caddd75
--- /dev/null
+++ b/tests/cpp_test_project/Test_Test_Suite.gdb
@@ -0,0 +1,5 @@
+source .gdbinit
+
+source checker.py
+
+python test_the_test_suite()
\ No newline at end of file
diff --git a/tests/cpp_test_project/checker.py b/tests/cpp_test_project/checker.py
index 5e47602..857482d 100644
--- a/tests/cpp_test_project/checker.py
+++ b/tests/cpp_test_project/checker.py
@@ -115,7 +115,7 @@ def fancy_print(s):
def test_suite():
fancy_print("START TEST SUITE")
methods = ["p", "pjson"]
- variables = ["fooz", "arr", "one", "mixed_nested"]
+ variables = ["fooz", "arr", "one", "foo", "mixed_nested"]
results = perform(methods, variables)
diff --git a/tests/offsets_finder/offsets_finder.py b/tests/offsets_finder/offsets_finder.py
index 7b5c85c..72e210c 100644
--- a/tests/offsets_finder/offsets_finder.py
+++ b/tests/offsets_finder/offsets_finder.py
@@ -46,6 +46,7 @@
INDENT = 4
+# https://stackoverflow.com/questions/29285287/c-getting-size-in-bits-of-integer
PLATFORM_BITS = "64" if sys.maxsize > 2 ** 32 else "32"
print("PLATFORM_BITS {}".format(PLATFORM_BITS))
@@ -57,24 +58,47 @@
print("")
print("Search range will be:")
-print("MIN: {} - MAX: {} - STEP: {}".format(SEARCH_MIN,SEARCH_MAX, SEARCH_STEP))
+print("MIN: {} - MAX: {} - STEP: {}".format(SEARCH_MIN, SEARCH_MAX, SEARCH_STEP))
print("")
""""""
# GDB black magic
""""""
NLOHMANN_JSON_TYPE_PREFIX = "nlohmann::basic_json"
-
-class NO_TYPE_ERROR(Exception):
- pass
+NLOHMANN_JSON_KIND_FIELD_NAME = "m_type"
+STD_RB_TREE_NODE_TYPE_NAME = "std::_Rb_tree_node_base"
-class NO_ENUM_TYPE_ERROR(Exception):
+class NO_RB_TREE_TYPES_ERROR(Exception):
pass
-class NO_RB_TREE_TYPES_ERROR(Exception):
- pass
+# adapted from https://github.com/hugsy/gef/blob/dev/gef.py
+def show_last_exception():
+ """Display the last Python exception."""
+ print("")
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+
+ print(" Exception raised ".center(80, HORIZONTAL_LINE))
+ print("{}: {}".format(exc_type.__name__, exc_value))
+ print(" Detailed stacktrace ".center(80, HORIZONTAL_LINE))
+ for (filename, lineno, method, code) in traceback.extract_tb(exc_traceback)[::-1]:
+ print("""{} File "{}", line {:d}, in {}()""".format(
+ DOWN_ARROW, filename, lineno, method))
+ print(" {} {}".format(RIGHT_ARROW, code))
+ print(" Last 10 GDB commands ".center(80, HORIZONTAL_LINE))
+ gdb.execute("show commands")
+ print(" Runtime environment ".center(80, HORIZONTAL_LINE))
+ print("* GDB: {}".format(gdb.VERSION))
+ print("* Python: {:d}.{:d}.{:d} - {:s}".format(sys.version_info.major, sys.version_info.minor,
+ sys.version_info.micro, sys.version_info.releaselevel))
+ print("* OS: {:s} - {:s} ({:s}) on {:s}".format(platform.system(), platform.release(),
+ platform.architecture()[0],
+ " ".join(platform.dist())))
+ print(HORIZONTAL_LINE * 80)
+ print("")
+ gdb.execute("q {}".format(ERROR_PARSING_ERROR))
+ sys.exit(ERROR_PARSING_ERROR)
def find_platform_type(regex, helper_type_name):
@@ -84,44 +108,30 @@ def find_platform_type(regex, helper_type_name):
lines = info_types.splitlines()
# correct command should have given lines, the last one being the correct one
if len(lines) == 4:
- # split last line, after line number and spaces
- t = re.split("^\d+:\s+", lines[-1])
- # transform result
- t = "".join(t[1::]).split(";")[0]
+ for l in lines:
+ print("### Log info types output : {}".format(l))
+ l = lines[-1]
+ if l.startswith(helper_type_name):
+ # line format "type_name;"
+ t = l.split(";")[0]
+ else:
+ # example
+ # 14708: nlohmann::basic_json, std::allocator >, bool, long long, unsigned long long, double, std::allocator, nlohmann::adl_serializer>;
+ t = re.split("^\d+:\s+", lines[-1])
+ # transform result
+ t = "".join(t[1::]).split(";")[0]
print("")
- print("The researched {} type for this executable is".format(helper_type_name).center(80, "-"))
+ print("The researched {} type for this executable is".format(
+ helper_type_name).center(80, "-"))
print("{}".format(t).center(80, "-"))
- print("Using regex: {}".format(regex))
+ print("(Using regex: {})".format(regex))
print("".center(80, "-"))
print("")
return t
else:
- raise NO_TYPE_ERROR("Too many matching types found ...\n{}".format("\n\t".join(lines)))
-
-def find_platform_std_string_type():
- # follwoing method does not work until a live inferior process is available ... too bad !
- # previous_std_str = "std::string"
- # new_t_std_str = None
- # count = 1
- # while True:
- # print("Try {}".format(count))
- # if count > 10:
- # raise Exception("Could not find std::string type def in symbols after {} iterations of whatis. Currently known types {} and {}".format(i, t_std_str, new_t_std_str))
- # new_t_std_str = gdb.execute("whatis {}".format(previous_std_str), to_string=True)
- # print("got {}".format(new_t_std_str))
- # new_t_std_str = new_t_std_str.split("=")[-1].strip()
- # if new_t_std_str == previous_std_str:
- # break
- # else:
- # previous_std_str = new_t_std_str
- # count += 1
- # # what_is_what_is_std_string = gdb.execute("whatis {}".format(what_is_std_string))
-
- std_str_regex = "^std::__cxx.*::basic_string$" # platform/compilation dependant ?
- t = find_platform_type(std_str_regex, "std::string")
- return gdb.lookup_type(t)
- # return gdb.lookup_type(new_t_std_str)
+ raise ValueError(
+ "Too many matching types found for JSON ...\n{}".format("\n\t".join(lines)))
def find_platform_json_type(nlohmann_json_type_prefix):
@@ -135,81 +145,63 @@ def find_platform_json_type(nlohmann_json_type_prefix):
def find_lohmann_types():
- nlohmann_json_type_namespace = find_platform_json_type(NLOHMANN_JSON_TYPE_PREFIX)
- try:
- NLOHMANN_JSON_TYPE = gdb.lookup_type(nlohmann_json_type_namespace).pointer()
- except:
- raise NO_TYPE_ERROR("Type namespace found but could not obtain type data ... WEIRD !")
- try:
- enum_json_detail = gdb.lookup_type("nlohmann::detail::value_t").fields()
- except:
- raise NO_ENUM_TYPE_ERROR()
- return nlohmann_json_type_namespace, NLOHMANN_JSON_TYPE, enum_json_detail
-
-def get_fields_offset_from_type(str_type):
- gdb_type = gdb.lookup_type(str_type)
- s = gdb.execute("ptype /o {}".format(gdb_type), to_string=True)
- lines = s.splitlines()
- field_names = [f.name for f in gdb_type.fields()]
- fields_offsets = dict()
-
- # structure to read
- # /* offset | size */ type = struct std::_Rb_tree_node_base {
- # /* 0 | 4 */ std::_Rb_tree_color _M_color;
- # /* XXX 4-byte hole */
- # /* 8 | 8 */ _Base_ptr _M_parent;
- # /* 16 | 8 */ _Base_ptr _M_left;
- # /* 24 | 8 */ _Base_ptr _M_right;
- # /**/
- # /* total size (bytes): 32 */
- # }
- matcher = re.compile("\/\*\s+(\d+).*")
- for l in lines:
- for field in field_names:
- if field in l:
- match = matcher.match(l)# re.split("\|", l)[0].
- field_offset = int(match.group(1))
- fields_offsets[field] = field_offset
- # print("Found offset {:02d} for {}".format(field_offset, field))
- break # break the loop over fields names, go next line
- else:
- continue
- return fields_offsets
-
-def find_rb_tree_types():
+ """
+ Finds essentials types to debug nlohmann JSONs
+ """
+
+ nlohmann_json_type_namespace = find_platform_json_type(
+ NLOHMANN_JSON_TYPE_PREFIX)
+
+ # enum type that represents what is eaxtcly the current json object
+ nlohmann_json_type = gdb.lookup_type(nlohmann_json_type_namespace)
+
+ # the real type behind "std::string"
+ # std::map is a C++ template, first template arg is the std::map key type
+ nlohmann_json_map_key_type = nlohmann_json_type.template_argument(0)
+
+ enum_json_detail_type = None
+ for field in nlohmann_json_type.fields():
+ if NLOHMANN_JSON_KIND_FIELD_NAME == field.name:
+ enum_json_detail_type = field.type
+ break
+
+ enum_json_details = enum_json_detail_type.fields()
+
+ return nlohmann_json_type_namespace, nlohmann_json_type.pointer(), enum_json_details, nlohmann_json_map_key_type
+
+
+def find_std_map_rb_tree_types():
try:
- std_rb_header_offsets = get_fields_offset_from_type("std::_Rb_tree_node_base")
- std_rb_tree_node_type = gdb.lookup_type("std::_Rb_tree_node_base::_Base_ptr").pointer()
- std_rb_tree_size_type = gdb.lookup_type("std::size_t").pointer()
- return std_rb_tree_node_type, std_rb_tree_size_type, std_rb_header_offsets
+ std_rb_tree_node_type = gdb.lookup_type(STD_RB_TREE_NODE_TYPE_NAME)
+ return std_rb_tree_node_type
except:
- raise NO_RB_TREE_TYPES_ERROR()
+ raise ValueError("Could not find the required RB tree types")
+
-## SET GLOBAL VARIABLES
+# SET GLOBAL VARIABLES
try:
- STD_STRING = find_platform_std_string_type()
- NLOHMANN_JSON_TYPE_NAMESPACE, NLOHMANN_JSON_TYPE, ENUM_JSON_DETAIL = find_lohmann_types()
- STD_RB_TREE_NODE_TYPE, STD_RB_TREE_SIZE_TYPE, STD_RB_HEADER_OFFSETS = find_rb_tree_types()
-except NO_TYPE_ERROR:
- print("FATAL ERROR {}".format(ERROR_NO_CORRECT_JSON_TYPE_FOUND))
- print("FATAL ERROR {}".format(ERROR_NO_CORRECT_JSON_TYPE_FOUND))
- print("FATAL ERROR {}: missing JSON type definition, could not find the JSON type starting with {}".format(NLOHMANN_JSON_TYPE_PREFIX))
- gdb.execute("q {}".format(ERROR_NO_CORRECT_JSON_TYPE_FOUND))
-except NO_RB_TREE_TYPES_ERROR:
- print("FATAL ERROR {}".format(ERROR_NO_RB_TYPES_FOUND))
- print("FATAL ERROR {}".format(ERROR_NO_RB_TYPES_FOUND))
- print("FATAL ERROR {}: missing some STL RB tree types definition")
- gdb.execute("q {}".format(ERROR_NO_RB_TYPES_FOUND))
+ NLOHMANN_JSON_TYPE_NAMESPACE, NLOHMANN_JSON_TYPE_POINTER, ENUM_JSON_DETAIL, NLOHMANN_JSON_MAP_KEY_TYPE = find_lohmann_types()
+ STD_RB_TREE_NODE_TYPE = find_std_map_rb_tree_types()
+except:
+ show_last_exception()
-ENUM_LITERAL_NAMESPACE_TO_LITERAL = dict([ (f.name, f.name.split("::")[-1]) for f in ENUM_JSON_DETAIL])
-# ENUM_LITERALS_NAMESPACE = ENUM_LITERAL_NAMESPACE_TO_LITERAL.keys()
+# convert the full namespace to only its litteral value
+# useful to access the corect variant of JSON m_value
+ENUM_LITERAL_NAMESPACE_TO_LITERAL = dict(
+ [(f.name, f.name.split("::")[-1]) for f in ENUM_JSON_DETAIL])
-def std_stl_item_to_int_address(node):
- return int(str(node), 0)
+def gdb_value_address_to_int(node):
+ val = None
+ if type(node) == gdb.Value:
+ # gives the int value of the address
+ # .address returns another gdb.Value that cannot be cast to int
+ val = int(str(node), 0)
+ return val
-def parse_std_str_from_hexa_address(hexa_str):
+
+def parse_std_string_from_hexa_address(hexa_str):
# https://stackoverflow.com/questions/6776961/how-to-inspect-stdstring-in-gdb-with-no-source-code
return '"{}"'.format(gdb.parse_and_eval("*(char**){}".format(hexa_str)).string())
@@ -249,13 +241,14 @@ def parse_as_object(self):
# traversing tree is a an adapted copy pasta from STL gdb parser
# (http://www.yolinux.com/TUTORIALS/src/dbinit_stl_views-1.03.txt and similar links)
- node = o["_M_t"]["_M_impl"]["_M_header"]["_M_left"]
+ node = o["_M_t"]["_M_impl"]["_M_header"]["_M_left"]
tree_size = o["_M_t"]["_M_impl"]["_M_node_count"]
- size_of_node = o["_M_t"]["_M_impl"]["_M_header"]["_M_left"].referenced_value().type.sizeof
+ # for safety
+ # assert(node.referenced_value().type == STD_RB_TREE_NODE_TYPE)
+ # assert(node.referenced_value().type.sizeof == STD_RB_TREE_NODE_TYPE.sizeof)
i = 0
-
if tree_size == 0:
return "{}"
else:
@@ -263,8 +256,8 @@ def parse_as_object(self):
for offset_key in SEARCH_RANGE:
try:
print("Testing Node.Key offset {}".format(offset_key))
- key_address = std_stl_item_to_int_address(node) + offset_key # + 1
- k_str = parse_std_str_from_hexa_address(hex(key_address))
+ key_address = gdb_value_address_to_int(node) + offset_key # + 1
+ k_str = parse_std_string_from_hexa_address(hex(key_address))
if key in k_str:
key_found = True
print("Found the key '{}'".format(k_str))
@@ -277,7 +270,7 @@ def parse_as_object(self):
try:
print("Testing Node.Value offset {}".format(offset_val))
value_address = key_address + offset_val
- value_object = gdb.Value(value_address).cast(NLOHMANN_JSON_TYPE)
+ value_object = gdb.Value(value_address).cast(NLOHMANN_JSON_TYPE_POINTER)
v_str = LohmannJSONPrinter(value_object, self.indent_level + 1).to_string()
if value in v_str:
print("Found the value '{}'".format(v_str))
@@ -286,19 +279,19 @@ def parse_as_object(self):
except:
continue
if key_found and value_found:
- if offset_key == size_of_node and offset_val == STD_STRING.sizeof:
+ if offset_key == STD_RB_TREE_NODE_TYPE.sizeof and offset_val == NLOHMANN_JSON_MAP_KEY_TYPE.sizeof:
print("\n\nOffsets for STD::MAP exploration from a given node are:\n")
- print("MAGIC_OFFSET_STD_MAP_KEY = {} = expected value from symbols {}".format(offset_key, size_of_node))
- print("MAGIC_OFFSET_STD_MAP_VAL = {} = expected value from symbols {}".format(offset_val, STD_STRING.sizeof))
+ print("MAGIC_OFFSET_STD_MAP_KEY = {} = expected value from symbols {}".format(offset_key, STD_RB_TREE_NODE_TYPE.sizeof))
+ print("MAGIC_OFFSET_STD_MAP_VAL = {} = expected value from symbols {}".format(offset_val, NLOHMANN_JSON_MAP_KEY_TYPE.sizeof))
return "\n ===> Offsets for STD::MAP : [ FOUND ] <=== "
- print("MAGIC_OFFSET_STD_MAP_KEY should be {} (from symbols)".format(size_of_node))
+ print("MAGIC_OFFSET_STD_MAP_KEY should be {} (from symbols)".format(STD_RB_TREE_NODE_TYPE.sizeof))
print("MAGIC_OFFSET_STD_MAP_VAL should be {} (from symbols)".format(STD_STRING.sizeof))
print("\n ===> Offsets for STD::MAP : [ NOT FOUND ] <=== ")
gdb.execute("q 25")
def parse_as_str(self):
- return parse_std_str_from_hexa_address(str(self.val["m_value"][self.field_type_short]))
+ return parse_std_string_from_hexa_address(str(self.val["m_value"][self.field_type_short]))
def parse_as_leaf(self):
s = "WTFBBQ !"
@@ -325,7 +318,7 @@ def parse_as_array(self):
element_size = start.referenced_value().type.sizeof
# start at expected index directly
i = expected_index
- start_address = std_stl_item_to_int_address(start)
+ start_address = gdb_value_address_to_int(start)
if size == 0:
return "error with std::vector"
else:
@@ -334,7 +327,7 @@ def parse_as_array(self):
print("Testing vector value offset {}".format(offset))
o = (i * offset)
i_address = start_address + o
- value_object = gdb.Value(i_address).cast(NLOHMANN_JSON_TYPE)
+ value_object = gdb.Value(i_address).cast(NLOHMANN_JSON_TYPE_POINTER)
v_str = LohmannJSONPrinter(value_object, self.indent_level + 1).to_string()
print("value: {}".format(v_str))
if expected_value in v_str: # or "9966990055" in v_str:
@@ -369,7 +362,7 @@ def parse(self):
return s
def to_string(self):
- self.field_type_full_namespace = self.val["m_type"]
+ self.field_type_full_namespace = self.val[NLOHMANN_JSON_KIND_FIELD_NAME]
str_val = str(self.field_type_full_namespace)
if not str_val in ENUM_LITERAL_NAMESPACE_TO_LITERAL:
# gdb.execute("q 100")
@@ -411,9 +404,11 @@ def show_last_exception():
def build_pretty_printer():
pp = gdb.printing.RegexpCollectionPrettyPrinter("nlohmann_json")
- pp.add_printer(NLOHMANN_JSON_TYPE_NAMESPACE, "^{}$".format(NLOHMANN_JSON_TYPE), LohmannJSONPrinter)
+ pp.add_printer(NLOHMANN_JSON_TYPE_NAMESPACE, "^{}$".format(
+ NLOHMANN_JSON_TYPE_POINTER), LohmannJSONPrinter)
return pp
-# executed at script load by GDB
-gdb.printing.register_pretty_printer(gdb.current_objfile(), build_pretty_printer())
+# executed at script load
+gdb.printing.register_pretty_printer(
+ gdb.current_objfile(), build_pretty_printer())