diff --git a/.flake8 b/.flake8 index 9a8ded72afb..71cbdefe77b 100644 --- a/.flake8 +++ b/.flake8 @@ -29,7 +29,6 @@ per-file-ignores = gui/wxpython/animation/g.gui.animation.py: E501 gui/wxpython/tplot/g.gui.tplot.py: E501 gui/wxpython/iclass/g.gui.iclass.py: E501 - gui/wxpython/iclass/statistics.py: F841, F405, F403 gui/wxpython/location_wizard/wizard.py: E722 gui/wxpython/mapdisp/main.py: E722 gui/wxpython/mapdisp/test_mapdisp.py: E501 @@ -43,10 +42,8 @@ per-file-ignores = # TODO: Is this really needed? python/grass/jupyter/__init__.py: E501 python/grass/pygrass/vector/__init__.py: E402 - python/grass/temporal/datetime_math.py: E722 python/grass/temporal/spatial_topology_dataset_connector.py: E722 python/grass/temporal/temporal_algebra.py: E722 - python/grass/temporal/temporal_granularity.py: E722 # Current benchmarks/tests are changing sys.path before import. # Possibly, a different approach should be taken there anyway. python/grass/pygrass/tests/benchmark.py: F821 diff --git a/.github/actions/create-upload-suggestions/action.yml b/.github/actions/create-upload-suggestions/action.yml index 135aa41845c..fe6a111fcbe 100644 --- a/.github/actions/create-upload-suggestions/action.yml +++ b/.github/actions/create-upload-suggestions/action.yml @@ -177,7 +177,7 @@ runs: echo "diff-file-name=${INPUT_DIFF_FILE_NAME}" >> "${GITHUB_OUTPUT}" env: INPUT_DIFF_FILE_NAME: ${{ steps.tool-name-safe.outputs.diff-file-name }} - - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + - uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 id: upload-diff if: >- ${{ (steps.files_changed.outputs.files_changed == 'true') && @@ -200,7 +200,7 @@ runs: echo 'Suggestions can only be added near to lines changed in this PR.' echo 'If any fixes can be added as code suggestions, they will be added shortly from another workflow.' } >> "${GITHUB_STEP_SUMMARY}" - - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + - uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 id: upload-changes if: >- ${{ always() && diff --git a/.github/workflows/additional_checks.yml b/.github/workflows/additional_checks.yml index 76554f24db3..3ec9b1fd0ac 100644 --- a/.github/workflows/additional_checks.yml +++ b/.github/workflows/additional_checks.yml @@ -17,6 +17,8 @@ concurrency: group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }} cancel-in-progress: true +permissions: {} + jobs: additional-checks: name: Additional checks diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 77ed085ad96..90ead51caf7 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -31,6 +31,7 @@ jobs: language: - c-cpp - python + - actions concurrency: group: ${{ github.workflow }}-${{ @@ -56,7 +57,7 @@ jobs: if: ${{ matrix.language == 'c-cpp' }} - name: Initialize CodeQL - uses: github/codeql-action/init@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0 + uses: github/codeql-action/init@b6a472f63d85b9c78a3ac5e89422239fc15e9b3c # v3.28.1 with: languages: ${{ matrix.language }} config-file: ./.github/codeql/codeql-config.yml @@ -81,6 +82,6 @@ jobs: run: .github/workflows/build_ubuntu-22.04.sh "${HOME}/install" - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0 + uses: github/codeql-action/analyze@b6a472f63d85b9c78a3ac5e89422239fc15e9b3c # v3.28.1 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 68a0fa03f11..5c0b54fdb7f 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -22,8 +22,9 @@ on: release: types: [published] -jobs: +permissions: {} +jobs: # Run for push to configured branches and all published releases. # Take care of different os. # For main branch, created tags are: @@ -47,6 +48,10 @@ jobs: - ubuntu_wxgui fail-fast: false + permissions: + contents: read + packages: write + steps: - name: Checkout uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 diff --git a/.github/workflows/gcc.yml b/.github/workflows/gcc.yml index 6b6286ef3f0..9e508baf81f 100644 --- a/.github/workflows/gcc.yml +++ b/.github/workflows/gcc.yml @@ -8,6 +8,8 @@ on: - releasebranch_* pull_request: +permissions: {} + jobs: build: name: ${{ matrix.c }} & ${{ matrix.cpp }} diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index 026a3675819..ed2132cc555 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -14,6 +14,9 @@ env: concurrency: group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }} cancel-in-progress: true + +permissions: {} + jobs: macos_build: name: macOS build @@ -50,7 +53,7 @@ jobs: # Year and week of year so cache key changes weekly run: echo "date=$(date +%Y-%U)" >> "${GITHUB_OUTPUT}" - name: Setup Mamba - uses: mamba-org/setup-micromamba@068f1ab4b37ed9b3d9f73da7db90a0cda0a48d29 # v2.0.3 + uses: mamba-org/setup-micromamba@0dea6379afdaffa5d528b3d1dabc45da37f443fc # v2.0.4 with: init-shell: bash environment-file: .github/workflows/macos_dependencies.txt @@ -107,7 +110,7 @@ jobs: nc_spm_full_v2alpha2.tar.gz" - name: Make HTML test report available if: ${{ !cancelled() }} - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 with: name: testreport-macOS path: testreport diff --git a/.github/workflows/milestones.yml b/.github/workflows/milestones.yml index e2ade4eb091..4e9fa634a75 100644 --- a/.github/workflows/milestones.yml +++ b/.github/workflows/milestones.yml @@ -5,12 +5,17 @@ on: pull_request_target: types: [closed] +permissions: {} + jobs: assign-milestone: runs-on: ubuntu-latest if: github.event.pull_request.merged + permissions: + contents: read + pull-requests: write steps: - # Retreiving the current milestoone from API instead of github context, + # Retrieving the current milestone from API instead of github context, # so up-to-date information is used when running after being queued or for reruns # Otherwise, the information should be available using # ${{ github.event.pull_request.milestone.title }} diff --git a/.github/workflows/osgeo4w.yml b/.github/workflows/osgeo4w.yml index 6931fd1f089..5eb97761c03 100644 --- a/.github/workflows/osgeo4w.yml +++ b/.github/workflows/osgeo4w.yml @@ -8,6 +8,8 @@ on: - releasebranch_* pull_request: +permissions: {} + jobs: build: name: ${{ matrix.os }} build and tests @@ -138,7 +140,7 @@ jobs: - name: Make HTML test report available if: ${{ always() }} - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 with: name: testreport-${{ matrix.os }} path: testreport diff --git a/.github/workflows/periodic_update.yml b/.github/workflows/periodic_update.yml index 64887e51827..a60d8a8e159 100644 --- a/.github/workflows/periodic_update.yml +++ b/.github/workflows/periodic_update.yml @@ -10,12 +10,18 @@ on: # See https://crontab.guru/#32_10_*/100,1-7_*_WED - cron: "32 10 */100,1-7 * WED" +permissions: {} + # A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: update-configure: # The type of runner that the job will run on runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + # Steps represent a sequence of tasks that will be executed as part of the job steps: - name: Create URL to the run output diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index d646c2e544b..be266a744cf 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -8,6 +8,8 @@ on: - releasebranch_* pull_request: +permissions: {} + jobs: pytest: concurrency: @@ -123,7 +125,7 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} - name: Make python-only code coverage test report available if: ${{ !cancelled() }} - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 with: name: python-codecoverage-report-${{ matrix.os }}-${{ matrix.python-version }} path: coverage_html_report diff --git a/.github/workflows/python-code-quality.yml b/.github/workflows/python-code-quality.yml index 3d1090d8452..627dbfc13f3 100644 --- a/.github/workflows/python-code-quality.yml +++ b/.github/workflows/python-code-quality.yml @@ -8,6 +8,8 @@ on: - releasebranch_* pull_request: +permissions: {} + jobs: python-checks: name: Python Code Quality Checks @@ -34,9 +36,9 @@ jobs: # renovate: datasource=pypi depName=pylint PYLINT_VERSION: "3.3.3" # renovate: datasource=pypi depName=bandit - BANDIT_VERSION: "1.8.0" + BANDIT_VERSION: "1.8.2" # renovate: datasource=pypi depName=ruff - RUFF_VERSION: "0.9.0" + RUFF_VERSION: "0.9.1" runs-on: ${{ matrix.os }} permissions: @@ -129,13 +131,13 @@ jobs: bandit -c pyproject.toml -iii -r . -f sarif -o bandit.sarif --exit-zero - name: Upload Bandit Scan Results - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 with: name: bandit.sarif path: bandit.sarif - name: Upload SARIF File into Security Tab - uses: github/codeql-action/upload-sarif@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0 + uses: github/codeql-action/upload-sarif@b6a472f63d85b9c78a3ac5e89422239fc15e9b3c # v3.28.1 with: sarif_file: bandit.sarif @@ -203,7 +205,7 @@ jobs: cp -rp dist.$ARCH/docs/html/libpython sphinx-grass - name: Make Sphinx documentation available - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 with: name: sphinx-grass path: sphinx-grass diff --git a/.github/workflows/super-linter.yml b/.github/workflows/super-linter.yml index 01a22f99edf..3138da70dda 100644 --- a/.github/workflows/super-linter.yml +++ b/.github/workflows/super-linter.yml @@ -12,6 +12,8 @@ concurrency: group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }} cancel-in-progress: true +permissions: {} + jobs: super-linter: name: GitHub Super Linter diff --git a/.github/workflows/ubuntu.yml b/.github/workflows/ubuntu.yml index 5cb1c6a3122..5f130a00812 100644 --- a/.github/workflows/ubuntu.yml +++ b/.github/workflows/ubuntu.yml @@ -10,6 +10,8 @@ on: - releasebranch_* pull_request: +permissions: {} + jobs: ubuntu: concurrency: @@ -149,7 +151,7 @@ jobs: - name: Make HTML test report available if: ${{ always() }} - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 with: name: testreport-${{ matrix.os }}-${{ matrix.config }}-${{ matrix.extra-include }} path: testreport diff --git a/.github/workflows/verify-success.yml b/.github/workflows/verify-success.yml index 237c239f576..92fff2e1d12 100644 --- a/.github/workflows/verify-success.yml +++ b/.github/workflows/verify-success.yml @@ -45,34 +45,36 @@ on: type: string required: true # Can't escape the handlebars in the description - description: + description: >- In the calling job that defines all the needed jobs, send `toJson(needs)` inside `$` followed by `{{ }}` fail_if_failure: type: boolean default: true - description: + description: >- If true, this workflow will fail if any job from 'needs_context was failed fail_if_cancelled: type: boolean default: true - description: + description: >- If true, this workflow will fail if any job from 'needs_context' was cancelled fail_if_skipped: type: boolean default: false - description: + description: >- If true, this workflow will fail if any job from 'needs_context' was skipped require_success: type: boolean default: true - description: + description: >- If true, this workflow will fail if no job from 'needs_context' was successful +permissions: {} + jobs: verify-success: name: Success diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a2a56b7d4bf..211739a30c3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -37,7 +37,7 @@ repos: ) - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.9.0 + rev: v0.9.1 hooks: # Run the linter. - id: ruff diff --git a/Makefile b/Makefile index 4ae6fae137e..a3f533c7d07 100644 --- a/Makefile +++ b/Makefile @@ -46,7 +46,6 @@ DIRS = \ visualization \ locale \ man \ - macosx \ mswindows SUBDIRS = $(DIRS) diff --git a/docker/alpine/Dockerfile b/docker/alpine/Dockerfile index 27afaf4d6db..a552a744a8a 100644 --- a/docker/alpine/Dockerfile +++ b/docker/alpine/Dockerfile @@ -1,4 +1,4 @@ -FROM alpine:3.21@sha256:b97e2a89d0b9e4011bb88c02ddf01c544b8c781acf1f4d559e7c8f12f1047ac3 as common +FROM alpine:3.21@sha256:56fa17d2a7e7f168a043a2712e63aed1f8543aeafdcee47c58dcffe38ed51099 as common # Based on: # https://github.com/mundialis/docker-grass-gis/blob/master/Dockerfile diff --git a/flake.lock b/flake.lock index edaef13fad9..94f4784f626 100644 --- a/flake.lock +++ b/flake.lock @@ -19,11 +19,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1736042175, - "narHash": "sha256-jdd5UWtLVrNEW8K6u5sy5upNAFmF3S4Y+OIeToqJ1X8=", + "lastModified": 1736693123, + "narHash": "sha256-9lIfXCaBPwUA7FnfDnoH4gxxdOvXG78k6UlUw0+ZDxc=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "bf689c40d035239a489de5997a4da5352434632e", + "rev": "2fdec2c2e68b7b7845d1ea4e0894c63143e3261b", "type": "github" }, "original": { diff --git a/gui/wxpython/history/tree.py b/gui/wxpython/history/tree.py index 3701fefb001..b5488c23d0e 100644 --- a/gui/wxpython/history/tree.py +++ b/gui/wxpython/history/tree.py @@ -216,6 +216,10 @@ def _popupMenuCommand(self): """Create popup menu for commands""" menu = Menu() + copyItem = wx.MenuItem(menu, wx.ID_ANY, _("&Copy")) + menu.AppendItem(copyItem) + self.Bind(wx.EVT_MENU, self.OnCopyCmd, copyItem) + item = wx.MenuItem(menu, wx.ID_ANY, _("&Remove")) menu.AppendItem(item) self.Bind(wx.EVT_MENU, self.OnRemoveCmd, item) @@ -658,3 +662,25 @@ def OnDoubleClick(self, node): self.CollapseNode(node, recursive=False) else: self.ExpandNode(node, recursive=False) + + def OnCopyCmd(self, event): + """Copy selected cmd to clipboard""" + self.DefineItems(self.GetSelected()) + if not self.selected_command: + return + + selected_command = self.selected_command[0] + command = selected_command.data["name"] + + # Copy selected command to clipboard + try: + if wx.TheClipboard.Open(): + try: + wx.TheClipboard.SetData(wx.TextDataObject(command)) + self.showNotification.emit( + message=_("Command <{}> copied to clipboard").format(command) + ) + finally: + wx.TheClipboard.Close() + except wx.PyWidgetError: + self.showNotification.emit(message=_("Failed to copy command to clipboard")) diff --git a/gui/wxpython/iclass/statistics.py b/gui/wxpython/iclass/statistics.py index 059ce5e6b03..c1727bc1676 100644 --- a/gui/wxpython/iclass/statistics.py +++ b/gui/wxpython/iclass/statistics.py @@ -18,13 +18,28 @@ """ import os -from ctypes import * +import sys +from ctypes import byref, c_char_p, c_float, c_int import grass.script as gs try: - from grass.lib.imagery import * -except ImportError as e: + from grass.lib.imagery import ( + I_iclass_statistics_get_cat, + I_iclass_statistics_get_color, + I_iclass_statistics_get_histo, + I_iclass_statistics_get_max, + I_iclass_statistics_get_mean, + I_iclass_statistics_get_min, + I_iclass_statistics_get_name, + I_iclass_statistics_get_nbands, + I_iclass_statistics_get_ncells, + I_iclass_statistics_get_nstd, + I_iclass_statistics_get_range_max, + I_iclass_statistics_get_range_min, + I_iclass_statistics_get_stddev, + ) +except ImportError: sys.stderr.write(_("Loading imagery lib failed")) from grass.pydispatch.signal import Signal diff --git a/lib/Makefile b/lib/Makefile index 91c38f5c73b..96a23af2069 100644 --- a/lib/Makefile +++ b/lib/Makefile @@ -31,6 +31,7 @@ SUBDIRS = \ lidar \ raster3d \ raster3d/test \ + external/parson/test \ gpde \ dspf \ symbol \ diff --git a/lib/external/parson/Makefile b/lib/external/parson/Makefile index 43aca3bc351..fa6cda3efa1 100644 --- a/lib/external/parson/Makefile +++ b/lib/external/parson/Makefile @@ -7,7 +7,7 @@ include $(MODULE_TOPDIR)/include/Make/Lib.make default: headers $(MAKE) lib -headers: $(ARCH_INCDIR)/parson.h +headers: $(ARCH_INCDIR)/parson.h $(ARCH_INCDIR)/gjson.h $(ARCH_INCDIR)/%.h: %.h $(INSTALL_DATA) $< $@ diff --git a/lib/external/parson/gjson.c b/lib/external/parson/gjson.c new file mode 100644 index 00000000000..54740fa6df8 --- /dev/null +++ b/lib/external/parson/gjson.c @@ -0,0 +1,155 @@ +/***************************************************************************** + * + * MODULE: GRASS json output interface + * + * AUTHOR: Nishant Bansal (nishant.bansal.282003@gmail.com) + * + * PURPOSE: parson library function wrapper + * part of the gjson library + * + * COPYRIGHT: (C) 2024 by the GRASS Development Team + * + * This program is free software under the GNU General Public + * License (>=v2). Read the file COPYING that comes with GRASS + * for details. + * + *****************************************************************************/ + +#include "gjson.h" + +/* *************************************************************** */ +/* ***** WRAPPER FOR PARSON FUNCTIONS USED IN GRASS ************** */ +/* *************************************************************** */ + +JSON_Value *G_json_value_init_object(void) +{ + return json_value_init_object(); +} + +JSON_Value *G_json_value_init_array(void) +{ + return json_value_init_array(); +} + +JSON_Object *G_json_value_get_object(const JSON_Value *value) +{ + return json_value_get_object(value); +} + +JSON_Object *G_json_object(const JSON_Value *value) +{ + return json_object(value); +} +JSON_Object *G_json_object_get_object(const JSON_Object *object, + const char *name) +{ + return json_object_get_object(object, name); +} +JSON_Array *G_json_object_get_array(const JSON_Object *object, const char *name) +{ + return json_object_get_array(object, name); +} +JSON_Value *G_json_object_get_value(const JSON_Object *object, const char *name) +{ + return json_object_get_value(object, name); +} +const char *G_json_object_get_string(const JSON_Object *object, + const char *name) +{ + return json_object_get_string(object, name); +} +double G_json_object_get_number(const JSON_Object *object, const char *name) +{ + return json_object_get_number(object, name); +} +int G_json_object_get_boolean(const JSON_Object *object, const char *name) +{ + return json_object_get_boolean(object, name); +} +JSON_Value *G_json_object_get_wrapping_value(const JSON_Object *object) +{ + return json_object_get_wrapping_value(object); +} +JSON_Status G_json_object_set_value(JSON_Object *object, const char *name, + JSON_Value *value) +{ + return json_object_set_value(object, name, value); +} +JSON_Status G_json_object_set_string(JSON_Object *object, const char *name, + const char *string) +{ + return json_object_set_string(object, name, string); +} +JSON_Status G_json_object_set_number(JSON_Object *object, const char *name, + double number) +{ + return json_object_set_number(object, name, number); +} +JSON_Status G_json_object_set_boolean(JSON_Object *object, const char *name, + int boolean) +{ + return json_object_set_boolean(object, name, boolean); +} +JSON_Status G_json_object_set_null(JSON_Object *object, const char *name) +{ + return json_object_set_null(object, name); +} +JSON_Array *G_json_array(const JSON_Value *value) +{ + return json_array(value); +} +JSON_Value *G_json_array_get_value(const JSON_Array *array, size_t index) +{ + return json_array_get_value(array, index); +} +const char *G_json_array_get_string(const JSON_Array *array, size_t index) +{ + return json_array_get_string(array, index); +} +double G_json_array_get_number(const JSON_Array *array, size_t index) +{ + return json_array_get_number(array, index); +} +int G_json_array_get_boolean(const JSON_Array *array, size_t index) +{ + return json_array_get_boolean(array, index); +} + +JSON_Status G_json_array_append_value(JSON_Array *array, JSON_Value *value) +{ + return json_array_append_value(array, value); +} + +JSON_Status G_json_array_append_string(JSON_Array *array, const char *string) +{ + return json_array_append_string(array, string); +} + +JSON_Status G_json_array_append_number(JSON_Array *array, double number) +{ + return json_array_append_number(array, number); +} + +JSON_Status G_json_array_append_boolean(JSON_Array *array, int boolean) +{ + return json_array_append_boolean(array, boolean); +} + +JSON_Status G_json_array_append_null(JSON_Array *array) +{ + return json_array_append_null(array); +} + +char *G_json_serialize_to_string_pretty(const JSON_Value *value) +{ + return json_serialize_to_string_pretty(value); +} + +void G_json_free_serialized_string(char *string) +{ + json_free_serialized_string(string); +} +void G_json_value_free(JSON_Value *value) +{ + json_value_free(value); +} diff --git a/lib/external/parson/gjson.h b/lib/external/parson/gjson.h new file mode 100644 index 00000000000..0e9cb345123 --- /dev/null +++ b/lib/external/parson/gjson.h @@ -0,0 +1,48 @@ +#ifndef GRASS_GJSON_H +#define GRASS_GJSON_H + +#include "parson.h" + +/* *************************************************************** */ +/* ***** WRAPPER FOR PARSON FUNCTIONS USED IN GRASS ************** */ +/* *************************************************************** */ + +extern JSON_Value *G_json_value_init_object(void); +extern JSON_Value *G_json_value_init_array(void); + +extern JSON_Object *G_json_value_get_object(const JSON_Value *); +extern JSON_Object *G_json_object(const JSON_Value *); +extern JSON_Object *G_json_object_get_object(const JSON_Object *, const char *); +extern JSON_Array *G_json_object_get_array(const JSON_Object *, const char *); +extern JSON_Value *G_json_object_get_value(const JSON_Object *, const char *); +extern const char *G_json_object_get_string(const JSON_Object *, const char *); +extern double G_json_object_get_number(const JSON_Object *, const char *); +extern int G_json_object_get_boolean(const JSON_Object *, const char *); +extern JSON_Value *G_json_object_get_wrapping_value(const JSON_Object *); + +extern JSON_Status G_json_object_set_value(JSON_Object *, const char *, + JSON_Value *); +extern JSON_Status G_json_object_set_string(JSON_Object *, const char *, + const char *); +extern JSON_Status G_json_object_set_number(JSON_Object *, const char *, + double); +extern JSON_Status G_json_object_set_boolean(JSON_Object *, const char *, int); +extern JSON_Status G_json_object_set_null(JSON_Object *, const char *); + +extern JSON_Array *G_json_array(const JSON_Value *); +extern JSON_Value *G_json_array_get_value(const JSON_Array *, size_t); +extern const char *G_json_array_get_string(const JSON_Array *, size_t); +extern double G_json_array_get_number(const JSON_Array *, size_t); +extern int G_json_array_get_boolean(const JSON_Array *, size_t); + +extern JSON_Status G_json_array_append_value(JSON_Array *, JSON_Value *); +extern JSON_Status G_json_array_append_string(JSON_Array *, const char *); +extern JSON_Status G_json_array_append_number(JSON_Array *, double); +extern JSON_Status G_json_array_append_boolean(JSON_Array *, int); +extern JSON_Status G_json_array_append_null(JSON_Array *); + +extern char *G_json_serialize_to_string_pretty(const JSON_Value *); +extern void G_json_free_serialized_string(char *); +extern void G_json_value_free(JSON_Value *); + +#endif /* GRASS_GJSON_H */ diff --git a/lib/external/parson/test/Makefile b/lib/external/parson/test/Makefile new file mode 100644 index 00000000000..893b3e59bc3 --- /dev/null +++ b/lib/external/parson/test/Makefile @@ -0,0 +1,10 @@ +MODULE_TOPDIR = ../../../.. + +PGM=test.gjson.lib + +LIBES = $(PARSONLIB) $(GISLIB) +DEPENDENCIES = $(PARSONDEP) $(GISDEP) + +include $(MODULE_TOPDIR)/include/Make/Module.make + +default: cmd diff --git a/lib/external/parson/test/test.gjson.lib.html b/lib/external/parson/test/test.gjson.lib.html new file mode 100644 index 00000000000..1d399f9eafa --- /dev/null +++ b/lib/external/parson/test/test.gjson.lib.html @@ -0,0 +1,9 @@ +