diff --git a/.env b/.env index 9fe6d52c..3a4bf73b 100644 --- a/.env +++ b/.env @@ -3,11 +3,16 @@ COMPOSE_PROJECT_NAME=search # unify separator with windows style COMPOSE_PATH_SEPARATOR=; # dev is default target +# in prod, you should use docker-compose.yml;docker/prod.yml;docker/monitor.yml COMPOSE_FILE=docker-compose.yml;docker/dev.yml # Version of Elastic products STACK_VERSION=8.3.3 +# Set TAG to sha- of the version you want to use +# if you want to use a docker image from our repository +# TAG=sha- + # Set the cluster name CLUSTER_NAME=docker-cluster @@ -32,12 +37,14 @@ ES_EXPOSE=127.0.0.1:9200 NGINX_BASIC_AUTH_USER_PASSWD= # by default on dev desktop, no restart +# set to always for production RESTART_POLICY=no # Increase or decrease based on the available host memory (in bytes) # 1GB works well, 2GB and above leads to lower latency MEM_LIMIT=4294967296 +# This is the name of a network possibly shared with other containers # on dev connect to the same network as off-server COMMON_NET_NAME=po_default @@ -51,4 +58,5 @@ LOG_LEVEL=DEBUG # This envvar is **required** CONFIG_PATH= + ALLOWED_ORIGINS='http://localhost,http://127.0.0.1,https://*.openfoodfacts.org,https://*.openfoodfacts.net' diff --git a/.github/labeler.yml b/.github/labeler.yml index 25689fc3..10a9f719 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -3,27 +3,19 @@ # Pull requests that update GitHub Actions code. If you navigate to the folder, you will have a README of what it does 📚 Documentation: - changed-files: - - any-glob-to-any-file: 'docs/how-to-install.md' - - any-glob-to-any-file: 'docs/sphinx/_static/.empty' - - any-glob-to-any-file: 'docs/sphinx/_templates/.empty' - - any-glob-to-any-file: 'docs/sphinx/api.rst' - - any-glob-to-any-file: 'docs/sphinx/cli.rst' - - any-glob-to-any-file: 'docs/sphinx/conf.py' - - any-glob-to-any-file: 'docs/sphinx/config.rst' - - any-glob-to-any-file: 'docs/sphinx/index.rst' - - any-glob-to-any-file: 'docs/sphinx/misc.rst' - - any-glob-to-any-file: 'docs/sphinx/searching.rst' - - any-glob-to-any-file: 'docs/sphinx/types.rst' - - any-glob-to-any-file: 'docs/users/ref-python.md' - + - any-glob-to-any-file: + - 'docs/**/*' + - 'docs/*' API: - changed-files: - - any-glob-to-any-file: 'docs/sphinx/api.rst' + - any-glob-to-any-file: + - 'docs/sphinx/api.rst' + - 'app/api.py' Build Scripts: - changed-files: - any-glob-to-any-file: 'scripts/Dockerfile.sphinx' - - any-glob-to-any-file: 'scripts/build_sphinx.sh' + - any-glob-to-any-file: 'scripts/build_*.sh' - any-glob-to-any-file: 'scripts/generate_doc.sh' - any-glob-to-any-file: 'scripts/sphinx/Makefile' diff --git a/.gitignore b/.gitignore index c74fef74..2b387f75 100644 --- a/.gitignore +++ b/.gitignore @@ -102,9 +102,6 @@ venv.bak/ # mkdocs documentation /gh_pages -# github pages -gh_pages/ - # mypy .mypy_cache/ .dmypy.json diff --git a/README.md b/README.md index d82a931b..6046e355 100644 --- a/README.md +++ b/README.md @@ -1,159 +1,23 @@ # ![Search-a-licious](./assets/RVB_HORIZONTAL_WHITE_BG_SEARCH-A-LICIOUS-50.png "Search-a-licious logo") +Search-a-licious unlocks the full potential of large data collections by transforming them into easily searchable content. Users can quickly and efficiently find exactly what they need. -**NOTE:** This is a prototype which is being heavily evolved to be more generic, more robust and have much more functionalities. +With powerful text queries, facet exploration, and intuitive visualizations, Search-a-licious empowers your users to dive deep into data effortlessly. -This API is currently in development. Read [Search-a-licious roadmap architecture notes](https://docs.google.com/document/d/1mibE8nACcmen6paSrqT9JQk5VbuvlFUXI1S93yHCK2I/edit) to understand where we are headed. +Developers can rapidly build and deploy new applications based on existing data collections in just hours. The platform offers reusable components that adapt to various contexts, all built on best-in-class open-source tools. -### Organization +Search-a-licious was originally developed to power the [Open Food Facts](https://world.openfoodfacts.org/) project, helping consumers make informed choices for their health and the planet. -There is a [Lit/JS Frontend](frontend/README.md) and a Python (FastAPI) Backend (current README) located on this repository. +Its versatile architecture makes it ideal for a wide range of applications: from exposing large data collections to the public, to building decision support systems and exploratory tools. Search-a-licious is the key to unlocking the value in your data. -### Backend +Ready to use it ? Jump to the [documentation](https://openfoodfacts.github.io/search-a-licious/) -The main file is `api.py`, and the schema is in `models/product.py`. +This is an Open Source project and [contributions are very welcome](https://openfoodfacts.github.io/search-a-licious/#contributing) ! -A CLI is available to perform common tasks. - -### Running the project on your machine - -Note: the Makefile will align the user id with your own uid for a smooth editing experience. - -Before running the services, you need to make sure that your [system mmap count is high enough for Elasticsearch to run](https://www.elastic.co/guide/en/elasticsearch/reference/current/vm-max-map-count.html). You can do this by running: - -```console -sudo sysctl -w vm.max_map_count=262144 -``` - -Then build the services with: - -``` -make build -``` - -Start docker: - -```console -docker compose up -d -``` - -> [!NOTE] -> You may encounter a permission error if your user is not part of the `docker` group, in which case you should either [add it](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user) or modify the Makefile to prefix `sudo` to all docker and docker compose commands. -> Update container crash because we are not connected to any Redis - -Docker spins up: -- Two elasticsearch nodes -- [Elasticvue](https://elasticvue.com/) -- The search service on port 8000 -- Redis on port 6379 - -You will then need to import from a JSONL dump (see instructions below). - -### Development - -#### Pre-requisites -##### Installing Docker -- First of all, you need to have Docker installed on your machine. You can download it [here](https://www.docker.com/products/docker-desktop). -- Be sure you can [run docker without sudo](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user) - -##### Installing Direnv -For Linux and macOS users, You can follow our tutorial to install [direnv](https://openfoodfacts.github.io/openfoodfacts-server/dev/how-to-use-direnv/).[^winEnvrc] - -Get your user id and group id by running `id -u` and `id -g` in your terminal. -Add a `.envrc` file at the root of the project with the following content: -```shell -export USER_GID= -export USER_UID= - -export CONFIG_PATH=data/config/openfoodfacts.yml -export OFF_API_URL=https://world.openfoodfacts.org -export ALLOWED_ORIGINS='http://localhost,http://127.0.0.1,https://*.openfoodfacts.org,https://*.openfoodfacts.net' -``` - -[^winEnvrc]: For Windows users, the .envrc is only taken into account by the `make` commands. - -##### Installing Pre-commit -You can follow the following [tutorial](https://pre-commit.com/#install) to install pre-commit on your machine. - -##### Installing mmap -Be sure that your [system mmap count is high enough for Elasticsearch to run](https://www.elastic.co/guide/en/elasticsearch/reference/current/vm-max-map-count.html). You can do this by running: -```shell -sudo sysctl -w vm.max_map_count=262144 -``` -To make the change permanent, you need to add a line `vm.max_map_count=262144` to the `/etc/sysctl.conf` file and run the command `sudo sysctl -p` to apply the changes. -This will ensure that the modified value of `vm.max_map_count` is retained even after a system reboot. Without this step, the value will be reset to its default value after a reboot. - -#### Running your local instance using Docker -Now you can run the project with Docker ```docker compose up ```. -After that run the following command on another shell to compile the project: ```make tsc_watch```. -Do this for next installation steps and to run the project. - -#### Exploring Elasticsearch data - -- Go to http://127.0.0.1:8080/welcome -- Click on "Add Elasticsearch cluster" -- change the cluster name to "docker-cluster" -- Click on "Connect" - -#### Importing data into your development environment -- Import Taxonomies: `make import-taxonomies` -- Import products : -```shell - # get some sample data - curl https://world.openfoodfacts.org/data/exports/products.random-modulo-10000.jsonl.gz --output data/products.random-modulo-10000.jsonl.gz - gzip -d data/products.random-modulo-10000.jsonl.gz - # we skip updates because we are not connected to any redis - make import-dataset filepath='products.random-modulo-10000.jsonl' args='--skip-updates' - -#### Pages -Now you can go to : -- http://localhost:8000 to have a simple search page without use lit components -or -- http://localhost:8000/static/off.html to access to lit components search page - -To look into the data, you may use elasticvue, going to http://127.0.0.1:8080/ and reaching http://127.0.0.1:9200 cluster: `docker-cluster` (unless you changed env variables). - -#### Pre-Commit - -This repo uses [pre-commit](https://pre-commit.com/) to enforce code styling, etc. To use it: -```console -pre-commit install -``` -To run tests without committing: - -```console -pre-commit run -``` - -#### Debugging the backend app -To debug the backend app: -* stop API instance: `docker compose stop api` -* add a pdb.set_trace() at the point you want, -* then launch `docker compose run --rm --use-aliases api uvicorn app.api:app --proxy-headers --host 0.0.0.0 --port 8000 --reload`[^use_aliases] - -### Running the full import (45-60 min) -To import data from the [JSONL export](https://world.openfoodfacts.org/data), download the dataset in the `data` folder, then run: - -`make import-dataset filepath='products.jsonl.gz'` - -If you get errors, try adding more RAM (12GB works well if you have that spare), or slow down the indexing process by setting `num_processes` to 1 in the command above. - -Typical import time is 45-60 minutes. - -If you want to skip updates (eg. because you don't have a Redis installed), -use `make import-dataset filepath='products.jsonl.gz' args="--skip-updates"` - -You should also import taxonomies: - -`make import-taxonomies` - -### Using sort script - -See [How to use scripts](./docs/users/how-to-use-scripts.md) ## Thank you to our sponsors ! This project has received financial support from the NGI Search (New Generation Internet) program, funded by the 🇪🇺 European Commission. Thank you for supporting Open-Souce, Open Data, and the Commons. -NGI-search logo +NGI-search logo European flag diff --git a/app/api.py b/app/api.py index 55ce739f..bb11eafe 100644 --- a/app/api.py +++ b/app/api.py @@ -106,6 +106,12 @@ def get_document( @app.post("/search") def search(search_parameters: Annotated[SearchParameters, Body()]): + """This is the main search endpoint. + + It uses POST request to ensure privacy. + + Under the hood, it calls the :py:func:`app.search.search` function + """ return app_search.search(search_parameters) @@ -138,6 +144,10 @@ def search_get( charts: GetSearchParamsTypes.charts = None, index_id: GetSearchParamsTypes.index_id = None, ) -> SearchResponse: + """This is the main search endpoint when using GET request + + Under the hood, it calls the :py:func:`app.search.search` function + """ # str to lists langs_list = langs.split(",") if langs else ["en"] fields_list = fields.split(",") if fields else None @@ -183,6 +193,7 @@ def taxonomy_autocomplete( ] = None, index_id: Annotated[str | None, INDEX_ID_QUERY_PARAM] = None, ): + """API endpoint for autocompletion using taxonomies""" check_config_is_defined() global_config = cast(config.Config, config.CONFIG) check_index_id_is_defined_or_400(index_id, global_config) @@ -216,6 +227,7 @@ def taxonomy_autocomplete( @app.get("/", response_class=HTMLResponse) def off_demo(): + """Redirects to the off.html page""" return RedirectResponse(url="/static/off.html", status_code=status.HTTP_302_FOUND) @@ -231,6 +243,7 @@ def html_search( # Display debug information in the HTML response display_debug: bool = False, ): + """A demo page to test the search endpoint directly""" if not q: return templates.TemplateResponse("search.html", {"request": request}) @@ -282,6 +295,10 @@ def robots_txt(): @app.get("/health") def healthcheck(): + """API endpoint to check the health of the application + + It uses :py:mod:`app.health`. + """ from app.health import health message, status, _ = health.run() diff --git a/app/health.py b/app/health.py index d8c0c572..d08feb67 100644 --- a/app/health.py +++ b/app/health.py @@ -1,3 +1,10 @@ +"""This module contains the health check functions for the application. + +It is based upon the `py-healthcheck`_ library. + +.. _py-healthcheck: https://github.com/klen/py-healthcheck +""" + from healthcheck import HealthCheck from app.utils import connection, get_logger @@ -8,6 +15,7 @@ def test_connect_redis(): + """Test connection to REDIS.""" logger.debug("health: testing redis connection") client = connection.get_redis_client(socket_connect_timeout=5) if client.ping(): @@ -16,6 +24,7 @@ def test_connect_redis(): def test_connect_es(): + """Test connection to ElasticSearch.""" logger.debug("health: testing es connection") es = connection.get_es_client(timeout=5) if es.ping(): diff --git a/docs/.pages b/docs/.pages index e9bec3e2..e9761171 100644 --- a/docs/.pages +++ b/docs/.pages @@ -1,3 +1,4 @@ nav: - README.md - users + - devs diff --git a/docs/README.md b/docs/README.md index 9e137960..9c0a63c5 100644 --- a/docs/README.md +++ b/docs/README.md @@ -19,23 +19,29 @@ It provides a ready to use component to: * build powerful in app features thanks to a powerful API On a technical level, you can use: -* web components to quickly build your UI using any javascript framework, or plain HTML +* [web components](./users/tutorial.md#building-a-search-interface) to quickly build your UI using any JavaScript framework, or plain HTML * sensible defaults to provide a good search experience -* an easy to setup, one stop, file configuration to describe your content -* a ready to deploy docker compose file including all needed services -* a one command initial data import from a jsonl data export -* continuous update through a stream of events +* an easy to setup, [one stop, file configuration](./users/tutorial.md#create-a-configuration-file) to describe your content +* a [ready to deploy Docker Compose file](./users/how-to-install.md) including all needed services +* a [one command initial data import](./users/tutorial.md#import-the-data) from a JSONL data export +* [continuous update](./users/how-to-update-index.md) through a stream of events -It leverage existing components: -* [Elasticsearch](https://www.elastic.co/elasticsearch) for the search engine[^OpenSearchWanted] +It leverages existing components: +* [Elasticsearch](https://www.elastic.co/elasticsearch) for the search engine * [Web Components](https://developer.mozilla.org/en-US/docs/Web/API/Web_Components) (built thanks to [Lit framework](https://lit.dev/)) * [Vega](https://vega.github.io/) for the charts -* [Redis] for event stream[^AltRedisWanted] +* [Redis] for event stream -[^OpenSearchWanted]: [Open Search](https://opensearch.org/) is also a desirable target, contribution to verify compatibility and provide it as default would be appreciated. +[Read our tutorial](./users/tutorial.md) to get started ! -[^AltRedisWanted]: an alternative to Redis for event stream would also be a desirable target. +## Contributing +This is an Open Source project and contributions are really welcome ! + +See our [developer introduction to get started](./devs/introduction.md) + +Every contribution as bug report, documentation, UX design is also really welcome ! +See our [wiki page about Open Food Facts](https://wiki.openfoodfacts.org/Search-a-licious) ## documentation organization @@ -47,4 +53,12 @@ Pages title should start with: * *tutorial on* - tutorials aimed at learning * *how to…* - how to guides to reach a specific goal * *explain…* - explanation to understand a topic -* *reference…* - providing detailed information \ No newline at end of file +* *reference…* - providing detailed information + + +## Thank you to our sponsors ! + +This project has received financial support from the NGI Search (New Generation Internet) program, funded by the 🇪🇺 European Commission. Thank you for supporting Open-Source, Open Data, and the Commons. + +NGI-search logo +European flag diff --git a/docs/assets/architecture-diagram.drawio b/docs/assets/architecture-diagram.drawio new file mode 100644 index 00000000..445e16e0 --- /dev/null +++ b/docs/assets/architecture-diagram.drawio @@ -0,0 +1 @@ +7Vxbc5s4FP41nuk+bAYQYPyYa7e7aZpOOtP2aQeDMLQYUSHH9v76lUCYi2Qb21zstslMgg5Cls4539G5CI/A7Xz1Ftux/x65MBxpirsagbuRppkWoH8ZYZ0RdEXNCDMcuBmpRHgJ/oOcqHDqInBhUulIEApJEFeJDooi6JAKzcYYLavdPBRWPzW2Z1AgvDh2KFI/By7xM6qljQv6XzCY+fknq+YkuzO38858JYlvu2hZIoH7EbjFCJHsar66hSHjXc6X7LmHLXc3E8MwIk0eWH9cfrm+s9SnZOFPnkwLOfef/tT5bF/tcMFXzGdL1jkLMFpELmSjKCNws/QDAl9i22F3l1TmlOaTeUhbKr30gjC8RSHCtB2hiHa6SQhG32GNKM6eL+gVYgJXJRJfzVuI5pDgNe3C7wLOWK5ZY95cFmIyLE7zSyIycoHYXDVmm5EL7tELzsADmKkaAu+gS7WJNxEmPpqhyA7vC+pNlbtFn0eEYs7Tb5CQNYeGvSCoynHKQLz+wp9PG19Z48rIm3er8s27NW8lxMbkmkGkLCpKewjYstM+W6WUoAV24C5ecKzaeAbJrn5c3xijdgodw9AmwWsVljIRpo/SddnrUocYBRFJSiM/M0KhS5pZVSagG1Uw7elvKEpNfbIZFMq0WcoJ+iVg9c4mNnsqmsGEBCg6Dbt2GMwieh1Cj4rshiExoJbwmpOniBA0r2F8pAHXgJarC0CndyxtCkyzHbTrVYbL4L7ZNMpwV42u4A4Ecdy/wlRJXgiG9rw9Q0pZ6Xme5jgyJrvm1DRaYrJRY7IlMtmU8NjsjMXibvTLWlS9oUUF6llYVJVb0C0WtRsLqW+zkB4kjg9xq5Dsx+6p2n5MGr1iEvzGZM4L4zcm9+qLIWDyg+eFAZVEa47LWQATGAMD0xoWlwUUv1aQKMclXAUke0wzePNrPiS9Lp5ijfyh44E6bgrUtnF6kkTHAnSuI/bpbrapxXQOHsInOpp7vH7CFEFAmA0tT+qNmo4Fp14nLr9k49t0qXj8oCuENUiWwMjNdxsntJMkcKrcroomR4FaggAHUgcgAA1BoDcEwR5Dl9MO29OETQtMqmqg1wO6bN38qR0xu17TJ12pDZQxRhiotfB9PISBrjhAhUqWfaCaHdcqhlzdY8iPV8c89bM/R3ReVjmfd8kMJNDGjs+mGM1Sz2aANEw/GQJQy7rqEqs8kRgDvSubrE6GdntS1jbGS9p6hjigy4d4JIlStoP0BKw1tf1qU+PfE9bEJNsGa9fP7wYBWj/hBVAnV9VoTZrytJS8Vz9JT1VMsWQ+qRMGkO+bP6ErKkpD5o2acmF05o/mA19GyNdxwclsaOHaz8WcBihTABSxVyhC8wAmlP4GxSxBYod/tIMth3KW7TsCuuaB66YKMgjA1JqXrxqGgC9ZNbc7v8LavvN8DlymYwPJo59ym1WThy7au7Es6DM6ksdmCeXg2wzZPuGhNMxzNpwwfyzYqQrKB2Bo7LdMMmfs/9Pjc5p/pLyPceqJ8cHo5LLxsn6nCflw0bl24qej7/T3DpBjvbAtyVL2WtLLcyYlOT5Q9mVp6wtLAYOJeTWuOWnS8gyLqXtkseg0PyzSx+i6GFJyQ3Z5DK9Fn4MXqDXRHU7PwVHSDEYQ24Sy4QL5PDk3qyFWkf5++fDUZhR4po6Rud8v2lSi+nGMBgk6Os+QND1ekWvimYQPmhg+iOI5qiZR1CH2F+aOrc7vV5zjBdo4IGycXu6n3iGcdFNqLnXTegeoDWSYtYG21DsOPjtZ35R1Zfe8lNP693JqQRNrrzO+sf/0OX69QY6/11hcG+Row+GVs4MKZ3I7q+y0s8dbQ63pCQit9QLAlmqsWg2c9ElNexqXdc3J1XhS+qmNW8++dlzl1cRDApu80cvdP+2YjTNNGtXDhsGTRkBMGr0rjpxlCZ8pznM9IZpRhF9etFbPnQJVjBH6PSJqDmGvjzXE3dYlQNPA4rziCrCtzsfPnrmLeUz/vWFR+GNLpYnzK/tZtb1EYs+kwXd375yIeZAY4oThhMLCHRW5PYV9ENUWO/oeRLMLtGq6oVaYL831Tfq0amJIcHoFIgsmko28wmCKbZyW/jqoR3QXdrRdtzC0M4tBwO8YJGtUJC0oBNf2k3bMxqFKN29VCBGGMW4zVDkuPDk0KWPU8sb5qdmtU9tyyrbb17U0iUW9vI2qlmYDoqUCEkvV2Wvoulj+++QHbIuJMfrGviSB7VTzmHq6UZtl112v+9c2nha4Pt7PdVX2dkB3bBc9Zsp27GYLJWzq7bywMRzP2QG3mv3a2Jl9LnF3fBdd4naNSC+RhpbnjXbwtV8rIhaW7ldBQtJgQkExjIq9/GewJ7qq7BWApshPcnQnBFnI0aJu9/TOgNXIbHSl3v+Op9G7p3vzh+n52mdt9vHHzVLyXRqnxHLvH1lKBFMXU5JW5K+gwwgHjj9PD0nXu5zv4TNByhJd2B7EgWoI3+cxEqnUZf7m8VJn4dqcBhaOKNCYRdaRVNYeP+2mJDCkzpg0E/2L6EOfx7ek+iB6ypfOYqHmP7CdFZ3iUxDnLcgCM39jifD3zkFysANykuTqAY3MsW4poKHN4nvnsmC++PI+cP8/ \ No newline at end of file diff --git a/docs/assets/architecture-diagram.drawio.svg b/docs/assets/architecture-diagram.drawio.svg new file mode 100644 index 00000000..fb38e63b --- /dev/null +++ b/docs/assets/architecture-diagram.drawio.svg @@ -0,0 +1,4 @@ + + + +
Data ingestion
Data ingestion
Event Stream
Event Stre...
Data fetcher
Data fet...
Offline ingestion
Offline...
Any data platform
Any data platform
search engine
search engine
search API
search API
Any client
Any client
taxonomies
taxonomies
search Widget
search Widg...
NLP interpreter
NLP interpret...
Facets
Facets
Fulltext
Fulltext
Graph generator
Graph generat...
JSON API
JSON API
graph engine
graph engine
search SDK
search SDK
Ingestion
logic
Ingestio...
Any data dump
Any data dump
personalized search / ranking
personalized...
Text is not SVG - cannot display
\ No newline at end of file diff --git a/docs/devs/.pages b/docs/devs/.pages new file mode 100644 index 00000000..ed42b89b --- /dev/null +++ b/docs/devs/.pages @@ -0,0 +1,6 @@ +nav: + - introduction.md + - ... | how-*.md + - ... | explain-*.md + - ... | ref-*.md + - ... \ No newline at end of file diff --git a/docs/devs/explain-architecture.md b/docs/devs/explain-architecture.md new file mode 100644 index 00000000..806c77e5 --- /dev/null +++ b/docs/devs/explain-architecture.md @@ -0,0 +1,21 @@ +# Explain Architecture + +Refer to the [introduction](./introduction.md) to get main components. + +This image illustrates the main components and their interactions. + +![Architecture diagram](../assets/architecture-diagram.drawio.svg) ([source](../assets/architecture-diagram.drawio)) + +The main idea is to have: +* a very good stack with best in class open source components (see docker compose files) +* a very powerful API that is capable and easy to use. + This is achieved through the use of Lucene Query Language and abstracting some complex processing under the hood (synonyms, language support, aggregations, etc.) +* a single, easy to use configuration file to declare your data structure and what you need to search +* web components to quickly build UI's while still being able to customize it in many scenarios. + +## Archive + +The initial [Search-a-licious roadmap architecture notes](https://docs.google.com/document/d/1mibE8nACcmen6paSrqT9JQk5VbuvlFUXI1S93yHCK2I/edit) might be interesting to grasp the general idea of the project architecture. + + + diff --git a/docs/devs/explain-web-frontend.md b/docs/devs/explain-web-frontend.md new file mode 100644 index 00000000..f678d228 --- /dev/null +++ b/docs/devs/explain-web-frontend.md @@ -0,0 +1,72 @@ +# Explain web frontend + +The search-a-licious web frontend supplies web components. +These are built using [lit](https://lit.dev) and [typescript](https://www.typescriptlang.org/). + +You can find the documentation for each widget in the [Reference for Search-a-licious Web Components](../users/ref-web-components.md) file. + + +## Explanation on code structure + +We use web-components for they will enable integration in a very wide variety of situations. + +The `search-ctl.ts` file contains the search controller, which is responsible for launching search and dispatching results. In practice this is a mixin, used by the search bar components which gets this role of controller. It is the contact point with the API. +The controller have a specific name, and components that are linked to it refer this the search bar name (`search-name` property). The default is 'searchalicious'. + +Components communicate with the search controller thanks to events, see `events.ts`. +There is an event to launch a search or change page, and one to dispatch search results. +Events always contains the search name, so we could have more than one search on the same page. + +We tend to factor code when it make sense using mixins, +for example as there are lots of component that needs the search results, there is a mixin than contains the logic to register to such events (see `search-results-ctl.ts`). + +## Writing documentation + +We render the reference on web components using [`api-viewer`](https://api-viewer.open-wc.org/docs) web component. + +Please comply with JSDoc and [document every property / slots](https://api-viewer.open-wc.org/docs/guide/writing-jsdoc/) etc. on each web components. + +## Tools + +Thanks to Makefile in root folder, + +* `make check_front` run all checks in front +* `make lint_front` lint js code + +While coding, you might want to run: `make tsc_watch` to have your code compile every time you save a `.ts` file. + +We generate a [custom-elements.json manifest](https://github.com/webcomponents/custom-elements-manifest) using [custom elements manifest analyzer](https://custom-elements-manifest.open-wc.org/analyzer). +Please use supported [JSDoc markers](https://custom-elements-manifest.open-wc.org/analyzer/getting-started/#supported-jsdoc) in your code to document components. + +The components documentation is rendered in `web-components.html`, using the [api-viewer component](https://api-viewer.open-wc.org/) + +## Tests + +`make test_front` run js tests. + +Note that we use: +* [Open Web Component testing framework](https://open-wc.org/docs/testing/testing-package/), + which in turn uses: + * [Mocha](https://mochajs.org/) as the test runner + * which runs tests using [playwright](https://playwright.dev/) + * and [Chai](https://www.chaijs.com/) for assertions + +## Translations + +In the frontend, we utilize [lit-localize](https://lit.dev/docs/localization/overview/), a library that leverages lit-element for managing translations from hardcoded text. +The language is set to the browser's language if it is supported by the project, otherwise it is set to default language (English). +The translations are stored in `xliff` files in the `frontend/xliff` directory. + +To add a new translation you need to : +- add `msg` in your code like this https://lit.dev/docs/localization/overview/#message-types +- run `npm run translations:extract` to extract the new translations +- add your translation with 'target' tag in the `xliff/.xlf` files +- run `npm run translations:build` to update the translations in the `src/generated/locales/.js` file + +To add a language, you have to add the language code to `targetLocales` in `lit-localize.json` + + +### Translations in Crowdin + +We can use Crowdin to manage translations. +All files in the xliff/ folder can be uploaded to Crowdin, as it supports the [xlf format](https://store.crowdin.com/xliff). diff --git a/docs/devs/how-to-debug-backend.md b/docs/devs/how-to-debug-backend.md new file mode 100644 index 00000000..6c00d79b --- /dev/null +++ b/docs/devs/how-to-debug-backend.md @@ -0,0 +1,9 @@ +# How to Debug the backend app + +By default, the API runs on uvicorn which use autoreloading and more than one thread, also it does not have a tty. So if you use `pdb.set_trace()` you won't be able to access the console. + +To debug the backend app: +* stop API instance: `docker compose stop api` +* add a pdb.set_trace() at the point you want, +* then launch `docker compose run --rm --use-aliases api uvicorn app.api:app --proxy-headers --host 0.0.0.0 --port 8000 --reload`[^use_aliases] +* go to the url you want to test \ No newline at end of file diff --git a/docs/devs/how-to-install.md b/docs/devs/how-to-install.md new file mode 100644 index 00000000..d2b28ec5 --- /dev/null +++ b/docs/devs/how-to-install.md @@ -0,0 +1,110 @@ +# How to install for local development + +## Pre-requisites + +First, follow same [prerequisite as for normal installation](../users/how-to-install.md#prerequisites): +* configuring mmap count +* installing docker and docker compose + + +## Installing Pre-commit + +We use pre-commit to check the code quality. + +You can follow the following [tutorial](https://pre-commit.com/#install) +to install pre-commit on your machine. + + +## Installing Direnv + +Direnv is a tool to automatically set environment variables depending on the current directory. +This is handy to personalize the environment for each project as environments variables have priority over the `.env` file. + +For Linux and macOS users, You can follow our tutorial to install [direnv](https://openfoodfacts.github.io/openfoodfacts-server/dev/how-to-use-direnv/).[^winEnvrc] + +## Setting up your environment + +You have several options to set up your environment: +1. use direnv, and thus use the `.envrc` file to set up your environment +2. add a .envrc that you source in your terminal. +3. modify the .env file directly, in which case you should be careful to not commit your changes + +The 1st and 2nd options are the recommended ones. +The following steps are for those options, in case you edit the `.env` just ignore the "export " keywords. + +Get your user id and group id by running `id -u` and `id -g` in your terminal. + +Add a `.envrc` file at the root of the project with the following content: + +```shell +export USER_GID= +export USER_UID= + +export CONFIG_PATH=data/config/openfoodfacts.yml +export OFF_API_URL=https://world.openfoodfacts.org +export ALLOWED_ORIGINS='http://localhost,http://127.0.0.1,https://*.openfoodfacts.org,https://*.openfoodfacts.net' +``` + +[^winEnvrc]: For Windows users, the .envrc is only taken into account by the `make` commands. + + +## Building containers + +To build the containers, you can run the following command: +```bash +make build +``` + +Note: the Makefile will align the user id with your own uid for a smooth editing experience (having same user id in container and host, so that you have permission to edit files). + +## Running + +Now you can run the project with Docker `docker compose up `. + +After that run the following command on another shell to compile the project: `make tsc_watch`. + +Do this for next installation steps and to run the project. + + +> [!NOTE] +> * You may encounter a permission error if your user is not part of the `docker` group, in which case you should either [add it](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user) or modify the Makefile to prefix `sudo` to all docker and docker compose commands. +> * Update container crash because we are not connected to any Redis, this is not a problem + +Docker spins up: +- Two elasticsearch nodes, one being exposed on port 9200 [^localhost_expose] + * test it going to http://127.0.0.1:9200 +- [Elasticvue](https://elasticvue.com/) on port 8080 + * test it going to http://127.0.0.1:8080 +- The search service on port 8000 + * test the API going to http://search.localhost:8000/docs + * test the UI going to http://search.localhost:8000/ + +[^localhost_expose]: by default we only expose on the localhost interface. +This is driven by the `*_EXPOSE` variables in `.env`. + +Congratulations, you have successfully installed the project ! + +You will then need to import from a JSONL dump (see instructions below). + +## Importing data into your development environment + +- Import Taxonomies: `make import-taxonomies` +- Import products : + ```bash + # get some sample data + curl https://world.openfoodfacts.org/data/exports/products.random-modulo-10000.jsonl.gz --output data/products.random-modulo-10000.jsonl.gz + gzip -d data/products.random-modulo-10000.jsonl.gz + # we skip updates because we are not connected to any redis + make import-dataset filepath='products.random-modulo-10000.jsonl' args='--skip-updates' + ``` + +Verify you have data by going to http://search.localhost:8000/ + +## Exploring Elasticsearch data + +When you need to explore the elasticsearch data, you can use elasticvue. + +- Go to http://127.0.0.1:8080/welcome +- Click on "Add Elasticsearch cluster" +- change the cluster name to "docker-cluster" at http://127.0.0.1:9200 +- Click on "Connect" diff --git a/docs/devs/index.md b/docs/devs/index.md deleted file mode 100644 index e69de29b..00000000 diff --git a/docs/devs/introduction.md b/docs/devs/introduction.md new file mode 100644 index 00000000..ce2be8e7 --- /dev/null +++ b/docs/devs/introduction.md @@ -0,0 +1,31 @@ +# Developer introduction + +The Search-a-licious project is centered around a few main components: +* The API, using FastAPI, that you find in **`app` folder**. See it's [ref documentation](./ref-python.md) +* The web components to build your UI, using [Lit](https://lit.dev/) that you find in **`frontend` folder**. + See [Explain frontend](./explain-web-frontend.md) +* Docker compose for deployment, see **`docker-compose.yml` and `docker/` folder** + +We use three main components: +* [Elasticsearch](https://www.elastic.co/elasticsearch) for the search engine[^OpenSearchWanted] +* [Redis] for event stream[^AltRedisWanted] +* [Vega](https://vega.github.io/) for the charts + + +see [Explain Architecture](./explain-architecture.md) for more information. + +## Getting started + +See [Install the project locally for development](./how-to-install.md) + + +## Development tips + +* [How to debug the backend](./how-to-debug-backend.md) + + +[^OpenSearchWanted]: [Open Search](https://opensearch.org/) is also a desirable target, contribution to verify compatibility and provide it as default would be appreciated. + +[^AltRedisWanted]: an alternative to Redis for event stream would also be a desirable target. + + diff --git a/docs/users/ref-python.md b/docs/devs/ref-python.md similarity index 100% rename from docs/users/ref-python.md rename to docs/devs/ref-python.md diff --git a/docs/sphinx/index.rst b/docs/sphinx/index.rst index 61af4ac5..28fbe3e0 100644 --- a/docs/sphinx/index.rst +++ b/docs/sphinx/index.rst @@ -13,8 +13,12 @@ For `general documentation about search-a-licious Follow this link`__ .. note: this only work on github pages .. __: /search-a-licious +The backend is built using FastAPI_ and a lot of tiangolo_ packages -Contents: +.. _FastAPI: https://fastapi.tiangolo.com/ +.. _tiangolo: https://tiangolo.com/projects/ + +Here is the documentation of main modules, by topics: .. toctree:: :maxdepth: 2 diff --git a/docs/users/how-to-install.md b/docs/users/how-to-install.md index e9b6fe48..5c0f328f 100644 --- a/docs/users/how-to-install.md +++ b/docs/users/how-to-install.md @@ -1,11 +1,61 @@ # How to install search-a-licious -search-a-licious uses docker and docker compose to manage the services it needs to run. You will need to install both of these before you can use search-a-licious. + +## Prerequisites + +### Ensure mmap count is high enough + +If you are on Linux, before running the services, you need to make sure that your [system mmap count is high enough for Elasticsearch to run](https://www.elastic.co/guide/en/elasticsearch/reference/current/vm-max-map-count.html). You can do this by running: + +```console +sudo sysctl -w vm.max_map_count=262144 +``` + +To make the change permanent, you need to add a line `vm.max_map_count=262144` to the `/etc/sysctl.conf` file and run the command `sudo sysctl -p` to apply the changes. +This will ensure that the modified value of `vm.max_map_count` is retained even after a system reboot. Without this step, the value will be reset to its default value after a reboot. + +### Install docker and docker compose + +search-a-licious uses docker and docker compose to manage the services it needs to run. +You will need to install both of these before you can use search-a-licious. Once [docker](https://docs.docker.com/engine/install/) and [docker compose](https://docs.docker.com/compose/install/) are installed, clone the git repository locally. +## Settings + All configuration are passed through environment variables to services through the use of a `.env` file. A sample `.env` file is provided in the repository, you will need to edit this file to suit your needs. -The only required change is to set the `CONFIG_PATH` variable to the path of your YAML configuration file. This file is used to configure the search-a-licious indexer and search services. +The only required change is to set the `CONFIG_PATH` variable to the path of your YAML configuration file. This file is used to configure the search-a-licious indexer and search services. See the [create your configuration, in tutorial](./tutorial.md#create-a-configuration-file) + +If you want to see more about applications settings, see the [Reference for Settings](./ref-settings.md) + +Look closely at each variable in the `.env` file. +You must at the very least: +* change `RESTART_POLICY` to `always` +* change `COMPOSE_FILE` to `docker-compose.yml;docker/prod.yml;docker/monitor.yml` (monitor is optional but recommended) +* change `MEM_LIMIT` to set elasticsearch memory limit +* change `NGINX_BASIC_AUTH_USER_PASSWD` + +Then you can either: +* rebuild the docker images by running `make build` +* use images from our github repository. For this, + * edit the .env file and set `TAG` to the commit sha corresponding to the version you want to use + +Our [CI file for deployment](https://github.com/openfoodfacts/search-a-licious/blob/main/.github/workflows/container-deploy.yml) might be of inspiration. + +## Launching + +You should now be able to start docker: + +```console +docker compose up -d +``` + +> [!NOTES] +> * You may encounter a permission error if your user is not part of the `docker` group, in which case you should either [add it](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user) or modify the Makefile to prefix `sudo` to all docker and docker compose commands. +> * Update container might crash because if you are note connected to any Redis, Search-a-licious will still run. You need to connect to Redis only if you want continuous updates. See [How to update the index](./how-to-update-index.md) + + +## Using it -If you want to see more about settings, see the [Reference for Settings](./ref-settings.md) \ No newline at end of file +To understand what you can then do, continue with the [tutorial](./tutorial.md). \ No newline at end of file diff --git a/docs/users/ref-web-components.md b/docs/users/ref-web-components.md index da2f2257..00669f8a 100644 --- a/docs/users/ref-web-components.md +++ b/docs/users/ref-web-components.md @@ -4,8 +4,29 @@ This page documents [web Components](https://developer.mozilla.org/en-US/docs/We provided by Search-a-licious to quickly build your interfaces. +## Customization + +### Styling + +We added a lot of `part` attributes to the components, to allow you to customize the look and feel of the components. See [::part() attribute documentation on MDN](https://developer.mozilla.org/en-US/docs/Web/CSS/::part) + +### Translations + +We only translated basic messages and most labels can generally be overridden using slots inside web component, where your own translation framework might be use (be it in javascript, or through your template engine or any technique). + +If you however needs to override current translations, you might clone this project, change translations in xliff files and regenerate the bundle. + + ## Main components +Those are the components you will certainly use to build your interface. +Of course none are mandatory and you can pick and choose the components you need. + +Bare attention to the `search-name` attribute +which must correspond to the `name` attribute of the search bar. +If you do not specify it, it will be the default one. +You'll need it if you mix multiple search bars in the same page. + ### searchalicious-bar @@ -21,9 +42,15 @@ to quickly build your interfaces. -### searchalicious-chart +### searchalicious-button + + + +### searchalicious-count + + - +## Sorting ### searchalicious-sort @@ -37,17 +64,40 @@ to quickly build your interfaces. -### searchalicious-button - +## Charts components -### searchalicious-count +Charts components are based on [vega](https://vega.github.io/). - +### searchalicious-distribution-chart + + + + +### searchalicious-scatter-chart + + + + +## Layout components + +Layout widgets are used to layout the page, they are not mandatory but can be useful. +It must not create dependencies with other components. + +### searchalicious-panel-manager + + + +### searchalicious-layout-page + + ## Internal components +Those are components that are not intended to be used directly by the user, +but are used internally by the other components. + ### searchalicious-facet-terms diff --git a/docs/users/tutorial.md b/docs/users/tutorial.md index 79ec25a9..e4f1c9f0 100644 --- a/docs/users/tutorial.md +++ b/docs/users/tutorial.md @@ -21,6 +21,7 @@ cd search-a-licious We need to create a configuration file to indicate which fields we care about in our index. For this we can create a conf/data/openfoodfacts-tutorial.yml file. It uses the [YAML format](https://yaml.org/). + #### Default Index and Indices At the top we have: @@ -169,6 +170,8 @@ indices: We have our configuration ready. That was a bit though, but this was the hardest part ! +Don't hesitate to read the [Reference for Configuration file](./ref-config.md) to learn more. + ### Setup the project In the project you can modify the `.env` file and change variables you need to change, @@ -182,6 +185,9 @@ is the one that will point to our configuration file. CONFIG_PATH=`data/config/openfoodfacts.yml` ``` +See the [Reference for Settings](./ref-settings.md) to learn about other settings. + + ## Initial import ### Getting the data @@ -224,6 +230,9 @@ We also need our taxonomy, and there is a command `import-taxonomies` to get it. docker compose run --rm api python3 -m app import-taxonomies ``` +You can read more about this process reading [How to update index documentation](./how-to-update-index.md#first-import). + + ### Inspecting Elasticsearch We can take a look at what just happened by using ElasticVue, a simple but handy tool to inspect Elasticsearch. @@ -315,6 +324,8 @@ If we wanted products having one or the other `labels_tags:("en:fair-trade" OR " We can also combine those filters with a search. Using `cocoa labels_tags:"en:fair-trade"` will help find some fair trade cocoa. +You can find more about the search query syntax in [Explain Query Language](./explain-query-language.md) + [^using-quotes]: Note that we have to use "" around value here, because the value contains a ":" inside.`labels_tags:en:fair-trade` would be interpreted as asking for a field named labels_tags.en having the value fair-trade. diff --git a/frontend/README.md b/frontend/README.md index 8e7e8dbe..dcd261bc 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -1,13 +1,16 @@ # Search-a-licious Web Frontend -The search-a-licious web frontend supplies web components. -These are built using [lit](https://lit.dev) and [typescript](https://www.typescriptlang.org/). +See [Explain frontend](../docs/devs/explain-web-frontend.md) for an introduction. + +## Credits + +This part of the project was bootstrapped using [lit-element-starter-ts](https://github.com/lit/lit-element-starter-ts/). -## Widgets -The project is currently composed of several widgets. +## Widgets - +**FIXME: all this docs should be moved to the JSDocs of the components** +which can be displayed in the [reference documentation for web components](https://openfoodfacts.github.io/search-a-licious/users/ref-web-components/) ### Main widgets @@ -83,78 +86,6 @@ This enables supporting multiple searches in the same page * it can be used to delete actions * **searchalicious-suggestion-entry** is a suggestion entry * it can be used to display a suggestion in searchalicious-bar -* searchalicious-chart-icon is a chart icon - * it can be used to display a chart icon in toggle-charts - - -## Explanation on code structure - -We use web-components for they will enable integration in a very wide variety of situations. - -The `search-ctl.ts` file contains the search controller, which is responsible for launching search and dispatching results. In practice this is a mixin, used by the search bar components which gets this role of controller. It is the contact point with the API. -The controller have a specific name, and components that are linked to it refer this the search bar name (`search-name` property). The default is 'searchalicious'. - -Components communicate with the search controller thanks to events, see `events.ts`. -There is an event to launch a search or change page, and one to dispatch search results. -Events always contains the search name, so we could have more than one search on the same page. - -We tend to factor code when it make sense using mixins, -for example as there are lots of component that needs the search results, there is a mixin than contains the logic to register to such events (see `search-results-ctl.ts`). - -## Writing documentation - -We render the reference on web components using [`api-viewer`](https://api-viewer.open-wc.org/docs) web component. - -Please comply with JSDoc and [document every property / slots](https://api-viewer.open-wc.org/docs/guide/writing-jsdoc/) etc. on each web components. - -## Tools -Thanks to Makefile in root folder, -* `make check_front` run all checks in front -* `make lint_front` lint js code - -While coding, you might want to run: `make tsc_watch` to have your code compile every time you save a `.ts` file. - -We generate a [custom-elements.json manifest](https://github.com/webcomponents/custom-elements-manifest) using [custom elements manifest analyzer](https://custom-elements-manifest.open-wc.org/analyzer). -Please use supported [JSDoc markers](https://custom-elements-manifest.open-wc.org/analyzer/getting-started/#supported-jsdoc) in your code to document components. - -The components documentation is rendered in `web-components.html`, using the [api-viewer component](https://api-viewer.open-wc.org/) - -## Tests - -`make test_front` run js tests. - -Note that we use: -* [Open Web Component testing framework](https://open-wc.org/docs/testing/testing-package/), - which in turn use: - * [Mocha](https://mochajs.org/) as the test runner - * which run tests using [playwright](https://playwright.dev/) - * and [Chai](https://www.chaijs.com/) for assertions - -## Translations -In the frontend, we utilize [lit-localize](https://lit.dev/docs/localization/overview/), a library that leverages lit-element for managing translations from hardcoded text. -The language is set to the browser's language if it is supported by the project, otherwise it is set to default language (English). -The translations are stored in `xliff` files in the `frontend/xliff` directory. - -To add a new translation you need to : -- add `msg` in your code like this https://lit.dev/docs/localization/overview/#message-types -- run `npm run translations:extract` to extract the new translations -- add your translation with 'target' tag in the `xliff/.xlf` files -- run `npm run translations:build` to update the translations in the `src/generated/locales/.js` file - -To add a language, you have to add the language code to `targetLocales` in `lit-localize.json` - - -### Personalizing translations as a search-a-licious user - -We only translated basic messages and most labels can generally be overridden using slots inside web component, where your own translation framework might be use (be it in javascript, or through your template engine or any technique). - -If you however needs to override current translations, you might clone this project, change translations in xliff files and regenerate the bundle. -### Translations in Crowdin -We can use Crowdin to manage translations. -All files in the xliff/ folder can be uploaded to Crowdin, as it supports the [xlf format](https://store.crowdin.com/xliff). - -## Credits -This part of the project was bootstrap using [lit-element-starter-ts](https://github.com/lit/lit-element-starter-ts/). diff --git a/frontend/src/layouts/search-layout-page.ts b/frontend/src/layouts/search-layout-page.ts index 7cf1ca1e..56dba32c 100644 --- a/frontend/src/layouts/search-layout-page.ts +++ b/frontend/src/layouts/search-layout-page.ts @@ -11,7 +11,10 @@ import {refreshCharts} from '../utils/charts'; /** * Component for the layout of the page - * Three columns layout with display flex + * + * It allows to handle sidebars: + * Three columns layout with display flex, + * one for facets, one for the results and one for the chart sidebar */ @customElement('searchalicious-layout-page') export class SearchLayoutPage extends SearchaliciousResultCtlMixin( diff --git a/frontend/src/layouts/search-panel-manager.ts b/frontend/src/layouts/search-panel-manager.ts index c8ff1c85..a8ac5c69 100644 --- a/frontend/src/layouts/search-panel-manager.ts +++ b/frontend/src/layouts/search-panel-manager.ts @@ -6,9 +6,14 @@ import {provide} from '@lit/context'; import {EventRegistrationMixin} from '../event-listener-setup'; /** - * This component enable sharing state about panels in a central place + * This component enables sharing state about panels in a central place. * - * It must be used if you want to use the searchalicious-layout-page component + * it should be high in the hierarchy (just below `body`) + * to wrap all other web-components + * + * It must be used if you want to use the `searchalicious-layout-page` component + * + * It allows to have a global variable to store with @lit/context */ @customElement('searchalicious-panel-manager') export class SearchaliciousPanelManager extends EventRegistrationMixin( diff --git a/frontend/src/search-chart.ts b/frontend/src/search-chart.ts index ae276c79..8b2b68c9 100644 --- a/frontend/src/search-chart.ts +++ b/frontend/src/search-chart.ts @@ -19,6 +19,9 @@ export type ChartSearchParam = ChartSearchParamPOST | string; // eslint-disable-next-line declare const vega: any; +/** + * Base class for chart elements + */ export class SearchaliciousChart extends SearchaliciousResultCtlMixin( LitElement ) { @@ -119,6 +122,11 @@ export class SearchaliciousChart extends SearchaliciousResultCtlMixin( } } +/** + * Distribution chart. + * + * This will draw bars for each value of the field, reflecting the number of elements. + */ @customElement('searchalicious-distribution-chart') export class SearchaliciousDistributionChart extends SearchaliciousChart { static override styles = [WHITE_PANEL_STYLE]; @@ -139,6 +147,13 @@ export class SearchaliciousDistributionChart extends SearchaliciousChart { }; } } + +/** + * Scatter plot chart. + * + * This will plot a point to each element at the corresponding coordinates, + * giving a sense of the distribution of the data. + */ @customElement('searchalicious-scatter-chart') export class SearchaliciousScatterChart extends SearchaliciousChart { static override styles = [WHITE_PANEL_STYLE]; diff --git a/scripts/build_sphinx.sh b/scripts/build_sphinx.sh index 8a385b68..f87cae26 100755 --- a/scripts/build_sphinx.sh +++ b/scripts/build_sphinx.sh @@ -23,6 +23,6 @@ docker run --rm --user user \ sphinx-builder make html # move to the right place and cleanup -rm -rf gh_pages/users/ref-python || true -mv gh_pages/sphinx/html gh_pages/users/ref-python +rm -rf gh_pages/devs/ref-python || true +mv gh_pages/sphinx/html gh_pages/devs/ref-python rm -rf gh_pages/sphinx/ diff --git a/scripts/generate_doc.sh b/scripts/generate_doc.sh index e41f9939..c3a26231 100755 --- a/scripts/generate_doc.sh +++ b/scripts/generate_doc.sh @@ -34,4 +34,8 @@ sudo chown $UID -R gh_pages/users/ref-web-components echo "Generate python code documentation using sphinx" scripts/build_sphinx.sh +# tell GitHub we don't want to use jekyll ! (otherwise it will remove _static) +# see https://github.blog/news-insights/the-library/bypassing-jekyll-on-github-pages/ +touch gh_pages/.nojekyll + echo "To see your doc locally, run: python3 -m http.server -d gh_pages 8001" \ No newline at end of file