diff --git a/.github/workflows/fast-model-mpi-explosion.yml b/.github/workflows/fast-model-mpi-explosion.yml new file mode 100644 index 000000000..b579ef7cf --- /dev/null +++ b/.github/workflows/fast-model-mpi-explosion.yml @@ -0,0 +1,44 @@ + +name: Fast model MPI tests + +on: [push, pull_request] + +jobs: + + pytest: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Pytest Unit Tests + uses: actions/setup-python@v2 + with: + python-version: '3.9' + - name: Install prerequisites + run: | + sudo apt-get update + sudo apt-get install python3-setuptools subversion git libopenmpi-dev g++ libjpeg8 libjpeg8-dev libfreetype6 libfreetype6-dev zlib1g-dev libpng++-dev libncurses5 libncurses5-dev libreadline-dev liblapack-dev libblas-dev gfortran libgsl0-dev openmpi-bin python-tk cmake + pip3 install pytest pytest-cov pytest-randomly coverage black + # TODO: Remove fixed numpy and pynn versions after the PyNN pull request + # https://github.com/NeuralEnsemble/PyNN/pull/762 is accepted + pip3 install numpy==1.23.5 scipy mpi4py matplotlib quantities lazyarray interval Pillow param==1.5.1 parameters neo cython pynn==0.10.0 psutil future requests elephant pytest-xdist pytest-timeout junitparser numba + - name: Download and install imagen + run: | + git clone https://github.com/antolikjan/imagen.git + cd imagen + python setup.py install + cd .. + - name: Install Nest + run: | + wget https://github.com/nest/nest-simulator/archive/v3.1.tar.gz + tar xvfz v3.1.tar.gz + cd nest-simulator-3.1 + cmake -Dwith-mpi=ON -Dwith-boost=ON -DCMAKE_INSTALL_PREFIX:PATH=$pythonLocation -Dwith-optimize='-O3' ./ + make -j8 + make -j8 install + cd .. + python -c 'import nest' + - name: Install mozaik + run: python setup.py install + + - name: Test with pytest + run: pytest -m "mpi and not not_github and mpi_explosion" ./tests/full_model/test_models_mpi.py --cov=mozaik diff --git a/.github/workflows/fast-model-mpi.yml b/.github/workflows/fast-model-mpi.yml index b4602fc39..d389e59ef 100644 --- a/.github/workflows/fast-model-mpi.yml +++ b/.github/workflows/fast-model-mpi.yml @@ -41,4 +41,4 @@ jobs: run: python setup.py install - name: Test with pytest - run: pytest -v -s -m "mpi and not not_github" ./tests/full_model/test_models_mpi.py --cov=mozaik + run: pytest -m "mpi and not not_github and not mpi_explosion" ./tests/full_model/test_models_mpi.py --cov=mozaik diff --git a/.github/workflows/fast-model-stepcurrentmodule.yml b/.github/workflows/fast-model-stepcurrentmodule.yml index 4211af582..09428621b 100644 --- a/.github/workflows/fast-model-stepcurrentmodule.yml +++ b/.github/workflows/fast-model-stepcurrentmodule.yml @@ -30,7 +30,7 @@ jobs: - name: Download and install PyNN run: | - git clone https://github.com/RCagnol/PyNN.git + git clone https://github.com/CSNG-MFF/PyNN.git cd PyNN git checkout PyNNStepCurrentModule pip install . @@ -49,7 +49,7 @@ jobs: - name: Install stepcurrentmodule run: | - git clone https://github.com/RCagnol/nest-step-current-module.git + git clone https://github.com/CSNG-MFF/nest-step-current-module.git cd nest-step-current-module cmake -Dwith-mpi=ON -Dwith-boost=ON -Dwith-nest=$pythonLocation/bin/nest-config -Dwith-optimize='-O3' ./ make -j8 diff --git a/README.rst b/README.rst index f11a22556..34527f271 100644 --- a/README.rst +++ b/README.rst @@ -51,7 +51,6 @@ ____________ Now you can install all other dependencies in this protected environment:: - pip3 install numpy scipy mpi4py matplotlib quantities lazyarray interval Pillow param==1.5.1 parameters neo cython psutil future requests elephant pytest-xdist pytest-timeout junitparser numba pip3 install numpy==1.23.5 scipy mpi4py matplotlib quantities lazyarray interval Pillow param==1.5.1 parameters neo cython psutil future requests elephant pytest-xdist pytest-timeout junitparser numba Next we will manually install several packages. It is probably the best if you create a separate directory in an appropriate @@ -72,7 +71,7 @@ Then install the *PyNN* package from the PyNNStepCurrentModule branch:: Next install the *Nest* simulator (always in the virtual environment): - - download the latest version from their `website `_ + - download the latest version from their `website `_:: wget https://github.com/nest/nest-simulator/archive/refs/tags/v3.4.tar.gz @@ -109,7 +108,7 @@ Then install the *stepcurrentmodule* Nest module: git clone https://github.com/CSNG-MFF/nest-step-current-module.git cd nest-step-current-module - - then configure it relatively to your nest-config installation path (should reside in $HOME/virt_env/mozaik/bin/nest-config):: + - then, in the following command, replace NEST_CONFIG_PATH by your nest-config installation path (should reside in $HOME/virt_env/mozaik/bin/nest-config) and run it:: (mozaik)$ cmake -Dwith-mpi=ON -Dwith-boost=ON -Dwith-optimize='-O3' -Dwith-nest=NEST_CONFIG_PATH ./ diff --git a/mozaik/storage/datastore.py b/mozaik/storage/datastore.py index 0a834624c..371ce450a 100644 --- a/mozaik/storage/datastore.py +++ b/mozaik/storage/datastore.py @@ -185,6 +185,30 @@ def get_analysis_result(self, **kwargs): """ return filter_query(self.analysis_results,**kwargs) + def sort_analysis_results(self, key, reverse=False): + if key[0:3] == 'st_': + ads_id = [] + ads_id_nkey = [] + ads_nid = [] + for ads in self.analysis_results: + if ads.stimulus_id is None: + ads_nid.append(ads) + else: + if hasattr(MozaikParametrized.idd(ads.stimulus_id),key[3:]): + ads_id.append(ads) + else: + ads_id_nkey.append(ads) + self.analysis_results = ads_nid + ads_id_nkey + sorted(ads_id, key= lambda x:getattr(MozaikParametrized.idd(x.stimulus_id),key[3:]),reverse=reverse) + else: + ads_key = [] + ads_nkey = [] + for ads in self.analysis_results: + if hasattr(ads,key): + ads_key.append(ads) + else: + ads_nkey.append(ads) + self.analysis_results = ads_nkey + sorted(ads_key, key= lambda x:getattr(x,key),reverse=reverse) + def get_sensory_stimulus(self, stimuli=None): """ Return the raw sensory stimulus that has been presented to the model due to stimuli specified by the stimuli argument. diff --git a/pyproject.toml b/pyproject.toml index b55e399c9..7eef311b3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,6 +6,7 @@ markers = [ "LSV1M_tiny: tests that need to run a tiny version of the LSV1M model to execute. Should be ", "merge: tests for the datastore merging", "mpi: tests using mpi to run the model", + "mpi_explosion: tests using mpi to run the model and tests the explosion detection", "not_github: tests using mpi to run the model", "stepcurrentmodule: tests using the nest stepcurrent module to run the model", ] diff --git a/tests/full_model/test_models.py b/tests/full_model/test_models.py index fe706e9a0..04e294ab0 100644 --- a/tests/full_model/test_models.py +++ b/tests/full_model/test_models.py @@ -36,8 +36,6 @@ def setup_class(cls): if os.path.exists(cls.result_path): os.system("rm -r " + cls.result_path) os.system(cls.model_run_command) - # Hack - For some reason MPI tests sometimes can't find the result datastore without this - print(os.listdir("tests/full_model/models/LSV1M_tiny/")) # Load DataStore of recordings from the model that just ran cls.ds = cls.load_datastore(cls.result_path) # Load DataStore of reference recordings diff --git a/tests/full_model/test_models_mpi.py b/tests/full_model/test_models_mpi.py index b274c820b..4aa20a694 100644 --- a/tests/full_model/test_models_mpi.py +++ b/tests/full_model/test_models_mpi.py @@ -67,11 +67,13 @@ class TestModelExplosionMonitoringMPI(TestModel): @pytest.mark.model @pytest.mark.mpi + @pytest.mark.mpi_explosion def test_explosion(self): assert self.ds.block.annotations["simulation_log"]["explosion_detected"] @pytest.mark.model @pytest.mark.mpi + @pytest.mark.mpi_explosion def test_fr_above_threshold(self): sheet_monitored = eval(self.ds.get_model_parameters())["explosion_monitoring"][ "sheet_name" diff --git a/tests/storage/test_storage.py b/tests/storage/test_storage.py new file mode 100644 index 000000000..1c3134d5f --- /dev/null +++ b/tests/storage/test_storage.py @@ -0,0 +1,119 @@ +import matplotlib + +matplotlib.use("Agg") +from mozaik.analysis.analysis import * +from mozaik.storage.datastore import * +from mozaik.storage.queries import param_filter_query +import mozaik +import logging +import os + +import pytest + + +class TestDatastore: + ref_path = "tests/full_model/reference_data/LSV1M_tiny" + + @classmethod + def setup_class(cls): + """ + Runs the model and loads its result and a saved reference result + """ + cls.ds = cls.load_datastore(cls.ref_path) + TrialAveragedFiringRate( + param_filter_query(cls.ds, st_name="FullfieldDriftingSinusoidalGrating"), + ParameterSet({}), + ).analyse() + cls.ads = cls.ds.get_analysis_result() + + @staticmethod + def load_datastore(base_dir): + """ + Load PickledDataStore for reading. + + Parameters + ---------- + base_dir : base directory where DataStore files are saved + + Returns + ------- + PickledDataStore with the data from base_dir + """ + return PickledDataStore( + load=True, + parameters=ParameterSet( + {"root_directory": base_dir, "store_stimuli": False} + ), + replace=False, + ) + + def test_ADS_sorting_homogeneity(self): + dsv_sorted = param_filter_query( + self.ds, st_name="FullfieldDriftingSinusoidalGrating" + ) + dsv_sorted.sort_analysis_results("sheet_name") + adss_sorted = dsv_sorted.get_analysis_result() + count_incorrect = 0 + for ads in adss_sorted: + if ads not in self.ads: + count_incorrect += 1 + for ads in self.ads: + if ads not in adss_sorted: + count_incorrect += 1 + assert count_incorrect == 0 + + pass + + def test_ADS_sorting_st_homogeneity(self): + dsv_sorted = param_filter_query(self.ds) + dsv_sorted.sort_analysis_results("st_orientation") + adss_sorted = dsv_sorted.get_analysis_result() + count_incorrect = 0 + for ads in adss_sorted: + if ads not in self.ads: + count_incorrect += 1 + for ads in self.ads: + if ads not in adss_sorted: + count_incorrect += 1 + assert count_incorrect == 0 + + pass + + def test_ADS_sorting_order(self): + dsv_sorted = param_filter_query(self.ds) + dsv_sorted.sort_analysis_results("sheet_name") + adss_sorted = dsv_sorted.get_analysis_result() + count_correct = 0 + for ads, ads_next in zip(adss_sorted[:-1], adss_sorted[1:]): + if not hasattr(ads, "sheet_name"): + count_correct += 1 + elif hasattr(ads_next, "sheet_name") and getattr( + ads, "sheet_name" + ) <= getattr(ads_next, "sheet_name"): + count_correct += 1 + assert count_correct + 1 == len(adss_sorted) + + pass + + def test_ADS_sorting_st_order(self): + dsv_sorted = param_filter_query(self.ds) + dsv_sorted.sort_analysis_results("st_orientation") + adss_sorted = dsv_sorted.get_analysis_result() + count_correct = 0 + for ads, ads_next in zip(adss_sorted[:-1], adss_sorted[1:]): + if ads.stimulus_id == None: + count_correct += 1 + elif ads_next.stimulus_id and not hasattr( + ads.stimulus_id, "st_orientation" + ): + count_correct += 1 + elif ads_next.stimulus_id and hasattr( + ads_next.stimulus_id, "st_orientation" + ): + if getattr( + MozaikParametrized.idd(ads.stimulus_id), "st_orientation" + ) <= getattr(MozaikParametrized.idd(ads_next), "st_orientation"): + count_correct += 1 + assert count_correct + 1 == len(adss_sorted) + + pass