diff --git a/.flake8 b/.flake8 index 33cb491..5f48c5b 100644 --- a/.flake8 +++ b/.flake8 @@ -1,6 +1,6 @@ [flake8] -ignore = E203, E266, E501, W503, F403, F401 -max-line-length = 120 +ignore = E203, E266, E501, W503, F403, F401, E704 +max-line-length = 100 max-complexity = 18 select = B,C,E,F,W,T4,B9 exclude = diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d426ef2..22e0649 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,13 +6,33 @@ jobs: pre-commit: runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v3 + with: + python-version: '3.10' + - uses: pre-commit/action@v3.0.1 + + pylint: + name: pylint + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 + + - name: Set up Python ${{ env.DEFAULT_PYTHON }} + uses: actions/setup-python@v5.1.1 with: - python-version: 3.8 - - uses: pre-commit/action@v2.0.0 + python-version: ${{ env.DEFAULT_PYTHON }} + + - name: Install python dependencies + run: pip install -e .[cli,gui,dev,docs,sci] + + - name: Run pylint checks + run: | + pre-commit run --hook-stage manual pylint-with-spelling --all-files tests: runs-on: ubuntu-latest @@ -20,18 +40,18 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.7', '3.8', '3.9', '3.10'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] include: - - python-version: 3.8 + - python-version: 3.10 rabbitmq: 3.6 - - python-version: 3.8 + - python-version: 3.10 rabbitmq: 3.8 steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5.1.1 with: python-version: ${{ matrix.python-version }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b0785cd..7b9c8a7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,31 +1,77 @@ +ci: + skip: [ pylint ] + # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 - hooks: - - id: check-added-large-files - args: ['--maxkb=5000'] - - id: end-of-file-fixer - - id: check-case-conflict - - id: detect-private-key - - id: check-docstring-first -- repo: https://github.com/psf/black - rev: 22.8.0 - hooks: - - id: black + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: check-added-large-files + args: [ '--maxkb=5000' ] + - id: end-of-file-fixer + - id: check-case-conflict + - id: detect-private-key + - id: check-docstring-first + - id: fix-encoding-pragma + exclude: &exclude_files > + (?x)^( + docs/.*| + )$ + args: [ --remove ] + - id: trailing-whitespace + + - repo: https://github.com/ikamensh/flynt/ + rev: '1.0.1' + hooks: + - id: flynt + + - repo: https://github.com/psf/black + rev: 24.8.0 + hooks: + - id: black exclude: (.*)/migrations -- repo: https://github.com/pycqa/flake8 - rev: 3.9.2 - hooks: - - id: flake8 -- repo: https://github.com/PyCQA/bandit - rev: 1.7.4 - hooks: - - id: bandit - args: [ "-c", ".bandit.yaml" ] -- repo: https://github.com/commitizen-tools/commitizen - rev: v2.35.0 - hooks: - - id: commitizen - stages: [commit-msg] + + - repo: https://github.com/pycqa/flake8 + rev: 7.1.1 + hooks: + - id: flake8 + + - repo: https://github.com/pycqa/isort + rev: '5.13.2' + hooks: + - id: isort + + - repo: https://github.com/PyCQA/bandit + rev: 1.7.9 + hooks: + - id: bandit + args: [ "-c", ".bandit.yaml" ] + + - repo: https://github.com/PyCQA/pylint + # Configuration help can be found here: + # https://pylint.pycqa.org/en/latest/user_guide/installation/pre-commit-integration.html + rev: v3.2.6 + hooks: + - id: pylint + alias: pylint-with-spelling + stages: [ manual ] + language: system + types: [ python ] + require_serial: true + exclude: + (?x)^( + docs/.*| + test/.* + )$ + + - repo: https://github.com/commitizen-tools/commitizen + rev: v3.29.0 + hooks: + - id: commitizen + stages: [ commit-msg ] + + - repo: https://github.com/srstevenson/nb-clean + rev: 3.3.0 + hooks: + - id: nb-clean diff --git a/.pylintrc b/.pylintrc index d8f7f81..3797bbc 100644 --- a/.pylintrc +++ b/.pylintrc @@ -50,7 +50,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=missing-docstring, useless-object-inheritance, locally-disabled +disable=missing-docstring, useless-object-inheritance, locally-disabled, consider-using-from-import, use-dict-literal # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option @@ -372,7 +372,7 @@ known-third-party=enchant [DESIGN] # Maximum number of arguments for function / method -max-args=6 +max-args=10 # Maximum number of attributes for a class (see R0902). max-attributes=12 @@ -422,4 +422,4 @@ valid-metaclass-classmethod-first-arg=mcs # Exceptions that will emit a warning when being caught. Defaults to # "Exception" -overgeneral-exceptions=Exception +overgeneral-exceptions=builtin.Exception diff --git a/LICENSE b/LICENSE.txt similarity index 100% rename from LICENSE rename to LICENSE.txt diff --git a/black.toml b/black.toml index 1a667de..48e2639 100644 --- a/black.toml +++ b/black.toml @@ -1,4 +1,4 @@ -line-length = 120 +line-length = 100 target-version = ['py37', 'py38', 'py39', 'py310'] include = '\.pyi?$' exclude = ''' diff --git a/docs/source/conf.py b/docs/source/conf.py index 692b0bf..a5b217f 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -13,6 +13,8 @@ # All configuration values have a default; values that are commented out # serve to show the default. +from importlib.machinery import SourceFileLoader + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. @@ -20,15 +22,11 @@ import os import sys -from importlib.machinery import SourceFileLoader - sys.path.insert(0, os.path.abspath(os.path.dirname("__file__"))) module = SourceFileLoader( "version", - os.path.join( - os.path.dirname(os.path.abspath(__file__)), "..", "..", "mincepy", "version.py" - ), + os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "mincepy", "version.py"), ).load_module() autoclass_content = "both" diff --git a/docs/source/examples/mapped-types.ipynb b/docs/source/examples/mapped-types.ipynb index f585644..ecda264 100644 --- a/docs/source/examples/mapped-types.ipynb +++ b/docs/source/examples/mapped-types.ipynb @@ -2,12 +2,7 @@ "cells": [ { "cell_type": "markdown", - "metadata": { - "collapsed": true, - "pycharm": { - "name": "#%% md\n" - } - }, + "metadata": {}, "source": [ "Mapped Types\n", "============\n", @@ -23,7 +18,8 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "class Car:\n", @@ -33,83 +29,40 @@ "\n", " def __str__(self):\n", " return \"{} {}\".format(self.colour, self.make)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "So far, mincePy can't do anything with this:" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", - "execution_count": 2, - "outputs": [ - { - "ename": "TypeError", - "evalue": "Type is incompatible with the historian: Car", - "output_type": "error", - "traceback": [ - "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m", - "\u001B[0;31mTypeError\u001B[0m Traceback (most recent call last)", - "\u001B[0;32m\u001B[0m in \u001B[0;36m\u001B[0;34m\u001B[0m\n\u001B[1;32m 3\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m 4\u001B[0m \u001B[0mferrari\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mCar\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m----> 5\u001B[0;31m \u001B[0mhistorian\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0msave\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mferrari\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m 6\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n", - "\u001B[0;32m~/src/mincepy/mincepy/historians.py\u001B[0m in \u001B[0;36msave\u001B[0;34m(self, *objs)\u001B[0m\n\u001B[1;32m 187\u001B[0m \u001B[0;32mwith\u001B[0m \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0min_transaction\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m 188\u001B[0m \u001B[0;32mfor\u001B[0m \u001B[0mentry\u001B[0m \u001B[0;32min\u001B[0m \u001B[0mto_save\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m--> 189\u001B[0;31m \u001B[0mids\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mappend\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0msave_one\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m*\u001B[0m\u001B[0mentry\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m 190\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m 191\u001B[0m \u001B[0;32mif\u001B[0m \u001B[0mlen\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mobjs\u001B[0m\u001B[0;34m)\u001B[0m \u001B[0;34m==\u001B[0m \u001B[0;36m1\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n", - "\u001B[0;32m~/src/mincepy/mincepy/historians.py\u001B[0m in \u001B[0;36msave_one\u001B[0;34m(self, obj, meta)\u001B[0m\n\u001B[1;32m 211\u001B[0m \u001B[0;31m# Save the object and metadata\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m 212\u001B[0m \u001B[0;32mwith\u001B[0m \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0min_transaction\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m--> 213\u001B[0;31m \u001B[0mrecord\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0m_save_object\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mobj\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0m_live_depositor\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m 214\u001B[0m \u001B[0;32mif\u001B[0m \u001B[0mmeta\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m 215\u001B[0m \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mmeta\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mupdate\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mrecord\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mobj_id\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mmeta\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n", - "\u001B[0;32m~/src/mincepy/mincepy/historians.py\u001B[0m in \u001B[0;36m_save_object\u001B[0;34m(self, obj, depositor)\u001B[0m\n\u001B[1;32m 783\u001B[0m \u001B[0mhelper\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0m_ensure_compatible\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mtype\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mobj\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m 784\u001B[0m \u001B[0;32mexcept\u001B[0m \u001B[0mTypeError\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m--> 785\u001B[0;31m raise TypeError(\n\u001B[0m\u001B[1;32m 786\u001B[0m \"Type is incompatible with the historian: {}\".format(type(obj).__name__)) from None\n\u001B[1;32m 787\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n", - "\u001B[0;31mTypeError\u001B[0m: Type is incompatible with the historian: Car" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "import mincepy\n", "historian = mincepy.connect('mongodb://127.0.0.1/mince-mapped-types', use_globally=True)\n", "\n", "ferrari = Car()\n", "historian.save(ferrari)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "To tell mincePy about `Car`s we need to define subclass of [TypeHelper](../apidoc.rst#mincepy.TypeHelper) which helps mincePy to understand your type...understandably..." - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", - "execution_count": 3, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "5f75cf4dc5e3bf28a7a85d9c\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "import uuid\n", "\n", @@ -122,188 +75,103 @@ "historian.register_type(CarHelper)\n", "ferrari_id = historian.save(ferrari)\n", "print(ferrari_id)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "...and that's it! MincePy can now work with `Car`s.\n", "You'll notice that, unlike many ORMs, we haven't specified the types of `make` and `colour`, nor any validation options like the maximum length of the strings or whether they can be missing or not.\n", "This is deliberate.\n", "MincePy leaves validation up to your code (so you do whatever you would have done if there was no database involved) and concerns itself with getting your object in and out of the database.\n", "Speaking of which, let's see some of that in action." - ], - "metadata": { - "collapsed": false - } + ] }, { "cell_type": "code", - "execution_count": 4, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "red ferrari\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "del ferrari\n", "loaded_ferrari = historian.load(ferrari_id)\n", "print(loaded_ferrari)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "Cool, so how does that work?\n", "Well mincePy has created a [DataRecord](../apidoc.rst#mincepy.DataRecord) of our `Car` in the database that stores a bunch of things, including the state which can be used to recreate it.\n", "Let's have a look:" - ], - "metadata": { - "collapsed": false - } + ] }, { "cell_type": "code", - "execution_count": 5, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "obj_id 5f75cf4dc5e3bf28a7a85d9c\n", - "type_id 21605412-30e5-4f48-9f56-f0fa8014e746\n", - "creation_time 2020-10-01 14:45:01.673000\n", - "version 0\n", - "state {'make': 'ferrari', 'colour': 'red'}\n", - "state_types [[[], UUID('21605412-30e5-4f48-9f56-f0fa8014e746')]]\n", - "snapshot_hash 17480f325c8a48d9a5ea1163fcda3ff3cf0940deff21e7df6c7a72b5b626bf69\n", - "snapshot_time 2020-10-01 14:45:01.673000\n", - "extras {'_user': 'martin', '_hostname': 'deca'}\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "print(historian.records.get(ferrari_id))\n" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "In addition to the state we see the creation and snapshots times, the version number and other information mincePy needs to store and track the object.\n", "\n", "Let's create some more `Car`s and perform some queries." - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", - "execution_count": 6, - "outputs": [ - { - "data": { - "text/plain": "10" - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "for make in 'skoda', 'honda', 'bmw':\n", " for colour in 'red', 'green', 'violet':\n", " historian.save(Car(make=make, colour=colour))\n", "\n", "historian.find().count()" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "We can, for example, find all the red ones using:" - ], - "metadata": { - "collapsed": false - } + ] }, { "cell_type": "code", - "execution_count": 7, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "red ferrari\n", - "red skoda\n", - "red honda\n", - "red bmw\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "results = historian.find(CarHelper.colour == 'red')\n", "for entry in results:\n", " print(entry)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "References\n", "----------\n", "\n", "The next thing we may want to introduce is references.\n", "What if we have an object like this:" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "class Person:\n", @@ -315,29 +183,21 @@ " return self.name if self.car is None else self.name + \"({})\".format(self.car)\n", "\n", "matt = Person('matt', loaded_ferrari)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "Here we want `Person` objects to be able to store a reference (a foreign key in ORM language) to the `Car` that they own.\n", "No problem, let's define a new helper:" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "class PersonHelper(mincepy.TypeHelper):\n", " TYPE = Person\n", @@ -347,124 +207,58 @@ "\n", "historian.register_type(PersonHelper)\n", "matt_id = historian.save(matt)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - }, - "execution_count": 9, - "outputs": [] + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "By using setting `ref=True` we tell mincePy that we want to the `car` field to be stored by reference rather than keeping a copy of the car in the record.\n", "Let's have a look:" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", - "execution_count": 10, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "obj_id 5f75cf51c5e3bf28a7a85da6\n", - "type_id 80c7bedb-9e51-48cd-afa9-04ec97b20569\n", - "creation_time 2020-10-01 14:45:05.364000\n", - "version 0\n", - "state {'name': 'matt', 'car': {'obj_id': ObjectId('5f75cf4dc5e3bf28a7a85d9c'), 'version': 0}}\n", - "state_types [[[], UUID('80c7bedb-9e51-48cd-afa9-04ec97b20569')], [['car'], UUID('633c7035-64fe-4d87-a91e-3b7abd8a6a28')]]\n", - "snapshot_hash 963c248f43a2cc8ff187c18e23b815f1f40df5a89ca2858346150cb6d0226a0a\n", - "snapshot_time 2020-10-01 14:45:05.364000\n", - "extras {'_user': 'martin', '_hostname': 'deca'}\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "print(historian.records.get(matt_id))" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "We see that the `car` field in the `state` dictionary is in fact a reference pointing to the object id of the Ferrari.\n", "What does this all mean in practice?\n", "Well let's see what happens when we load the `matt` object from the database:" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", - "execution_count": 11, - "outputs": [ - { - "data": { - "text/plain": "True" - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "del matt\n", "loaded_matt = historian.load(matt_id)\n", "\n", "loaded_matt.car is loaded_ferrari" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "If we add another `Person` referring to the Ferrari we see that they share a reference to the same instance, as expected." - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", - "execution_count": 12, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "True\n", - "yellow ferrari\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "rob = Person('rob', loaded_ferrari)\n", "rob_id = historian.save(rob)\n", @@ -479,27 +273,16 @@ "\n", "print(matt.car is rob.car)\n", "print(matt.car)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "So, that gets you up to speed on the basics of using mapped types in mincePy.\n", "Have a look at the [API reference](../apidoc.rst) and post an issue [here](https://github.com/muhrin/mincepy/issues>) if there is anything else you would like to see documented.\n", "\n" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] } ], "metadata": { @@ -517,8 +300,7 @@ "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.6" + "pygments_lexer": "ipython2" } }, "nbformat": 4, diff --git a/docs/source/examples/quick-start.ipynb b/docs/source/examples/quick-start.ipynb index 40e28f5..27f10a7 100644 --- a/docs/source/examples/quick-start.ipynb +++ b/docs/source/examples/quick-start.ipynb @@ -2,12 +2,7 @@ "cells": [ { "cell_type": "markdown", - "metadata": { - "collapsed": true, - "pycharm": { - "name": "#%% md\n" - } - }, + "metadata": {}, "source": [ "Quick Start\n", "===========\n", @@ -21,6 +16,9 @@ }, { "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "import mincepy\n", "import uuid\n", @@ -30,80 +28,51 @@ " TYPE_ID = uuid.UUID('26798d9e-8c78-430a-ab2c-b17d612ef5fe')\n", " name = mincepy.field()\n", " age = mincepy.field()" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - }, - "execution_count": 1, - "outputs": [] + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "Here, we've defined a simple object and told `mincepy`:\n", "\n", "1. that the attributes `name` and `age` should be stored when saving `Person` objects, and,\n", "2. that this type can be identified by the ID given in `TYPE_ID`" - ], - "metadata": { - "collapsed": false - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "Storing objects\n", "---------------\n", "\n", "Now, let's save some people! First we need to connect to our MongoDB database:\n" - ], - "metadata": { - "collapsed": false - } + ] }, { "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "historian = mincepy.connect('mongodb://127.0.0.1/mince-quick-start', use_globally=True)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - }, - "execution_count": 2, - "outputs": [] + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "The `historian` is our connection the database and provides methods to perform database related actions on our objects.\n", "\n", "Now we can instantiate and save some people!" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", - "execution_count": 3, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "5f75cf7b44703dcf435c8500 5f75cf7b44703dcf435c8501\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "martin = Person(name='Martin', age=34)\n", "martin_id = historian.save(martin)\n", @@ -117,16 +86,11 @@ "gavin_id = gavin.save()\n", "\n", "print(martin_id, sonia_id)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "Here we see some of the IDs assigned to our objects. These serve to uniquely identify them and can be used to load\n", "them from the database.\n", @@ -135,92 +99,55 @@ "---------------\n", "\n", "Loading objects is as simple as:" - ], - "metadata": { - "collapsed": false - } + ] }, { "cell_type": "code", - "execution_count": 4, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Martin, 34\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "del martin\n", "\n", "martin, sonia = historian.load(martin_id, sonia_id)\n", "print(\"{}, {}\".format(martin.name, martin.age))" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "Finding objects\n", "---------------\n", "\n", "Now, let's do a search" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "for person in historian.find(Person.age==34):\n", " print('{}, {}'.format(person.name, person.age))" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - }, - "execution_count": 5, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Martin, 34\n", - "Gavin, 34\n" - ] - } ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "Modifying objects\n", "-----------------\n", "\n", "Simple, just mutate our object and save!" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "sonia.age = 31\n", "sonia.save()\n", @@ -229,51 +156,23 @@ "del sonia\n", "sonia = historian.load(sonia_id)\n", "print(sonia.age)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - }, - "execution_count": 6, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "31\n" - ] - } ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "Annotating objects\n", "------------------\n", "\n", "Objects can be annotated by setting a metadata dictionary." - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", - "execution_count": 7, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'city': 'Glasgow'}\n" - ] - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "historian.meta.set(sonia, dict(city='Copenhagen'))\n", "# Can also do it like this:\n", @@ -281,16 +180,11 @@ "gavin.set_meta(dict(city='Glasgow'))\n", "\n", "print(historian.meta.get(gavin))" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "*Ok, that's cool so now what?*\n", " \n", @@ -298,36 +192,21 @@ "\n", "Searching metadata\n", "------------------" - ], - "metadata": { - "collapsed": false - } + ] }, { "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "for person in historian.find(Person.age==34, meta=dict(city='Glasgow')):\n", " print(\"{}, {}\".format(person.name, person.age))" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - }, - "execution_count": 8, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Gavin, 34\n" - ] - } ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "*Nice*.\n", "\n", @@ -341,97 +220,54 @@ "\n", "Version control\n", "---------------" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "records = list(historian.snapshots.records.find(obj_id=sonia_id))\n", "for record in records:\n", " print(\"{}, {}\".format(record.version, record.state))\n" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - }, - "execution_count": 9, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0, {'name': 'Sonia', 'age': 30}\n", - "1, {'name': 'Sonia', 'age': 31}\n" - ] - } ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "Here we see two records that were fetched from the archive for Sonia. One with the original age value and the other\n", "with the current. MincePy will, by default, keep a record of any modifications you make to objects, think of as being\n", "like `git`, but for objects. We can use a historical record to load the object as it was then:" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] }, { "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "past_sonia = records[0].load()\n", "print(\"{}, {}\".format(past_sonia.name, past_sonia.age))" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - }, - "execution_count": 10, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Sonia, 30\n" - ] - } ] }, { "cell_type": "markdown", + "metadata": {}, "source": [ "So, that gets you up to speed on the basics of using mincePy.\n", "Have a look at the [API reference](../apidoc.rst) and post an issue [here](https://github.com/muhrin/mincepy/issues>) if there is anything else you would like to see documented.\n", "\n", "\n" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%% md\n" - } - } + ] } ], "metadata": { "kernelspec": { - "name": "pycharm-d5059434", + "display_name": "PyCharm (mince)", "language": "python", - "display_name": "PyCharm (mince)" + "name": "pycharm-d5059434" }, "language_info": { "codemirror_mode": { @@ -442,8 +278,7 @@ "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.6" + "pygments_lexer": "ipython2" } }, "nbformat": 4, diff --git a/mincepy/__init__.py b/mincepy/__init__.py index 7950901..e25b308 100644 --- a/mincepy/__init__.py +++ b/mincepy/__init__.py @@ -1,17 +1,38 @@ -# -*- coding: utf-8 -*- +"""mincePy: move the database to one side and let your objects take centre stage.""" + +from . import ( + archive_factory, + archives, + base_savable, + builtins, + common_helpers, + depositors, + exceptions, + expr, + fields, + helpers, + hist, + historians, + history, + migrations, + mongo, + operations, + process, +) from . import qops as q +from . import records, refs, tracking, types, utils, version +from .archive_factory import * from .archives import * -from .builtins import * from .base_savable import * -from .archive_factory import * +from .builtins import * from .comparators import * from .depositors import * from .exceptions import * # pylint: disable=redefined-builtin from .expr import * from .fields import * from .helpers import * -from .historians import * from .hist import * +from .historians import * from .history import * from .migrations import * from .process import * @@ -19,32 +40,7 @@ from .refs import * from .tracking import * from .types import * -from .version import * - -from . import archives -from . import archive_factory -from . import base_savable -from . import builtins -from . import common_helpers -from . import depositors -from . import expr -from . import exceptions -from . import fields -from . import helpers -from . import hist -from . import history -from . import historians -from . import migrations -from . import mongo # pylint: disable=cyclic-import -from . import operations -from . import process -from . import refs -from . import records -from . import testing -from . import tracking -from . import types -from . import utils -from . import version +from .version import __author__, __version__ _ADDITIONAL = ( "mongo", @@ -53,7 +49,6 @@ "utils", "q", "operations", - "testing", ) __all__ = ( @@ -64,7 +59,6 @@ + process.__all__ + types.__all__ + helpers.__all__ - + version.__all__ + history.__all__ + archive_factory.__all__ + refs.__all__ diff --git a/mincepy/archive_factory.py b/mincepy/archive_factory.py index e51ce8e..90a8a4e 100644 --- a/mincepy/archive_factory.py +++ b/mincepy/archive_factory.py @@ -1,8 +1,6 @@ -# -*- coding: utf-8 -*- import logging -from . import historians -from . import plugins +from . import historians, plugins __all__ = "create_archive", "create_historian" @@ -32,9 +30,7 @@ def create_historian( :param apply_plugins: register the plugin types with the new historian :param connect_timeout: a connection timeout (in milliseconds) """ - historian = historians.Historian( - create_archive(archive_uri, connect_timeout=connect_timeout) - ) + historian = historians.Historian(create_archive(archive_uri, connect_timeout=connect_timeout)) if apply_plugins: historian.register_types(plugins.get_types()) diff --git a/mincepy/archives.py b/mincepy/archives.py index 5b2a55f..1717ff3 100644 --- a/mincepy/archives.py +++ b/mincepy/archives.py @@ -1,29 +1,28 @@ -# -*- coding: utf-8 -*- import abc from typing import ( + Any, + Callable, + Dict, Generic, - TypeVar, - NamedTuple, - Sequence, - Union, - Mapping, Iterable, - Dict, Iterator, - Any, - Type, - Optional, - Callable, List, + Mapping, + NamedTuple, + Optional, + Sequence, Tuple, + Type, + TypeVar, + Union, ) import networkx +from . import operations from . import qops as q from . import records from .records import DataRecord -from . import operations __all__ = ( "Archive", @@ -170,9 +169,7 @@ def meta_distinct( """ @abc.abstractmethod - def meta_create_index( - self, keys: Union[str, List[Tuple]], unique=False, where_exist=False - ): + def meta_create_index(self, keys: Union[str, List[Tuple]], unique=False, where_exist=False): """Create an index on the metadata. Takes either a single key or list of (key, direction) pairs @@ -212,7 +209,7 @@ def find( extras: dict = None, limit=0, sort=None, - skip=0, + skip: int = 0, ) -> Iterator[DataRecord]: """Find records matching the given criteria @@ -231,13 +228,13 @@ def find( 2. an iterable of object ids in which is treated as {'$in': list(obj_ids)} 3. a general query filter to be applied to the object ids :param sort: sort the results by the given criteria - :param skip: skip the this many entries + :param skip: skip this many entries """ @abc.abstractmethod def distinct( - self, key: str, filter: dict = None - ) -> Iterator: # pylint: disable=redefined-builtin + self, key: str, filter: dict = None # pylint: disable=redefined-builtin + ) -> Iterator: """Get distinct values of the given record key :param key: the key to find distinct values for, see DataRecord for possible keys @@ -288,8 +285,9 @@ def remove_archive_listener(self, listener: "ArchiveListener"): class BaseArchive(Archive[IdT]): - # This is _still_ an abstract class, pylint is just silly in not recognising that a class only becomes concrete - # once _all_ abstract methods are implemented. See: https://github.com/PyCQA/pylint/issues/179 + # This is _still_ an abstract class, pylint is just silly in not recognising that a class only + # becomes concrete once _all_ abstract methods are implemented. + # See: https://github.com/PyCQA/pylint/issues/179 # pylint:disable=abstract-method ID_TYPE = None # type: Type[IdT] @@ -347,27 +345,26 @@ def remove_archive_listener(self, listener: "ArchiveListener"): self._listeners.remove(listener) def _fire_event(self, evt: Callable, *args, **kwargs): - """Inform all listeners of an event. The event should be a method from the ArchiveListener interface""" + """ + Inform all listeners of an event. The event should be a method from the ArchiveListener + interface + """ for listener in self._listeners: getattr(listener, evt.__name__)(self, *args, **kwargs) -def scalar_query_spec( - specifier: Union[Mapping, Iterable[Any], Any] -) -> Union[Any, Dict]: +def scalar_query_spec(specifier: Union[Mapping, Iterable[Any], Any]) -> Union[Any, Dict]: """Convenience function to create a query specifier for a given item. There are three possibilities: 1. The item is a mapping in which case it is returned as is. 2. The item is an iterable (but not a mapping) in which case it is interpreted to mean: {'$in': list(iterable)} - 3. it is a raw item item in which case it is matched directly + 3. it is a raw item, in which case it is matched directly """ if isinstance(specifier, dict): # This has to be first as dict is iterable return specifier - if isinstance( - specifier, Iterable - ): # pylint: disable=isinstance-second-argument-not-valid-type + if isinstance(specifier, Iterable): # pylint: disable=isinstance-second-argument-not-valid-type return q.in_(*specifier) return specifier @@ -448,9 +445,7 @@ def distinct( """Find all records matching the given criteria""" @abc.abstractmethod - def count( - self, filter: dict, *, meta: dict = None # pylint: disable=redefined-builtin - ) -> int: + def count(self, filter: dict, *, meta: dict = None) -> int: # pylint: disable=redefined-builtin """Get the number of entries that match the search criteria""" @@ -458,12 +453,11 @@ class ArchiveListener: """Archive listener interface""" def on_bulk_write(self, archive: Archive, ops: Sequence[operations.Operation]): - """Called when an archive is about to perform a sequence of write operations but has not performed them yet. - The listener must not assume that the operations will be completed as there are a number of reasons why this - process could be interrupted. + """ + Called when an archive is about to perform a sequence of write operations but has not + performed them yet. The listener must not assume that the operations will be completed as + there are a number of reasons why this process could be interrupted. """ - def on_bulk_write_complete( - self, archive: Archive, ops: Sequence[operations.Operation] - ): + def on_bulk_write_complete(self, archive: Archive, ops: Sequence[operations.Operation]): """Called when an archive is has successfully performed a sequence of write operations""" diff --git a/mincepy/base_savable.py b/mincepy/base_savable.py index 35cce91..c099b7b 100644 --- a/mincepy/base_savable.py +++ b/mincepy/base_savable.py @@ -1,11 +1,10 @@ -# -*- coding: utf-8 -*- import collections -from typing import Optional import typing +from typing import Optional, cast -from . import depositors -from . import refs -from . import types +import mincepy.history + +from . import depositors, refs, types __all__ = ( "BaseSavableObject", @@ -59,15 +58,12 @@ def __eq__(self, other): return False return all( - getattr(self, attr.name) == getattr(other, attr.name) - for attr in self.__get_attrs() + getattr(self, attr.name) == getattr(other, attr.name) for attr in self.__get_attrs() ) def yield_hashables(self, hasher): yield from super().yield_hashables(hasher) - yield from hasher.yield_hashables( - [getattr(self, attr.name) for attr in self.__get_attrs()] - ) + yield from hasher.yield_hashables([getattr(self, attr.name) for attr in self.__get_attrs()]) def save_instance_state(self, saver) -> dict: saved_state = super().save_instance_state(saver) @@ -127,9 +123,7 @@ def set_meta(self, meta: Optional[dict]): def update_meta(self, meta: dict): """Update the metadata dictionary for this object""" if self._historian is None: - raise RuntimeError( - "Object must be saved before the metadata can be updated" - ) + raise RuntimeError("Object must be saved before the metadata can be updated") self._historian.meta.update(self, meta) @@ -144,11 +138,10 @@ def save(self, meta: dict = None): """Save the object""" if self._historian is None: # We don't have a historian yet (we haven't been saved), so use the current global one - from . import history # pylint: disable=cyclic-import - - historian = history.get_historian() + historian = mincepy.history.get_historian() else: historian = self._historian + return historian.save_one(self, meta=meta) def sync(self): @@ -158,11 +151,11 @@ def sync(self): def save_instance_state(self, saver: depositors.Saver): self._on_save(saver) - return super().save_instance_state(saver) + return cast(types.Savable, super()).save_instance_state(saver) def load_instance_state(self, saved_state, loader: "depositors.Loader"): """Take the given object and load the instance state into it""" - super().load_instance_state(saved_state, loader) + cast(types.Savable, super()).load_instance_state(saved_state, loader) self._on_load(loader) def _on_save(self, saver: depositors.Saver): diff --git a/mincepy/builtins.py b/mincepy/builtins.py index 48ddbfe..e4b6d7b 100644 --- a/mincepy/builtins.py +++ b/mincepy/builtins.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Module for all the built-in container and other types that are supported by default""" from abc import ABCMeta @@ -9,14 +8,9 @@ import typing import uuid -from . import base_savable -from .files import File, BaseFile -from . import helpers -from . import records -from . import refs +from . import base_savable, helpers, records, refs, type_ids, types +from .files import BaseFile, File from .utils import sync -from . import types -from . import type_ids __all__ = ( "List", @@ -35,13 +29,13 @@ class _UserType(base_savable.SimpleSavable, metaclass=ABCMeta): """Mixin for helping user types to be compatible with the historian. - These typically have a .data member that stores the actual data (list, dict, str, etc)""" + These typically have a .data member that stores the actual data (list, dict, str, etc.)""" ATTRS = ("data",) data = None # placeholder # This is an optional type for the data member. See save_instance_state type for information # on when it might be useful - DATA_TYPE = None # type: type + DATA_TYPE: type = None def save_instance_state(self, saver): # This is a convenient way of storing primitive data types directly as the state @@ -50,9 +44,7 @@ def save_instance_state(self, saver): if self.DATA_TYPE is not None and issubclass( self.DATA_TYPE, saver.get_historian().primitives ): - self._on_save( - saver - ) # Call this here are we aren't going to call up the hierarchy + self._on_save(saver) # Call this here are we aren't going to call up the hierarchy return self.data return super().save_instance_state(saver) @@ -62,9 +54,7 @@ def load_instance_state(self, saved_state, loader): if self.DATA_TYPE is not None and issubclass( self.DATA_TYPE, loader.get_historian().primitives ): - self._on_load( - loader - ) # Call this here are we aren't going to call up the hierarchy + self._on_load(loader) # Call this here are we aren't going to call up the hierarchy self.data = saved_state else: super().load_instance_state(saved_state, loader) @@ -256,9 +246,7 @@ class RefDict(collections.abc.MutableMapping, Reffer, _UserType): def __init__(self, *args, **kwargs): super().__init__() initial = dict(*args, **kwargs) - self.data = self.DATA_TYPE( - {key: self._ref(value) for key, value in initial.items()} - ) + self.data = self.DATA_TYPE({key: self._ref(value) for key, value in initial.items()}) def __str__(self): return str(self.data) @@ -289,9 +277,7 @@ class LiveDict(collections.abc.MutableMapping, _UserType): def __init__(self, *args, **kwargs): super().__init__() initial = dict(*args, **kwargs) - self.data = RefDict( - {key: self._create_proxy(value) for key, value in initial.items()} - ) + self.data = RefDict({key: self._create_proxy(value) for key, value in initial.items()}) @sync() def __getitem__(self, item): @@ -340,9 +326,7 @@ def __init__(self, *args, **kwargs): super().__init__() initial = dict(*args, **kwargs) - self.data = RefDict( - {key: self._create_proxy(value) for key, value in initial.items()} - ) + self.data = RefDict({key: self._create_proxy(value) for key, value in initial.items()}) @sync() def __getitem__(self, item): @@ -370,8 +354,10 @@ def _create_proxy(self, value): class OrderedDictHelper(helpers.BaseHelper): - """Enable saving of OrderedDicts. In the database, these will be stored as a list of (key, value) pairs and hence - preserve the order.""" + """ + Enable saving of OrderedDicts. In the database, these will be stored as a list of (key, value) + pairs and hence preserve the order. + """ TYPE = collections.OrderedDict TYPE_ID = uuid.UUID("9e7714f8-8ecf-466f-a0e1-6c9fc1d92f51") @@ -386,7 +372,7 @@ def save_instance_state( return list(obj.items()) def load_instance_state(self, obj, saved_state: typing.List[typing.Tuple], _loader): - obj.__init__(saved_state) + obj.__init__(saved_state) # pylint: disable=unnecessary-dunder-call # endregion @@ -405,7 +391,7 @@ def save_instance_state(self, obj: set, _saver) -> typing.List: return list(obj) def load_instance_state(self, obj: set, saved_state: List, _loader): - return obj.__init__(saved_state) + return obj.__init__(saved_state) # pylint: disable=unnecessary-dunder-call class SnapshotIdHelper(helpers.TypeHelper): @@ -415,10 +401,7 @@ class SnapshotIdHelper(helpers.TypeHelper): TYPE_ID = type_ids.SNAPSHOT_ID_TYPE_ID def eq(self, one, other): # pylint: disable=invalid-name - if not ( - isinstance(one, records.SnapshotId) - and isinstance(other, records.SnapshotId) - ): + if not (isinstance(one, records.SnapshotId) and isinstance(other, records.SnapshotId)): return False return one.obj_id == other.obj_id and one.version == other.version diff --git a/mincepy/cli/__init__.py b/mincepy/cli/__init__.py index c7c2b0e..ddde4d0 100644 --- a/mincepy/cli/__init__.py +++ b/mincepy/cli/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from . import main from .dev import * from .main import * diff --git a/mincepy/cli/dev.py b/mincepy/cli/dev.py index 14c5131..54b7eb5 100644 --- a/mincepy/cli/dev.py +++ b/mincepy/cli/dev.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import logging import sys diff --git a/mincepy/cli/main.py b/mincepy/cli/main.py index a80b9a4..a38c08e 100644 --- a/mincepy/cli/main.py +++ b/mincepy/cli/main.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import logging import sys import uuid @@ -37,9 +36,7 @@ def gui(uri): try: import mincepy_gui except ImportError: - click.echo( - "mincepy-gui not found, please install (e.g. via pip install mincepy-gui)" - ) + click.echo("mincepy-gui not found, please install (e.g. via pip install mincepy-gui)") sys.exit(1) else: mincepy_gui.start(uri) diff --git a/mincepy/cli/query.py b/mincepy/cli/query.py index 696a8d1..6ff497c 100644 --- a/mincepy/cli/query.py +++ b/mincepy/cli/query.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from collections import OrderedDict import typing @@ -8,6 +7,7 @@ import mincepy import mincepy.records +import mincepy.testing @click.command() @@ -17,7 +17,7 @@ def query(obj_type, filter, limit): # pylint: disable=redefined-builtin historian = mincepy.get_historian() - results = historian.find_recods(obj_type, state=filter, limit=limit, version=-1) + results = historian.records.find(obj_type, state=filter, limit=limit, version=-1) historian.register_types(mincepy.testing.HISTORIAN_TYPES) @@ -57,9 +57,7 @@ def print_records(records: typing.Sequence[mincepy.records.DataRecord], historia for column_name in columns.keys(): if column_name != REF: - columns[column_name] = [ - get_value(column_name, record.state) for record in records - ] + columns[column_name] = [get_value(column_name, record.state) for record in records] rows = [] for row in range(len(records)): @@ -69,8 +67,7 @@ def print_records(records: typing.Sequence[mincepy.records.DataRecord], historia tabulate( rows, headers=[ - ".".join(path) if isinstance(path, tuple) else path - for path in columns.keys() + ".".join(path) if isinstance(path, tuple) else path for path in columns.keys() ], ) ) diff --git a/mincepy/common_helpers.py b/mincepy/common_helpers.py index b53cf26..8a4eadb 100644 --- a/mincepy/common_helpers.py +++ b/mincepy/common_helpers.py @@ -1,6 +1,5 @@ -# -*- coding: utf-8 -*- # This module will be removed in 0.17.0 -from .builtins import PathHelper, NamespaceHelper, TupleHelper +from .builtins import NamespaceHelper, PathHelper, TupleHelper __all__ = "PathHelper", "TupleHelper", "NamespaceHelper" diff --git a/mincepy/comparators.py b/mincepy/comparators.py index 952f1d3..4a7fdf5 100644 --- a/mincepy/comparators.py +++ b/mincepy/comparators.py @@ -1,7 +1,6 @@ -# -*- coding: utf-8 -*- +import collections.abc import datetime import numbers -import collections.abc from operator import itemgetter import uuid @@ -25,7 +24,7 @@ def load_instance_state(self, obj, saved_state, loader): class BytesEquator(SimpleHelper): - TYPE = collections.abc.ByteString + TYPE = bytes, bytearray def yield_hashables(self, obj, hasher): yield obj @@ -57,7 +56,7 @@ def yield_hashables(self, obj: collections.abc.Set, hasher): class MappingEquator(SimpleHelper): TYPE = collections.abc.Mapping - def yield_hashables(self, obj, hasher): + def yield_hashables(self, obj: collections.abc.Mapping, hasher): def hashed_key_mapping(mapping): for key, value in mapping.items(): yield tuple(hasher.yield_hashables(key)), value @@ -72,7 +71,7 @@ def hashed_key_mapping(mapping): class OrderedDictEquator(SimpleHelper): TYPE = collections.OrderedDict - def yield_hashables(self, obj, hasher): + def yield_hashables(self, obj: collections.OrderedDict, hasher): for key, val in sorted(obj, key=itemgetter(0)): yield from hasher.yield_hashables(key) yield from hasher.yield_hashables(val) diff --git a/mincepy/defaults.py b/mincepy/defaults.py index 8f5c912..469c4e3 100644 --- a/mincepy/defaults.py +++ b/mincepy/defaults.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from . import comparators diff --git a/mincepy/depositors.py b/mincepy/depositors.py index d523d64..dbcb576 100644 --- a/mincepy/depositors.py +++ b/mincepy/depositors.py @@ -1,21 +1,18 @@ -# -*- coding: utf-8 -*- """This module contains various strategies for loading, saving and migrating objects in the archive """ from abc import ABCMeta, abstractmethod import contextlib import logging -from typing import Optional, Dict, Any, Iterable, Sequence +from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Sequence import deprecation from pytray import tree -import mincepy -from . import archives -from . import exceptions -from . import operations -from . import records -from . import staging +if TYPE_CHECKING: + import mincepy + +from . import archives, exceptions, operations, records, staging from . import transactions # pylint: disable=unused-import from . import version as version_mod @@ -125,44 +122,41 @@ def decode( try: entry = schema[path] except KeyError: - # There is no schema entry so this is a primitive type and only containers need to (potentially) - # decoded further + # There is no schema entry so this is a primitive type and only containers need to + # (potentially) decoded further if isinstance(encoded, CONTAINERS): return self._recursive_unpack(encoded, schema, path, created_callback) # Fully decoded return encoded - else: - saved_state = encoded - helper = self.get_historian().get_helper(entry.type_id) - if helper.IMMUTABLE: - saved_state = self._recursive_unpack( - encoded, schema, path, created_callback - ) - new_obj = helper.new(saved_state) - if new_obj is None: - raise RuntimeError( - f"Helper '{helper.__class__}' failed to create a class given state '{saved_state}'" - ) + saved_state = encoded + helper = self.get_historian().get_helper(entry.type_id) + if helper.IMMUTABLE: + saved_state = self._recursive_unpack(encoded, schema, path, created_callback) - if created_callback is not None: - created_callback(path, new_obj) + new_obj = helper.new(saved_state) + if new_obj is None: + raise RuntimeError( + f"Helper '{helper.__class__}' failed to create a class given state " + f"'{saved_state}'" + ) - if not helper.IMMUTABLE: - saved_state = self._recursive_unpack( - encoded, schema, path, created_callback, updates - ) + if created_callback is not None: + created_callback(path, new_obj) + + if not helper.IMMUTABLE: + saved_state = self._recursive_unpack(encoded, schema, path, created_callback, updates) - updated = helper.ensure_up_to_date(saved_state, entry.version, self) - if updated is not None: - # Use the current version of the record - saved_state = updated - if updates is not None: - updates[path] = updated + updated = helper.ensure_up_to_date(saved_state, entry.version, self) + if updated is not None: + # Use the current version of the record + saved_state = updated + if updates is not None: + updates[path] = updated - helper.load_instance_state(new_obj, saved_state, self) - return new_obj + helper.load_instance_state(new_obj, saved_state, self) + return new_obj def _recursive_unpack( self, @@ -202,15 +196,11 @@ def get_snapshot_id(self, obj) -> Optional[records.SnapshotId]: return self._get_current_snapshot_id(obj) except exceptions.NotFound: # Then we have to save it and get the resulting reference - return self._save_object( - obj - ).snapshot_id # pylint: disable=protected-access + return self._save_object(obj).snapshot_id # pylint: disable=protected-access def _get_current_snapshot_id(self, obj) -> records.SnapshotId: """Get the current snapshot id of an object""" - return self._historian.current_transaction().get_snapshot_id_for_live_object( - obj - ) + return self._historian.current_transaction().get_snapshot_id_for_live_object(obj) def load(self, snapshot_id: records.SnapshotId): try: @@ -248,7 +238,7 @@ def created(path, new_obj): def _load_object(self, obj_id) -> object: """Load an object form the database. This method is deliberately private as it should - only be used by the the depositor and the historian""" + only be used by the depositor and the historian""" historian = self.get_historian() archive = self.get_archive() with historian.in_transaction() as trans: @@ -263,24 +253,22 @@ def _load_object(self, obj_id) -> object: ) try: - obj = historian._live_objects.get_object( - obj_id - ) # pylint: disable=protected-access + obj = historian._live_objects.get_object(obj_id) # pylint: disable=protected-access except exceptions.NotFound: logger.debug("Loading object from record: %s", record.snapshot_id) # Ok, just use the one from the archive return self.load_from_record(record) - else: - # Compare with the current, live, version - live_record = historian._live_objects.get_record( - obj - ) # pylint: disable=protected-access - if record.version != live_record.version: - # The one in the archive is newer, so use that - logger.debug("Updating object from record: %s", record.snapshot_id) - self.update_from_record(obj, record) - return obj + # Compare with the current, live, version + live_record = historian._live_objects.get_record( + obj + ) # pylint: disable=protected-access + if record.version != live_record.version: + # The one in the archive is newer, so use that + logger.debug("Updating object from record: %s", record.snapshot_id) + self.update_from_record(obj, record) + + return obj def update_from_record(self, obj: object, record: records.DataRecord) -> bool: """Do an in-place update of an object from a record""" @@ -290,9 +278,7 @@ def update_from_record(self, obj: object, record: records.DataRecord) -> bool: # Make sure the record is in the transaction with the object trans.insert_live_object(obj, record) - saved_state = self._recursive_unpack( - record.state, record.get_state_schema() - ) + saved_state = self._recursive_unpack(record.state, record.get_state_schema()) helper.load_instance_state(obj, saved_state, self) return True @@ -331,31 +317,27 @@ def _save_object(self, obj: object) -> records.DataRecord: # Apply the sticky meta historian.meta.update(record.obj_id, historian.meta.sticky) return record - else: - if helper.IMMUTABLE: - logger.info( - "Tried to save immutable object with id '%s' again", - record.obj_id, - ) - return record - - # Check if our record is up-to-date - with historian.transaction() as nested: - loaded_obj = SnapshotLoader(historian).load_from_record(record) - - if current_hash == record.snapshot_hash and historian.eq( - obj, loaded_obj - ): - # Objects identical - nested.rollback() - else: - builder = records.make_child_builder( - record, snapshot_hash=current_hash - ) - record = self._save_from_builder(obj, builder) + if helper.IMMUTABLE: + logger.info( + "Tried to save immutable object with id '%s' again", + record.obj_id, + ) return record + # Check if our record is up-to-date + with historian.transaction() as nested: + loaded_obj = SnapshotLoader(historian).load_from_record(record) + + if current_hash == record.snapshot_hash and historian.eq(obj, loaded_obj): + # Objects identical + nested.rollback() + else: + builder = records.make_child_builder(record, snapshot_hash=current_hash) + record = self._save_from_builder(obj, builder) + + return record + def _save_from_builder(self, obj, builder: records.DataRecordBuilder): """Save a live object""" assert ( @@ -368,9 +350,7 @@ def _save_from_builder(self, obj, builder: records.DataRecordBuilder): sid = records.SnapshotId(builder.obj_id, builder.version) with trans.prepare_for_saving(sid, obj): # Inject the extras - builder.extras.update( - self._get_extras(obj, builder.obj_id, builder.version) - ) + builder.extras.update(self._get_extras(obj, builder.obj_id, builder.version)) # Now ask the object to save itself and create the record builder.update(self.save_state(obj)) @@ -424,7 +404,7 @@ def _get_extras(self, obj, obj_id, version: int) -> dict: return extras def _create_builder(self, helper, **additional) -> records.DataRecordBuilder: - """Create a record builder for a new object object""" + """Create a record builder for a new object""" additional = additional or {} builder = records.DataRecord.new_builder( @@ -438,8 +418,8 @@ def _create_builder(self, helper, **additional) -> records.DataRecordBuilder: @contextlib.contextmanager def _cycle_protection(self, obj: object): """This context manager is used as a means of circular-reference identification. - Naturally, such cyclic saving should never happen however if there is a bug, at least this method - allows us to catch it early and see the source. + Naturally, such cyclic saving should never happen however if there is a bug, at least this + method allows us to catch it early and see the source. """ obj_id = id(obj) if obj_id in self._saving_set: @@ -524,14 +504,14 @@ def get_snapshot_id(self, obj) -> records.SnapshotId: def migrate_records( self, to_migrate: Iterable[records.DataRecord] ) -> Sequence[records.DataRecord]: - """Migrate multiple records. This call will return an iterable of those that were migrated""" + """ + Migrate multiple records. This call will return an iterable of those that were migrated + """ migrated = [] with self._historian.in_transaction() as trans: # type: transactions.Transaction for record in to_migrate: updates = {} - obj = self.decode( - record.state, record.get_state_schema(), updates=updates - ) + obj = self.decode(record.state, record.get_state_schema(), updates=updates) if updates: self._migrate_record(record, obj, trans) migrated.append(record) @@ -551,6 +531,4 @@ def _migrate_record(self, record, new_obj, trans): ) ) - logger.info( - "Snapshot %s has been migrated to the latest version", record.snapshot_id - ) + logger.info("Snapshot %s has been migrated to the latest version", record.snapshot_id) diff --git a/mincepy/exceptions.py b/mincepy/exceptions.py index d9e90e6..62ff1be 100644 --- a/mincepy/exceptions.py +++ b/mincepy/exceptions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- __all__ = ( "NotFound", "ModificationError", diff --git a/mincepy/expr.py b/mincepy/expr.py index ae0ed89..0a09af5 100644 --- a/mincepy/expr.py +++ b/mincepy/expr.py @@ -1,8 +1,8 @@ -# -*- coding: utf-8 -*- """Query expressions""" + import abc import copy -from typing import Union, List, Iterable +from typing import Iterable, List, Union __all__ = ( "Expr", @@ -36,8 +36,6 @@ class FilterLike(metaclass=abc.ABCMeta): - """An abstract base class for objects representing a pyos path, e.g. pyos.pathlib.PurePath.""" - # pylint: disable=too-few-public-methods @abc.abstractmethod @@ -79,9 +77,7 @@ def __init__(self, operand: List[Expr]): raise TypeError(f"Expected a list, got {type(operand).__name__}") for entry in operand: if not isinstance(entry, Expr): - raise TypeError( - f"Expected a list of Expr, found {type(entry).__name__}" - ) + raise TypeError(f"Expected a list of Expr, found {type(entry).__name__}") self.operand = operand def __query_expr__(self) -> dict: @@ -161,9 +157,7 @@ class Nin(SimpleOperator): oper = "$nin" -COMPARISON_OPERATORS = { - oper_type.oper: oper_type for oper_type in SimpleOperator.__subclasses__() -} +COMPARISON_OPERATORS = {oper_type.oper: oper_type for oper_type in SimpleOperator.__subclasses__()} class Comparison(Expr): @@ -177,9 +171,7 @@ def __init__(self, field, expr: Operator): if field is None: raise ValueError("field cannot be None") if not isinstance(expr, Operator): - raise TypeError( - f"Expected an operator expression, got '{type(expr).__name__}'" - ) + raise TypeError(f"Expected an operator expression, got '{type(expr).__name__}'") self.field = field self.expr = expr @@ -389,15 +381,14 @@ def query_expr(filter: FilterLike) -> dict: # pylint: disable=redefined-builtin try: query_repr = filter.__query_expr__() except AttributeError: - raise TypeError( - "expected dict or object with __query_expr__, not " + str(filter) - ) from None + raise TypeError("expected dict or object with __query_expr__, not " + str(filter)) from None if isinstance(query_repr, dict): return query_repr raise TypeError( - f"expected {type(filter).__name__}.__query_expr__() to return dict, not {type(query_repr).__name__}" + f"expected {type(filter).__name__}.__query_expr__() to return dict, not " + f"{type(query_repr).__name__}" ) @@ -408,9 +399,7 @@ def field_name(field) -> str: try: name = field.__field_name__() except AttributeError: - raise TypeError( - f"expected str or object with __field__name__, not {field}" - ) from None + raise TypeError(f"expected str or object with __field__name__, not {field}") from None if isinstance(name, str): return name @@ -486,9 +475,7 @@ def build_expr(item) -> Expr: # noqa: C901 class Query: __slots__ = "_filter_expressions", "limit", "sort", "skip" - def __init__( - self, *expr: Expr, limit: int = None, sort: dict = None, skip: int = None - ): + def __init__(self, *expr: Expr, limit: int = None, sort: dict = None, skip: int = None): self._filter_expressions = [] # type: List[Expr] self.extend(expr) self.limit = limit @@ -508,9 +495,7 @@ def copy(self) -> "Query": @property def __dict__(self) -> dict: - return dict( - filter=self.get_filter(), sort=self.sort, limit=self.limit, skip=self.skip - ) + return dict(filter=self.get_filter(), sort=self.sort, limit=self.limit, skip=self.skip) def append(self, expr: Expr): self._filter_expressions.append(build_expr(expr)) diff --git a/mincepy/fields.py b/mincepy/fields.py index 28ff755..afa1135 100644 --- a/mincepy/fields.py +++ b/mincepy/fields.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Module that contains methods and classes for dealing with database storable attributes of objects""" @@ -103,14 +102,12 @@ def __getattribute__(self, item: str): ): properties = get_field_properties(self._properties.field_type) try: - child_field = type(self)( - properties[item], path_prefix=self.get_path() - ) + child_field = type(self)(properties[item], path_prefix=self.get_path()) except KeyError: raise exc from None - else: - child_field.set_query_context(self._query_context) - return child_field + + child_field.set_query_context(self._query_context) + return child_field if self._properties.dynamic: # Creating a dynamic child @@ -126,9 +123,7 @@ def __getattribute__(self, item: str): def __field_name__(self) -> str: return self._properties.store_as - def __call__( - self, fget=None, fset=None, fdel=None, doc=None, prop_kwargs=None - ) -> property: + def __call__(self, fget=None, fset=None, fdel=None, doc=None, prop_kwargs=None) -> property: """This method allows the field to become a property""" self.getter(fget) self.setter(fset) @@ -150,9 +145,7 @@ def __set__(self, obj, value): def __delete__(self, obj): if self._deleter is None: - raise AttributeError( - f"can't delete attribute '{self._properties.attr_name}'" - ) + raise AttributeError(f"can't delete attribute '{self._properties.attr_name}'") self._deleter(obj) def getter(self, fget): @@ -172,9 +165,7 @@ def _getter(self, obj): try: return obj.__dict__[self._properties.attr_name] except KeyError: - raise AttributeError( - f"unreadable attribute '{self._properties.attr_name}'" - ) from None + raise AttributeError(f"unreadable attribute '{self._properties.attr_name}'") from None def _setter(self, obj, value): """Default setter""" @@ -236,10 +227,8 @@ class WithFields(metaclass=WithFieldMeta): """Base class for types that describe how to save objects in the database using db fields""" @classmethod - def init_field(cls, obj_field, attr_name: str): - obj_field._properties.class_created( - cls, attr_name - ) # pylint: disable=protected-access + def init_field(cls, obj_field: Field, attr_name: str): + obj_field._properties.class_created(cls, attr_name) # pylint: disable=protected-access def __init__(self, **kwargs): for name, field_properties in get_field_properties(type(self)).items(): @@ -279,8 +268,6 @@ def get_field_properties(db_type: Type[WithFields]) -> Dict[str, FieldProperties continue for name, class_attr in entry.__dict__.items(): if isinstance(class_attr, Field): - db_attrs[ - name - ] = class_attr._properties # pylint: disable=protected-access + db_attrs[name] = class_attr._properties # pylint: disable=protected-access return db_attrs diff --git a/mincepy/files.py b/mincepy/files.py index 2dc6ad4..698ff2e 100644 --- a/mincepy/files.py +++ b/mincepy/files.py @@ -1,18 +1,18 @@ -# -*- coding: utf-8 -*- import pathlib import shutil import tempfile -from typing import Optional, BinaryIO, TextIO, Union +from typing import BinaryIO, Optional, TextIO, Union -from . import type_ids -from . import base_savable -from . import fields +from . import base_savable, fields, type_ids __all__ = "File", "BaseFile" class File(base_savable.SimpleSavable): - """A mincePy file object. These should not be instantiated directly but using Historian.create_file()""" + """ + A mincePy file object. These should not be instantiated directly but using + `Historian.create_file()` + """ TYPE_ID = type_ids.FILE_TYPE_ID READ_SIZE = 256 # The number of bytes to read at a time @@ -42,9 +42,7 @@ def open(self, mode="r", **kwargs) -> Union[BinaryIO, TextIO]: self._ensure_buffer() if "b" not in mode: kwargs.setdefault("encoding", self.encoding) - return open( - self._buffer_file, mode, **kwargs - ) # pylint: disable=unspecified-encoding + return open(self._buffer_file, mode, **kwargs) # pylint: disable=unspecified-encoding def from_disk(self, path): """Copy the contents of a disk file to this file""" @@ -114,9 +112,9 @@ def __eq__(self, other) -> bool: If both files do not exist they are considered equal. """ - if ( + if ( # pylint: disable=comparison-with-callable not isinstance(other, File) or self.filename != other.filename - ): # pylint: disable=comparison-with-callable + ): return False try: diff --git a/mincepy/frontend.py b/mincepy/frontend.py index 83fbe42..9c892e3 100644 --- a/mincepy/frontend.py +++ b/mincepy/frontend.py @@ -1,18 +1,26 @@ -# -*- coding: utf-8 -*- """ This module collects all the frontend database entities such as collections and results that a user interacts with (through the historian) """ + import functools import inspect -from typing import TypeVar, Generic, Iterable, Callable, Any, Iterator, Optional, Union - -from . import archives -from . import exceptions -from . import expr -from . import fields -from . import records -from . import types +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Generic, + Iterable, + Iterator, + Optional, + TypeVar, + Union, +) + +from . import exceptions, expr, fields, records, types + +if TYPE_CHECKING: + import mincepy T = TypeVar("T") # The type stored by the collection pylint: disable=invalid-name @@ -38,7 +46,7 @@ class ResultSet(Generic[T]): def __init__( self, historian, - archive_collection: archives.Collection, + archive_collection: "mincepy.archives.Collection", query: expr.Query, kwargs: dict = None, entry_factory: Callable[[Any], T] = None, @@ -50,9 +58,7 @@ def __init__( self._entry_factory = entry_factory or (lambda x: x) def __iter__(self) -> Iterable[T]: - for entry in self._archive_collection.find( - **self._query.__dict__, **self._kwargs - ): + for entry in self._archive_collection.find(**self._query.__dict__, **self._kwargs): yield self._entry_factory(entry) def __len__(self) -> int: @@ -64,7 +70,7 @@ def historian(self): return self._historian @property - def archive_collection(self) -> archives.Collection: + def archive_collection(self) -> "mincepy.archives.Collection": """Access the archive these results are from""" return self._archive_collection @@ -76,9 +82,7 @@ def query(self) -> expr.Query: def distinct(self, key: Union[str, fields.Field]) -> Iterator: if isinstance(key, fields.Field): key = key.get_path() - yield from self._archive_collection.distinct( - key, filter=self._query.get_filter() - ) + yield from self._archive_collection.distinct(key, filter=self._query.get_filter()) def any(self) -> Optional[T]: """ @@ -108,9 +112,7 @@ def one(self) -> Optional[T]: return None if len(results) > 1: - raise exceptions.NotOneError( - "one() used with more than one result available" - ) + raise exceptions.NotOneError("one() used with more than one result available") return self._entry_factory(results[0]) @@ -128,13 +130,14 @@ def _project(self, *field: str) -> Iterator: class EntriesCollection(Generic[T]): - """A collection of archive entries. This is the base class, but it can be specialised to provide specific - functionality for a given collection""" + """ + A collection of archive entries. This is the base class, but it can be specialised to provide + specific functionality for a given collection""" def __init__( self, historian, - archive_collection: archives.Collection, + archive_collection: "mincepy.archives.Collection", entry_factory: Callable[[dict], T], ): self._historian = historian @@ -185,7 +188,9 @@ def distinct( # pylint: disable=redefined-builtin state=None, extras: dict = None, ) -> Iterator: - """Get the distinct values for the given key, optionally restricting to a subset of results""" + """ + Get the distinct values for the given key, optionally restricting to a subset of results + """ yield from self.find( *filter, obj_type=obj_type, @@ -209,7 +214,8 @@ def _prepare_query( ) -> expr.Query: """Prepare a query filter expression from the passed filter criteria""" for entry in expression: - # Automatically register any types passed in during find (unless that type id is already is use) + # Automatically register any types passed in during find + # (unless that type id is already is use) if inspect.isclass(entry) and issubclass(entry, types.SavableObject): self._historian.type_registry.register_type(entry) @@ -277,9 +283,9 @@ class ObjectCollection(EntriesCollection[object]): def __init__( self, historian, - archive_collection: archives.Collection, - record_factory: Callable[[dict], records.DataRecord], - obj_loader: Callable[[records.DataRecord], object], + archive_collection: "mincepy.archives.Collection", + record_factory: Callable[[dict], "mincepy.DataRecord"], + obj_loader: Callable[["mincepy.DataRecord"], object], ): super().__init__( historian, @@ -288,15 +294,13 @@ def __init__( ) self._record_factory = record_factory self._obj_loader = obj_loader - self._records = EntriesCollection( - self._historian, archive_collection, record_factory - ) + self._records = EntriesCollection(self._historian, archive_collection, record_factory) def get(self, entry_id) -> object: return self._create_object(self._archive_collection.get(entry_id)) @property - def records(self) -> EntriesCollection[records.DataRecord]: + def records(self) -> EntriesCollection["mincepy.DataRecord"]: """Access the records directly""" return self._records diff --git a/mincepy/helpers.py b/mincepy/helpers.py index 3b240b0..672a42c 100644 --- a/mincepy/helpers.py +++ b/mincepy/helpers.py @@ -1,18 +1,13 @@ -# -*- coding: utf-8 -*- from abc import ABCMeta import logging -from typing import Type, Optional, Sequence +from typing import TYPE_CHECKING, Optional, Sequence, Tuple, Type, Union import pytray.pretty -from . import depositors -from . import exceptions -from . import expr -from . import fields -from . import migrations -from . import saving -from . import tracking -from . import types +from . import depositors, exceptions, expr, fields, migrations, saving, tracking + +if TYPE_CHECKING: + import mincepy __all__ = "TypeHelper", "WrapperHelper", "BaseHelper" @@ -44,30 +39,45 @@ class TypeHelper(fields.WithFields): the historian.""" #: The type this helper corresponds to - TYPE: Type = None + TYPE: Union[type, Tuple[type]] = None TYPE_ID = None # The unique id for this type of object IMMUTABLE = False # If set to true then the object is decoded straight away INJECT_CREATION_TRACKING = False # The latest migration, if there is one - LATEST_MIGRATION = None # type: migrations.ObjectMigration + LATEST_MIGRATION: "mincepy.ObjectMigration" = None @classmethod - def init_field(cls, field: fields.Field, attr_name: str): - super().init_field(field, attr_name) - field.set_query_context(expr.Comparison("type_id", expr.Eq(cls.TYPE_ID))) - field.path_prefix = "state" - - def __init__(self): - assert ( - self.TYPE is not None - ), "Must set the TYPE to a type of or a tuple of types" + def init_field(cls, obj_field: fields.Field, attr_name: str): + super().init_field(obj_field, attr_name) + obj_field.set_query_context(expr.Comparison("type_id", expr.Eq(cls.TYPE_ID))) + obj_field.path_prefix = "state" + + def __init__(self): # pylint: disable=super-init-not-called + if isinstance(self.TYPE, tuple): + for entry in self.TYPE: # pylint: disable=not-an-iterable + if not isinstance(entry, type): + raise RuntimeError( + f"All entries of the TYPE must be `types`, got '{self.TYPE}'" + ) + elif not isinstance(self.TYPE, type): + raise RuntimeError( + f"Must set the TYPE to a type or a tuple of types, got '{self.TYPE}'" + ) + if self.INJECT_CREATION_TRACKING: inject_creation_tracking(self.TYPE) + def __repr__(self) -> str: + return f"{type(self).__name__}({repr(self.TYPE)})" + def new(self, encoded_saved_state): # pylint: disable=unused-argument """Create a new blank object of this type""" - cls = self.TYPE - return cls.__new__(cls) + if isinstance(self.TYPE, tuple): + obj_type = self.TYPE[0] # pylint: disable=unsubscriptable-object + else: + obj_type = self.TYPE + + return obj_type.__new__(obj_type) def yield_hashables(self, obj: object, hasher): """Yield values from this object that should be included in its hash""" @@ -75,14 +85,16 @@ def yield_hashables(self, obj: object, hasher): def eq(self, one, other) -> bool: # pylint: disable=invalid-name """Determine if two objects are equal""" - if not isinstance(one, self.TYPE) or not isinstance( + if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type + one, self.TYPE + ) or not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type other, self.TYPE - ): # pylint: disable=isinstance-second-argument-not-valid-type + ): return False - return saving.save_instance_state( - one, type(self) - ) == saving.save_instance_state(other, type(self)) + return saving.save_instance_state(one, type(self)) == saving.save_instance_state( + other, type(self) + ) def save_instance_state(self, obj, saver): # pylint: disable=unused-argument """Save the instance state of an object, should return a saved instance""" @@ -95,26 +107,23 @@ def load_instance_state( saving.load_instance_state(obj, saved_state, type(self)) def get_version(self) -> Optional[int]: - """Gets the version of the latest migration, returns None if there is not migration""" + """Gets the version of the latest migration, returns `None` if there is not a migration""" if self.LATEST_MIGRATION is None: return None version = self.LATEST_MIGRATION.VERSION if version is None: raise RuntimeError( - f"Object '{self.TYPE}' has a migration ({self.LATEST_MIGRATION}) which has no version number" + f"Object '{self.TYPE}' has a migration ({self.LATEST_MIGRATION}) which has no " + f"version number" ) return version - def ensure_up_to_date( - self, saved_state, version: Optional[int], loader: depositors.Loader - ): + def ensure_up_to_date(self, saved_state, version: Optional[int], loader: depositors.Loader): """Apply any migrations that are necessary to this saved state. If no migrations are necessary then None is returned""" - latest_version = ( - None if self.LATEST_MIGRATION is None else self.LATEST_MIGRATION.VERSION - ) + latest_version = None if self.LATEST_MIGRATION is None else self.LATEST_MIGRATION.VERSION if latest_version == version: return None @@ -152,9 +161,7 @@ def ensure_up_to_date( return saved_state - def _get_migrations( - self, version: Optional[int] - ) -> Sequence[migrations.ObjectMigration]: + def _get_migrations(self, version: Optional[int]) -> Sequence[migrations.ObjectMigration]: """Get the sequence of migrations that needs to be applied to a given version""" if self.LATEST_MIGRATION is None: return [] # No migrations we can apply @@ -188,20 +195,23 @@ class WrapperHelper(TypeHelper): # pylint: disable=invalid-name - def __init__(self, obj_type: Type[types.SavableObject]): + def __init__(self, obj_type: Type["mincepy.SavableObject"]): self.TYPE = obj_type self.TYPE_ID = obj_type.TYPE_ID self.LATEST_MIGRATION = obj_type.LATEST_MIGRATION super().__init__() + def __repr__(self) -> str: + return f"WrapperHelper({repr(self.TYPE)})" + def yield_hashables(self, obj, hasher): yield from self.TYPE.yield_hashables(obj, hasher) def eq(self, one, other) -> bool: return self.TYPE.__eq__(one, other) # pylint: disable=unnecessary-dunder-call - def save_instance_state(self, obj: types.Savable, saver): + def save_instance_state(self, obj: "mincepy.Savable", saver): return self.TYPE.save_instance_state(obj, saver) - def load_instance_state(self, obj, saved_state: types.Savable, loader): + def load_instance_state(self, obj, saved_state: "mincepy.Savable", loader): self.TYPE.load_instance_state(obj, saved_state, loader) diff --git a/mincepy/hist/__init__.py b/mincepy/hist/__init__.py index 831dc6c..c5b1d32 100644 --- a/mincepy/hist/__init__.py +++ b/mincepy/hist/__init__.py @@ -1,7 +1,7 @@ -# -*- coding: utf-8 -*- -from .live_objects import LiveObjectsCollection +from . import live_objects +from .live_objects import * from .metas import Meta from .references import References from .snapshots import SnapshotsCollection -__all__ = "Meta", "References", "SnapshotsCollection", "LiveObjectsCollection" +__all__ = live_objects.__all__ + ("Meta", "References", "SnapshotsCollection") diff --git a/mincepy/hist/live_objects.py b/mincepy/hist/live_objects.py index 7205fcb..a9817f5 100644 --- a/mincepy/hist/live_objects.py +++ b/mincepy/hist/live_objects.py @@ -1,15 +1,18 @@ -# -*- coding: utf-8 -*- -from typing import Callable, Optional +from typing import TYPE_CHECKING, Callable, Optional -from mincepy import archives -from mincepy import frontend -import mincepy.records as recordsm +import mincepy.frontend as frontend +import mincepy.records as records_ + +if TYPE_CHECKING: + import mincepy __all__ = ("LiveObjectsCollection",) class LiveObjectsCollection(frontend.ObjectCollection): - def __init__(self, historian, archive_collection: archives.Collection): + def __init__( + self, historian: "mincepy.Historian", archive_collection: "mincepy.archives.Collection" + ): super().__init__( historian, archive_collection, @@ -23,16 +26,16 @@ def __init__(self, historian, archive_collection: archives.Collection): ) -class LoadableRecord(recordsm.DataRecord): +class LoadableRecord(records_.DataRecord): __slots__ = () - _obj_loader: Optional[Callable[[recordsm.DataRecord], object]] = None - _snapshot_loader: Optional[Callable[[recordsm.DataRecord], object]] = None + _obj_loader: Optional[Callable[["mincepy.DataRecord"], object]] = None + _snapshot_loader: Optional[Callable[["mincepy.DataRecord"], object]] = None def __new__( cls, record_dict: dict, - snapshot_loader: Callable[[recordsm.DataRecord], object], - obj_loader: Callable[[recordsm.DataRecord], object], + snapshot_loader: Callable[["mincepy.DataRecord"], object], + obj_loader: Callable[["mincepy.DataRecord"], object], ): loadable = super().__new__(cls, **record_dict) loadable._obj_loader = obj_loader diff --git a/mincepy/hist/metas.py b/mincepy/hist/metas.py index 28c8de2..4415b5f 100644 --- a/mincepy/hist/metas.py +++ b/mincepy/hist/metas.py @@ -1,10 +1,10 @@ -# -*- coding: utf-8 -*- -from typing import Any, Optional, Mapping, Dict, Iterator +from typing import TYPE_CHECKING, Any, Dict, Iterator, Mapping, Optional -from mincepy import archives -from mincepy import historians # pylint: disable=unused-import, cyclic-import from mincepy import exceptions +if TYPE_CHECKING: + import mincepy + __all__ = ("Meta",) @@ -13,8 +13,8 @@ class Meta: # Meta is a 'friend' of Historian and so can access privates pylint: disable=protected-access - def __init__(self, historian, archive): - self._hist = historian # type: historians.Historian + def __init__(self, historian: "mincepy.Historian", archive: "mincepy.Archive") -> None: + self._hist: mincepy.Historian = historian self._archive = archive self._sticky = {} @@ -72,9 +72,7 @@ def set(self, obj_or_identifier, meta: Optional[Mapping]): return self._archive.meta_set(obj_id, meta) def set_many(self, metas: Mapping[Any, Optional[dict]]): - mapped = { - self._hist._ensure_obj_id(ident): meta for ident, meta in metas.items() - } + mapped = {self._hist._ensure_obj_id(ident): meta for ident, meta in metas.items()} trans = self._hist.current_transaction() if trans: for entry in mapped.items(): @@ -105,9 +103,7 @@ def update(self, obj_or_identifier, meta: Mapping): self._archive.meta_update(obj_id, meta) def update_many(self, metas: Mapping[Any, Optional[dict]]): - mapped = { - self._hist._ensure_obj_id(ident): meta for ident, meta in metas.items() - } + mapped = {self._hist._ensure_obj_id(ident): meta for ident, meta in metas.items()} trans = self._hist.current_transaction() if trans: for entry in mapped.items(): @@ -117,7 +113,7 @@ def update_many(self, metas: Mapping[Any, Optional[dict]]): def find( self, filter, obj_id=None - ) -> Iterator[archives.Archive.MetaEntry]: # pylint: disable=redefined-builtin + ) -> Iterator["mincepy.Archive.MetaEntry"]: # pylint: disable=redefined-builtin """Find metadata matching the given criteria. Each returned result is a tuple containing the corresponding object id and the metadata dictionary itself""" return self._archive.meta_find(filter=filter, obj_id=obj_id) diff --git a/mincepy/hist/references.py b/mincepy/hist/references.py index b1e572a..27812b2 100644 --- a/mincepy/hist/references.py +++ b/mincepy/hist/references.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- -from typing import Generic, TypeVar, Set, overload +from typing import Generic, Set, TypeVar, overload import networkx from networkx.algorithms import dag -from mincepy import archives -from mincepy import records -from mincepy import operations -from mincepy import transactions +from mincepy import archives, operations, records, transactions __all__ = ("References",) @@ -31,12 +27,10 @@ def __init__(self, historian): SnapshotId = records.SnapshotId[IdT] @overload - def references(self, identifier: IdT) -> Set[IdT]: - ... + def references(self, identifier: IdT) -> Set[IdT]: ... @overload - def references(self, identifier: "SnapshotId") -> "Set[SnapshotId]": - ... + def references(self, identifier: "SnapshotId") -> "Set[SnapshotId]": ... def references(self, identifier): """Get the ids of the objects referred to by the passed identifier.""" @@ -50,33 +44,24 @@ def references(self, identifier): return set(edge[1] for edge in graph.edges) @overload - def referenced_by(self, identifier: IdT) -> "Set[IdT]": - ... + def referenced_by(self, identifier: IdT) -> "Set[IdT]": ... @overload - def referenced_by(self, identifier: "SnapshotId") -> "Set[SnapshotId]": - ... + def referenced_by(self, identifier: "SnapshotId") -> "Set[SnapshotId]": ... def referenced_by(self, identifier): """Get the ids of the objects that refer to the passed object""" if isinstance(identifier, records.SnapshotId): - graph = self.get_snapshot_ref_graph( - identifier, direction=archives.INCOMING, max_dist=1 - ) + graph = self.get_snapshot_ref_graph(identifier, direction=archives.INCOMING, max_dist=1) elif isinstance(identifier, self._archive.get_id_type()): - graph = self.get_obj_ref_graph( - identifier, direction=archives.INCOMING, max_dist=1 - ) + graph = self.get_obj_ref_graph(identifier, direction=archives.INCOMING, max_dist=1) else: raise TypeError(identifier) return set(edge[0] for edge in graph.edges) def get_snapshot_ref_graph( - self, - *snapshot_ids: SnapshotId, - direction=archives.OUTGOING, - max_dist: int = None + self, *snapshot_ids: SnapshotId, direction=archives.OUTGOING, max_dist: int = None ) -> networkx.DiGraph: return self._archive.get_snapshot_ref_graph( @@ -87,9 +72,7 @@ def get_obj_ref_graph( self, *obj_ids: IdT, direction=archives.OUTGOING, max_dist: int = None ) -> networkx.DiGraph: obj_ids = set(obj_ids) - graph = self._archive.get_obj_ref_graph( - *obj_ids, direction=direction, max_dist=max_dist - ) + graph = self._archive.get_obj_ref_graph(*obj_ids, direction=direction, max_dist=max_dist) # If there is a transaction then we should fix up the graph to contain information from that # too @@ -114,9 +97,7 @@ def get_obj_ref_graph( return graph -def _update_from_transaction( - graph: networkx.DiGraph, transaction: transactions.Transaction -): +def _update_from_transaction(graph: networkx.DiGraph, transaction: transactions.Transaction): """Given a transaction update the reference graph to reflect the insertion of any new records""" for op in transaction.staged: # pylint: disable=invalid-name if isinstance(op, operations.Insert): diff --git a/mincepy/hist/snapshots.py b/mincepy/hist/snapshots.py index ed0af2a..242e90b 100644 --- a/mincepy/hist/snapshots.py +++ b/mincepy/hist/snapshots.py @@ -1,12 +1,8 @@ -# -*- coding: utf-8 -*- import logging from typing import Callable -from mincepy import archives -from mincepy import frontend -from mincepy import operations +from mincepy import archives, frontend, operations, result_types import mincepy.records as recordsm -from mincepy import result_types __all__ = ("SnapshotsCollection",) @@ -31,9 +27,9 @@ def purge(self, deleted=True, dry_run=True) -> result_types.PurgeResult: if deleted: # First find all the object ids of those that have been deleted # pylint: disable=protected-access - res = self.records.find( - recordsm.DataRecord.state == recordsm.DELETED - )._project(recordsm.OBJ_ID) + res = self.records.find(recordsm.DataRecord.state == recordsm.DELETED)._project( + recordsm.OBJ_ID + ) obj_ids = [entry[recordsm.OBJ_ID] for entry in res] # DB HIT logging.debug("Found %i objects that have been deleted", len(obj_ids)) @@ -53,9 +49,7 @@ def purge(self, deleted=True, dry_run=True) -> result_types.PurgeResult: if snapshot_ids and not dry_run: # Commit the changes - self._historian.archive.bulk_write( - [operations.Delete(sid) for sid in snapshot_ids] - ) + self._historian.archive.bulk_write([operations.Delete(sid) for sid in snapshot_ids]) logging.info("Deleted %i snapshots", len(snapshot_ids)) return result_types.PurgeResult(set(snapshot_ids)) @@ -64,9 +58,7 @@ def purge(self, deleted=True, dry_run=True) -> result_types.PurgeResult: class SnapshotLoadableRecord(recordsm.DataRecord): __slots__ = () - def __new__( - cls, record_dict: dict, snapshot_loader: Callable[[recordsm.DataRecord], object] - ): + def __new__(cls, record_dict: dict, snapshot_loader: Callable[[recordsm.DataRecord], object]): loadable = super().__new__(cls, **record_dict) loadable._snapshot_loader = snapshot_loader return loadable diff --git a/mincepy/historians.py b/mincepy/historians.py index d3992dc..ac15ed6 100644 --- a/mincepy/historians.py +++ b/mincepy/historians.py @@ -1,55 +1,50 @@ -# -*- coding: utf-8 -*- # pylint: disable=too-many-lines import collections import contextlib - -try: - from contextlib import nullcontext -except ImportError: - from contextlib2 import nullcontext import getpass import logging import socket from typing import ( - MutableMapping, + TYPE_CHECKING, Any, - Optional, + Callable, + Dict, Iterable, - Union, Iterator, - Type, - Dict, - Callable, + Literal, + MutableMapping, + Optional, Sequence, + Type, + Union, ) import weakref import deprecation import networkx -from . import archives -from . import builtins -from . import frontend -from . import defaults -from . import depositors -from . import refs -from . import exceptions -from . import expr -from . import files -from . import helpers -from . import hist -from . import migrate -from . import operations -from . import qops -from . import records as recordsm # The records module -from . import result_types -from . import staging -from . import tracking -from . import types -from . import type_registry -from . import utils -from . import version as version_mod -from .transactions import RollbackTransaction, Transaction, LiveObjects +from . import ( + archives, + builtins, + defaults, + depositors, + exceptions, + expr, + files, + frontend, + helpers, + hist, + migrate, + operations, + qops, +) +from . import records as records_ # The records module +from . import refs, result_types, staging, tracking, type_registry, types, utils +from . import version as version_ +from .transactions import LiveObjects, RollbackTransaction, Transaction + +if TYPE_CHECKING: + import mincepy __all__ = "Historian", "ObjectEntry" @@ -68,7 +63,7 @@ class Historian: # pylint: disable=too-many-public-methods, too-many-instance-a @deprecation.deprecated( deprecated_in="0.14.5", removed_in="0.16.0", - current_version=version_mod.__version__, + current_version=version_.__version__, details="Use mincepy.copy() instead", ) def copy(self, obj): @@ -79,10 +74,10 @@ def copy(self, obj): @deprecation.deprecated( deprecated_in="0.15.10", removed_in="0.17.0", - current_version=version_mod.__version__, + current_version=version_.__version__, details="Use mincepy.records.find() instead", ) - def find_records(self, *args, **kwargs) -> Iterator[recordsm.DataRecord]: + def find_records(self, *args, **kwargs) -> Iterator["mincepy.DataRecord"]: """Find records Has same signature as py:meth:`mincepy.Records.find`. @@ -92,7 +87,7 @@ def find_records(self, *args, **kwargs) -> Iterator[recordsm.DataRecord]: @deprecation.deprecated( deprecated_in="0.15.10", removed_in="0.17.0", - current_version=version_mod.__version__, + current_version=version_.__version__, details="Use mincepy.records.distinct() instead", ) def find_distinct(self, *args, **kwargs): @@ -102,7 +97,7 @@ def find_distinct(self, *args, **kwargs): """ yield from self.records.distinct(*args, **kwargs) - def __init__(self, archive: archives.Archive, equators=()): + def __init__(self, archive: "mincepy.Archive", equators=()): self._archive = archive self._equator = types.Equator(defaults.get_default_equators() + equators) # Register default types @@ -113,9 +108,9 @@ def __init__(self, archive: archives.Archive, equators=()): self.register_types(archive.get_types()) # Snapshot objects -> reference. Objects that were loaded from historical snapshots - self._snapshots_objects = ( + self._snapshots_objects: MutableMapping[Any, "mincepy.SnapshotId"] = ( utils.WeakObjectIdDict() - ) # type: MutableMapping[Any, recordsm.SnapshotId] + ) self._live_objects = LiveObjects() self._transactions = None @@ -136,7 +131,7 @@ def archive(self): return self._archive @property - def meta(self) -> hist.Meta: + def meta(self) -> "mincepy.Meta": """Access to functions that operate on the metadata""" return self._meta @@ -146,33 +141,31 @@ def primitives(self) -> tuple: return types.PRIMITIVE_TYPES + (self._archive.get_id_type(),) @property - def migrations(self) -> migrate.Migrations: + def migrations(self) -> "mincepy.migrate.Migrations": """Access the migration possibilities""" return self._migrate @property - def records(self) -> frontend.EntriesCollection[recordsm.DataRecord]: + def records(self) -> "mincepy.frontend.EntriesCollection[mincepy.DataRecord]": """Access methods and properties that act on and return data records""" return self._objects.records @property - def objects(self) -> hist.LiveObjectsCollection: + def objects(self) -> "mincepy.LiveObjectsCollection": """Access the snapshots""" return self._objects @property - def references(self) -> hist.References: + def references(self) -> "mincepy.References": """Access the references collection""" return self._references @property - def snapshots(self) -> hist.SnapshotsCollection: + def snapshots(self) -> "mincepy.SnapshotsCollection": """Access the snapshots""" return self._snapshots - def create_file( - self, filename: str = None, encoding: str = None - ) -> builtins.BaseFile: + def create_file(self, filename: str = None, encoding: str = None) -> builtins.BaseFile: """Create a new file. The historian will supply file type compatible with the archive in use.""" return files.File(self._archive.file_store, filename, encoding) @@ -224,9 +217,7 @@ def save_one(self, obj: object, meta: dict = None): # Save the object and metadata with self.in_transaction(): - record = self._live_depositor._save_object( - obj - ) # pylint: disable=protected-access + record = self._live_depositor._save_object(obj) # pylint: disable=protected-access if meta: self.meta.update(record.obj_id, meta) @@ -260,10 +251,10 @@ def replace(self, old: object, new: object): # Make sure creators is correct as well staging.replace(old, new) - def load_snapshot(self, snapshot_id: recordsm.SnapshotId) -> object: + def load_snapshot(self, snapshot_id: "mincepy.SnapshotId") -> object: return self._new_snapshot_depositor().load(snapshot_id) - def load_snapshot_from_record(self, record: recordsm.DataRecord) -> object: + def load_snapshot_from_record(self, record: "mincepy.DataRecord") -> object: return self._new_snapshot_depositor().load_from_record(record) def load(self, *obj_id_or_snapshot_id): @@ -279,7 +270,7 @@ def load(self, *obj_id_or_snapshot_id): def load_one(self, obj_id_or_snapshot_id) -> object: """Load one object or snapshot from the database""" - if isinstance(obj_id_or_snapshot_id, recordsm.SnapshotId): + if isinstance(obj_id_or_snapshot_id, records_.SnapshotId): return self.load_snapshot(obj_id_or_snapshot_id) # OK, assume we're dealing with an object id @@ -290,9 +281,7 @@ def load_one(self, obj_id_or_snapshot_id) -> object: return self.get_obj(obj_id) except exceptions.NotFound: # Going to have to load from the database - return self._live_depositor._load_object( - obj_id - ) # pylint: disable=protected-access + return self._live_depositor._load_object(obj_id) # pylint: disable=protected-access def get(self, obj_id) -> object: """Get a live object using the object id""" @@ -313,9 +302,7 @@ def sync(self, obj: object) -> bool: record = self._objects.records.get(obj_id) if record.is_deleted_record(): - raise exceptions.ObjectDeleted( - f"Object with id '{obj_id}' has been deleted" - ) + raise exceptions.ObjectDeleted(f"Object with id '{obj_id}' has been deleted") if record.version == self.get_snapshot_id(obj).version: # Nothing has changed @@ -327,9 +314,10 @@ def sync(self, obj: object) -> bool: def delete(self, *obj_or_identifier, imperative=True) -> result_types.DeleteResult: """Delete objects. - :param imperative: if True, this means that the caller explicitly expects this call to delete the passed - objects, and it should therefore raise if an object cannot be found or has been deleted already. If False, - the function will ignore these cases and continue. + :param imperative: if True, this means that the caller explicitly expects this call to + delete the passed objects, and it should therefore raise if an object cannot be found + or has been deleted already. If False, the function will ignore these cases and + continue. :raises mincepy.NotFound: if the object cannot be found (potentially because it was already deleted) """ @@ -337,7 +325,7 @@ def delete(self, *obj_or_identifier, imperative=True) -> result_types.DeleteResu obj_ids = list(map(self._ensure_obj_id, obj_or_identifier)) # Find the current records (i.e. from our cache) - records = {} # type: Dict[Any, recordsm.DataRecord] + records: Dict[Any, "mincepy.DataRecord"] = {} left_to_find = set() for obj_id in obj_ids: try: @@ -352,9 +340,7 @@ def delete(self, *obj_or_identifier, imperative=True) -> result_types.DeleteResu # Those that we don't have cached records for and need to look up if left_to_find: # Have a look in the archive - for record in self._objects.records.find( - recordsm.DataRecord.obj_id.in_(*left_to_find) - ): + for record in self._objects.records.find(records_.DataRecord.obj_id.in_(*left_to_find)): records[record.obj_id] = record left_to_find.remove(record.obj_id) @@ -364,14 +350,15 @@ def delete(self, *obj_or_identifier, imperative=True) -> result_types.DeleteResu deleted = [] with self.in_transaction() as trans: - # Mark each object as deleted in the transaction and stage the 'delete record' for insertion - # in the order that they were passed to us, in case this makes a difference to the caller + # Mark each object as deleted in the transaction and stage the 'delete record' for + # insertion in the order that they were passed to us, in case this makes a difference + # to the caller for obj_id in obj_ids: record = records.get(obj_id, None) if record is None: continue - builder = recordsm.make_deleted_builder(record) + builder = records_.make_deleted_builder(record) deleted_record = self._record_builder_created(builder).build() trans.delete(record.obj_id) trans.stage(operations.Insert(deleted_record)) @@ -381,7 +368,7 @@ def delete(self, *obj_or_identifier, imperative=True) -> result_types.DeleteResu def history( self, obj_or_obj_id, idx_or_slice="*", as_objects=True - ) -> [Sequence[ObjectEntry], Sequence[recordsm.DataRecord]]: + ) -> [Sequence[ObjectEntry], Sequence["mincepy.DataRecord"]]: """Get a sequence of object ids and instances from the history of the given object. :param obj_or_obj_id: The instance or id of the object to get the history for @@ -413,7 +400,7 @@ def history( return [self._archive.load(ref) for ref in to_get] - def get_current_record(self, obj: object) -> recordsm.DataRecord: + def get_current_record(self, obj: object) -> "mincepy.DataRecord": """Get the current record that the historian has cached for the passed object""" trans = self.current_transaction() # Try the transaction first @@ -481,12 +468,12 @@ def to_obj_id(self, obj_or_identifier): 3. Passed a type that can be understood by the archive as an object id e.g. a string of version, in which case the archive will attempt to convert it - Returns None if neither of these cases were True. + Returns `None` if none of these cases were true. """ if self.is_obj_id(obj_or_identifier): return obj_or_identifier - if isinstance(obj_or_identifier, recordsm.SnapshotId): + if isinstance(obj_or_identifier, records_.SnapshotId): return obj_or_identifier.obj_id try: @@ -499,7 +486,7 @@ def to_obj_id(self, obj_or_identifier): return self.get_obj_id(obj_or_identifier) - def get_snapshot_id(self, obj: object) -> recordsm.SnapshotId: + def get_snapshot_id(self, obj: object) -> "mincepy.SnapshotId": """Get the current snapshot id for a live object. Will return the id or raise :class:`mincepy.NotFound` exception""" trans = self.current_transaction() @@ -536,7 +523,7 @@ def is_trackable(cls, obj): def type_registry(self) -> type_registry.TypeRegistry: return self._type_registry - def is_primitive(self, obj) -> bool: + def is_primitive(self, obj: Any) -> bool: """Check if the object is one of the primitives and should be saved by value in the archive""" return obj.__class__ in self.primitives @@ -553,16 +540,14 @@ def register_type( return helper - def register_types( - self, obj_clases_or_helpers: Iterable[HistorianType], replace=True - ): + def register_types(self, obj_clases_or_helpers: Iterable[HistorianType], replace=True): for item in obj_clases_or_helpers: self.register_type(item, replace=replace) def get_obj_type_id(self, obj_type): return self._type_registry.get_type_id(obj_type) - def get_obj_type(self, type_id): + def get_obj_type(self, type_id) -> type: return self.get_helper(type_id).TYPE def get_helper(self, type_id_or_type, auto_register=False) -> helpers.TypeHelper: @@ -598,23 +583,24 @@ def find( apply filters on the stored state of the object and metadata respectively. To understand how the state is stored in the database (and therefore how to apply filters to it) it may be necessary to look at the details of the `save_instance_state()` method for that type. - Metadata is always a dictionary containing primitives (strings, dicts, lists, etc). + Metadata is always a dictionary containing primitives (strings, dicts, lists, etc.). For the most part, the filter syntax of `mincePy` conforms to that of `MongoDB`_ with convenience functions locate in :py:mod:`mincepy.qops` that can make it easier to - to build a query. + build a query. Examples: Find all :py:class:`~mincepy.testing.Car`s that are brown or red: >>> import mincepy as mpy + >>> from mincepy import testing >>> historian = mpy.get_historian() - >>> historian.find(mpy.testing.Car.colour.in_('brown', 'red')) + >>> historian.find(testing.Car.colour.in_('brown', 'red')) Find all people that are older than 34 and live in Edinburgh: - >>> historian.find(mpy.testing.Person.age > 34, meta=dict(city='Edinburgh')) + >>> historian.find(testing.Person.age > 34, meta=dict(city='Edinburgh')) :param obj_type: the object type to look for :param obj_id: an object or multiple object ids to look for @@ -643,7 +629,7 @@ def get_creator(self, obj_or_identifier) -> object: if not self.is_obj_id(obj_or_identifier): # Object instance, try the staging area info = staging.get_info(obj_or_identifier, create=False) or {} - created_by = info.get(recordsm.ExtraKeys.CREATED_BY, None) + created_by = info.get(records_.ExtraKeys.CREATED_BY, None) if created_by is not None: return created_by @@ -669,25 +655,26 @@ def get_user_info(self) -> dict: """Get information about the current user and host""" user_info = {} if self._user: - user_info[recordsm.ExtraKeys.USER] = self._user + user_info[records_.ExtraKeys.USER] = self._user if self._hostname: - user_info[recordsm.ExtraKeys.HOSTNAME] = self._hostname + user_info[records_.ExtraKeys.HOSTNAME] = self._hostname return user_info def merge( self, result_set: frontend.ResultSet[object], *, - meta=None, # pylint: disable=unused-argument - batch_size=1024, + meta: Optional[Literal["update", "overwrite"]] = None, + # pylint: disable=unused-argument + batch_size: int = 1024, progress_callback: Callable[ [utils.Progress, Optional[result_types.MergeResult]], None ] = None, ) -> result_types.MergeResult: """Merge a set of objects into this database. - Given a set of results from another archive this will attempt to merge the corresponding records - into this historian's archive. + Given a set of results from another archive this will attempt to merge the corresponding + records into this historian's archive. :param result_set: the set of records to merge from the source historian :param meta: option for merging metadata, allowed values: @@ -699,9 +686,9 @@ def merge( remote = result_set.historian # type: Historian # Get information about the records that we've been asked to merge # pylint: disable=protected-access - remote_partial_records = result_set._project(recordsm.OBJ_ID, recordsm.VERSION) + remote_partial_records = result_set._project(records_.OBJ_ID, records_.VERSION) remote_snapshot_ids = set( - map(recordsm.SnapshotId.from_dict, remote_partial_records) + map(records_.SnapshotId.from_dict, remote_partial_records) ) # DB HIT progress = utils.Progress(len(remote_snapshot_ids)) @@ -734,9 +721,7 @@ def merge( return result - def purge( - self, deleted=True, unreferenced=True, dry_run=True - ) -> result_types.PurgeResult: + def purge(self, deleted=True, unreferenced=True, dry_run=True) -> result_types.PurgeResult: """Purge the archive of unused snapshots""" snapshot_purge = self.snapshots.purge(deleted=deleted, dry_run=dry_run) @@ -745,11 +730,9 @@ def purge( # Let's get snapshot ids for all live object live_snapshot_ids = list( map( - recordsm.SnapshotId.from_dict, + records_.SnapshotId.from_dict, # pylint: disable=protected-access - self.objects.records.find()._project( - recordsm.OBJ_ID, recordsm.VERSION - ), + self.objects.records.find()._project(records_.OBJ_ID, records_.VERSION), ) ) # Now, find all the snapshots that they refer to, these will be the ones we DON'T delete @@ -764,20 +747,16 @@ def purge( unreferenced_deleted = set( map( - recordsm.SnapshotId.from_dict, + records_.SnapshotId.from_dict, # pylint: disable=protected-access - res._project(recordsm.OBJ_ID, recordsm.VERSION), + res._project(records_.OBJ_ID, records_.VERSION), ) ) if unreferenced_deleted and not dry_run: - self._archive.bulk_write( - list(map(operations.Delete, unreferenced_deleted)) - ) + self._archive.bulk_write(list(map(operations.Delete, unreferenced_deleted))) - return result_types.PurgeResult( - snapshot_purge.deleted_purged, unreferenced_deleted - ) + return result_types.PurgeResult(snapshot_purge.deleted_purged, unreferenced_deleted) def _merge_batch( self, remote: "Historian", remote_ref_graph: networkx.DiGraph @@ -790,12 +769,12 @@ def _merge_batch( for entry in remote.archive.snapshots.find( {"_id": qops.in_(*sid_strings)}, projection={ - recordsm.OBJ_ID: 1, - recordsm.VERSION: 1, - recordsm.SNAPSHOT_HASH: 1, + records_.OBJ_ID: 1, + records_.VERSION: 1, + records_.SNAPSHOT_HASH: 1, }, ): # DB HIT - remote_partial_records[recordsm.SnapshotId.from_dict(entry)] = entry + remote_partial_records[records_.SnapshotId.from_dict(entry)] = entry # LOCAL # Find the local snapshots along with their hashes @@ -803,21 +782,18 @@ def _merge_batch( for entry in self.archive.snapshots.find( {"_id": qops.in_(*sid_strings)}, projection={ - recordsm.OBJ_ID: 1, - recordsm.VERSION: 1, - recordsm.SNAPSHOT_HASH: 1, + records_.OBJ_ID: 1, + records_.VERSION: 1, + records_.SNAPSHOT_HASH: 1, }, ): # DB HIT - local_partial_records[recordsm.SnapshotId.from_dict(entry)] = entry + local_partial_records[records_.SnapshotId.from_dict(entry)] = entry # Remove all those that match and log any that have conflicting hashes conflicting = [] for sid, local_partial in local_partial_records.items(): remote_record = remote_partial_records.pop(sid) - if ( - remote_record[recordsm.SNAPSHOT_HASH] - != local_partial[recordsm.SNAPSHOT_HASH] - ): + if remote_record[records_.SNAPSHOT_HASH] != local_partial[records_.SNAPSHOT_HASH]: conflicting.append(sid) if conflicting: @@ -831,7 +807,7 @@ def _merge_batch( for remote_record in remote.archive.snapshots.find( {"_id": qops.in_(*map(str, remote_partial_records.keys()))} ): # DB HIT - record = recordsm.DataRecord(**remote_record) + record = records_.DataRecord(**remote_record) ops.append(operations.Merge(record)) files_in_record = record.get_files() if files_in_record: @@ -840,20 +816,16 @@ def _merge_batch( # and write the new records into our archive if ops: - # Copy the files first. This way if the user cancels prematurely the files are there but no the objects - # that refer to them. The other way around would result in the objects being there but failing when - # someone tries to load the files + # Copy the files first. This way if the user cancels prematurely the files are there + # but no the objects that refer to them. The other way around would result in the + # objects being there but failing when someone tries to load the files file_store = self.archive.file_store for file_dict in files_to_transfer: file_id = file_dict[expr.field_name(files.File.file_id)] filename = file_dict[expr.field_name(files.File.filename)] or "" - with remote.archive.file_store.open_download_stream( - file_id - ) as down_stream: - file_store.upload_from_stream_with_id( - file_id, filename, down_stream - ) + with remote.archive.file_store.open_download_stream(file_id) as down_stream: + file_store.upload_from_stream_with_id(file_id, filename, down_stream) self._archive.bulk_write(ops) # DB HIT @@ -870,7 +842,7 @@ def in_transaction(self) -> Iterator[Transaction]: if current is None: ctx = self.transaction() else: - ctx = nullcontext(current) + ctx = contextlib.nullcontext(current) with ctx as trans: yield trans @@ -884,7 +856,9 @@ def transaction(self) -> Iterator[Transaction]: self._transactions.append(nested) try: yield nested - except Exception: # Need this so we can have 'else' pylint: disable=try-except-raise + except ( # Need this so we can have 'else' pylint: disable=try-except-raise + Exception + ): raise else: self._closing_transaction(nested) @@ -924,9 +898,7 @@ def _closing_transaction(self, trans: Transaction): ) obj_ids = set(operation.obj_id for operation in del_ops) - ref_graph = self.references.get_obj_ref_graph( - *obj_ids, direction=archives.INCOMING - ) + ref_graph = self.references.get_obj_ref_graph(*obj_ids, direction=archives.INCOMING) for obj_id in obj_ids: for edge in ref_graph.in_edges(obj_id): conflicting.add(edge[1]) @@ -961,10 +933,10 @@ def _commit_transaction(self, trans: Transaction): if trans.metas: self._archive.meta_set_many(trans.metas) - def _load_object_from_record(self, record: recordsm.DataRecord): + def _load_object_from_record(self, record: "mincepy.DataRecord"): depositor = self._live_depositor - # Try getting the object from the our dict of up to date ones + # Try getting the object from our dict of up-to-date ones obj_id = record.obj_id try: return self.get_obj(obj_id) @@ -985,14 +957,12 @@ def _load_object_from_record(self, record: recordsm.DataRecord): def _ensure_obj_id(self, obj_or_identifier): """ - This call will try and get an object id from the passed parameter. Uses .to_obj_id() and raises NotFound if it - is not possible to get the object id. + This call will try and get an object id from the passed parameter. Uses `.to_obj_id()` and + raises `NotFound` if it is not possible to get the object id. """ obj_id = self.to_obj_id(obj_or_identifier) if obj_id is None: - raise exceptions.NotFound( - f"Could not get an object id from '{obj_or_identifier}'" - ) + raise exceptions.NotFound(f"Could not get an object id from '{obj_or_identifier}'") return obj_id @@ -1028,8 +998,8 @@ def _prepare_type_id(self, obj_type): return list(map(self.get_obj_type_id, obj_type)) def _record_builder_created( - self, builder: recordsm.DataRecordBuilder - ) -> recordsm.DataRecordBuilder: + self, builder: "mincepy.DataRecordBuilder" + ) -> "mincepy.DataRecordBuilder": """Update a data record builder with standard information.""" builder.extras.update(self.get_user_info()) return builder diff --git a/mincepy/history.py b/mincepy/history.py index cc6e7ab..9e18e14 100644 --- a/mincepy/history.py +++ b/mincepy/history.py @@ -1,18 +1,17 @@ -# -*- coding: utf-8 -*- """ This module exposes some global functionality for connecting to and interacting with the current historian """ + import os -from typing import Optional +from typing import TYPE_CHECKING, Optional import deprecation -from . import archive_factory -from . import helpers -from . import historians -from . import plugins -from . import version +from . import archive_factory, helpers, plugins, version + +if TYPE_CHECKING: + import mincepy __all__ = ( "connect", @@ -61,7 +60,7 @@ def create_default_historian(): return None -def connect(uri: str = "", use_globally=False, timeout=30000) -> historians.Historian: +def connect(uri: str = "", use_globally=False, timeout=30000) -> "mincepy.Historian": """Connect to an archive and return a corresponding historian :param uri: the URI of the archive to connect to @@ -69,9 +68,7 @@ def connect(uri: str = "", use_globally=False, timeout=30000) -> historians.Hist :param timeout: a connection timeout (in milliseconds) """ uri = uri or default_archive_uri() - hist = archive_factory.create_historian( - uri, apply_plugins=True, connect_timeout=timeout - ) + hist = archive_factory.create_historian(uri, apply_plugins=True, connect_timeout=timeout) if use_globally: set_historian(hist, apply_plugins=False) return hist @@ -86,7 +83,7 @@ def default_archive_uri() -> Optional[str]: # region Globals -def get_historian(create=True) -> Optional[historians.Historian]: +def get_historian(create=True) -> Optional["mincepy.Historian"]: """Get the currently set global historian. If one doesn't exist and create is True then this call will attempt to create a new default historian using connect()""" global CURRENT_HISTORIAN # pylint: disable=global-statement, global-variable-not-assigned @@ -98,7 +95,7 @@ def get_historian(create=True) -> Optional[historians.Historian]: return CURRENT_HISTORIAN -def set_historian(new_historian: Optional[historians.Historian], apply_plugins=True): +def set_historian(new_historian: Optional["mincepy.Historian"], apply_plugins=True): """Set the current global historian. Optionally load all plugins. To reset the historian pass None. """ diff --git a/mincepy/migrate.py b/mincepy/migrate.py index 5500dac..5696ab9 100644 --- a/mincepy/migrate.py +++ b/mincepy/migrate.py @@ -1,11 +1,10 @@ -# -*- coding: utf-8 -*- -from typing import Iterator, Sequence, Iterable +from typing import TYPE_CHECKING, Iterable, Iterator, Sequence -import mincepy -from . import depositors -from . import helpers -from .qops import elem_match_, or_, lt_ -from . import records +from . import depositors, helpers, records +from .qops import elem_match_, lt_, or_ + +if TYPE_CHECKING: + import mincepy __all__ = ("Migrations",) @@ -42,7 +41,7 @@ def find_migratable_records(self) -> Iterator[records.DataRecord]: ] if not have_migrations: - return [] + return iter([]) # Now, let's look for those records that would need migrating archive = self._historian.archive diff --git a/mincepy/migrations.py b/mincepy/migrations.py index 19cdf03..e2b942b 100644 --- a/mincepy/migrations.py +++ b/mincepy/migrations.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from typing import Any, Optional import pytray.pretty @@ -24,8 +23,8 @@ def __init__(cls, name, bases, dct): if cls.PREVIOUS is not None and cls.VERSION <= cls.PREVIOUS.VERSION: raise RuntimeError( f"A migration must have a version number higher than the previous migration. " - f"{pytray.pretty.type_string(cls.PREVIOUS)}.VERSION is {cls.PREVIOUS.VERSION} while " - f"{pytray.pretty.type_string(cls)}.VERSION is {cls.VERSION}" + f"{pytray.pretty.type_string(cls.PREVIOUS)}.VERSION is {cls.PREVIOUS.VERSION} " + f"while {pytray.pretty.type_string(cls)}.VERSION is {cls.VERSION}" ) if cls.NAME is None: diff --git a/mincepy/mongo/__init__.py b/mincepy/mongo/__init__.py index a16cb77..0425d44 100644 --- a/mincepy/mongo/__init__.py +++ b/mincepy/mongo/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from . import mongo_archive from .mongo_archive import * diff --git a/mincepy/mongo/aggregation.py b/mincepy/mongo/aggregation.py index a977598..4a43453 100644 --- a/mincepy/mongo/aggregation.py +++ b/mincepy/mongo/aggregation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Module that contains aggregation operations""" diff --git a/mincepy/mongo/bulk.py b/mincepy/mongo/bulk.py index 829c300..4c59fe9 100644 --- a/mincepy/mongo/bulk.py +++ b/mincepy/mongo/bulk.py @@ -1,10 +1,9 @@ -# -*- coding: utf-8 -*- import functools import pymongo -from mincepy import operations -from mincepy import q +import mincepy.operations as operations +import mincepy.qops as q from . import db diff --git a/mincepy/mongo/db.py b/mincepy/mongo/db.py index c34b783..8bf68cb 100644 --- a/mincepy/mongo/db.py +++ b/mincepy/mongo/db.py @@ -1,16 +1,16 @@ -# -*- coding: utf-8 -*- """This module contains the names of the keys in the various collections used by the mongo archive and methods to convert mincepy types to mongo collection entries and back""" + import functools from typing import Optional -import pymongo.collection -import pymongo.errors from bidict import bidict import bson +import pymongo.collection +import pymongo.errors +import mincepy.qops as q import mincepy.records -from mincepy import q SETTINGS_COLLECTION = "settings" GLOBAL_SETTINGS = "global" @@ -47,7 +47,7 @@ # endregion -def to_record(entry) -> mincepy.DataRecord: +def to_record(entry) -> "mincepy.DataRecord": """Convert a MongoDB data collection entry to a DataRecord""" record_dict = mincepy.DataRecord.defaults() @@ -57,11 +57,7 @@ def to_record(entry) -> mincepy.DataRecord: # Invert our mapping of keys back to the data record property names and update over any # defaults record_dict.update( - { - recordkey: entry[dbkey] - for recordkey, dbkey in KEY_MAP.items() - if dbkey in entry - } + {recordkey: entry[dbkey] for recordkey, dbkey in KEY_MAP.items() if dbkey in entry} ) return mincepy.DataRecord(**record_dict) @@ -74,8 +70,8 @@ def to_document(record, exclude_defaults=False) -> dict: raise TypeError(record.__class__) -@to_document.register(mincepy.DataRecord) -def _(record: mincepy.DataRecord, exclude_defaults=False) -> dict: +@to_document.register(mincepy.records.DataRecord) +def _(record: mincepy.records.DataRecord, exclude_defaults=False) -> dict: """Convert a DataRecord to a MongoDB document with our keys""" defaults = mincepy.DataRecord.defaults() entry = {} @@ -130,7 +126,7 @@ def remap_key(key: str) -> str: return ".".join(split_key) -def to_id_dict(sid: mincepy.SnapshotId) -> dict: +def to_id_dict(sid: mincepy.records.SnapshotId) -> dict: return {OBJ_ID: sid.obj_id, VERSION: sid.version} @@ -144,8 +140,11 @@ def sid_from_str(sid_str: str): def safe_bulk_delete(collection: pymongo.collection.Collection, ids, id_key="_id"): - """Sometimes when you want to delete a bunch of documents using an identifier the 'delete document' itself exceeds - the 16MB Mongo limit. This function will catch such cases and break up the command into suitably batches""" + """ + Sometimes when you want to delete a bunch of documents using an identifier the 'delete document' + itself exceeds the 16MB Mongo limit. This function will catch such cases and break up the + command into suitably batches + """ ids = list(set(ids)) # No needs to repeat ourselves try: collection.delete_many({id_key: q.in_(*ids)}) diff --git a/mincepy/mongo/migrate.py b/mincepy/mongo/migrate.py index 6cf9cde..994c549 100644 --- a/mincepy/mongo/migrate.py +++ b/mincepy/mongo/migrate.py @@ -1,9 +1,8 @@ -# -*- coding: utf-8 -*- import abc import contextlib import random import string -from typing import List, Type, Optional +from typing import List, Optional, Type import pymongo.database @@ -105,8 +104,8 @@ def ensure_up_to_date(database: pymongo.database.Database, latest: Type[Migratio current_version = current.get(VERSION, None) if current_version and current_version > latest.VERSION: raise MigrationError( - f"The current database version ({current_version}) is higher than the code version ({latest.VERSION}) you " - f"may need to update your version of the code" + f"The current database version ({current_version}) is higher than the code version " + f"({latest.VERSION}) you may need to update your version of the code" ) migrator = MigrationManager(latest) @@ -120,9 +119,7 @@ def get_version(database) -> Optional[int]: @contextlib.contextmanager def temporary_collection(database: pymongo.database.Database, coll_name=None): - coll_name = coll_name or "".join( - random.choices(string.ascii_letters, k=10) # nosec - ) + coll_name = coll_name or "".join(random.choices(string.ascii_letters, k=10)) # nosec coll = database[coll_name] yield coll database.drop_collection(coll_name) diff --git a/mincepy/mongo/migrations.py b/mincepy/mongo/migrations.py index 1d9161d..5c7d8e4 100644 --- a/mincepy/mongo/migrations.py +++ b/mincepy/mongo/migrations.py @@ -1,9 +1,8 @@ -# -*- coding: utf-8 -*- import pymongo.database import tqdm from . import migrate -from .aggregation import eq_, and_ +from .aggregation import and_, eq_ class Initial(migrate.Migration): diff --git a/mincepy/mongo/mongo_archive.py b/mincepy/mongo/mongo_archive.py index faf36ef..aa95635 100644 --- a/mincepy/mongo/mongo_archive.py +++ b/mincepy/mongo/mongo_archive.py @@ -1,32 +1,26 @@ -# -*- coding: utf-8 -*- -from typing import Optional, Sequence, Union, Iterable, Mapping, Iterator, Dict, Tuple -import weakref +from typing import Dict, Iterable, Iterator, Mapping, Optional, Sequence, Tuple, Union from urllib import parse import uuid +import weakref import bson import gridfs import networkx import pymongo -import pymongo.uri_parser import pymongo.database import pymongo.errors +import pymongo.uri_parser # MincePy imports -from mincepy import archives -from mincepy import helpers -from mincepy import operations -from mincepy import q -from mincepy import records -from mincepy import exceptions +import mincepy.archives as archives +import mincepy.exceptions as exceptions +import mincepy.helpers as helpers +import mincepy.operations as operations +import mincepy.qops as q +import mincepy.records as records # Local imports -from . import bulk -from . import migrate -from . import migrations -from . import db -from . import references -from . import queries +from . import bulk, db, migrate, migrations, queries, references __all__ = ("MongoArchive", "connect") @@ -204,9 +198,7 @@ def meta_get(self, obj_id: bson.ObjectId): found.pop("_id") return found.get(db.META, None) - def meta_get_many( - self, obj_ids: Iterable[bson.ObjectId] - ) -> Dict[bson.ObjectId, dict]: + def meta_get_many(self, obj_ids: Iterable[bson.ObjectId]) -> Dict[bson.ObjectId, dict]: # Find multiple for obj_id in obj_ids: if not isinstance(obj_id, bson.ObjectId): @@ -226,9 +218,9 @@ def meta_set(self, obj_id, meta): ) except pymongo.errors.DuplicateKeyError as exc: raise exceptions.DuplicateKeyError(str(exc)) - else: - if found.modified_count == 0: - raise exceptions.NotFound(f"No record with object id '{obj_id}' found") + + if found.modified_count == 0: + raise exceptions.NotFound(f"No record with object id '{obj_id}' found") def meta_set_many(self, metas: Mapping[bson.ObjectId, Optional[dict]]): ops = [] @@ -255,14 +247,12 @@ def meta_set_many(self, metas: Mapping[bson.ObjectId, Optional[dict]]): def meta_update(self, obj_id, meta: Mapping): try: to_set = queries.expand_filter(db.META, meta) - res = self._data_collection.update_one( - {"_id": obj_id}, {"$set": to_set}, upsert=False - ) + res = self._data_collection.update_one({"_id": obj_id}, {"$set": to_set}, upsert=False) except pymongo.errors.DuplicateKeyError as exc: raise exceptions.DuplicateKeyError(str(exc)) - else: - if res.matched_count == 0: - raise exceptions.NotFound(f"No record with object id '{obj_id}' found") + + if res.matched_count == 0: + raise exceptions.NotFound(f"No record with object id '{obj_id}' found") def meta_find( self, @@ -302,9 +292,7 @@ def meta_create_index(self, keys, unique=True, where_exist=False): return for entry in keys: if not isinstance(entry, tuple): - raise TypeError( - f"Keys must be list of tuples, got {entry.__class__.__name__}" - ) + raise TypeError(f"Keys must be list of tuples, got {entry.__class__.__name__}") # Transform the keys keys = [(f"{db.META}.{name}", direction) for name, direction in keys] @@ -443,9 +431,7 @@ def get_snapshot_ref_graph( def get_obj_ref_graph( self, *obj_ids: bson.ObjectId, direction=archives.OUTGOING, max_dist: int = None ) -> Iterator[networkx.DiGraph]: - return self._refman.get_obj_ref_graphs( - obj_ids, direction=direction, max_dist=max_dist - ) + return self._refman.get_obj_ref_graphs(obj_ids, direction=direction, max_dist=max_dist) @staticmethod def _get_pipeline( @@ -597,9 +583,7 @@ def get(self, entry_id: bson.ObjectId) -> dict: raise exceptions.NotFound(entry_id) return db.remap_back(doc) - def count( - self, filter: dict, *, meta: dict = None # pylint: disable=redefined-builtin - ) -> int: + def count(self, filter: dict, *, meta: dict = None) -> int: # pylint: disable=redefined-builtin """Get the number of entries that match the search criteria""" # Create the pipeline pipeline = [] @@ -616,8 +600,8 @@ def count( result = next(self._collection.aggregate(pipeline)) except StopIteration: return 0 - else: - return result["total"] + + return result["total"] MOCKED = weakref.WeakValueDictionary() @@ -655,9 +639,7 @@ def pymongo_connect(uri, database: str = None, timeout=30000): raise ValueError(f"Failed to supply database on MongoDB uri: {uri}") try: - client = pymongo.MongoClient( - uri, connect=True, serverSelectionTimeoutMS=timeout - ) + client = pymongo.MongoClient(uri, connect=True, serverSelectionTimeoutMS=timeout) database = client.get_default_database() return MongoArchive(database) except pymongo.errors.ServerSelectionTimeoutError as exc: diff --git a/mincepy/mongo/queries.py b/mincepy/mongo/queries.py index 9252910..435b3c2 100644 --- a/mincepy/mongo/queries.py +++ b/mincepy/mongo/queries.py @@ -1,9 +1,8 @@ -# -*- coding: utf-8 -*- import functools from typing import Mapping -from .aggregation import and_, eq_ from . import db +from .aggregation import and_, eq_ def pipeline_latest_version(data_collection: str) -> list: diff --git a/mincepy/mongo/references.py b/mincepy/mongo/references.py index c93f3e2..70ce438 100644 --- a/mincepy/mongo/references.py +++ b/mincepy/mongo/references.py @@ -1,18 +1,18 @@ -# -*- coding: utf-8 -*- import itertools import logging -import operator -from typing import Sequence, Union, Callable, Iterator, Iterable, List +from typing import TYPE_CHECKING, Callable, Iterable, Iterator, List, Sequence, Union import bson import networkx import pymongo.collection -import mincepy -from mincepy import OUTGOING -from . import aggregation -from . import db -from . import types +import mincepy.archives as archives +import mincepy.records as records + +from . import aggregation, db, types + +if TYPE_CHECKING: + import mincepy logger = logging.getLogger(__name__) # pylint: disable=invalid-name @@ -36,14 +36,14 @@ def __init__( self._history_collection = history_collection def get_obj_ref_graphs( - self, obj_ids: Sequence[bson.ObjectId], direction=OUTGOING, max_dist: int = None + self, obj_ids: Sequence[bson.ObjectId], direction=archives.OUTGOING, max_dist: int = None ) -> networkx.DiGraph: return self._get_graph(obj_ids, direction=direction, max_dist=max_dist) def get_snapshot_ref_graph( self, - ids: Sequence[mincepy.SnapshotId], - direction=OUTGOING, + ids: Sequence["mincepy.SnapshotId"], + direction=archives.OUTGOING, max_dist: int = None, ) -> Iterator[networkx.DiGraph]: """Get the reference graph for a sequence of ids""" @@ -63,7 +63,7 @@ def invalidate( def _get_graph( self, ids: Sequence[Union[bson.ObjectId, types.SnapshotId]], - direction=OUTGOING, + direction=archives.OUTGOING, max_dist: int = None, node_factory: Callable = None, ) -> networkx.DiGraph: @@ -81,13 +81,11 @@ def _get_graph( node_factory = node_factory or (lambda x: x) search_max_dist = max_dist - if max_dist is not None and direction == OUTGOING: + if max_dist is not None and direction == archives.OUTGOING: search_max_dist = max(max_dist - 1, 0) search_ids = self._prepare_for_ref_search(ids) - pipeline = self._get_ref_pipeline( - search_ids, direction=direction, max_dist=search_max_dist - ) + pipeline = self._get_ref_pipeline(search_ids, direction=direction, max_dist=search_max_dist) # Need to allow disk use as the graph can get huge ref_results = { result["_id"]: result @@ -119,17 +117,19 @@ def _get_graph( for neighbour_id in entry["refs"]: neighbour = node_factory(neighbour_id) - if direction == OUTGOING or neighbour in graph.nodes: + if direction == archives.OUTGOING or neighbour in graph.nodes: graph.add_edge(this, neighbour) return graph def _get_ref_pipeline( - self, ids: Sequence, direction=OUTGOING, max_dist: int = None + self, ids: Sequence, direction=archives.OUTGOING, max_dist: int = None ) -> list: - """Get the reference lookup pipeline. Given a sequence of ids, a direction and maximum distance this will - return a pipeline that can be used in an aggregation operation on the relevant collection to get the reference - graph.""" + """ + Get the reference lookup pipeline. Given a sequence of ids, a direction and maximum + distance this will return a pipeline that can be used in an aggregation operation on the + relevant collection to get the reference graph. + """ if max_dist is not None and max_dist < 0: raise ValueError(f"max_dist must be positive, got '{max_dist}'") @@ -142,17 +142,13 @@ def _get_ref_pipeline( "as": "references", "depthField": "depth", } - if direction == OUTGOING: + if direction == archives.OUTGOING: lookup_params.update( - dict( - startWith="$refs", connectFromField="refs", connectToField="_id" - ) + dict(startWith="$refs", connectFromField="refs", connectToField="_id") ) else: lookup_params.update( - dict( - startWith="$_id", connectFromField="_id", connectToField="refs" - ) + dict(startWith="$_id", connectFromField="_id", connectToField="refs") ) if max_dist is not None: @@ -163,15 +159,16 @@ def _get_ref_pipeline( return pipeline - def _prepare_for_ref_search( - self, ids: Sequence[Union[bson.ObjectId, mincepy.SnapshotId]] - ): - """Make sure that the references collections are up to date in preparation for a reference graph search""" + def _prepare_for_ref_search(self, ids: Sequence[Union[bson.ObjectId, "mincepy.SnapshotId"]]): + """ + Make sure that the references collections are up-to-date in preparation for a reference + graph search + """ hist_updated = False data_updated = False converted = [] for entry in ids: - if isinstance(entry, mincepy.SnapshotId): + if isinstance(entry, records.SnapshotId): if not hist_updated: self._ensure_current("history") hist_updated = True @@ -202,9 +199,7 @@ def _ensure_current(self, collection_name: str, ids=None): else: raise ValueError(f"Unsupported collection: {collection_name}") - logger.debug( - "Checking for missing reference in '%s' collection", collection_name - ) + logger.debug("Checking for missing reference in '%s' collection", collection_name) to_insert = [] for data_entry in self._get_missing_entries(collection, ids): diff --git a/mincepy/mongo/settings.py b/mincepy/mongo/settings.py index 31eb1a4..743996f 100644 --- a/mincepy/mongo/settings.py +++ b/mincepy/mongo/settings.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from typing import Optional import pymongo.database diff --git a/mincepy/mongo/types.py b/mincepy/mongo/types.py index 6a3b030..23f8b6a 100644 --- a/mincepy/mongo/types.py +++ b/mincepy/mongo/types.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import bson from mincepy import archives diff --git a/mincepy/operations.py b/mincepy/operations.py index a789c81..7209388 100644 --- a/mincepy/operations.py +++ b/mincepy/operations.py @@ -1,5 +1,5 @@ -# -*- coding: utf-8 -*- """Module containing record operations that can be performed sent to the archive to perform""" + import abc from . import records @@ -89,8 +89,10 @@ def snapshot_id(self) -> records.SnapshotId: class Merge(Operation): """Merge a record into the archive. This could be: * An entirely new snapshot, i.e. the object id doesn't exist in the archive at all - * A new version of a record, i.e. the object id does exist but this version is newer than any other - * An old version of a record, i.e. the object id does exist but this version is older than the latest + * A new version of a record, i.e. the object id does exist but this version is newer than + any other + * An old version of a record, i.e. the object id does exist but this version is older than + the latest In any case the snapshot id should not exist in the database already. """ diff --git a/mincepy/plugins.py b/mincepy/plugins.py index b4baf14..4bd7b20 100644 --- a/mincepy/plugins.py +++ b/mincepy/plugins.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import logging from typing import List @@ -8,9 +7,7 @@ def load_failed(_manager, entrypoint, exception): - logger.warning( - "Error loading mincepy plugin from entrypoing '%s':\n%s", entrypoint, exception - ) + logger.warning("Error loading mincepy plugin from entrypoing '%s':\n%s", entrypoint, exception) def get_types() -> List: diff --git a/mincepy/process.py b/mincepy/process.py index cb55b7c..bd22716 100644 --- a/mincepy/process.py +++ b/mincepy/process.py @@ -1,13 +1,10 @@ -# -*- coding: utf-8 -*- import contextlib import functools import uuid import deprecation -from . import base_savable -from . import tracking -from . import version +from . import base_savable, tracking, version __all__ = ("Process",) diff --git a/mincepy/provides.py b/mincepy/provides.py index 30bfe10..b84b9b6 100644 --- a/mincepy/provides.py +++ b/mincepy/provides.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from . import testing diff --git a/mincepy/qops.py b/mincepy/qops.py index ff4a8ce..97c03e1 100644 --- a/mincepy/qops.py +++ b/mincepy/qops.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Module containing functions to generate query operations. To prevent clashes with python builtins we append underscores to the function names. This also makes it safer to import this module as a wildcard import. diff --git a/mincepy/records.py b/mincepy/records.py index c32c978..b1b0241 100644 --- a/mincepy/records.py +++ b/mincepy/records.py @@ -1,32 +1,33 @@ -# -*- coding: utf-8 -*- """This module defines the data record and other objects and functions related to storing things in an archive.""" -import copy import collections +import copy import datetime import operator from typing import ( - Optional, + TYPE_CHECKING, + Any, + Generic, Iterable, + List, + Mapping, + Optional, Sequence, - Union, Tuple, - Any, - Mapping, TypeVar, - Generic, - List, + Union, ) import deprecation import pytray.tree -from . import type_ids -from . import fields -from . import utils +from . import fields, type_ids, utils from . import version as version_mod +if TYPE_CHECKING: + import mincepy + __all__ = ( "OBJ_ID", "TYPE_ID", @@ -76,9 +77,7 @@ class ExtraKeys: # pylint: disable=too-few-public-methods CREATED_BY = "_created_by" # The ID of the process the data was created in - COPIED_FROM = ( - "_copied_from" # The reference to the snapshot that this object was copied from - ) + COPIED_FROM = "_copied_from" # The reference to the snapshot that this object was copied from USER = "_user" # The user that saved this snapshot HOSTNAME = "_hostname" # The hostname of the computer this snapshot was saved on @@ -104,8 +103,11 @@ class SnapshotId(Generic[IdT]): @classmethod def from_dict(cls, sid_dict: dict) -> "SnapshotId": - """Build a snapshot ID from a dictionary. Uses OBJ_ID and VERSION keys but ignores any additional keys making - it useful when passing **sid_dict to the constructor would fail because of the presence of unexpected keys.""" + """ + Build a snapshot ID from a dictionary. Uses OBJ_ID and VERSION keys but ignores any + additional keys making it useful when passing **sid_dict to the constructor would fail + because of the presence of unexpected keys. + """ return cls(sid_dict[OBJ_ID], sid_dict[VERSION]) def __init__(self, obj_id, version: int): @@ -146,7 +148,7 @@ def to_dict(self) -> dict: SnapshotRef = SnapshotId -def readonly_field(field_name: str, **kwargs) -> fields.Field: +def readonly_field(field_name: str, **kwargs) -> "mincepy.fields.Field": properties = dict( fget=operator.itemgetter(DATA_RECORD_FIELDS.index(field_name)), doc=field_name ) @@ -391,6 +393,4 @@ def make_child_builder(record: DataRecord, **kwargs) -> "DataRecordBuilder": def make_deleted_builder(record: DataRecord) -> DataRecordBuilder: """Get a record that represents the deletion of this object""" - return make_child_builder( - record, state=DELETED, state_types=None, snapshot_hash=None - ) + return make_child_builder(record, state=DELETED, state_types=None, snapshot_hash=None) diff --git a/mincepy/refs.py b/mincepy/refs.py index 6f6a49b..6bb9ef2 100644 --- a/mincepy/refs.py +++ b/mincepy/refs.py @@ -1,11 +1,8 @@ -# -*- coding: utf-8 -*- """References module""" + from typing import Optional -from . import exceptions -from . import records -from . import types -from . import type_ids +from . import exceptions, records, type_ids, types __all__ = ("ObjRef", "ref") diff --git a/mincepy/result_types.py b/mincepy/result_types.py index 098027f..82ee279 100644 --- a/mincepy/result_types.py +++ b/mincepy/result_types.py @@ -1,5 +1,4 @@ -# -*- coding: utf-8 -*- -from typing import List, Iterable, Tuple, Set +from typing import Iterable, List, Set, Tuple from . import records as recordsm @@ -8,9 +7,10 @@ class MergeResult: """Information about the results from a merge operation. - `all` contains all of the IDs that were considered in the merge which means not only those that were passed but also - all those that they reference. - `merged` contains the ids of all the records that were actually merged (the rest were already present) + `all` contains all the IDs that were considered in the merge which means not only those that + were passed but also all those that they reference. + `merged` contains the ids of all the records that were actually merged (the rest were already + present) """ __slots__ = "all", "merged" diff --git a/mincepy/saving.py b/mincepy/saving.py index a69e4a5..1b5be27 100644 --- a/mincepy/saving.py +++ b/mincepy/saving.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Module for methods related to saving and loading objects to/from records""" from typing import Type, Union @@ -59,14 +58,13 @@ def load_instance_state( if ignore_missing: value = None else: - raise ValueError( - f"Saved state missing '{properties.store_as}'" - ) from None + raise ValueError(f"Saved state missing '{properties.store_as}'") from None if properties.ref and value is not None: - assert isinstance( - value, refs.ObjRef - ), f"Expected to see a reference in the saved state for key '{properties.store_as}' but got '{value}'" + assert isinstance(value, refs.ObjRef), ( + f"Expected to see a reference in the saved state for key " + f"'{properties.store_as}' but got '{value}'" + ) value = value() # Dereference it to_set[properties.attr_name] = value diff --git a/mincepy/staging.py b/mincepy/staging.py index fedeeac..6cde2a1 100644 --- a/mincepy/staging.py +++ b/mincepy/staging.py @@ -1,5 +1,4 @@ -# -*- coding: utf-8 -*- -from typing import MutableMapping, Any, Optional +from typing import Any, MutableMapping, Optional from . import utils diff --git a/mincepy/testing.py b/mincepy/testing.py index a59476a..e0b2d19 100644 --- a/mincepy/testing.py +++ b/mincepy/testing.py @@ -1,13 +1,12 @@ -# -*- coding: utf-8 -*- """Classes and function useful for trying out mincepy functionality""" -# pylint: disable=cyclic-import + import contextlib import gc import logging import os -import string import random -from typing import Iterator, Callable +import string +from typing import Callable, Iterator import uuid import weakref @@ -28,8 +27,9 @@ def get_base_uri() -> str: - """Get a base URI for an archive that can be used for testing. This will not contain the database name as multiple - databases can be used during a test session.""" + """ + Get a base URI for an archive that can be used for testing. This will not contain the database + name as multiple databases can be used during a test session.""" return os.environ.get(ENV_ARCHIVE_BASE_URI, DEFAULT_ARCHIVE_BASE_URI) @@ -46,8 +46,10 @@ def create_archive_uri(base_uri="", db_name=""): @contextlib.contextmanager # @mongomock.patch(servers=(('localhost', 27017),)) -def temporary_archive(archive_uri: str) -> Iterator[mincepy.Archive]: - """Create a temporary archive. The associated database will be dropped on exiting the context""" +def temporary_archive(archive_uri: str) -> Iterator["mincepy.Archive"]: + """ + Create a temporary archive. The associated database will be dropped on exiting the context + """ archive = mincepy.mongo.connect(archive_uri) db = archive.database client = db.client @@ -58,16 +60,17 @@ def temporary_archive(archive_uri: str) -> Iterator[mincepy.Archive]: @contextlib.contextmanager -def temporary_historian(archive_uri: str = "") -> Iterator[mincepy.Archive]: - """Create a temporary historian. The associated database will be dropped on exiting the context.""" +def temporary_historian(archive_uri: str = "") -> Iterator["mincepy.Archive"]: + """ + Create a temporary historian. The associated database will be dropped on exiting the context. + """ with temporary_archive(archive_uri) as archive: yield mincepy.Historian(archive) try: - import pytest - # Optional pytest fixtures + import pytest @pytest.fixture def archive_uri() -> str: @@ -204,18 +207,17 @@ def populate(historian=None): historian.save(people) -def do_round_trip( - historian: mincepy.Historian, factory: Callable, *args, **kwargs -) -> object: - """Given a historian, this function will: +def do_round_trip(historian: mincepy.Historian, factory: Callable, *args, **kwargs) -> object: + """ + Given a historian, this function will: 1. create the object using factory(*args, **kwargs) 2. save the object and ask for it to be deleted, 3. reload the object using the object id 4. check that the python id of the loaded object is different from the original 5. return the loaded object - This is useful to check that saving and loading of an object work correctly and makes it easy to subsequently check - that the state of the loaded object is as expected. + This is useful to check that saving and loading of an object work correctly and makes it easy to + subsequently check that the state of the loaded object is as expected. """ obj_id, obj_type = _do_create_and_save(historian, factory, *args, **kwargs) diff --git a/mincepy/tracking.py b/mincepy/tracking.py index c03d433..a225df2 100644 --- a/mincepy/tracking.py +++ b/mincepy/tracking.py @@ -1,10 +1,8 @@ -# -*- coding: utf-8 -*- import copy as python_copy import functools from typing import Callable -from . import records -from . import staging +from . import records, staging __all__ = "track", "copy", "deepcopy", "mark_as_copy" @@ -61,9 +59,7 @@ def track(obj_or_fn): be the creator. Or it can be used as a context in which case the creator should be passed as the argument. """ - if isinstance( - obj_or_fn, Callable - ): # pylint: disable=isinstance-second-argument-not-valid-type + if isinstance(obj_or_fn, Callable): # pylint: disable=isinstance-second-argument-not-valid-type # We're acting as a decorator @functools.wraps(obj_or_fn) def wrapper(self, *args, **kwargs): diff --git a/mincepy/transactions.py b/mincepy/transactions.py index af73704..19c2628 100644 --- a/mincepy/transactions.py +++ b/mincepy/transactions.py @@ -1,26 +1,11 @@ -# -*- coding: utf-8 -*- import contextlib import copy -from typing import ( - MutableMapping, - Any, - List, - Sequence, - Optional, - Dict, - Set, - Union, - overload, -) +from typing import Any, Dict, List, MutableMapping, Optional, Sequence, Set, Union, overload import weakref import deprecation -from . import archives -from . import exceptions -from . import operations -from . import records -from . import utils +from . import archives, exceptions, operations, records, utils from . import version as version_mod @@ -33,9 +18,7 @@ def __init__(self): utils.WeakObjectIdDict() ) # type: MutableMapping[object, archives.DataRecord] # Obj id -> (weak) object - self._objects = ( - weakref.WeakValueDictionary() - ) # type: MutableMapping[Any, object] + self._objects = weakref.WeakValueDictionary() # type: MutableMapping[Any, object] def __str__(self): return f"{len(self._objects)} live" @@ -72,12 +55,10 @@ def get_record(self, obj: object) -> records.DataRecord: raise exceptions.NotFound(f"No live object found '{obj}'") from None @overload - def get_object(self, identifier: records.SnapshotId): - ... + def get_object(self, identifier: records.SnapshotId): ... @overload - def get_object(self, identifier: Any): - ... + def get_object(self, identifier: Any): ... def get_object(self, identifier: Union[records.SnapshotId, Any]): """Get an object from the collection either by snapshot id or object id @@ -94,9 +75,7 @@ def get_object(self, identifier: Union[records.SnapshotId, Any]): try: return self._objects[identifier] except KeyError: - raise exceptions.NotFound( - f"No live object with id '{identifier}'" - ) from None + raise exceptions.NotFound(f"No live object with id '{identifier}'") from None def get_snapshot_id(self, obj) -> records.SnapshotId: """Given an object, get the snapshot id""" @@ -192,9 +171,7 @@ def metas(self) -> dict: def insert_live_object(self, obj, record: records.DataRecord): """Insert a live object along with an up-to-date record into the transaction""" if self.is_deleted(record.obj_id): - raise ValueError( - f"Object with id '{record.obj_id}' has already been deleted!" - ) + raise ValueError(f"Object with id '{record.obj_id}' has already been deleted!") sid = record.snapshot_id if sid in self._in_progress_cache: @@ -220,12 +197,10 @@ def prepare_for_saving(self, snapshot_id: records.SnapshotId, obj): del self._in_progress_cache[snapshot_id] @overload - def get_live_object(self, identifier: records.SnapshotId) -> object: - ... + def get_live_object(self, identifier: records.SnapshotId) -> object: ... @overload - def get_live_object(self, identifier: Any) -> object: - ... + def get_live_object(self, identifier: Any) -> object: ... def get_live_object(self, identifier: Union[records.SnapshotId, Any]) -> object: if isinstance(identifier, records.SnapshotId): @@ -293,9 +268,7 @@ def get_snapshot(self, snapshot_id): try: return self._snapshots[snapshot_id] except KeyError: - raise exceptions.NotFound( - f"No snapshot with id '{snapshot_id}' found" - ) from None + raise exceptions.NotFound(f"No snapshot with id '{snapshot_id}' found") from None def stage(self, op: operations.Operation): # pylint: disable=invalid-name """Stage an operation to be carried out on completion of this transaction""" diff --git a/mincepy/type_ids.py b/mincepy/type_ids.py index 2da6acf..6a110dc 100644 --- a/mincepy/type_ids.py +++ b/mincepy/type_ids.py @@ -1,5 +1,5 @@ -# -*- coding: utf-8 -*- """Module for storing type ids of common mincePy types""" + import uuid FILE_TYPE_ID = uuid.UUID("3bf3c24e-f6c8-4f70-956f-bdecd7aed091") diff --git a/mincepy/type_registry.py b/mincepy/type_registry.py index 809cc72..17c18b0 100644 --- a/mincepy/type_registry.py +++ b/mincepy/type_registry.py @@ -1,14 +1,10 @@ -# -*- coding: utf-8 -*- import collections -from typing import Type, MutableMapping, Any, Union +from typing import Any, MutableMapping, Type, Union -from . import helpers -from . import types +from . import helpers, types SavableObjectType = Type[types.SavableObject] -RegisterableType = Union[ - helpers.TypeHelper, Type[helpers.TypeHelper], SavableObjectType -] +RegisterableType = Union[helpers.TypeHelper, Type[helpers.TypeHelper], SavableObjectType] class TypeRegistry: @@ -16,10 +12,8 @@ class TypeRegistry: to store and track objects in the archive""" def __init__(self): - self._helpers = ( - {} - ) # type: MutableMapping[SavableObjectType, helpers.TypeHelper] - self._type_ids = {} # type: MutableMapping[Any, SavableObjectType] + self._helpers: MutableMapping[SavableObjectType, helpers.TypeHelper] = {} + self._type_ids: MutableMapping[Any, SavableObjectType] = {} def __contains__(self, item: SavableObjectType) -> bool: return item in self._helpers @@ -37,8 +31,8 @@ def register_type( """Register a type new type :param obj_class_or_helper: the type helper of savable object to register - :param replace: if True, will silently replace an entry that has the same type id, otherwise raises a - ValueError the id is already registered + :param replace: if True, will silently replace an entry that has the same type id, otherwise + raises a `ValueError` the id is already registered """ helper = self._register(obj_class_or_helper, replace) @@ -49,10 +43,12 @@ def register_type( return helper def unregister_type(self, item: Union[helpers.TypeHelper, SavableObjectType, Any]): - """Un-register a type helper. If the type is not registered, this method will return with no effect. + """ + Un-register a type helper. If the type is not registered, this method will return with no + effect. - :param item: either a `TypeHelper` (for mapped type), a `SavableObjectType` or a type id. The checks will be - performed in this order. + :param item: either a `TypeHelper` (for mapped type), a `SavableObjectType` or a type id. + The checks will be performed in this order. """ try: self._remove_using_type_id(item.TYPE_ID) @@ -89,9 +85,7 @@ def get_helper_from_type_id(self, type_id) -> helpers.TypeHelper: except KeyError: raise TypeError(f"Type id '{type_id}' not known") from None - def get_helper_from_obj_type( - self, obj_type: SavableObjectType - ) -> helpers.TypeHelper: + def get_helper_from_obj_type(self, obj_type: SavableObjectType) -> helpers.TypeHelper: try: # Try the direct lookup return self._helpers[obj_type] @@ -166,9 +160,9 @@ def _insert_helper(self, helper: helpers.TypeHelper, replace=False): and self._type_ids[type_id] is not obj_type ): raise ValueError( - f"Helper for type id '{helper.TYPE_ID}' already exists for type '{self._type_ids[type_id]}' but " - f"it is attempting to be replace by '{obj_type.__name__}'. " - f"Call with replace=True if this is intentional." + f"Helper for type id '{helper.TYPE_ID}' already exists for type " + f"'{self._type_ids[type_id]}' but it is attempting to be replace by " + f"'{obj_type.__name__}'. Call with replace=True if this is intentional." ) self._helpers[obj_type] = helper diff --git a/mincepy/types.py b/mincepy/types.py index 268b059..4c2c587 100644 --- a/mincepy/types.py +++ b/mincepy/types.py @@ -1,20 +1,13 @@ -# -*- coding: utf-8 -*- from abc import ABCMeta, abstractmethod import datetime -from typing import Type, List +from hashlib import blake2b +from typing import TYPE_CHECKING, List, Optional, Sequence, Type import uuid -try: # Python3 - from hashlib import blake2b -except ImportError: # Python < 3.6 - from pyblake2 import blake2b +from . import depositors, expr, fields, saving, tracking -from . import depositors -from . import expr -from . import fields -from . import migrations # pylint: disable=unused-import -from . import saving -from . import tracking +if TYPE_CHECKING: + import mincepy __all__ = "Savable", "Comparable", "Object", "SavableObject", "PRIMITIVE_TYPES" @@ -41,12 +34,10 @@ class Savable(fields.WithFields, expr.FilterLike): """Interface for an object that can save and load its instance state""" TYPE_ID = None - LATEST_MIGRATION: "migrations.ObjectMigration" = None + LATEST_MIGRATION: Optional["mincepy.ObjectMigration"] = None def __init__(self, *args, **kwargs): - assert ( - self.TYPE_ID is not None - ), "Must set the TYPE_ID for an object to be savable" + assert self.TYPE_ID is not None, "Must set the TYPE_ID for an object to be savable" super().__init__(*args, **kwargs) @classmethod @@ -55,13 +46,11 @@ def __expr__(cls): return expr.Comparison("type_id", expr.Eq(cls.TYPE_ID)) @classmethod - def __query_expr__(cls) -> dict: + def __query_expr__(cls) -> dict: # pylint: disable=arguments-differ """This method gives savables the ability to be used in query filter expressions""" return cls.__expr__().__query_expr__() - def save_instance_state( - self, saver: depositors.Saver - ): # pylint: disable=unused-argument + def save_instance_state(self, saver: depositors.Saver): # pylint: disable=unused-argument """Save the instance state of an object, should return a saved instance""" return saving.save_instance_state(self) @@ -94,10 +83,10 @@ class SavableObject(Object, Savable, metaclass=ABCMeta): _historian = None @classmethod - def init_field(cls, field: fields.Field, attr_name: str): - super().init_field(field, attr_name) - field.set_query_context(expr.Comparison("type_id", expr.Eq(cls.TYPE_ID))) - field.path_prefix = "state" + def init_field(cls, obj_field: fields.Field, attr_name: str): + super().init_field(obj_field, attr_name) + obj_field.set_query_context(expr.Comparison("type_id", expr.Eq(cls.TYPE_ID))) + obj_field.path_prefix = "state" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -116,8 +105,8 @@ def yield_hashables(self, hasher): class Equator: - def __init__(self, equators=tuple()): - self._equators = list(equators) + def __init__(self, equators: Sequence["mincepy.TypeHelper"] = tuple()): + self._equators: List["mincepy.TypeHelper"] = [] def do_hash(*args): hasher = blake2b(digest_size=32) @@ -128,10 +117,14 @@ def do_hash(*args): self._hasher = do_hash - def add_equator(self, equator): + # Initialise all the equators + for equator in equators: + self.add_equator(equator) + + def add_equator(self, equator: "mincepy.TypeHelper"): self._equators.append(equator) - def remove_equator(self, equator): + def remove_equator(self, equator: "mincepy.TypeHelper"): self._equators.reverse() try: self._equators.remove(equator) @@ -143,11 +136,14 @@ def remove_equator(self, equator): def get_equator(self, obj): # Iterate in reversed order i.e. the latest added should be used preferentially for equator in reversed(self._equators): - if isinstance(obj, equator.TYPE): - return equator - raise TypeError( - f"Don't know how to compare '{type(obj)}' types, no type equator set" - ) + try: + if isinstance(obj, equator.TYPE): + return equator + except TypeError as exc: + raise RuntimeError( + f"There is a problem with equator '{type(equator).__name__}'" + ) from exc + raise TypeError(f"Don't know how to compare '{type(obj)}' types, no type equator set") def yield_hashables(self, obj): try: @@ -158,7 +154,8 @@ def yield_hashables(self, obj): yield from obj.yield_hashables(self) except AttributeError: raise TypeError( - f"No helper registered and no yield_hashables method on '{type(obj).__name__}'" + f"No helper registered and no `yield_hashables()` method on " + f"'{type(obj).__name__}'" ) from None else: yield from equator.yield_hashables(obj, self) @@ -167,7 +164,7 @@ def hash(self, obj): return self._hasher(*self.yield_hashables(obj)) def eq(self, obj1, obj2) -> bool: # pylint: disable=invalid-name - if not type(obj1) == type(obj2): # pylint: disable=unidiomatic-typecheck + if not type(obj1) == type(obj2): # pylint: disable=unidiomatic-typecheck # noqa: E721 return False try: @@ -175,8 +172,8 @@ def eq(self, obj1, obj2) -> bool: # pylint: disable=invalid-name except TypeError: # Fallback to python eq return obj1 == obj2 - else: - return equator.eq(obj1, obj2) + + return equator.eq(obj1, obj2) def float_to_str(self, value, sig=14): """ diff --git a/mincepy/utils.py b/mincepy/utils.py index f763edd..d3d4dca 100644 --- a/mincepy/utils.py +++ b/mincepy/utils.py @@ -1,7 +1,6 @@ -# -*- coding: utf-8 -*- import collections.abc import functools -from typing import TypeVar, Generic, Any, Type +from typing import Any, Generic, Type, TypeVar import weakref try: @@ -17,9 +16,7 @@ class WeakObjectIdDict(collections.abc.MutableMapping): """ def __init__(self, seq=None, **kwargs): - self._refs = ( - {} - ) # type: collections.abc.MutableMapping[int, weakref.ReferenceType] + self._refs = {} # type: collections.abc.MutableMapping[int, weakref.ReferenceType] self._values = {} # type: collections.abc.MutableMapping[int, Any] if seq: if isinstance(seq, collections.abc.Mapping): @@ -87,9 +84,7 @@ def __init__(self, tuple_type: Type[T], defaults=None): defaults = defaults or {} diff = set(defaults.keys()) - set(tuple_type._fields) if diff: - raise RuntimeError( - f"Can't supply defaults that are not in the namedtuple: '{diff}'" - ) + raise RuntimeError(f"Can't supply defaults that are not in the namedtuple: '{diff}'") super().__setattr__("_tuple_type", tuple_type) super().__setattr__("_values", defaults) @@ -163,9 +158,7 @@ def inner(obj_method): @functools.wraps(obj_method) def wrapper(self, *args, **kwargs): # pylint: disable=protected-access - ctx = ( - nullcontext if self._historian is None else self._historian.transaction - ) + ctx = nullcontext if self._historian is None else self._historian.transaction with ctx(): try: self.__sync += 1 diff --git a/mincepy/version.py b/mincepy/version.py index 53f8338..28d57ed 100644 --- a/mincepy/version.py +++ b/mincepy/version.py @@ -1,8 +1,2 @@ -# -*- coding: utf-8 -*- -author_info = (("Martin Uhrin", "martin.uhrin.10@ucl.ac.uk"),) -version_info = (0, 16, 5) - -__author__ = ", ".join(f"{info[0]} <{info[1]}>" for info in author_info) -__version__ = ".".join(map(str, version_info)) - -__all__ = ("__version__",) +__author__ = "Martin Uhrin " +__version__ = "0.16.5" diff --git a/notebooks/development.ipynb b/notebooks/development.ipynb index 43792f2..8477e1a 100644 --- a/notebooks/development.ipynb +++ b/notebooks/development.ipynb @@ -2,13 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": true, - "pycharm": { - "is_executing": false - } - }, + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "import pymongo\n", @@ -21,39 +16,28 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "hist = mincepy.create_historian(\"mongodb://localhost/test\")\n", "mincepy.set_historian(hist)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n", - "is_executing": false - } - } + ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "hist.get_archive()._data_collection.drop()\n", "hist.get_archive()._meta_collection.drop()" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n", - "is_executing": false - } - } + ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "def populate_database(hist):\n", @@ -82,18 +66,12 @@ " car.colour = colour\n", " hist.save(car)\n", " " - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n", - "is_executing": false - } - } + ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ " \n", @@ -103,97 +81,47 @@ "car = Car('ferrari')\n", "car_id = hist.save(car)\n", "\n" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n", - "is_executing": false - } - } + ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "garage = Garage(car)\n", "garage_id = hist.save(garage)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n", - "is_executing": false - } - } + ] }, { "cell_type": "code", - "execution_count": 11, - "outputs": [ - { - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mdel\u001b[0m \u001b[0mgarage\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mcar\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcolour\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'yellow'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mhist\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msave\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcar\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mNameError\u001b[0m: name 'garage' is not defined" - ], - "ename": "NameError", - "evalue": "name 'garage' is not defined", - "output_type": "error" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "del garage\n" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n", - "is_executing": false - } - } + ] }, { "cell_type": "code", - "execution_count": 14, - "outputs": [ - { - "name": "stdout", - "text": [ - "yellow\n" - ], - "output_type": "stream" - } - ], + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "car.colour = 'yellow'\n", "hist.save(car)\n", "garage = hist.load(garage_id)\n", "print(garage.car.colour)" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n", - "is_executing": false - } - } + ] }, { "cell_type": "code", "execution_count": null, + "metadata": {}, "outputs": [], "source": [ "\n" - ], - "metadata": { - "collapsed": false, - "pycharm": { - "name": "#%%\n" - } - } + ] } ], "metadata": { @@ -211,16 +139,15 @@ "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.6" + "pygments_lexer": "ipython2" }, "pycharm": { "stem_cell": { "cell_type": "raw", - "source": [], "metadata": { "collapsed": false - } + }, + "source": [] } } }, diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..e512cf6 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,166 @@ +[build-system] +requires = ['flit_core >=3.9,<4'] +build-backend = 'flit_core.buildapi' + +[project] +name = 'mincepy' +dynamic = ["version", "description"] +authors = [ + { name = 'Martin Uhrin', email = 'martin.uhrin.10@ucl.ac.uk' }, +] +readme = 'README.rst' +license = { file = 'LICENSE.txt' } +classifiers = [ + 'Development Status :: 4 - Beta', + 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', +] +keywords = ["database", "schemaless", "nosql", "orm", "object-store", "concurrent", "optimistic-locking"] +requires-python = '>=3.8' +dependencies = [ + "deprecation", + "dnspython", # Needed to be able to connect using domain name rather than IP + "pymongo<4.0", + "litemongo", + "importlib-metadata<5.0", # see: https://stackoverflow.com/questions/73929564/entrypoints-object-has-no-attribute-get-digital-ocean + "mongomock", + "bidict", + "networkx", # For reference graphs + "pytray>=0.2.1", + "stevedore", + "click", + "tabulate", + "tqdm", +] + +[project.urls] +Home = 'https://mincepy.readthedocs.io/en/latest/index.html' +Source = 'https://github.com/muhrin/mincepy.git' + +[project.optional-dependencies] +docs = [ + "nbsphinx", + "sphinx", + "sphinx-autobuild", +] +dev = [ + "black", + "flit", + "ipython", + "mongomock", + "pip", + "pylint", + "pytest>4", + "pytest-benchmark", + "pytest-cov", + "pre-commit", + "yapf", +] +cli = ["click", "tabulate"] +gui = ["mincepy-gui"] +sci = ["mincepy-sci"] + +[project.scripts] +mince = "mincepy.cli:main" + +[project.entry-points."mincepy.plugins.types"] +native = "mincepy.provides:get_types" + + +[tool.flit.module] +name = 'mincepy' + +[tool.flit.sdist] +exclude = [ + '.github/', + 'docs/', + 'examples/', + 'test/', +] + +[tool.flynt] +line-length = 100 +fail-on-change = true + +[tool.isort] +profile = "black" +force_sort_within_sections = true +include_trailing_comma = true +line_length = 100 +multi_line_output = 3 + +[tool.pylint.format] +max-line-length = 100 + +[tool.black] +line-length = 100 + +[tool.pylint.messages_control] +disable = [ + # Unfortunately jaxtyping decorator creates a function that seems to mistakenly be identified as + # not returning anything, so we have to disable the error below for now + 'assignment-from-no-return', + 'duplicate-code', + 'import-outside-toplevel', + 'missing-docstring', + 'locally-disabled', + 'too-few-public-methods', + 'too-many-arguments', + 'too-many-instance-attributes', + 'use-dict-literal', +] + +[tool.pylint.design] +max-locals = 20 + +[pytest] +log_cli = "True" +log_cli_level = "DEBUG" + +[tool.pytest.ini_options] +minversion = '6.0' +testpaths = [ + 'test', +] +filterwarnings = [ + 'ignore::DeprecationWarning:frozendict:', +] + +[tool.yapf] +align_closing_bracket_with_visual_indent = true +based_on_style = 'google' +coalesce_brackets = true +column_limit = 100 +dedent_closing_brackets = true +indent_dictionary_value = false +split_arguments_when_comma_terminated = true + +[tool.tox] +legacy_tox_ini = """ +[tox] +envlist = py311 + +[testenv] +usedevelop = true + +[testenv:py{39,310,311,312}] +description = Run the unit tests +extras = + dev +commands = pytest {posargs} + +[testenv:pre-commit] +description = Run the style checks and formatting +extras = + dev +commands = pre-commit run {posargs} + +[pytest] +filterwarnings = + ignore::DeprecationWarning:distutils: +""" diff --git a/release.sh b/release.sh index dd64dc1..bbd5d16 100755 --- a/release.sh +++ b/release.sh @@ -1,7 +1,7 @@ PACKAGE="mincepy" REMOTE="muhrin" -VERSION_FILE=${PACKAGE}/version.py +VERSION_FILE=${PACKAGE}/__init__.py version=$1 while true; do @@ -15,8 +15,7 @@ done set -x -ver_info=`python -c "print(tuple(int(entry) for entry in '$version'.split('.')))"` -sed -i "/^version_info/c version_info = ${ver_info}" $VERSION_FILE +sed -i "/^__version__/c __version__ = ${version}" $VERSION_FILE current_branch=`git rev-parse --abbrev-ref HEAD` diff --git a/setup.py b/setup.py deleted file mode 100644 index d7d37f4..0000000 --- a/setup.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- coding: utf-8 -*- -from setuptools import setup - -__author__ = "Martin Uhrin" -__license__ = "LGPLv3" - -about = {} -with open("mincepy/version.py") as f: - exec(f.read(), about) # nosec - -setup( - name="mincepy", - version=about["__version__"], - description="Python object storage with versioning made simple", - long_description=open("README.rst").read(), - url="https://github.com/muhrin/mincepy.git", - author="Martin Uhrin", - author_email="martin.uhrin.10@ucl.ac.uk", - license=__license__, - classifiers=[ - "License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - ], - keywords="database schemaless nosql orm object-store concurrent optimistic-locking", - install_requires=[ - 'contextlib2; python_version<"3.7"', - "deprecation", - "dnspython", # Needed to be able to connect using domain name rather than IP - "pymongo<4.0", - "litemongo", - "importlib-metadata<5.0", # see: https://stackoverflow.com/questions/73929564/entrypoints-object-has-no-attribute-get-digital-ocean - "mongomock", - "bidict", - "networkx", # For reference graphs - 'pyblake2; python_version<"3.6"', - "pytray>=0.2.1", - "stevedore", - "click", - "tabulate", - "tqdm", - ], - python_requires=">=3.7", - extras_require={ - "cli": ["click", "tabulate"], - "gui": ["mincepy-gui"], - "dev": [ - "ipython", - "mongomock", - "pip", - "pytest>4", - "pytest-benchmark", - "pytest-cov", - "pre-commit", - # 'prospector', - # 'pylint', - "twine", - "yapf", - ], - "docs": [ - "nbsphinx", - "sphinx", - "sphinx-autobuild", - ], - "sci": ["mincepy-sci"], - }, - packages=["mincepy", "mincepy.cli", "mincepy.mongo", "mincepy.hist"], - include_package_data=True, - test_suite="test", - provides=["mincepy.plugins"], - entry_points={ - "console_scripts": ["mince = mincepy.cli.main:mince"], - "mincepy.plugins.types": ["native_types = mincepy.provides:get_types"], - }, -) diff --git a/test/archive/test_reference_graph.py b/test/archive/test_reference_graph.py index 09ba87b..705a371 100644 --- a/test/archive/test_reference_graph.py +++ b/test/archive/test_reference_graph.py @@ -1,6 +1,5 @@ -# -*- coding: utf-8 -*- import mincepy -from mincepy.testing import Car, Garage, Cycle +from mincepy.testing import Car, Cycle, Garage def test_get_snapshot_graph_simple(historian: mincepy.Historian): @@ -115,9 +114,7 @@ def test_get_obj_referencing_simple(historian: mincepy.Historian): garage = Garage(mincepy.ObjRef(car)) gid = garage.save() - car_graph = historian.archive.get_obj_ref_graph( - car.obj_id, direction=mincepy.INCOMING - ) + car_graph = historian.archive.get_obj_ref_graph(car.obj_id, direction=mincepy.INCOMING) assert len(car_graph.edges) == 1 assert len(car_graph.nodes) == 2 assert (gid, car.obj_id) in car_graph.edges @@ -127,9 +124,7 @@ def test_get_obj_referencing_simple(historian: mincepy.Historian): g2id = garage2.save() # Check that the reference graph is correct - car_graph = historian.archive.get_obj_ref_graph( - car.obj_id, direction=mincepy.INCOMING - ) + car_graph = historian.archive.get_obj_ref_graph(car.obj_id, direction=mincepy.INCOMING) assert len(car_graph.nodes) == 3 assert len(car_graph.edges) == 2 assert (gid, car.obj_id) in car_graph.edges @@ -195,23 +190,17 @@ def test_obj_referencing_max_depth(historian: mincepy.Historian): assert (three_id, two_id) in graph.edges assert (two_id, one_id) in graph.edges - graph = historian.archive.get_obj_ref_graph( - zero_id, direction=mincepy.INCOMING, max_dist=2 - ) + graph = historian.archive.get_obj_ref_graph(zero_id, direction=mincepy.INCOMING, max_dist=2) assert len(graph.edges) == 2 assert len(graph.nodes) == 3 assert (one_id, zero_id) in graph.edges assert (two_id, one_id) in graph.edges - graph = historian.archive.get_obj_ref_graph( - one_id, direction=mincepy.INCOMING, max_dist=1 - ) + graph = historian.archive.get_obj_ref_graph(one_id, direction=mincepy.INCOMING, max_dist=1) assert len(graph.edges) == 1 assert len(graph.nodes) == 2 assert (two_id, one_id) in graph.edges - graph = historian.archive.get_obj_ref_graph( - two_id, direction=mincepy.INCOMING, max_dist=0 - ) + graph = historian.archive.get_obj_ref_graph(two_id, direction=mincepy.INCOMING, max_dist=0) assert len(graph.edges) == 0 assert len(graph.nodes) == 1 diff --git a/test/cli/test_migrate.py b/test/cli/test_migrate.py index 27b144d..aa8e9cf 100644 --- a/test/cli/test_migrate.py +++ b/test/cli/test_migrate.py @@ -1,9 +1,9 @@ -# -*- coding: utf-8 -*- from click.testing import CliRunner import mincepy import mincepy.cli.main -from ..common import CarV1, CarV2, StoreByValue, StoreByRef + +from ..common import CarV1, CarV2, StoreByRef, StoreByValue def test_simple_migrate(historian: mincepy.Historian, archive_uri): diff --git a/test/common.py b/test/common.py index 5fbb94a..1bc006b 100644 --- a/test/common.py +++ b/test/common.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import uuid import mincepy diff --git a/test/conftest.py b/test/conftest.py index cd79baa..493993c 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # pylint: disable=unused-import, redefined-outer-name import random @@ -6,7 +5,8 @@ import mincepy from mincepy import testing -from mincepy.testing import archive_uri, mongodb_archive, historian, archive_base_uri +from mincepy.testing import archive_base_uri, archive_uri, historian, mongodb_archive + from . import utils diff --git a/test/historian/test_delete.py b/test/historian/test_delete.py index bc5aee9..2d40d10 100644 --- a/test/historian/test_delete.py +++ b/test/historian/test_delete.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import pytest import mincepy @@ -80,10 +79,7 @@ def test_delete_find(historian: mincepy.Historian): # Now check the archive assert historian.snapshots.records.find(obj_id=car_id).count() == 2 - assert ( - historian.snapshots.records.find(obj_id=car_id, state=mincepy.DELETED).count() - == 1 - ) + assert historian.snapshots.records.find(obj_id=car_id, state=mincepy.DELETED).count() == 1 def test_delete_multiple_versions(historian: mincepy.Historian): diff --git a/test/historian/test_meta.py b/test/historian/test_meta.py index 448eb3e..ff33684 100644 --- a/test/historian/test_meta.py +++ b/test/historian/test_meta.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import uuid import pytest diff --git a/test/historian/test_references.py b/test/historian/test_references.py index 40d1c40..9752c56 100644 --- a/test/historian/test_references.py +++ b/test/historian/test_references.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- import pytest import mincepy -from mincepy.testing import Person, Car, Garage +from mincepy.testing import Car, Garage, Person # pylint: disable=invalid-name @@ -109,9 +108,7 @@ def test_snapshot_references(historian: mincepy.Historian): refs = historian.references.references(sid) assert len(refs) == len(address_book) - assert not set(historian.get_snapshot_id(person) for person in address_book) - set( - refs - ) + assert not set(historian.get_snapshot_id(person) for person in address_book) - set(refs) def test_snapshot_referenced_by(historian: mincepy.Historian): @@ -121,8 +118,6 @@ def test_snapshot_referenced_by(historian: mincepy.Historian): address_book.save() sid = historian.get_snapshot_id(address_book) - refs = historian.references.referenced_by( - historian.get_snapshot_id(address_book[0]) - ) + refs = historian.references.referenced_by(historian.get_snapshot_id(address_book[0])) assert len(refs) == 1 assert sid in refs diff --git a/test/historian/test_type_registry.py b/test/historian/test_type_registry.py index b48b2ca..b5a6f12 100644 --- a/test/historian/test_type_registry.py +++ b/test/historian/test_type_registry.py @@ -1,10 +1,9 @@ -# -*- coding: utf-8 -*- +from test import common # pylint: disable=wrong-import-order + import pytest from mincepy import type_registry -from test import common # pylint: disable=wrong-import-order - # pylint: disable=invalid-name diff --git a/test/mongo/test_mongo_archive.py b/test/mongo/test_mongo_archive.py index 3e101a0..0612af6 100644 --- a/test/mongo/test_mongo_archive.py +++ b/test/mongo/test_mongo_archive.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Specific tests for the MongoDB archive""" import bson @@ -35,6 +34,4 @@ def test_distinct(historian: mincepy.Historian): testing.Car(colour="red").save() assert set(archive.distinct("state.colour")) == {"red", "blue"} - assert set(archive.distinct("state.colour", {"state": {"colour": "red"}})) == { - "red" - } + assert set(archive.distinct("state.colour", {"state": {"colour": "red"}})) == {"red"} diff --git a/test/test_archive.py b/test/test_archive.py index aa77aa7..c326e79 100644 --- a/test/test_archive.py +++ b/test/test_archive.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from typing import Sequence import bson @@ -53,9 +52,7 @@ def test_meta_set_update_many(historian: mincepy.Historian): results = archive.meta_get_many((car1id, car2id)) assert results == {car1id: {"reg": "car1"}, car2id: {"reg": "car2"}} - archive.meta_update_many( - {car1id: {"colour": "red"}, car2id: {"reg": "car2updated"}} - ) + archive.meta_update_many({car1id: {"colour": "red"}, car2id: {"reg": "car2updated"}}) metas = archive.meta_get_many((car1id, car2id)) assert metas == { @@ -153,12 +150,8 @@ def test_find_using_iterator(mongodb_archive: mincepy.Archive): """Test that passing an iterable to find types that support it, works.""" record_details = dict(state=None, state_types=None, snapshot_hash=None) - record1 = mincepy.DataRecord.new_builder( - obj_id=123, type_id=1, **record_details - ).build() - record2 = mincepy.DataRecord.new_builder( - obj_id=456, type_id=2, **record_details - ).build() + record1 = mincepy.DataRecord.new_builder(obj_id=123, type_id=1, **record_details).build() + record2 = mincepy.DataRecord.new_builder(obj_id=456, type_id=2, **record_details).build() mongodb_archive.save_many([record1, record2]) diff --git a/test/test_base_savable.py b/test/test_base_savable.py index e1e77a2..71df7ea 100644 --- a/test/test_base_savable.py +++ b/test/test_base_savable.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import uuid import mincepy diff --git a/test/test_benchmarks.py b/test/test_benchmarks.py index fb144be..a2986b7 100644 --- a/test/test_benchmarks.py +++ b/test/test_benchmarks.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- try: from contextlib import nullcontext except ImportError: @@ -7,8 +6,9 @@ import pytest import mincepy -from mincepy.testing import Car import mincepy.testing +from mincepy.testing import Car + from . import utils # pylint: disable=invalid-name diff --git a/test/test_builtins.py b/test/test_builtins.py index 9ed4983..9b0edb4 100644 --- a/test/test_builtins.py +++ b/test/test_builtins.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import argparse import collections import pathlib @@ -8,9 +7,7 @@ import pytest import mincepy -from mincepy import testing -from mincepy import builtins - +from mincepy import builtins, testing # region lists @@ -34,9 +31,7 @@ def test_ref_list(historian: mincepy.Historian): # Now delete everything, reload, and make sure the condition is still satisfied list1_id, list2_id = historian.save(list1, list2) - assert ( - car.is_saved() - ), "The container should automatically save all it's entries saved" + assert car.is_saved(), "The container should automatically save all it's entries saved" del list1, list2, car list1_loaded = historian.load(list1_id) @@ -47,9 +42,7 @@ def test_ref_list(historian: mincepy.Historian): assert list1_loaded[0] is list2_loaded[0] -@pytest.mark.parametrize( - "list_type", (builtins.List, builtins.LiveList, builtins.LiveRefList) -) +@pytest.mark.parametrize("list_type", (builtins.List, builtins.LiveList, builtins.LiveRefList)) def test_list_primitives(list_type, historian: mincepy.Historian): """Test that we can store primitives in a ref list also""" reflist = list_type() @@ -153,9 +146,7 @@ def test_ref_dict(historian: mincepy.Historian): assert dict1_loaded["car"] is dict2_loaded["car"] -@pytest.mark.parametrize( - "dict_type", (builtins.RefDict, builtins.LiveDict, builtins.LiveRefDict) -) +@pytest.mark.parametrize("dict_type", (builtins.RefDict, builtins.LiveDict, builtins.LiveRefDict)) def test_primitives(dict_type, historian: mincepy.Historian): """Test that we can store primitives in a ref list also""" refdict = dict_type() @@ -172,9 +163,7 @@ def test_primitives(dict_type, historian: mincepy.Historian): assert loaded["2"] == 10.8 -@pytest.mark.parametrize( - "dict_type", (builtins.RefDict, builtins.LiveDict, builtins.LiveRefDict) -) +@pytest.mark.parametrize("dict_type", (builtins.RefDict, builtins.LiveDict, builtins.LiveRefDict)) def test_ref_dicts_iterate(dict_type: MutableMapping): to_store = {"car": testing.Car(), "msg": "hello", "number": 5} diff --git a/test/test_convenience.py b/test/test_convenience.py index 95e17a6..29a25e9 100644 --- a/test/test_convenience.py +++ b/test/test_convenience.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import pytest import mincepy diff --git a/test/test_data.py b/test/test_data.py index 51c6a76..d834fd3 100644 --- a/test/test_data.py +++ b/test/test_data.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import gc import uuid @@ -6,7 +5,7 @@ import mincepy import mincepy.builtins -from mincepy.testing import Car, Garage, Cycle +from mincepy.testing import Car, Cycle, Garage # pylint: disable=invalid-name @@ -187,10 +186,7 @@ def test_user_info(historian: mincepy.Historian): record = historian.get_current_record(car) assert record.extras[mincepy.ExtraKeys.USER] == user_info[mincepy.ExtraKeys.USER] - assert ( - record.extras[mincepy.ExtraKeys.HOSTNAME] - == user_info[mincepy.ExtraKeys.HOSTNAME] - ) + assert record.extras[mincepy.ExtraKeys.HOSTNAME] == user_info[mincepy.ExtraKeys.HOSTNAME] def test_save_as_ref(historian: mincepy.Historian): diff --git a/test/test_expr.py b/test/test_expr.py index b3e405e..640f48c 100644 --- a/test/test_expr.py +++ b/test/test_expr.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import pytest from mincepy import expr @@ -83,9 +82,7 @@ def test_query_overlapping_filter_keys(): compound1 = gt_24 & lt_38 compound2 = gt_24 & lt_38 query_filter = expr.Query(compound1, compound2).get_filter() - assert query_filter == { - "$and": [expr.query_expr(compound1), expr.query_expr(compound2)] - } + assert query_filter == {"$and": [expr.query_expr(compound1), expr.query_expr(compound2)]} def test_queryable(): @@ -147,9 +144,7 @@ def get_path(self) -> str: queryable.regex_(True) # Test starts_with - assert expr.query_expr(queryable.starts_with_(value)) == { - field_name: {"$regex": f"^{value}"} - } + assert expr.query_expr(queryable.starts_with_(value)) == {field_name: {"$regex": f"^{value}"}} def test_query_expr(): diff --git a/test/test_fields_saving.py b/test/test_fields_saving.py index ea7f703..53e71a1 100644 --- a/test/test_fields_saving.py +++ b/test/test_fields_saving.py @@ -1,13 +1,10 @@ -# -*- coding: utf-8 -*- import argparse import datetime import pytest import mincepy -from mincepy import expr -from mincepy import fields -from mincepy import saving +from mincepy import expr, fields, saving # pylint: disable=too-few-public-methods, invalid-name, pointless-statement, protected-access @@ -122,9 +119,7 @@ def creation_time(self): saving.load_instance_state(ts, saved_state) # Check that it's been restored correctly - assert ( - ts._creation_time == ctime - ) is True # pylint: disable=comparison-with-callable + assert (ts._creation_time == ctime) is True # pylint: disable=comparison-with-callable assert ts.creation_time == ctime @@ -154,9 +149,7 @@ def test_fields(): # Now check what it does if we miss a value with pytest.raises(ValueError): - saving.load_instance_state( - img, dict(width=512, height=512), ignore_missing=False - ) + saving.load_instance_state(img, dict(width=512, height=512), ignore_missing=False) # Check that it hasn't destroyed the state assert img.width == 1024 diff --git a/test/test_file.py b/test/test_file.py index bfcaf91..be0cc2b 100644 --- a/test/test_file.py +++ b/test/test_file.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import io import shutil @@ -24,9 +23,7 @@ def test_file_basics(historian: mincepy.Historian): assert buffer.getvalue() == INITIAL_DATA -def test_file_changing( - tmp_path, historian: mincepy.Historian -): # pylint: disable=unused-argument +def test_file_changing(tmp_path, historian: mincepy.Historian): # pylint: disable=unused-argument encoding = "utf-8" INITIAL_DATA = "Initial string" mince_file = historian.create_file(encoding=encoding) diff --git a/test/test_find.py b/test/test_find.py index 517b97c..ecf1cda 100644 --- a/test/test_find.py +++ b/test/test_find.py @@ -1,10 +1,9 @@ -# -*- coding: utf-8 -*- """Tests for the various historian find methods""" import pytest -import mincepy.records from mincepy import testing +import mincepy.records def test_find_state(historian: mincepy.Historian): @@ -61,9 +60,7 @@ def test_simple_sort(historian: mincepy.Historian): cars.append(testing.Car(idx)) historian.save(cars) - results = list( - historian.records.find(testing.Car, sort=mincepy.records.CREATION_TIME) - ) + results = list(historian.records.find(testing.Car, sort=mincepy.records.CREATION_TIME)) for idx, result in enumerate(results[1:]): # No need to subtract 1 from idx as we're already one behind because of the slicing assert result.creation_time >= results[idx].creation_time @@ -173,9 +170,7 @@ def test_distinct(historian): car4.save() assert set( - historian.snapshots.records.distinct( - "version", obj_type=testing.Car, obj_id=id4 - ) + historian.snapshots.records.distinct("version", obj_type=testing.Car, obj_id=id4) ) == {0, 1} colours = set(historian.records.distinct("state.colour")) @@ -222,11 +217,12 @@ class User(mincepy.SimpleSavable): # This call should not raise as `User` should be automatically registered assert isinstance(historian.find(User).one(), User) - # Now try creating a second user type with the same TYPE_ID, here automatic registration should fail because - # otherwise we would clobber the existing helper in the registry + # Now try creating a second user type with the same TYPE_ID, here automatic registration should + # fail because otherwise we would clobber the existing helper in the registry class User2(mincepy.SimpleSavable): TYPE_ID = "User" with pytest.raises(ValueError): - # Should raise, because now we have the same type id and we would clobber the one already reigstered + # Should raise, because now we have the same type id and we would clobber the one already + # reigstered assert isinstance(historian.find(User2).one(), User) diff --git a/test/test_frontend.py b/test/test_frontend.py index 868bc00..af3da31 100644 --- a/test/test_frontend.py +++ b/test/test_frontend.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- import mincepy -from mincepy import frontend -from mincepy import testing +from mincepy import frontend, testing # pylint: disable=invalid-name @@ -10,9 +8,7 @@ def test_collection(historian): def identity(x): return x - coll = frontend.EntriesCollection( - historian, historian.archive.objects, entry_factory=identity - ) + coll = frontend.EntriesCollection(historian, historian.archive.objects, entry_factory=identity) p1 = testing.Person("martin", 35) p1.save() p2 = testing.Person("john", 5) diff --git a/test/test_global.py b/test/test_global.py index 6fc1bca..f2cf137 100644 --- a/test/test_global.py +++ b/test/test_global.py @@ -1,5 +1,5 @@ -# -*- coding: utf-8 -*- """Test global functions in mincepy""" + import mincepy from mincepy import testing diff --git a/test/test_helpers.py b/test/test_helpers.py index 2568bee..6914579 100644 --- a/test/test_helpers.py +++ b/test/test_helpers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import uuid import pytest @@ -110,9 +109,7 @@ class Powerboat(Boat): TYPE_ID = uuid.UUID("924ef5b2-ce20-40b0-8c98-4da470f6c2c3") horsepower = mincepy.field() - def __init__( - self, make: str, length: float, horsepower: float, owner: testing.Person = None - ): + def __init__(self, make: str, length: float, horsepower: float, owner: testing.Person = None): super().__init__(make, length, owner) self.horsepower = horsepower diff --git a/test/test_historian.py b/test/test_historian.py index 5e713a6..da7b251 100644 --- a/test/test_historian.py +++ b/test/test_historian.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import uuid import pytest @@ -212,16 +211,8 @@ def test_find_arg_types(historian: mincepy.Historian): # Test different possibilities for object ids being passed list(historian.find(obj_id=red_ferrari_id)) - list( - historian.find( - obj_id=[red_ferrari_id, green_ferrari_id, martin_id, red_honda_id] - ) - ) - list( - historian.find( - obj_id=(red_ferrari_id, green_ferrari_id, martin_id, red_honda_id) - ) - ) + list(historian.find(obj_id=[red_ferrari_id, green_ferrari_id, martin_id, red_honda_id])) + list(historian.find(obj_id=(red_ferrari_id, green_ferrari_id, martin_id, red_honda_id))) list(historian.find(obj_id=str(red_ferrari_id))) # Test object types @@ -313,9 +304,7 @@ def test_snapshots_collection(historian: mincepy.Historian): assert set(car.colour for car in snapshots) == {"red", "brown"} assert ( - historian.snapshots.records.find(Car.colour == "brown", obj_id=ferrari_id) - .one() - .version + historian.snapshots.records.find(Car.colour == "brown", obj_id=ferrari_id).one().version == 1 ) @@ -342,10 +331,7 @@ def test_objects_collection(historian: mincepy.Historian): assert set(car.colour for car in objects) == {"brown"} assert ( - historian.objects.records.find(Car.colour == "brown", obj_id=ferrari_id) - .one() - .version - == 1 + historian.objects.records.find(Car.colour == "brown", obj_id=ferrari_id).one().version == 1 ) @@ -452,9 +438,7 @@ def test_merge_file(historian: mincepy.Historian): # Now check that files contained within objects are correctly merged file_list = mincepy.List((file,)) file_list.save() # pylint: disable=no-member - result = remote.merge( - local.find(obj_id=file_list.obj_id) - ) # pylint: disable=no-member + result = remote.merge(local.find(obj_id=file_list.obj_id)) # pylint: disable=no-member assert len(result.merged) == 1 assert historian.get_snapshot_id(file_list) in result.merged @@ -463,9 +447,11 @@ def test_merge_file(historian: mincepy.Historian): def test_primitive_subtypes(historian: mincepy.Historian): - """This test catches the case where someone creates a subclass of a primitive. This should not be - treated as a primitive by the historian as we need to reload the correct type when retrieving from - the database.""" + """ + This test catches the case where someone creates a subclass of a primitive. This should not be + treated as a primitive by the historian as we need to reload the correct type when retrieving + from the database. + """ class DictSubclass(dict, mincepy.BaseSavableObject): TYPE_ID = uuid.UUID("67a939ee-4be6-4006-ac77-fd1dbf3b0642") diff --git a/test/test_history.py b/test/test_history.py index 8472307..fa1c6dd 100644 --- a/test/test_history.py +++ b/test/test_history.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import mincepy from mincepy import testing diff --git a/test/test_migrate.py b/test/test_migrate.py index 414230f..12c530c 100644 --- a/test/test_migrate.py +++ b/test/test_migrate.py @@ -1,10 +1,11 @@ -# -*- coding: utf-8 -*- """"Tests of migration""" + import gc import mincepy from mincepy import testing -from .common import CarV1, CarV2, StoreByValue, StoreByRef + +from .common import CarV1, CarV2, StoreByRef, StoreByValue def test_find_migratable(historian: mincepy.Historian): diff --git a/test/test_migrations.py b/test/test_migrations.py index 8400e8e..8fbb52d 100644 --- a/test/test_migrations.py +++ b/test/test_migrations.py @@ -1,5 +1,5 @@ -# -*- coding: utf-8 -*- """"Tests of migrations""" + import gc import logging import uuid @@ -8,7 +8,8 @@ import mincepy from mincepy import testing -from .common import CarV0, CarV1, CarV2, StoreByValue, StoreByRef + +from .common import CarV0, CarV1, CarV2, StoreByRef, StoreByValue # pylint: disable=invalid-name diff --git a/test/test_process.py b/test/test_process.py index 6f3d40f..1b64777 100644 --- a/test/test_process.py +++ b/test/test_process.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import pytest import mincepy diff --git a/test/test_qops.py b/test/test_qops.py index 425b093..2a58b4d 100644 --- a/test/test_qops.py +++ b/test/test_qops.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from mincepy import qops diff --git a/test/test_refs.py b/test/test_refs.py index 5c045df..3833547 100644 --- a/test/test_refs.py +++ b/test/test_refs.py @@ -1,11 +1,10 @@ -# -*- coding: utf-8 -*- """Module for testing object references""" from argparse import Namespace import mincepy -import mincepy.records from mincepy import testing +import mincepy.records # pylint: disable=invalid-name diff --git a/test/test_savables.py b/test/test_savables.py index aa54493..68f9be7 100644 --- a/test/test_savables.py +++ b/test/test_savables.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import gc import uuid @@ -6,7 +5,7 @@ import mincepy import mincepy.builtins -from mincepy.testing import Car, Garage, Cycle +from mincepy.testing import Car, Cycle, Garage # pylint: disable=invalid-name @@ -213,10 +212,7 @@ def test_user_info(historian: mincepy.Historian): record = historian.get_current_record(car) assert record.extras[mincepy.ExtraKeys.USER] == user_info[mincepy.ExtraKeys.USER] - assert ( - record.extras[mincepy.ExtraKeys.HOSTNAME] - == user_info[mincepy.ExtraKeys.HOSTNAME] - ) + assert record.extras[mincepy.ExtraKeys.HOSTNAME] == user_info[mincepy.ExtraKeys.HOSTNAME] def test_save_as_ref(historian: mincepy.Historian): diff --git a/test/test_snapshots.py b/test/test_snapshots.py index 9b9949c..d2b8b10 100644 --- a/test/test_snapshots.py +++ b/test/test_snapshots.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Module for testing saved snapshots""" import time diff --git a/test/test_staging.py b/test/test_staging.py index 5b8d663..58bebf8 100644 --- a/test/test_staging.py +++ b/test/test_staging.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import pytest from mincepy import staging diff --git a/test/test_transactions.py b/test/test_transactions.py index 21840b3..c4d3b94 100644 --- a/test/test_transactions.py +++ b/test/test_transactions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Module for testing saved snapshots""" import mincepy diff --git a/test/test_type_registry.py b/test/test_type_registry.py index e4f973d..92f16b6 100644 --- a/test/test_type_registry.py +++ b/test/test_type_registry.py @@ -1,6 +1,5 @@ -# -*- coding: utf-8 -*- -import mincepy.type_registry import mincepy.testing +import mincepy.type_registry def test_basics(): diff --git a/test/test_types.py b/test/test_types.py index d0cb5b6..fd954ef 100644 --- a/test/test_types.py +++ b/test/test_types.py @@ -1,6 +1,4 @@ -# -*- coding: utf-8 -*- import mincepy - from mincepy.testing import Car diff --git a/test/utils.py b/test/utils.py index 648eb9e..d1ce327 100644 --- a/test/utils.py +++ b/test/utils.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- import random import string def random_str(length=10): letters = string.ascii_lowercase - return "".join(random.choice(letters) for i in range(length)) + return "".join(random.choice(letters) for _ in range(length))