diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml new file mode 100644 index 0000000..793d8e0 --- /dev/null +++ b/.github/workflows/docker-image.yml @@ -0,0 +1,18 @@ +name: Docker Image CI + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + +jobs: + + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Build the Docker image + run: docker build . --file Dockerfile --tag my-image-name:$(date +%s) diff --git a/.gitignore b/.gitignore index 64799da..180de2b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ __pycache__/ .py[cod] +instance/ diff --git a/.mailmap b/.mailmap new file mode 100644 index 0000000..bac9567 --- /dev/null +++ b/.mailmap @@ -0,0 +1 @@ +Mia Herkt diff --git a/0x0-prune.service b/0x0-prune.service new file mode 100644 index 0000000..b28fb2d --- /dev/null +++ b/0x0-prune.service @@ -0,0 +1,22 @@ +[Unit] +Description=Prune 0x0 files +After=remote-fs.target + +[Service] +Type=oneshot +User=nullptr +WorkingDirectory=/path/to/0x0 +BindPaths=/path/to/0x0 + +Environment=FLASK_APP=fhost +ExecStart=/usr/bin/flask prune +ProtectProc=noaccess +ProtectSystem=strict +ProtectHome=tmpfs +PrivateTmp=true +PrivateUsers=true +ProtectKernelLogs=true +LockPersonality=true + +[Install] +WantedBy=multi-user.target diff --git a/0x0-prune.timer b/0x0-prune.timer new file mode 100644 index 0000000..df6a594 --- /dev/null +++ b/0x0-prune.timer @@ -0,0 +1,9 @@ +[Unit] +Description=Prune 0x0 files + +[Timer] +OnCalendar=hourly +Persistent=true + +[Install] +WantedBy=timers.target diff --git a/0x0-vscan.service b/0x0-vscan.service new file mode 100644 index 0000000..6a48b1c --- /dev/null +++ b/0x0-vscan.service @@ -0,0 +1,22 @@ +[Unit] +Description=Scan 0x0 files with ClamAV +After=remote-fs.target clamd.service + +[Service] +Type=oneshot +User=nullptr +WorkingDirectory=/path/to/0x0 +BindPaths=/path/to/0x0 + +Environment=FLASK_APP=fhost +ExecStart=/usr/bin/flask vscan +ProtectProc=noaccess +ProtectSystem=strict +ProtectHome=tmpfs +PrivateTmp=true +PrivateUsers=true +ProtectKernelLogs=true +LockPersonality=true + +[Install] +WantedBy=multi-user.target diff --git a/0x0-vscan.timer b/0x0-vscan.timer new file mode 100644 index 0000000..d2c6486 --- /dev/null +++ b/0x0-vscan.timer @@ -0,0 +1,9 @@ +[Unit] +Description=Scan 0x0 files with ClamAV + +[Timer] +OnCalendar=hourly +Persistent=true + +[Install] +WantedBy=timers.target diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e2e8936 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,37 @@ +FROM tiangolo/uwsgi-nginx:python3.11 + +LABEL maintainer="Sebastian Ramirez " + +# Install requirements +COPY requirements.txt /tmp/requirements.txt +RUN pip install --no-cache-dir -r /tmp/requirements.txt + +# URL under which static (not modified by Python) files will be requested +# They will be served by Nginx directly, without being handled by uWSGI +ENV STATIC_URL /static +# Absolute path in where the static files wil be +ENV STATIC_PATH /app/static + +# If STATIC_INDEX is 1, serve / with /static/index.html directly (or the static URL configured) +# ENV STATIC_INDEX 1 +ENV STATIC_INDEX 0 + +# Add demo app +COPY . /app +WORKDIR /app + +# Make /app/* available to be imported by Python globally to better support several use cases like Alembic migrations. +ENV PYTHONPATH=/app + +# Move the base entrypoint to reuse it +RUN mv /entrypoint.sh /uwsgi-nginx-entrypoint.sh && mkdir -p mnt/up && FLASK_APP=fhost flask db upgrade +# Copy the entrypoint that will generate Nginx additional configs +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] + +# Run the start script provided by the parent image tiangolo/uwsgi-nginx. +# It will check for an /app/prestart.sh script (e.g. for migrations) +# And then will start Supervisor, which in turn will start Nginx and uWSGI +CMD ["/start.sh"] diff --git a/Dockerfile.dev b/Dockerfile.dev new file mode 100644 index 0000000..10792fa --- /dev/null +++ b/Dockerfile.dev @@ -0,0 +1,8 @@ +FROM python:latest +#RUN apt update -y && apt install python3 python3-pip sqlite3 libmagic-dev -y +#COPY . /data +RUN git clone https://github.com/mia-0/0x0.git app && cd app && pip3 install --no-cache-dir -r requirements.txt +WORKDIR /app +RUN cp instance/config.example.py instance/config.py && sed -i "s#/path/to#/app/mnt#;s#FHOST_USE_X_ACCEL_REDIRECT = True#FHOST_USE_X_ACCEL_REDIRECT = False#;s#USE_X_SENDFILE = False#USE_X_SENDFILE =True#" instance/config.py && touch app.db && FLASK_APP=fhost flask db upgrade +EXPOSE 5000 +CMD ["flask","--app","fhost.py","run","-h","0.0.0.0"] diff --git a/LICENSE b/LICENSE index 7f1b81d..4153cd3 100644 --- a/LICENSE +++ b/LICENSE @@ -1,13 +1,287 @@ -Copyright © 2016, Martin Herkt - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + EUROPEAN UNION PUBLIC LICENCE v. 1.2 + EUPL © the European Union 2007, 2016 + +This European Union Public Licence (the ‘EUPL’) applies to the Work (as defined +below) which is provided under the terms of this Licence. Any use of the Work, +other than as authorised under this Licence is prohibited (to the extent such +use is covered by a right of the copyright holder of the Work). + +The Work is provided under the terms of this Licence when the Licensor (as +defined below) has placed the following notice immediately following the +copyright notice for the Work: + + Licensed under the EUPL + +or has expressed by any other means his willingness to license under the EUPL. + +1. Definitions + +In this Licence, the following terms have the following meaning: + +- ‘The Licence’: this Licence. + +- ‘The Original Work’: the work or software distributed or communicated by the + Licensor under this Licence, available as Source Code and also as Executable + Code as the case may be. + +- ‘Derivative Works’: the works or software that could be created by the + Licensee, based upon the Original Work or modifications thereof. This Licence + does not define the extent of modification or dependence on the Original Work + required in order to classify a work as a Derivative Work; this extent is + determined by copyright law applicable in the country mentioned in Article 15. + +- ‘The Work’: the Original Work or its Derivative Works. + +- ‘The Source Code’: the human-readable form of the Work which is the most + convenient for people to study and modify. + +- ‘The Executable Code’: any code which has generally been compiled and which is + meant to be interpreted by a computer as a program. + +- ‘The Licensor’: the natural or legal person that distributes or communicates + the Work under the Licence. + +- ‘Contributor(s)’: any natural or legal person who modifies the Work under the + Licence, or otherwise contributes to the creation of a Derivative Work. + +- ‘The Licensee’ or ‘You’: any natural or legal person who makes any usage of + the Work under the terms of the Licence. + +- ‘Distribution’ or ‘Communication’: any act of selling, giving, lending, + renting, distributing, communicating, transmitting, or otherwise making + available, online or offline, copies of the Work or providing access to its + essential functionalities at the disposal of any other natural or legal + person. + +2. Scope of the rights granted by the Licence + +The Licensor hereby grants You a worldwide, royalty-free, non-exclusive, +sublicensable licence to do the following, for the duration of copyright vested +in the Original Work: + +- use the Work in any circumstance and for all usage, +- reproduce the Work, +- modify the Work, and make Derivative Works based upon the Work, +- communicate to the public, including the right to make available or display + the Work or copies thereof to the public and perform publicly, as the case may + be, the Work, +- distribute the Work or copies thereof, +- lend and rent the Work or copies thereof, +- sublicense rights in the Work or copies thereof. + +Those rights can be exercised on any media, supports and formats, whether now +known or later invented, as far as the applicable law permits so. + +In the countries where moral rights apply, the Licensor waives his right to +exercise his moral right to the extent allowed by law in order to make effective +the licence of the economic rights here above listed. + +The Licensor grants to the Licensee royalty-free, non-exclusive usage rights to +any patents held by the Licensor, to the extent necessary to make use of the +rights granted on the Work under this Licence. + +3. Communication of the Source Code + +The Licensor may provide the Work either in its Source Code form, or as +Executable Code. If the Work is provided as Executable Code, the Licensor +provides in addition a machine-readable copy of the Source Code of the Work +along with each copy of the Work that the Licensor distributes or indicates, in +a notice following the copyright notice attached to the Work, a repository where +the Source Code is easily and freely accessible for as long as the Licensor +continues to distribute or communicate the Work. + +4. Limitations on copyright + +Nothing in this Licence is intended to deprive the Licensee of the benefits from +any exception or limitation to the exclusive rights of the rights owners in the +Work, of the exhaustion of those rights or of other applicable limitations +thereto. + +5. Obligations of the Licensee + +The grant of the rights mentioned above is subject to some restrictions and +obligations imposed on the Licensee. Those obligations are the following: + +Attribution right: The Licensee shall keep intact all copyright, patent or +trademarks notices and all notices that refer to the Licence and to the +disclaimer of warranties. The Licensee must include a copy of such notices and a +copy of the Licence with every copy of the Work he/she distributes or +communicates. The Licensee must cause any Derivative Work to carry prominent +notices stating that the Work has been modified and the date of modification. + +Copyleft clause: If the Licensee distributes or communicates copies of the +Original Works or Derivative Works, this Distribution or Communication will be +done under the terms of this Licence or of a later version of this Licence +unless the Original Work is expressly distributed only under this version of the +Licence — for example by communicating ‘EUPL v. 1.2 only’. The Licensee +(becoming Licensor) cannot offer or impose any additional terms or conditions on +the Work or Derivative Work that alter or restrict the terms of the Licence. + +Compatibility clause: If the Licensee Distributes or Communicates Derivative +Works or copies thereof based upon both the Work and another work licensed under +a Compatible Licence, this Distribution or Communication can be done under the +terms of this Compatible Licence. For the sake of this clause, ‘Compatible +Licence’ refers to the licences listed in the appendix attached to this Licence. +Should the Licensee's obligations under the Compatible Licence conflict with +his/her obligations under this Licence, the obligations of the Compatible +Licence shall prevail. + +Provision of Source Code: When distributing or communicating copies of the Work, +the Licensee will provide a machine-readable copy of the Source Code or indicate +a repository where this Source will be easily and freely available for as long +as the Licensee continues to distribute or communicate the Work. + +Legal Protection: This Licence does not grant permission to use the trade names, +trademarks, service marks, or names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the copyright notice. + +6. Chain of Authorship + +The original Licensor warrants that the copyright in the Original Work granted +hereunder is owned by him/her or licensed to him/her and that he/she has the +power and authority to grant the Licence. + +Each Contributor warrants that the copyright in the modifications he/she brings +to the Work are owned by him/her or licensed to him/her and that he/she has the +power and authority to grant the Licence. + +Each time You accept the Licence, the original Licensor and subsequent +Contributors grant You a licence to their contributions to the Work, under the +terms of this Licence. + +7. Disclaimer of Warranty + +The Work is a work in progress, which is continuously improved by numerous +Contributors. It is not a finished work and may therefore contain defects or +‘bugs’ inherent to this type of development. + +For the above reason, the Work is provided under the Licence on an ‘as is’ basis +and without warranties of any kind concerning the Work, including without +limitation merchantability, fitness for a particular purpose, absence of defects +or errors, accuracy, non-infringement of intellectual property rights other than +copyright as stated in Article 6 of this Licence. + +This disclaimer of warranty is an essential part of the Licence and a condition +for the grant of any rights to the Work. + +8. Disclaimer of Liability + +Except in the cases of wilful misconduct or damages directly caused to natural +persons, the Licensor will in no event be liable for any direct or indirect, +material or moral, damages of any kind, arising out of the Licence or of the use +of the Work, including without limitation, damages for loss of goodwill, work +stoppage, computer failure or malfunction, loss of data or any commercial +damage, even if the Licensor has been advised of the possibility of such damage. +However, the Licensor will be liable under statutory product liability laws as +far such laws apply to the Work. + +9. Additional agreements + +While distributing the Work, You may choose to conclude an additional agreement, +defining obligations or services consistent with this Licence. However, if +accepting obligations, You may act only on your own behalf and on your sole +responsibility, not on behalf of the original Licensor or any other Contributor, +and only if You agree to indemnify, defend, and hold each Contributor harmless +for any liability incurred by, or claims asserted against such Contributor by +the fact You have accepted any warranty or additional liability. + +10. Acceptance of the Licence + +The provisions of this Licence can be accepted by clicking on an icon ‘I agree’ +placed under the bottom of a window displaying the text of this Licence or by +affirming consent in any other similar way, in accordance with the rules of +applicable law. Clicking on that icon indicates your clear and irrevocable +acceptance of this Licence and all of its terms and conditions. + +Similarly, you irrevocably accept this Licence and all of its terms and +conditions by exercising any rights granted to You by Article 2 of this Licence, +such as the use of the Work, the creation by You of a Derivative Work or the +Distribution or Communication by You of the Work or copies thereof. + +11. Information to the public + +In case of any Distribution or Communication of the Work by means of electronic +communication by You (for example, by offering to download the Work from a +remote location) the distribution channel or media (for example, a website) must +at least provide to the public the information requested by the applicable law +regarding the Licensor, the Licence and the way it may be accessible, concluded, +stored and reproduced by the Licensee. + +12. Termination of the Licence + +The Licence and the rights granted hereunder will terminate automatically upon +any breach by the Licensee of the terms of the Licence. + +Such a termination will not terminate the licences of any person who has +received the Work from the Licensee under the Licence, provided such persons +remain in full compliance with the Licence. + +13. Miscellaneous + +Without prejudice of Article 9 above, the Licence represents the complete +agreement between the Parties as to the Work. + +If any provision of the Licence is invalid or unenforceable under applicable +law, this will not affect the validity or enforceability of the Licence as a +whole. Such provision will be construed or reformed so as necessary to make it +valid and enforceable. + +The European Commission may publish other linguistic versions or new versions of +this Licence or updated versions of the Appendix, so far this is required and +reasonable, without reducing the scope of the rights granted by the Licence. New +versions of the Licence will be published with a unique version number. + +All linguistic versions of this Licence, approved by the European Commission, +have identical value. Parties can take advantage of the linguistic version of +their choice. + +14. Jurisdiction + +Without prejudice to specific agreement between parties, + +- any litigation resulting from the interpretation of this License, arising + between the European Union institutions, bodies, offices or agencies, as a + Licensor, and any Licensee, will be subject to the jurisdiction of the Court + of Justice of the European Union, as laid down in article 272 of the Treaty on + the Functioning of the European Union, + +- any litigation arising between other parties and resulting from the + interpretation of this License, will be subject to the exclusive jurisdiction + of the competent court where the Licensor resides or conducts its primary + business. + +15. Applicable Law + +Without prejudice to specific agreement between parties, + +- this Licence shall be governed by the law of the European Union Member State + where the Licensor has his seat, resides or has his registered office, + +- this licence shall be governed by Belgian law if the Licensor has no seat, + residence or registered office inside a European Union Member State. + +Appendix + +‘Compatible Licences’ according to Article 5 EUPL are: + +- GNU General Public License (GPL) v. 2, v. 3 +- GNU Affero General Public License (AGPL) v. 3 +- Open Software License (OSL) v. 2.1, v. 3.0 +- Eclipse Public License (EPL) v. 1.0 +- CeCILL v. 2.0, v. 2.1 +- Mozilla Public Licence (MPL) v. 2 +- GNU Lesser General Public Licence (LGPL) v. 2.1, v. 3 +- Creative Commons Attribution-ShareAlike v. 3.0 Unported (CC BY-SA 3.0) for + works other than software +- European Union Public Licence (EUPL) v. 1.1, v. 1.2 +- Québec Free and Open-Source Licence — Reciprocity (LiLiQ-R) or Strong + Reciprocity (LiLiQ-R+). + +The European Commission may update this Appendix to later versions of the above +licences without producing a new version of the EUPL, as long as they provide +the rights granted in Article 2 of this Licence and protect the covered Source +Code from exclusive appropriation. + +All other changes or additions to this Appendix require the production of a new +EUPL version. diff --git a/README.rst b/README.rst index ca5ba46..ba8748a 100644 --- a/README.rst +++ b/README.rst @@ -4,6 +4,21 @@ The Null Pointer This is a no-bullshit file hosting and URL shortening service that also runs `0x0.st `_. Use with uWSGI. +Configuration +------------- + +To configure 0x0, copy ``instance/config.example.py`` to ``instance/config.py``, then edit +it. Resonable defaults are set, but there's a couple options you'll need to change +before running 0x0 for the first time. + +By default, the configuration is stored in the Flask instance directory. +Normally, this is in `./instance`, but it might be different for your system. +For details, see +`the Flask documentation `_. + +To customize the home and error pages, simply create a ``templates`` directory +in your instance directory and copy any templates you want to modify there. + If you are running nginx, you should use the ``X-Accel-Redirect`` header. To make it work, include this in your nginx config’s ``server`` block:: @@ -11,44 +26,109 @@ To make it work, include this in your nginx config’s ``server`` block:: internal; } -where ``/up`` is whatever you’ve configured as ``FHOST_STORAGE_PATH`` -in ``fhost.py``. +where ``/up`` is whatever you’ve configured as ``FHOST_STORAGE_PATH``. For all other servers, set ``FHOST_USE_X_ACCEL_REDIRECT`` to ``False`` and ``USE_X_SENDFILE`` to ``True``, assuming your server supports this. -Otherwise, Flask will serve the file with chunked encoding, which sucks and -should be avoided at all costs. - -To make files expire, simply create a cronjob that runs ``cleanup.py`` every -now and then. - -Before running the service for the first time, run ``./fhost.py db upgrade``. - - -FAQ ---- - -Q: - Will you ever add a web interface with HTML forms? -A: - No. This would without a doubt make it very popular and quickly exceed - my hosting budget unless I started crippling it. - -Q: - What about file management? Will I be able to register an account at some - point? -A: - No. - -Q: - Why are you storing IP addresses with each uploaded file? -A: - This is done to make dealing with legal claims and accidental uploads - easier, e.g. when a user requests removal of all text files uploaded from - a certain address within a given time frame (it happens). - -Q: - Do you accept donations? -A: - Only if you insist. I’ve spent very little time and effort on this service - and I don’t feel like I should be taking money for it. +Otherwise, Flask will serve the file with chunked encoding, which has several +downsides, one of them being that range requests will not work. This is a +problem for example when streaming media files: It won’t be possible to seek, +and some ISOBMFF (MP4) files will not play at all. + +To make files expire, simply run ``FLASK_APP=fhost flask prune`` every +now and then. You can use the provided systemd unit files for this:: + + 0x0-prune.service + 0x0-prune.timer + +Make sure to edit them to match your system configuration. In particular, +set the user and paths in ``0x0-prune.service``. + +Before running the service for the first time and every time you update it +from this git repository, run ``FLASK_APP=fhost flask db upgrade``. + + +Moderation UI +------------- + +.. image:: modui.webp + :height: 300 + +0x0 features a TUI program for file moderation. With it, you can view a list +of uploaded files, as well as extended information on them. It allows you to +take actions like removing files temporarily or permanently, as well as +blocking IP addresses and associated files. + +If a sufficiently recent version of python-mpv with libmpv is present and +your terminal supports it, you also get graphical file previews, including +video playback. Upstream mpv currently supports sixels and the +`kitty graphics protocol `_. +For this to work, set the ``MOD_PREVIEW_PROTO`` option in ``instance/config.py``. + +Requirements: + +* `Textual `_ + +Optional: + +* `python-mpv `_ + (graphical previews) +* `PyAV `_ + (information on multimedia files) +* `PyMuPDF `_ + (previews and file information for PDF, XPS, EPUB, MOBI and FB2) +* `libarchive-c `_ + (archive content listing) + +.. note:: + `Mosh `_ currently does not support sixels or kitty graphics. + +.. hint:: + You may need to set the ``COLORTERM`` environment variable to + ``truecolor``. + +.. tip:: + Using compression with SSH (``-C`` option) can significantly + reduce the bandwidth requirements for graphics. + + +NSFW Detection +-------------- + +0x0 supports classification of NSFW content via Yahoo’s open_nsfw Caffe +neural network model. This works for images and video files and requires +the following: + +* Caffe Python module (built for Python 3) +* `PyAV `_ + + +Virus Scanning +-------------- + +0x0 can scan its files with ClamAV’s daemon. As this can take a long time +for larger files, this does not happen immediately but instead every time +you run the ``vscan`` command. It is recommended to configure a systemd +timer or cronjob to do this periodically. Examples are included:: + + 0x0-vscan.service + 0x0-vscan.timer + +Remember to adjust your size limits in clamd.conf, including +``StreamMaxLength``! + +This feature requires the `clamd module `_. + + +Network Security Considerations +------------------------------- + +Keep in mind that 0x0 can fetch files from URLs. This includes your local +network! You should take precautions so that this feature cannot be abused. +0x0 does not (yet) have a way to filter remote URLs, but on Linux, you can +use firewall rules and/or namespaces. This is less error-prone anyway. + +For instance, if you are using the excellent `FireHOL `_, +it’s very easy to create a group on your system and use it as a condition +in your firewall rules. You would then run the application server under that +group. diff --git a/cleanup.py b/cleanup.py index 401140a..14fbc61 100755 --- a/cleanup.py +++ b/cleanup.py @@ -1,23 +1,8 @@ #!/usr/bin/env python3 -import os, sys, time, datetime -from fhost import app - -os.chdir(os.path.dirname(sys.argv[0])) -os.chdir(app.config["FHOST_STORAGE_PATH"]) - -files = [f for f in os.listdir(".")] - -maxs = app.config["MAX_CONTENT_LENGTH"] -mind = 30 -maxd = 365 - -for f in files: - stat = os.stat(f) - systime = time.time() - age = datetime.timedelta(seconds = systime - stat.st_mtime).days - - maxage = mind + (-maxd + mind) * (stat.st_size / maxs - 1) ** 3 - - if age >= maxage: - os.remove(f) +print("This script has been replaced!!") +print("Instead, please run") +print("") +print(" $ FLASK_APP=fhost flask prune") +print("") +exit(1); diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100644 index 0000000..0eb7fcb --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,47 @@ +#! /usr/bin/env sh +set -e + +/uwsgi-nginx-entrypoint.sh + +# Get the URL for static files from the environment variable +USE_STATIC_URL=${STATIC_URL:-'/static'} +# Get the absolute path of the static files from the environment variable +USE_STATIC_PATH=${STATIC_PATH:-'/app/static'} +# Get the listen port for Nginx, default to 80 +USE_LISTEN_PORT=${LISTEN_PORT:-80} + +if [ -f /app/nginx.conf ]; then + cp /app/nginx.conf /etc/nginx/nginx.conf +else + content_server='server {\n' + content_server=$content_server" listen ${USE_LISTEN_PORT};\n" + content_server=$content_server" root /app;\n" + content_server=$content_server' location / {\n' + content_server=$content_server' try_files $uri @app;\n' + content_server=$content_server' }\n' + content_server=$content_server' location @app {\n' + content_server=$content_server' include uwsgi_params;\n' + content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' + content_server=$content_server' }\n' + content_server=$content_server" location $USE_STATIC_URL {\n" + content_server=$content_server" alias $USE_STATIC_PATH;\n" + content_server=$content_server' }\n' + # If STATIC_INDEX is 1, serve / with /static/index.html directly (or the static URL configured) + if [ "$STATIC_INDEX" = 1 ] ; then + content_server=$content_server' location = / {\n' + content_server=$content_server" index $USE_STATIC_URL/index.html;\n" + content_server=$content_server' }\n' + fi + content_server=$content_server'}\n' + # Save generated server /etc/nginx/conf.d/nginx.conf + printf "$content_server" > /etc/nginx/conf.d/nginx.conf +fi + +# For Alpine: +# Explicitly add installed Python packages and uWSGI Python packages to PYTHONPATH +# Otherwise uWSGI can't import Flask +if [ -n "$ALPINEPYTHON" ] ; then + export PYTHONPATH=$PYTHONPATH:/usr/local/lib/$ALPINEPYTHON/site-packages:/usr/lib/$ALPINEPYTHON/site-packages +fi + +exec "$@" diff --git a/fhost.py b/fhost.py index 5e35104..8201954 100755 --- a/fhost.py +++ b/fhost.py @@ -1,48 +1,99 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -from flask import Flask, abort, escape, make_response, redirect, request, send_from_directory, url_for, Response +""" + Copyright © 2020 Mia Herkt + Licensed under the EUPL, Version 1.2 or - as soon as approved + by the European Commission - subsequent versions of the EUPL + (the "License"); + You may not use this work except in compliance with the License. + You may obtain a copy of the license at: + + https://joinup.ec.europa.eu/software/page/eupl + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + either express or implied. + See the License for the specific language governing permissions + and limitations under the License. +""" + +from flask import Flask, abort, make_response, redirect, request, send_from_directory, url_for, Response, render_template from flask_sqlalchemy import SQLAlchemy -from flask_script import Manager -from flask_migrate import Migrate, MigrateCommand +from flask_migrate import Migrate +from sqlalchemy import and_, or_ +from jinja2.exceptions import * +from jinja2 import ChoiceLoader, FileSystemLoader from hashlib import sha256 -from humanize import naturalsize from magic import Magic from mimetypes import guess_extension -import os, sys +import click +import os +import sys +import time +import datetime +import typing import requests -from short_url import UrlEncoder +import secrets from validators import url as url_valid - -app = Flask(__name__) -app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False - -app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///db.sqlite" # "postgresql://0x0@/0x0" -app.config["PREFERRED_URL_SCHEME"] = "https" # nginx users: make sure to have 'uwsgi_param UWSGI_SCHEME $scheme;' in your config -app.config["MAX_CONTENT_LENGTH"] = 256 * 1024 * 1024 -app.config["MAX_URL_LENGTH"] = 4096 -app.config["FHOST_STORAGE_PATH"] = "up" -app.config["FHOST_USE_X_ACCEL_REDIRECT"] = True # expect nginx by default -app.config["USE_X_SENDFILE"] = False -app.config["FHOST_EXT_OVERRIDE"] = { - "image/gif" : ".gif", - "image/jpeg" : ".jpg", - "image/png" : ".png", - "image/svg+xml" : ".svg", - "video/webm" : ".webm", - "video/x-matroska" : ".mkv", - "application/octet-stream" : ".bin", - "text/plain" : ".txt" -} - -# default blacklist to avoid AV mafia extortion -app.config["FHOST_MIME_BLACKLIST"] = [ - "application/x-dosexec", - "application/java-archive", - "application/java-vm" -] - -app.config["FHOST_UPLOAD_BLACKLIST"] = "tornodes.txt" +from pathlib import Path + +app = Flask(__name__, instance_relative_config=True) +app.config.update( + SQLALCHEMY_TRACK_MODIFICATIONS = False, + PREFERRED_URL_SCHEME = "https", # nginx users: make sure to have 'uwsgi_param UWSGI_SCHEME $scheme;' in your config + MAX_CONTENT_LENGTH = 256 * 1024 * 1024, + MAX_URL_LENGTH = 4096, + USE_X_SENDFILE = False, + FHOST_USE_X_ACCEL_REDIRECT = True, # expect nginx by default + FHOST_STORAGE_PATH = "up", + FHOST_MAX_EXT_LENGTH = 9, + FHOST_SECRET_BYTES = 16, + FHOST_EXT_OVERRIDE = { + "audio/flac" : ".flac", + "image/gif" : ".gif", + "image/jpeg" : ".jpg", + "image/png" : ".png", + "image/svg+xml" : ".svg", + "video/webm" : ".webm", + "video/x-matroska" : ".mkv", + "application/octet-stream" : ".bin", + "text/plain" : ".log", + "text/plain" : ".txt", + "text/x-diff" : ".diff", + }, + FHOST_MIME_BLACKLIST = [ + "application/x-dosexec", + "application/java-archive", + "application/java-vm" + ], + FHOST_UPLOAD_BLACKLIST = None, + NSFW_DETECT = False, + NSFW_THRESHOLD = 0.608, + VSCAN_SOCKET = None, + VSCAN_QUARANTINE_PATH = "quarantine", + VSCAN_IGNORE = [ + "Eicar-Test-Signature", + "PUA.Win.Packer.XmMusicFile", + ], + VSCAN_INTERVAL = datetime.timedelta(days=7), + URL_ALPHABET = "DEQhd2uFteibPwq0SWBInTpA_jcZL5GKz3YCR14Ulk87Jors9vNHgfaOmMXy6Vx-", +) + +if not app.config["TESTING"]: + app.config.from_pyfile("config.py") + app.jinja_loader = ChoiceLoader([ + FileSystemLoader(str(Path(app.instance_path) / "templates")), + app.jinja_loader + ]) + + if app.config["DEBUG"]: + app.config["FHOST_USE_X_ACCEL_REDIRECT"] = False + +if app.config["NSFW_DETECT"]: + from nsfw_detect import NSFWDetector + nsfw = NSFWDetector() try: mimedetect = Magic(mime=True, mime_encoding=False) @@ -51,18 +102,11 @@ Please install python-magic.""") sys.exit(1) -if not os.path.exists(app.config["FHOST_STORAGE_PATH"]): - os.mkdir(app.config["FHOST_STORAGE_PATH"]) - db = SQLAlchemy(app) migrate = Migrate(app, db) -manager = Manager(app) -manager.add_command("db", MigrateCommand) - -su = UrlEncoder(alphabet='DEQhd2uFteibPwq0SWBInTpA_jcZL5GKz3YCR14Ulk87Jors9vNHgfaOmMXy6Vx-', block_size=16) - class URL(db.Model): + __tablename__ = "URL" id = db.Column(db.Integer, primary_key = True) url = db.Column(db.UnicodeText, unique = True) @@ -70,7 +114,20 @@ def __init__(self, url): self.url = url def getname(self): - return su.enbase(self.id, 1) + return su.enbase(self.id) + + def geturl(self): + return url_for("get", path=self.getname(), _external=True) + "\n" + + def get(url): + u = URL.query.filter_by(url=url).first() + + if not u: + u = URL(url) + db.session.add(u) + db.session.commit() + + return u class File(db.Model): id = db.Column(db.Integer, primary_key = True) @@ -78,23 +135,201 @@ class File(db.Model): ext = db.Column(db.UnicodeText) mime = db.Column(db.UnicodeText) addr = db.Column(db.UnicodeText) + ua = db.Column(db.UnicodeText) removed = db.Column(db.Boolean, default=False) - - def __init__(self, sha256, ext, mime, addr): + nsfw_score = db.Column(db.Float) + expiration = db.Column(db.BigInteger) + mgmt_token = db.Column(db.String) + secret = db.Column(db.String) + last_vscan = db.Column(db.DateTime) + size = db.Column(db.BigInteger) + + def __init__(self, sha256, ext, mime, addr, ua, expiration, mgmt_token): self.sha256 = sha256 self.ext = ext self.mime = mime self.addr = addr + self.ua = ua + self.expiration = expiration + self.mgmt_token = mgmt_token + + @property + def is_nsfw(self) -> bool: + return self.nsfw_score and self.nsfw_score > app.config["NSFW_THRESHOLD"] def getname(self): - return u"{0}{1}".format(su.enbase(self.id, 1), self.ext) + return u"{0}{1}".format(su.enbase(self.id), self.ext) + def geturl(self): + n = self.getname() -def getpath(fn): - return os.path.join(app.config["FHOST_STORAGE_PATH"], fn) + if self.is_nsfw: + return url_for("get", path=n, secret=self.secret, _external=True, _anchor="nsfw") + "\n" + else: + return url_for("get", path=n, secret=self.secret, _external=True) + "\n" + + def getpath(self) -> Path: + return Path(app.config["FHOST_STORAGE_PATH"]) / self.sha256 + + def delete(self, permanent=False): + self.expiration = None + self.mgmt_token = None + self.removed = permanent + self.getpath().unlink(missing_ok=True) + + # Returns the epoch millisecond that a file should expire + # + # Uses the expiration time provided by the user (requested_expiration) + # upper-bounded by an algorithm that computes the size based on the size of the + # file. + # + # That is, all files are assigned a computed expiration, which can voluntarily + # shortened by the user either by providing a timestamp in epoch millis or a + # duration in hours. + def get_expiration(requested_expiration, size) -> int: + current_epoch_millis = time.time() * 1000; + + # Maximum lifetime of the file in milliseconds + this_files_max_lifespan = get_max_lifespan(size); + + # The latest allowed expiration date for this file, in epoch millis + this_files_max_expiration = this_files_max_lifespan + 1000 * time.time(); + + if requested_expiration is None: + return this_files_max_expiration + elif requested_expiration < 1650460320000: + # Treat the requested expiration time as a duration in hours + requested_expiration_ms = requested_expiration * 60 * 60 * 1000 + return min(this_files_max_expiration, current_epoch_millis + requested_expiration_ms) + else: + # Treat the requested expiration time as a timestamp in epoch millis + return min(this_files_max_expiration, requested_expiration) + + """ + requested_expiration can be: + - None, to use the longest allowed file lifespan + - a duration (in hours) that the file should live for + - a timestamp in epoch millis that the file should expire at + + Any value greater that the longest allowed file lifespan will be rounded down to that + value. + """ + def store(file_, requested_expiration: typing.Optional[int], addr, ua, secret: bool): + data = file_.read() + digest = sha256(data).hexdigest() + + def get_mime(): + guess = mimedetect.from_buffer(data) + app.logger.debug(f"MIME - specified: '{file_.content_type}' - detected: '{guess}'") + + if not file_.content_type or not "/" in file_.content_type or file_.content_type == "application/octet-stream": + mime = guess + else: + mime = file_.content_type + + if mime in app.config["FHOST_MIME_BLACKLIST"] or guess in app.config["FHOST_MIME_BLACKLIST"]: + abort(415) + + if len(mime) > 128: + abort(400) -def geturl(p): - return url_for("get", path=p, _external=True) + "\n" + if mime.startswith("text/") and not "charset" in mime: + mime += "; charset=utf-8" + + return mime + + def get_ext(mime): + ext = "".join(Path(file_.filename).suffixes[-2:]) + if len(ext) > app.config["FHOST_MAX_EXT_LENGTH"]: + ext = Path(file_.filename).suffixes[-1] + gmime = mime.split(";")[0] + guess = guess_extension(gmime) + + app.logger.debug(f"extension - specified: '{ext}' - detected: '{guess}'") + + if not ext: + if gmime in app.config["FHOST_EXT_OVERRIDE"]: + ext = app.config["FHOST_EXT_OVERRIDE"][gmime] + elif guess: + ext = guess + else: + ext = "" + + return ext[:app.config["FHOST_MAX_EXT_LENGTH"]] or ".bin" + + expiration = File.get_expiration(requested_expiration, len(data)) + isnew = True + + f = File.query.filter_by(sha256=digest).first() + if f: + # If the file already exists + if f.removed: + # The file was removed by moderation, so don't accept it back + abort(451) + if f.expiration is None: + # The file has expired, so give it a new expiration date + f.expiration = expiration + + # Also generate a new management token + f.mgmt_token = secrets.token_urlsafe() + else: + # The file already exists, update the expiration if needed + f.expiration = max(f.expiration, expiration) + isnew = False + else: + mime = get_mime() + ext = get_ext(mime) + mgmt_token = secrets.token_urlsafe() + f = File(digest, ext, mime, addr, ua, expiration, mgmt_token) + + f.addr = addr + f.ua = ua + + if isnew: + f.secret = None + if secret: + f.secret = secrets.token_urlsafe(app.config["FHOST_SECRET_BYTES"]) + + storage = Path(app.config["FHOST_STORAGE_PATH"]) + storage.mkdir(parents=True, exist_ok=True) + p = storage / digest + + if not p.is_file(): + with open(p, "wb") as of: + of.write(data) + + f.size = len(data) + + if not f.nsfw_score and app.config["NSFW_DETECT"]: + f.nsfw_score = nsfw.detect(str(p)) + + db.session.add(f) + db.session.commit() + return f, isnew + + +class UrlEncoder(object): + def __init__(self,alphabet, min_length): + self.alphabet = alphabet + self.min_length = min_length + + def enbase(self, x): + n = len(self.alphabet) + str = "" + while x > 0: + str = (self.alphabet[int(x % n)]) + str + x = int(x // n) + padding = self.alphabet[0] * (self.min_length - len(str)) + return '%s%s' % (padding, str) + + def debase(self, x): + n = len(self.alphabet) + result = 0 + for i, c in enumerate(reversed(x)): + result += self.alphabet.index(c) * (n ** i) + return result + +su = UrlEncoder(alphabet=app.config["URL_ALPHABET"], min_length=1) def fhost_url(scheme=None): if not scheme: @@ -112,20 +347,13 @@ def shorten(url): if not url_valid(url) or is_fhost_url(url) or "\n" in url: abort(400) - existing = URL.query.filter_by(url=url).first() + u = URL.get(url) - if existing: - return geturl(existing.getname()) - else: - u = URL(url) - db.session.add(u) - db.session.commit() - - return geturl(u.getname()) + return u.geturl() def in_upload_bl(addr): - if os.path.isfile(app.config["FHOST_UPLOAD_BLACKLIST"]): - with open(app.config["FHOST_UPLOAD_BLACKLIST"], "r") as bl: + if app.config["FHOST_UPLOAD_BLACKLIST"]: + with app.open_instance_resource(app.config["FHOST_UPLOAD_BLACKLIST"], "r") as bl: check = addr.lstrip("::ffff:") for l in bl.readlines(): if not l.startswith("#"): @@ -134,72 +362,35 @@ def in_upload_bl(addr): return False -def store_file(f, addr): +""" +requested_expiration can be: + - None, to use the longest allowed file lifespan + - a duration (in hours) that the file should live for + - a timestamp in epoch millis that the file should expire at + +Any value greater that the longest allowed file lifespan will be rounded down to that +value. +""" +def store_file(f, requested_expiration: typing.Optional[int], addr, ua, secret: bool): if in_upload_bl(addr): return "Your host is blocked from uploading files.\n", 451 - data = f.stream.read() - digest = sha256(data).hexdigest() - existing = File.query.filter_by(sha256=digest).first() - - if existing: - if existing.removed: - return legal() + sf, isnew = File.store(f, requested_expiration, addr, ua, secret) - epath = getpath(existing.sha256) + response = make_response(sf.geturl()) + response.headers["X-Expires"] = sf.expiration - if not os.path.exists(epath): - with open(epath, "wb") as of: - of.write(data) + if isnew: + response.headers["X-Token"] = sf.mgmt_token - os.utime(epath, None) - existing.addr = addr - db.session.commit() + return response - return geturl(existing.getname()) - else: - guessmime = mimedetect.from_buffer(data) - - if not f.content_type or not "/" in f.content_type or f.content_type == "application/octet-stream": - mime = guessmime - else: - mime = f.content_type - - if mime in app.config["FHOST_MIME_BLACKLIST"] or guessmime in app.config["FHOST_MIME_BLACKLIST"]: - abort(415) - - if mime.startswith("text/") and not "charset" in mime: - mime += "; charset=utf-8" - - ext = os.path.splitext(f.filename)[1] - - if not ext: - gmime = mime.split(";")[0] - - if not gmime in app.config["FHOST_EXT_OVERRIDE"]: - ext = guess_extension(gmime) - else: - ext = app.config["FHOST_EXT_OVERRIDE"][gmime] - else: - ext = ext[:8] - - if not ext: - ext = ".bin" - - with open(getpath(digest), "wb") as of: - of.write(data) - - sf = File(digest, ext, mime, addr) - db.session.add(sf) - db.session.commit() - - return geturl(sf.getname()) - -def store_url(url, addr): +def store_url(url, addr, ua, secret: bool): if is_fhost_url(url): - return segfault(508) + abort(400) - r = requests.get(url, stream=True, verify=False) + h = { "Accept-Encoding" : "identity" } + r = requests.get(url, stream=True, verify=False, headers=h) try: r.raise_for_status() @@ -209,49 +400,87 @@ def store_url(url, addr): if "content-length" in r.headers: l = int(r.headers["content-length"]) - if l < app.config["MAX_CONTENT_LENGTH"]: + if l <= app.config["MAX_CONTENT_LENGTH"]: def urlfile(**kwargs): return type('',(),kwargs)() - f = urlfile(stream=r.raw, content_type=r.headers["content-type"], filename="") + f = urlfile(read=r.raw.read, content_type=r.headers["content-type"], filename="") - return store_file(f, addr) + return store_file(f, None, addr, ua, secret) else: - hl = naturalsize(l, binary = True) - hml = naturalsize(app.config["MAX_CONTENT_LENGTH"], binary=True) - - return "Remote file too large ({0} > {1}).\n".format(hl, hml), 413 + abort(413) else: - return "Could not determine remote file size (no Content-Length in response header; shoot admin).\n", 411 + abort(411) + +def manage_file(f): + try: + assert(request.form["token"] == f.mgmt_token) + except: + abort(401) + + if "delete" in request.form: + f.delete() + db.session.commit() + return "" + if "expires" in request.form: + try: + requested_expiration = int(request.form["expires"]) + except ValueError: + abort(400) + + f.expiration = File.get_expiration(requested_expiration, f.size) + db.session.commit() + return "", 202 + + abort(400) + +@app.route("/", methods=["GET", "POST"]) +@app.route("/s//", methods=["GET", "POST"]) +def get(path, secret=None): + p = Path(path.split("/", 1)[0]) + sufs = "".join(p.suffixes[-2:]) + name = p.name[:-len(sufs) or None] + + if "." in name: + abort(404) -@app.route("/") -def get(path): - p = os.path.splitext(path) - id = su.debase(p[0]) + id = su.debase(name) - if p[1]: + if sufs: f = File.query.get(id) - if f and f.ext == p[1]: + if f and f.ext == sufs: + if f.secret != secret: + abort(404) + if f.removed: - return legal() + abort(451) - fpath = getpath(f.sha256) + fpath = f.getpath() - if not os.path.exists(fpath): + if not fpath.is_file(): abort(404) - fsize = os.path.getsize(fpath) + if request.method == "POST": + return manage_file(f) if app.config["FHOST_USE_X_ACCEL_REDIRECT"]: response = make_response() response.headers["Content-Type"] = f.mime - response.headers["Content-Length"] = fsize - response.headers["X-Accel-Redirect"] = "/" + fpath - return response + response.headers["Content-Length"] = f.size + response.headers["X-Accel-Redirect"] = "/" + str(fpath) else: - return send_from_directory(app.config["FHOST_STORAGE_PATH"], f.sha256, mimetype = f.mime) + response = send_from_directory(app.config["FHOST_STORAGE_PATH"], f.sha256, mimetype = f.mime) + + response.headers["X-Expires"] = f.expiration + return response else: + if request.method == "POST": + abort(405) + + if "/" in path: + abort(404) + u = URL.query.get(id) if u: @@ -259,108 +488,47 @@ def get(path): abort(404) -@app.route("/dump_urls/") -@app.route("/dump_urls/") -def dump_urls(start=0): - meta = "#FORMAT: BEACON\n#PREFIX: {}/\n\n".format(fhost_url("https")) - - def gen(): - yield meta - - for url in URL.query.order_by(URL.id.asc()).offset(start): - if url.url.startswith("http") or url.url.startswith("https"): - bar = "|" - else: - bar = "||" - - yield url.getname() + bar + url.url + "\n" - - return Response(gen(), mimetype="text/plain") - @app.route("/", methods=["GET", "POST"]) def fhost(): if request.method == "POST": sf = None + secret = "secret" in request.form if "file" in request.files: - return store_file(request.files["file"], request.remote_addr) + try: + # Store the file with the requested expiration date + return store_file( + request.files["file"], + int(request.form["expires"]), + request.remote_addr, + request.user_agent.string, + secret + ) + except ValueError: + # The requested expiration date wasn't properly formed + abort(400) + except KeyError: + # No expiration date was requested, store with the max lifespan + return store_file( + request.files["file"], + None, + request.remote_addr, + request.user_agent.string, + secret + ) elif "url" in request.form: - return store_url(request.form["url"], request.remote_addr) + return store_url( + request.form["url"], + request.remote_addr, + request.user_agent.string, + secret + ) elif "shorten" in request.form: return shorten(request.form["shorten"]) abort(400) else: - fmts = list(app.config["FHOST_EXT_OVERRIDE"]) - fmts.sort() - maxsize = naturalsize(app.config["MAX_CONTENT_LENGTH"], binary=True) - maxsizenum, maxsizeunit = maxsize.split(" ") - maxsizenum = float(maxsizenum) - maxsizehalf = maxsizenum / 2 - - if maxsizenum.is_integer(): - maxsizenum = int(maxsizenum) - if maxsizehalf.is_integer(): - maxsizehalf = int(maxsizehalf) - - return """
-THE NULL POINTER
-================
-
-HTTP POST files here:
-    curl -F'file=@yourfile.png' {0}
-You can also POST remote URLs:
-    curl -F'url=http://example.com/image.jpg' {0}
-Or you can shorten URLs:
-    curl -F'shorten=http://example.com/some/long/url' {0}
-
-File URLs are valid for at least 30 days and up to a year (see below).
-Shortened URLs do not expire.
-
-Maximum file size: {1}
-Not allowed: {5}
-
-
-FILE RETENTION PERIOD
----------------------
-
-retention = min_age + (-max_age + min_age) * pow((file_size / max_size - 1), 3)
-
-   days
-    365 |  \\
-        |   \\
-        |    \\
-        |     \\
-        |      \\
-        |       \\
-        |        ..
-        |          \\
-  197.5 | ----------..-------------------------------------------
-        |             ..
-        |               \\
-        |                ..
-        |                  ...
-        |                     ..
-        |                       ...
-        |                          ....
-        |                              ......
-     30 |                                    ....................
-          0{2}{3}
-           {4}
-
-
-ABUSE
------
-
-If you would like to request permanent deletion, please contact lachs0r via
-IRC on Freenode, or send an email to lachs0r@(this domain).
-
-Please allow up to 24 hours for a response.
-
-""".format(fhost_url(), - maxsize, str(maxsizehalf).rjust(27), str(maxsizenum).rjust(27), - maxsizeunit.rjust(54), - ", ".join(app.config["FHOST_MIME_BLACKLIST"])) + return render_template("index.html") @app.route("/robots.txt") def robots(): @@ -368,75 +536,139 @@ def robots(): Disallow: / """ -def legal(): - return "451 Unavailable For Legal Reasons\n", 451 - @app.errorhandler(400) +@app.errorhandler(401) @app.errorhandler(404) +@app.errorhandler(411) +@app.errorhandler(413) @app.errorhandler(414) @app.errorhandler(415) -def segfault(e): - return "Segmentation fault\n", e.code - -@app.errorhandler(404) -def notfound(e): - return u"""
Process {0} stopped
-* thread #1: tid = {0}, {1:#018x}, name = '{2}'
-    frame #0:
-Process {0} stopped
-* thread #8: tid = {0}, {3:#018x} fhost`get(path='{4}') + 27 at fhost.c:139, name = 'fhost/responder', stop reason = invalid address (fault address: 0x30)
-    frame #0: {3:#018x} fhost`get(path='{4}') + 27 at fhost.c:139
-   136   get(SrvContext *ctx, const char *path)
-   137   {{
-   138       StoredObj *obj = ctx->store->query(shurl_debase(path));
--> 139       switch (obj->type) {{
-   140           case ObjTypeFile:
-   141               ctx->serve_file_id(obj->id);
-   142               break;
-(lldb) q
-""".format(os.getpid(), id(app), "fhost", id(get), escape(request.path)), e.code - -@manager.command -def debug(): - app.config["FHOST_USE_X_ACCEL_REDIRECT"] = False - app.run(debug=True, port=4562,host="0.0.0.0") - -@manager.command -def permadelete(name): - id = su.debase(name) - f = File.query.get(id) - - if f: - if os.path.exists(getpath(f.sha256)): - os.remove(getpath(f.sha256)) - f.removed = True - db.session.commit() - -@manager.command -def query(name): - id = su.debase(name) - f = File.query.get(id) - - if f: - print("url: {}".format(f.getname())) - vals = vars(f) +@app.errorhandler(451) +def ehandler(e): + try: + return render_template(f"{e.code}.html", id=id, request=request), e.code + except TemplateNotFound: + return "Segmentation fault\n", e.code + +@app.cli.command("prune") +def prune(): + """ + Clean up expired files + + Deletes any files from the filesystem which have hit their expiration time. This + doesn't remove them from the database, only from the filesystem. It's recommended + that server owners run this command regularly, or set it up on a timer. + """ + current_time = time.time() * 1000; + + # The path to where uploaded files are stored + storage = Path(app.config["FHOST_STORAGE_PATH"]) + + # A list of all files who've passed their expiration times + expired_files = File.query\ + .where( + and_( + File.expiration.is_not(None), + File.expiration < current_time + ) + ) + + files_removed = 0; + + # For every expired file... + for file in expired_files: + # Log the file we're about to remove + file_name = file.getname() + file_hash = file.sha256 + file_path = storage / file_hash + print(f"Removing expired file {file_name} [{file_hash}]") + + # Remove it from the file system + try: + os.remove(file_path) + files_removed += 1; + except FileNotFoundError: + pass # If the file was already gone, we're good + except OSError as e: + print(e) + print( + "\n------------------------------------" + "Encountered an error while trying to remove file {file_path}. Double" + "check to make sure the server is configured correctly, permissions are" + "okay, and everything is ship shape, then try again.") + return; + + # Finally, mark that the file was removed + file.expiration = None; + db.session.commit() + + print(f"\nDone! {files_removed} file(s) removed") + +""" For a file of a given size, determine the largest allowed lifespan of that file + +Based on the current app's configuration: Specifically, the MAX_CONTENT_LENGTH, as well +as FHOST_{MIN,MAX}_EXPIRATION. + +This lifespan may be shortened by a user's request, but no files should be allowed to +expire at a point after this number. + +Value returned is a duration in milliseconds. +""" +def get_max_lifespan(filesize: int) -> int: + min_exp = app.config.get("FHOST_MIN_EXPIRATION", 30 * 24 * 60 * 60 * 1000) + max_exp = app.config.get("FHOST_MAX_EXPIRATION", 365 * 24 * 60 * 60 * 1000) + max_size = app.config.get("MAX_CONTENT_LENGTH", 256 * 1024 * 1024) + return min_exp + int((-max_exp + min_exp) * (filesize / max_size - 1) ** 3) + +def do_vscan(f): + if f["path"].is_file(): + with open(f["path"], "rb") as scanf: + try: + f["result"] = list(app.config["VSCAN_SOCKET"].instream(scanf).values())[0] + except: + f["result"] = ("SCAN FAILED", None) + else: + f["result"] = ("FILE NOT FOUND", None) + + return f + +@app.cli.command("vscan") +def vscan(): + if not app.config["VSCAN_SOCKET"]: + print("""Error: Virus scanning enabled but no connection method specified. +Please set VSCAN_SOCKET.""") + sys.exit(1) + + qp = Path(app.config["VSCAN_QUARANTINE_PATH"]) + qp.mkdir(parents=True, exist_ok=True) + + from multiprocessing import Pool + with Pool() as p: + if isinstance(app.config["VSCAN_INTERVAL"], datetime.timedelta): + scandate = datetime.datetime.now() - app.config["VSCAN_INTERVAL"] + res = File.query.filter(or_(File.last_vscan < scandate, + File.last_vscan == None), + File.removed == False) + else: + res = File.query.filter(File.last_vscan == None, File.removed == False) - for v in vals: - if not v.startswith("_sa"): - print("{}: {}".format(v, vals[v])) + work = [{"path" : f.getpath(), "name" : f.getname(), "id" : f.id} for f in res] -@manager.command -def queryhash(h): - f = File.query.filter_by(sha256=h).first() - if f: - query(su.enbase(f.id, 1)) + results = [] + for i, r in enumerate(p.imap_unordered(do_vscan, work)): + if r["result"][0] != "OK": + print(f"{r['name']}: {r['result'][0]} {r['result'][1] or ''}") -@manager.command -def queryaddr(a): - res = File.query.filter_by(addr=a) + found = False + if r["result"][0] == "FOUND": + if not r["result"][1] in app.config["VSCAN_IGNORE"]: + r["path"].rename(qp / r["name"]) + found = True - for f in res: - query(su.enbase(f.id, 1)) + results.append({ + "id" : r["id"], + "last_vscan" : None if r["result"][0] == "SCAN FAILED" else datetime.datetime.now(), + "removed" : found}) -if __name__ == "__main__": - manager.run() + db.session.bulk_update_mappings(File, results) + db.session.commit() diff --git a/instance/config.example.py b/instance/config.example.py new file mode 100644 index 0000000..9740ca2 --- /dev/null +++ b/instance/config.example.py @@ -0,0 +1,226 @@ + + + ################################################################################ + # This is a configuration file for 0x0 / The Null Pointer # + # # + # The default values here are set to generally reasonable defaults, but a # + # couple of things need your attention. Specifically, make sure you set # + # SQLALCHEMY_DATABASE_URI. You'll also probably want to configure # + # FHOST_USE_X_SENDFILE and FHOST_USE_X_ACCEL_REDIRECT to match your webserver. # + # # + # Need help, or find anything confusing? Try opening up an issue! # + # https://git.0x0.st/mia/0x0/issues/new # + ################################################################################ + + + +# The database URL for the database 0x0 should use +# +# See https://docs.sqlalchemy.org/en/20/core/engines.html#backend-specific-urls +# for help configuring these for your database. +# +# For small and medium servers, it's plenty sufficient to just use an sqlite +# database. In this case, the database URI you want to use is just +# +# sqlite:/// + /path/to/your/database.db +# +# Until https://git.0x0.st/mia/0x0/issues/70 is resolved, it's recommended that +# any sqlite databases use an absolute path, as relative paths aren't consistently +# resolved. +SQLALCHEMY_DATABASE_URI = 'sqlite:///' + '/path/to/database.sqlite' + + +# The maximum allowable upload size, in bytes +# +# Keep in mind that this affects the expiration of files as well! The closer a +# file is to the max content length, the less time it will last before being +# deleted. +MAX_CONTENT_LENGTH = 256 * 1024 * 1024 # Default: 256MiB + + +# The maximum length of URLs we'll shorten, in characters +# +# If a user tries to submit a URL longer than this, we'll reject their request +# with a 414 REQUEST URI TOO LONG. +MAX_URL_LENGTH = 4096 + + +# The minimum and maximum amount of time we'll retain a file for +# +# Small files (nearing zero bytes) are stored for the longest possible expiration date, +# while larger files (nearing MAX_CONTENT_LENGTH bytes) are stored for the shortest amount +# of time. Values between these two extremes are interpolated with an exponential curve, +# like the one shown on the index page. +# +# All times are in milliseconds. If you want all files to be stored for the same amount +# of time, set these to the same value. +FHOST_MIN_EXPIRATION = 30 * 24 * 60 * 60 * 1000 +FHOST_MAX_EXPIRATION = 365 * 24 * 60 * 60 * 1000 + + +# This should be detected automatically when running behind a reverse proxy, but needs +# to be set for URL resolution to work in e.g. the moderation UI. +# SERVER_NAME = "example.com" + + +# Specifies which graphics protocol to use for the media previews in the moderation UI. +# Requires pympv with libmpv >= 0.36.0 and terminal support. +# Available choices are "sixel" and "kitty". +# MOD_PREVIEW_PROTO = "sixel" + + +# Use the X-SENDFILE header to speed up serving files w/ compatible webservers +# +# Some webservers can be configured use the X-Sendfile header to handle sending +# large files on behalf of the application. If your server is setup to do +# this, set this variable to True +USE_X_SENDFILE = False + + +# Use X-Accel-Redirect to speed up serving files w/ compatible webservers +# +# Other webservers, like nginx and Caddy, use the X-Accel-Redirect header to +# accomplish a very similar thing to X-Sendfile (above). If your webserver is +# configured to do this, set this variable to True +# +# Note: It's recommended that you use either X-Sendfile or X-Accel-Redirect +# when you deploy in production. +FHOST_USE_X_ACCEL_REDIRECT = True # expect nginx by default + + +# The directory that 0x0 should store uploaded files in +# +# Whenever a file is uploaded to 0x0, we store it here! Relative paths are +# resolved relative to the working directory that 0x0 is being run from. +FHOST_STORAGE_PATH = "up" + + +# The maximum acceptable user-specified file extension +# +# When a user uploads a file, in most cases, we keep the file extension they +# provide. But! If the specified file extension is longer than +# FHOST_MAX_EXT_LENGTH, we truncate it. So if a user tries to upload the file +# "myfile.withareallongext", but FHOST_MAX_EXT_LENGTH is set to 9, then the +# extension that we keep is ".withareal" +FHOST_MAX_EXT_LENGTH = 9 + + +# The number of bytes used for "secret" URLs +# +# When a user uploads a file with the "secret" option, 0x0 generates a string +# from this many bytes of random data. It is base64-encoded, so on average +# each byte results in approximately 1.3 characters. +FHOST_SECRET_BYTES = 16 + +# A list of filetypes to use when the uploader doesn't specify one +# +# When a user uploads a file with no file extension, we try to find an extension that +# works for that file. This configuration option is the first thing that we check. If +# the type of a file without an extension is in this dict, then it'll be used as the file +# extension for that file. Otherwise, we try to pick something sensible from libmagic's +# database. +# +# For example, if the user uploads "myfile" with no extension, and the file is a jpeg +# image, the file will get a URL like "eAa.jpg" +# +# For a list of MIME types you can use in this list, check +# https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types +FHOST_EXT_OVERRIDE = { + "audio/flac" : ".flac", + "image/gif" : ".gif", + "image/jpeg" : ".jpg", + "image/png" : ".png", + "image/svg+xml" : ".svg", + "video/webm" : ".webm", + "video/x-matroska" : ".mkv", + "application/octet-stream" : ".bin", + "text/plain" : ".log", + "text/plain" : ".txt", + "text/x-diff" : ".diff", +} + + +# Control which files aren't allowed to be uploaded +# +# Certain kinds of files are never accepted. If the file claims to be one of +# these types of files, or if we look at the contents of the file and it looks +# like one of these filetypes, then we reject the file outright with a 415 +# UNSUPPORTED MEDIA EXCEPTION +FHOST_MIME_BLACKLIST = [ + "application/x-dosexec", + "application/java-archive", + "application/java-vm" +] + + +# A list of IP addresses which are blacklisted from uploading files +# +# Can be set to the path of a file with an IP address on each line. The file +# can also include comment lines using a pound sign (#). Paths are resolved +# relative to the instance/ directory. +# +# If this is set to None, then no IP blacklist will be consulted. +FHOST_UPLOAD_BLACKLIST = None + + +# Enables support for detecting NSFW images +# +# Consult README.md for additional dependencies before setting to True +NSFW_DETECT = False + + +# The cutoff for when an image is considered NFSW +# +# When the NSFW detection algorithm generates an output higher than this +# number, an image is considered to be NSFW. NSFW images aren't declined, but +# are marked as NSFW. +# +# If NSFW_DETECT is set to False, then this has no effect. +NSFW_THRESHOLD = 0.608 + + +# If you want to scan files for viruses using ClamAV, specify the socket used +# for connections here. You will need the clamd module. +# Since this can take a very long time on larger files, it is not done +# immediately but every time you run the vscan command. It is recommended to +# configure a systemd timer or cronjob to do this periodically. +# Remember to adjust your size limits in clamd.conf, including StreamMaxLength! +# +# Example: +# from clamd import ClamdUnixSocket +# VSCAN_SOCKET = ClamdUnixSocket("/run/clamav/clamd-socket") + +# This is the directory that files flagged as malicious are moved to. +# Relative paths are resolved relative to the working directory +# of the 0x0 process. +VSCAN_QUARANTINE_PATH = "quarantine" + +# Since updated virus definitions might catch some files that were previously +# reported as clean, you may want to rescan old files periodically. +# Set this to a datetime.timedelta to specify the frequency, or None to +# disable rescanning. +from datetime import timedelta +VSCAN_INTERVAL = timedelta(days=7) + +# Some files flagged by ClamAV are usually not malicious, especially if the +# DetectPUA option is enabled in clamd.conf. This is a list of signatures +# that will be ignored. +VSCAN_IGNORE = [ + "Eicar-Test-Signature", + "PUA.Win.Packer.XmMusicFile", +] + +# A list of all characters which can appear in a URL +# +# If this list is too short, then URLs can very quickly become long. +# Generally, the default value for this should work for basically all usecases. +URL_ALPHABET = "DEQhd2uFteibPwq0SWBInTpA_jcZL5GKz3YCR14Ulk87Jors9vNHgfaOmMXy6Vx-" + + + ################################################################################# + # CONGRATULATIONS! You made it all the way through! # + # If you want to go even further to customize your instance, try checking out # + # the templates in the templates/ directory to customize your landing page, 404 # + # page, and other error pages. # + ################################################################################# + diff --git a/instance/config.py b/instance/config.py new file mode 100644 index 0000000..7105ac4 --- /dev/null +++ b/instance/config.py @@ -0,0 +1,223 @@ + ################################################################################ + # This is a configuration file for 0x0 / The Null Pointer # + # # + # The default values here are set to generally reasonable defaults, but a # + # couple of things need your attention. Specifically, make sure you set # + # SQLALCHEMY_DATABASE_URI. You'll also probably want to configure # + # FHOST_USE_X_SENDFILE and FHOST_USE_X_ACCEL_REDIRECT to match your webserver. # + # # + # Need help, or find anything confusing? Try opening up an issue! # + # https://git.0x0.st/mia/0x0/issues/new # + ################################################################################ + + + +# The database URL for the database 0x0 should use +# +# See https://docs.sqlalchemy.org/en/20/core/engines.html#backend-specific-urls +# for help configuring these for your database. +# +# For small and medium servers, it's plenty sufficient to just use an sqlite +# database. In this case, the database URI you want to use is just +# +# sqlite:/// + /path/to/your/database.db +# +# Until https://git.0x0.st/mia/0x0/issues/70 is resolved, it's recommended that +# any sqlite databases use an absolute path, as relative paths aren't consistently +# resolved. +SQLALCHEMY_DATABASE_URI = 'sqlite:///' + '/app/mnt/database.sqlite' + + +# The maximum allowable upload size, in bytes +# +# Keep in mind that this affects the expiration of files as well! The closer a +# file is to the max content length, the less time it will last before being +# deleted. +MAX_CONTENT_LENGTH = 256 * 1024 * 1024 # Default: 256MiB + + +# The maximum length of URLs we'll shorten, in characters +# +# If a user tries to submit a URL longer than this, we'll reject their request +# with a 414 REQUEST URI TOO LONG. +MAX_URL_LENGTH = 4096 + + +# The minimum and maximum amount of time we'll retain a file for +# +# Small files (nearing zero bytes) are stored for the longest possible expiration date, +# while larger files (nearing MAX_CONTENT_LENGTH bytes) are stored for the shortest amount +# of time. Values between these two extremes are interpolated with an exponential curve, +# like the one shown on the index page. +# +# All times are in milliseconds. If you want all files to be stored for the same amount +# of time, set these to the same value. +FHOST_MIN_EXPIRATION = 30 * 24 * 60 * 60 * 1000 +FHOST_MAX_EXPIRATION = 365 * 24 * 60 * 60 * 1000 + + +# This should be detected automatically when running behind a reverse proxy, but needs +# to be set for URL resolution to work in e.g. the moderation UI. +# SERVER_NAME = "example.com" + + +# Specifies which graphics protocol to use for the media previews in the moderation UI. +# Requires pympv with libmpv >= 0.36.0 and terminal support. +# Available choices are "sixel" and "kitty". +# MOD_PREVIEW_PROTO = "sixel" + + +# Use the X-SENDFILE header to speed up serving files w/ compatible webservers +# +# Some webservers can be configured use the X-Sendfile header to handle sending +# large files on behalf of the application. If your server is setup to do +# this, set this variable to True +USE_X_SENDFILE = False + + +# Use X-Accel-Redirect to speed up serving files w/ compatible webservers +# +# Other webservers, like nginx and Caddy, use the X-Accel-Redirect header to +# accomplish a very similar thing to X-Sendfile (above). If your webserver is +# configured to do this, set this variable to True +# +# Note: It's recommended that you use either X-Sendfile or X-Accel-Redirect +# when you deploy in production. +FHOST_USE_X_ACCEL_REDIRECT = True # expect nginx by default + + +# The directory that 0x0 should store uploaded files in +# +# Whenever a file is uploaded to 0x0, we store it here! Relative paths are +# resolved relative to the working directory that 0x0 is being run from. +FHOST_STORAGE_PATH = "mnt/up" + + +# The maximum acceptable user-specified file extension +# +# When a user uploads a file, in most cases, we keep the file extension they +# provide. But! If the specified file extension is longer than +# FHOST_MAX_EXT_LENGTH, we truncate it. So if a user tries to upload the file +# "myfile.withareallongext", but FHOST_MAX_EXT_LENGTH is set to 9, then the +# extension that we keep is ".withareal" +FHOST_MAX_EXT_LENGTH = 9 + + +# The number of bytes used for "secret" URLs +# +# When a user uploads a file with the "secret" option, 0x0 generates a string +# from this many bytes of random data. It is base64-encoded, so on average +# each byte results in approximately 1.3 characters. +FHOST_SECRET_BYTES = 16 + +# A list of filetypes to use when the uploader doesn't specify one +# +# When a user uploads a file with no file extension, we try to find an extension that +# works for that file. This configuration option is the first thing that we check. If +# the type of a file without an extension is in this dict, then it'll be used as the file +# extension for that file. Otherwise, we try to pick something sensible from libmagic's +# database. +# +# For example, if the user uploads "myfile" with no extension, and the file is a jpeg +# image, the file will get a URL like "eAa.jpg" +# +# For a list of MIME types you can use in this list, check +# https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types +FHOST_EXT_OVERRIDE = { + "audio/flac" : ".flac", + "image/gif" : ".gif", + "image/jpeg" : ".jpg", + "image/png" : ".png", + "image/svg+xml" : ".svg", + "video/webm" : ".webm", + "video/x-matroska" : ".mkv", + "application/octet-stream" : ".bin", + "text/plain" : ".log", + "text/plain" : ".txt", + "text/x-diff" : ".diff", +} + + +# Control which files aren't allowed to be uploaded +# +# Certain kinds of files are never accepted. If the file claims to be one of +# these types of files, or if we look at the contents of the file and it looks +# like one of these filetypes, then we reject the file outright with a 415 +# UNSUPPORTED MEDIA EXCEPTION +FHOST_MIME_BLACKLIST = [ + "application/x-dosexec", + "application/java-archive", + "application/java-vm" +] + + +# A list of IP addresses which are blacklisted from uploading files +# +# Can be set to the path of a file with an IP address on each line. The file +# can also include comment lines using a pound sign (#). Paths are resolved +# relative to the instance/ directory. +# +# If this is set to None, then no IP blacklist will be consulted. +FHOST_UPLOAD_BLACKLIST = None + + +# Enables support for detecting NSFW images +# +# Consult README.md for additional dependencies before setting to True +NSFW_DETECT = False + + +# The cutoff for when an image is considered NFSW +# +# When the NSFW detection algorithm generates an output higher than this +# number, an image is considered to be NSFW. NSFW images aren't declined, but +# are marked as NSFW. +# +# If NSFW_DETECT is set to False, then this has no effect. +NSFW_THRESHOLD = 0.608 + + +# If you want to scan files for viruses using ClamAV, specify the socket used +# for connections here. You will need the clamd module. +# Since this can take a very long time on larger files, it is not done +# immediately but every time you run the vscan command. It is recommended to +# configure a systemd timer or cronjob to do this periodically. +# Remember to adjust your size limits in clamd.conf, including StreamMaxLength! +# +# Example: +# from clamd import ClamdUnixSocket +# VSCAN_SOCKET = ClamdUnixSocket("/run/clamav/clamd-socket") + +# This is the directory that files flagged as malicious are moved to. +# Relative paths are resolved relative to the working directory +# of the 0x0 process. +VSCAN_QUARANTINE_PATH = "quarantine" + +# Since updated virus definitions might catch some files that were previously +# reported as clean, you may want to rescan old files periodically. +# Set this to a datetime.timedelta to specify the frequency, or None to +# disable rescanning. +from datetime import timedelta +VSCAN_INTERVAL = timedelta(days=7) + +# Some files flagged by ClamAV are usually not malicious, especially if the +# DetectPUA option is enabled in clamd.conf. This is a list of signatures +# that will be ignored. +VSCAN_IGNORE = [ + "Eicar-Test-Signature", + "PUA.Win.Packer.XmMusicFile", +] + +# A list of all characters which can appear in a URL +# +# If this list is too short, then URLs can very quickly become long. +# Generally, the default value for this should work for basically all usecases. +URL_ALPHABET = "DEQhd2uFteibPwq0SWBInTpA_jcZL5GKz3YCR14Ulk87Jors9vNHgfaOmMXy6Vx-" + + + ################################################################################# + # CONGRATULATIONS! You made it all the way through! # + # If you want to go even further to customize your instance, try checking out # + # the templates in the templates/ directory to customize your landing page, 404 # + # page, and other error pages. # + ################################################################################# diff --git a/migrations/versions/0659d7b9eea8_.py b/migrations/versions/0659d7b9eea8_.py new file mode 100644 index 0000000..2ef2151 --- /dev/null +++ b/migrations/versions/0659d7b9eea8_.py @@ -0,0 +1,26 @@ +"""add file management token + +Revision ID: 0659d7b9eea8 +Revises: 939a08e1d6e5 +Create Date: 2022-11-30 01:06:53.362973 + +""" + +# revision identifiers, used by Alembic. +revision = '0659d7b9eea8' +down_revision = '939a08e1d6e5' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('file', sa.Column('mgmt_token', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('file', 'mgmt_token') + # ### end Alembic commands ### diff --git a/migrations/versions/30bfe33aa328_add_file_size_field.py b/migrations/versions/30bfe33aa328_add_file_size_field.py new file mode 100644 index 0000000..e6ac279 --- /dev/null +++ b/migrations/versions/30bfe33aa328_add_file_size_field.py @@ -0,0 +1,46 @@ +"""add file size field + +Revision ID: 30bfe33aa328 +Revises: 5cee97aab219 +Create Date: 2022-12-13 22:32:12.242394 + +""" + +# revision identifiers, used by Alembic. +revision = '30bfe33aa328' +down_revision = '5cee97aab219' + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.ext.automap import automap_base +from sqlalchemy.orm import Session +from flask import current_app +from pathlib import Path + +Base = automap_base() + +def upgrade(): + op.add_column('file', sa.Column('size', sa.BigInteger(), nullable=True)) + bind = op.get_bind() + Base.prepare(autoload_with=bind) + File = Base.classes.file + session = Session(bind=bind) + + storage = Path(current_app.config["FHOST_STORAGE_PATH"]) + + updates = [] + files = session.scalars(sa.select(File).where(sa.not_(File.removed))) + for f in files: + p = storage / f.sha256 + if p.is_file(): + updates.append({ + "id" : f.id, + "size" : p.stat().st_size + }) + + session.bulk_update_mappings(File, updates) + session.commit() + + +def downgrade(): + op.drop_column('file', 'size') diff --git a/migrations/versions/5cee97aab219_.py b/migrations/versions/5cee97aab219_.py new file mode 100644 index 0000000..6c1a16b --- /dev/null +++ b/migrations/versions/5cee97aab219_.py @@ -0,0 +1,26 @@ +"""add date of last virus scan + +Revision ID: 5cee97aab219 +Revises: e2e816056589 +Create Date: 2022-12-10 16:39:56.388259 + +""" + +# revision identifiers, used by Alembic. +revision = '5cee97aab219' +down_revision = 'e2e816056589' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('file', sa.Column('last_vscan', sa.DateTime(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('file', 'last_vscan') + # ### end Alembic commands ### diff --git a/migrations/versions/7e246705da6a_.py b/migrations/versions/7e246705da6a_.py new file mode 100644 index 0000000..33dbf79 --- /dev/null +++ b/migrations/versions/7e246705da6a_.py @@ -0,0 +1,26 @@ +"""add NSFW score + +Revision ID: 7e246705da6a +Revises: 0cd36ecdd937 +Create Date: 2017-10-27 03:07:48.179290 + +""" + +# revision identifiers, used by Alembic. +revision = '7e246705da6a' +down_revision = '0cd36ecdd937' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('file', sa.Column('nsfw_score', sa.Float(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('file', 'nsfw_score') + # ### end Alembic commands ### diff --git a/migrations/versions/939a08e1d6e5_.py b/migrations/versions/939a08e1d6e5_.py new file mode 100644 index 0000000..e389b41 --- /dev/null +++ b/migrations/versions/939a08e1d6e5_.py @@ -0,0 +1,86 @@ +"""add file expirations + +Revision ID: 939a08e1d6e5 +Revises: 7e246705da6a +Create Date: 2022-11-22 12:16:32.517184 + +""" + +# revision identifiers, used by Alembic. +revision = '939a08e1d6e5' +down_revision = '7e246705da6a' + +from alembic import op +from flask import current_app +from flask_sqlalchemy import SQLAlchemy +from pathlib import Path +import sqlalchemy as sa +from sqlalchemy.ext.automap import automap_base +from sqlalchemy.orm import Session + +import os +import time + +""" For a file of a given size, determine the largest allowed lifespan of that file + +Based on the current app's configuration: Specifically, the MAX_CONTENT_LENGTH, as well +as FHOST_{MIN,MAX}_EXPIRATION. + +This lifespan may be shortened by a user's request, but no files should be allowed to +expire at a point after this number. + +Value returned is a duration in milliseconds. +""" +def get_max_lifespan(filesize: int) -> int: + min_exp = current_app.config.get("FHOST_MIN_EXPIRATION", 30 * 24 * 60 * 60 * 1000) + max_exp = current_app.config.get("FHOST_MAX_EXPIRATION", 365 * 24 * 60 * 60 * 1000) + max_size = current_app.config.get("MAX_CONTENT_LENGTH", 256 * 1024 * 1024) + return min_exp + int((-max_exp + min_exp) * (filesize / max_size - 1) ** 3) + +Base = automap_base() + +def upgrade(): + op.add_column('file', sa.Column('expiration', sa.BigInteger())) + + bind = op.get_bind() + Base.prepare(autoload_with=bind) + File = Base.classes.file + session = Session(bind=bind) + + storage = Path(current_app.config["FHOST_STORAGE_PATH"]) + current_time = time.time() * 1000; + + # List of file hashes which have not expired yet + # This could get really big for some servers + try: + unexpired_files = os.listdir(storage) + except FileNotFoundError: + return # There are no currently unexpired files + + # Calculate an expiration date for all existing files + + q = session.scalars( + sa.select(File) + .where( + sa.not_(File.removed) + ) + ) + updates = [] # We coalesce updates to the database here + + # SQLite has a hard limit on the number of variables so we + # need to do this the slow way + files = [f for f in q if f.sha256 in unexpired_files] + + for file in files: + file_path = storage / file.sha256 + stat = os.stat(file_path) + max_age = get_max_lifespan(stat.st_size) # How long the file is allowed to live, in ms + file_birth = stat.st_mtime * 1000 # When the file was created, in ms + updates.append({'id': file.id, 'expiration': int(file_birth + max_age)}) + + # Apply coalesced updates + session.bulk_update_mappings(File, updates) + session.commit() + +def downgrade(): + op.drop_column('file', 'expiration') diff --git a/migrations/versions/dd0766afb7d2_store_user_agent_string_with_files.py b/migrations/versions/dd0766afb7d2_store_user_agent_string_with_files.py new file mode 100644 index 0000000..4af7680 --- /dev/null +++ b/migrations/versions/dd0766afb7d2_store_user_agent_string_with_files.py @@ -0,0 +1,30 @@ +"""Store user agent string with files + +Revision ID: dd0766afb7d2 +Revises: 30bfe33aa328 +Create Date: 2023-03-29 07:18:49.113200 + +""" + +# revision identifiers, used by Alembic. +revision = 'dd0766afb7d2' +down_revision = '30bfe33aa328' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('file', schema=None) as batch_op: + batch_op.add_column(sa.Column('ua', sa.UnicodeText(), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('file', schema=None) as batch_op: + batch_op.drop_column('ua') + + # ### end Alembic commands ### diff --git a/migrations/versions/e2e816056589_.py b/migrations/versions/e2e816056589_.py new file mode 100644 index 0000000..7c31ba9 --- /dev/null +++ b/migrations/versions/e2e816056589_.py @@ -0,0 +1,26 @@ +"""add URL secret + +Revision ID: e2e816056589 +Revises: 0659d7b9eea8 +Create Date: 2022-12-01 02:16:15.976864 + +""" + +# revision identifiers, used by Alembic. +revision = 'e2e816056589' +down_revision = '0659d7b9eea8' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('file', sa.Column('secret', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('file', 'secret') + # ### end Alembic commands ### diff --git a/mod.css b/mod.css new file mode 100644 index 0000000..c001ef1 --- /dev/null +++ b/mod.css @@ -0,0 +1,56 @@ +#ftable { + width: 1fr; +} + +#infopane { + width: 50%; + outline-top: hkey $primary; + background: $panel; +} + +#finfo { + background: $boost; + height: 12; + width: 1fr; + box-sizing: content-box; +} + +#mpv { + display: none; + height: 20%; + width: 1fr; + content-align: center middle; +} + +#ftextlog { + height: 1fr; + width: 1fr; +} + +#filter_container { + height: auto; + display: none; +} + +#filter_label { + content-align: right middle; + height: 1fr; + width: 20%; + margin: 0 1 0 2; +} + +#filter_input { + width: 1fr; +} + +Notification { + dock: bottom; + layer: notification; + width: auto; + margin: 2 4; + padding: 1 2; + background: $background; + color: $text; + height: auto; + +} diff --git a/mod.py b/mod.py new file mode 100755 index 0000000..0748a42 --- /dev/null +++ b/mod.py @@ -0,0 +1,283 @@ +#!/usr/bin/env python3 + +from itertools import zip_longest +from sys import stdout +import time + +from textual.app import App, ComposeResult +from textual.widgets import DataTable, Header, Footer, TextLog, Static, Input +from textual.containers import Horizontal, Vertical +from textual.screen import Screen +from textual import log +from rich.text import Text +from jinja2.filters import do_filesizeformat + +from fhost import db, File, su, app as fhost_app, in_upload_bl +from modui import * + +fhost_app.app_context().push() + +class NullptrMod(Screen): + BINDINGS = [ + ("q", "quit_app", "Quit"), + ("f1", "filter(1, 'Lookup name:')", "Lookup name"), + ("f2", "filter(2, 'Filter IP address:')", "Filter IP"), + ("f3", "filter(3, 'Filter MIME Type:')", "Filter MIME"), + ("f4", "filter(4, 'Filter extension:')", "Filter Ext."), + ("f5", "refresh", "Refresh"), + ("f6", "filter_clear", "Clear filter"), + ("f7", "filter(5, 'Filter user agent:')", "Filter UA"), + ("r", "remove_file(False)", "Remove file"), + ("ctrl+r", "remove_file(True)", "Ban file"), + ("p", "ban_ip(False)", "Ban IP"), + ("ctrl+p", "ban_ip(True)", "Nuke IP"), + ] + + async def action_quit_app(self): + self.mpvw.shutdown() + await self.app.action_quit() + + def action_refresh(self): + ftable = self.query_one("#ftable") + ftable.watch_query(None, None) + + def action_filter_clear(self): + self.query_one("#filter_container").display = False + ftable = self.query_one("#ftable") + ftable.focus() + ftable.query = ftable.base_query + + def action_filter(self, fcol: int, label: str): + self.query_one("#filter_label").update(label) + finput = self.query_one("#filter_input") + self.filter_col = fcol + self.query_one("#filter_container").display = True + finput.focus() + self._refresh_layout() + + if self.current_file: + match fcol: + case 1: finput.value = "" + case 2: finput.value = self.current_file.addr + case 3: finput.value = self.current_file.mime + case 4: finput.value = self.current_file.ext + case 5: finput.value = self.current_file.ua or "" + + def on_input_submitted(self, message: Input.Submitted) -> None: + self.query_one("#filter_container").display = False + ftable = self.query_one("#ftable") + ftable.focus() + + if len(message.value): + match self.filter_col: + case 1: + try: ftable.query = ftable.base_query.filter(File.id == su.debase(message.value)) + except ValueError: pass + case 2: ftable.query = ftable.base_query.filter(File.addr.like(message.value)) + case 3: ftable.query = ftable.base_query.filter(File.mime.like(message.value)) + case 4: ftable.query = ftable.base_query.filter(File.ext.like(message.value)) + case 5: ftable.query = ftable.base_query.filter(File.ua.like(message.value)) + else: + ftable.query = ftable.base_query + + def action_remove_file(self, permanent: bool) -> None: + if self.current_file: + self.current_file.delete(permanent) + db.session.commit() + self.mount(Notification(f"{'Banned' if permanent else 'Removed'} file {self.current_file.getname()}")) + self.action_refresh() + + def action_ban_ip(self, nuke: bool) -> None: + if self.current_file: + if not fhost_app.config["FHOST_UPLOAD_BLACKLIST"]: + self.mount(Notification("Failed: FHOST_UPLOAD_BLACKLIST not set!")) + return + else: + if in_upload_bl(self.current_file.addr): + txt = f"{self.current_file.addr} is already banned" + else: + with fhost_app.open_instance_resource(fhost_app.config["FHOST_UPLOAD_BLACKLIST"], "a") as bl: + print(self.current_file.addr.lstrip("::ffff:"), file=bl) + txt = f"Banned {self.current_file.addr}" + + if nuke: + tsize = 0 + trm = 0 + for f in File.query.filter(File.addr == self.current_file.addr): + if f.getpath().is_file(): + tsize += f.size or f.getpath().stat().st_size + trm += 1 + f.delete(True) + db.session.commit() + txt += f", removed {trm} {'files' if trm != 1 else 'file'} totaling {do_filesizeformat(tsize, True)}" + self.mount(Notification(txt)) + self._refresh_layout() + ftable = self.query_one("#ftable") + ftable.watch_query(None, None) + + def on_update(self) -> None: + stdout.write("\033[?25l") + stdout.flush() + + def compose(self) -> ComposeResult: + yield Header() + yield Horizontal( + FileTable(id="ftable", zebra_stripes=True), + Vertical( + DataTable(id="finfo", show_header=False), + MpvWidget(id="mpv"), + TextLog(id="ftextlog"), + id="infopane")) + yield Horizontal(Static("Filter:", id="filter_label"), Input(id="filter_input"), id="filter_container") + yield Footer() + + def on_mount(self) -> None: + self.current_file = None + + self.ftable = self.query_one("#ftable") + self.ftable.focus() + + self.finfo = self.query_one("#finfo") + self.finfo.add_columns("key", "value") + + self.mpvw = self.query_one("#mpv") + self.ftlog = self.query_one("#ftextlog") + + self.mimehandler = mime.MIMEHandler() + self.mimehandler.register(mime.MIMECategory.Archive, self.handle_libarchive) + self.mimehandler.register(mime.MIMECategory.Text, self.handle_text) + self.mimehandler.register(mime.MIMECategory.AV, self.handle_mpv) + self.mimehandler.register(mime.MIMECategory.Document, self.handle_mupdf) + self.mimehandler.register(mime.MIMECategory.Fallback, self.handle_libarchive) + self.mimehandler.register(mime.MIMECategory.Fallback, self.handle_mpv) + self.mimehandler.register(mime.MIMECategory.Fallback, self.handle_raw) + + def handle_libarchive(self, cat): + import libarchive + with libarchive.file_reader(str(self.current_file.getpath())) as a: + self.ftlog.write("\n".join(e.path for e in a)) + return True + + def handle_text(self, cat): + with open(self.current_file.getpath(), "r") as sf: + data = sf.read(1000000).replace("\033","") + self.ftlog.write(data) + return True + + def handle_mupdf(self, cat): + import fitz + with fitz.open(self.current_file.getpath(), + filetype=self.current_file.ext.lstrip(".")) as doc: + p = doc.load_page(0) + pix = p.get_pixmap(dpi=72) + imgdata = pix.tobytes("ppm").hex() + + self.mpvw.styles.height = "40%" + self.mpvw.start_mpv("hex://" + imgdata, 0) + + self.ftlog.write(Text.from_markup(f"[bold]Pages:[/bold] {doc.page_count}")) + self.ftlog.write(Text.from_markup("[bold]Metadata:[/bold]")) + for k, v in doc.metadata.items(): + self.ftlog.write(Text.from_markup(f" [bold]{k}:[/bold] {v}")) + toc = doc.get_toc() + if len(toc): + self.ftlog.write(Text.from_markup("[bold]TOC:[/bold]")) + for lvl, title, page in toc: + self.ftlog.write(f"{' ' * lvl} {page}: {title}") + return True + + def handle_mpv(self, cat): + if cat == mime.MIMECategory.AV or self.current_file.nsfw_score >= 0: + self.mpvw.styles.height = "20%" + self.mpvw.start_mpv(str(self.current_file.getpath()), 0) + + import av + with av.open(str(self.current_file.getpath())) as c: + self.ftlog.write(Text("Format:", style="bold")) + self.ftlog.write(f" {c.format.long_name}") + if len(c.metadata): + self.ftlog.write(Text("Metadata:", style="bold")) + for k, v in c.metadata.items(): + self.ftlog.write(f" {k}: {v}") + for s in c.streams: + self.ftlog.write(Text(f"Stream {s.index}:", style="bold")) + self.ftlog.write(f" Type: {s.type}") + if s.base_rate: + self.ftlog.write(f" Frame rate: {s.base_rate}") + if len(s.metadata): + self.ftlog.write(Text(" Metadata:", style="bold")) + for k, v in s.metadata.items(): + self.ftlog.write(f" {k}: {v}") + return True + return False + + def handle_raw(self, cat): + def hexdump(binf, length): + def fmt(s): + if isinstance(s, str): + c = chr(int(s, 16)) + else: + c = chr(s) + s = c + if c.isalpha(): return f"\0[chartreuse1]{s}\0[/chartreuse1]" + if c.isdigit(): return f"\0[gold1]{s}\0[/gold1]" + if not c.isprintable(): + g = "grey50" if c == "\0" else "cadet_blue" + return f"\0[{g}]{s if len(s) == 2 else '.'}\0[/{g}]" + return s + return Text.from_markup("\n".join(f"{' '.join(map(fmt, map(''.join, zip(*[iter(c.hex())] * 2))))}" + f"{' ' * (16 - len(c))}" + f" {''.join(map(fmt, c))}" + for c in map(lambda x: bytes([n for n in x if n != None]), + zip_longest(*[iter(binf.read(min(length, 16 * 10)))] * 16)))) + + with open(self.current_file.getpath(), "rb") as binf: + self.ftlog.write(hexdump(binf, self.current_file.size)) + if self.current_file.size > 16*10*2: + binf.seek(self.current_file.size-16*10) + self.ftlog.write(" [...] ".center(64, '─')) + self.ftlog.write(hexdump(binf, self.current_file.size - binf.tell())) + + return True + + def on_file_table_selected(self, message: FileTable.Selected) -> None: + f = message.file + self.current_file = f + self.finfo.clear() + self.finfo.add_rows([ + ("ID:", str(f.id)), + ("File name:", f.getname()), + ("URL:", f.geturl() if fhost_app.config["SERVER_NAME"] else "⚠ Set SERVER_NAME in config.py to display"), + ("File size:", do_filesizeformat(f.size, True)), + ("MIME type:", f.mime), + ("SHA256 checksum:", f.sha256), + ("Uploaded by:", Text(f.addr)), + ("User agent:", Text(f.ua or "")), + ("Management token:", f.mgmt_token), + ("Secret:", f.secret), + ("Is NSFW:", ("Yes" if f.is_nsfw else "No") + (f" (Score: {f.nsfw_score:0.4f})" if f.nsfw_score else " (Not scanned)")), + ("Is banned:", "Yes" if f.removed else "No"), + ("Expires:", time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(File.get_expiration(f.expiration, f.size)/1000))) + ]) + + self.mpvw.stop_mpv(True) + self.ftlog.remove() + self.query_one("#infopane").mount(TextLog(id="ftextlog")) + self.ftlog = self.query_one("#ftextlog") + + if f.getpath().is_file(): + self.mimehandler.handle(f.mime, f.ext) + self.ftlog.scroll_home(animate=False) + +class NullptrModApp(App): + CSS_PATH = "mod.css" + + def on_mount(self) -> None: + self.title = "0x0 File Moderation Interface" + self.main_screen = NullptrMod() + self.install_screen(self.main_screen, name="main") + self.push_screen("main") + +if __name__ == "__main__": + app = NullptrModApp() + app.run() diff --git a/modui.webp b/modui.webp new file mode 100644 index 0000000..4ce8769 Binary files /dev/null and b/modui.webp differ diff --git a/modui/__init__.py b/modui/__init__.py new file mode 100644 index 0000000..2e190ea --- /dev/null +++ b/modui/__init__.py @@ -0,0 +1,3 @@ +from .filetable import FileTable +from .notification import Notification +from .mpvwidget import MpvWidget diff --git a/modui/filetable.py b/modui/filetable.py new file mode 100644 index 0000000..7be0f1b --- /dev/null +++ b/modui/filetable.py @@ -0,0 +1,72 @@ +from textual.widgets import DataTable, Static +from textual.reactive import Reactive +from textual.message import Message, MessageTarget +from textual import events, log +from jinja2.filters import do_filesizeformat + +from fhost import File +from modui import mime + +class FileTable(DataTable): + query = Reactive(None) + order_col = Reactive(0) + order_desc = Reactive(True) + limit = 10000 + colmap = [File.id, File.removed, File.nsfw_score, None, File.ext, File.size, File.mime] + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.add_columns("#", "☣️", "🔞", "📂", "name", "size", "mime") + self.base_query = File.query.filter(File.size != None) + self.query = self.base_query + + class Selected(Message): + def __init__(self, sender: MessageTarget, f: File) -> None: + self.file = f + super().__init__(sender) + + def watch_order_col(self, old, value) -> None: + self.watch_query(None, None) + + def watch_order_desc(self, old, value) -> None: + self.watch_query(None, None) + + def watch_query(self, old, value) -> None: + def fmt_file(f: File) -> tuple: + return ( + str(f.id), + "🔴" if f.removed else " ", + "🚩" if f.is_nsfw else " ", + "👻" if not f.getpath().is_file() else " ", + f.getname(), + do_filesizeformat(f.size, True), + f"{mime.mimemoji.get(f.mime.split('/')[0], mime.mimemoji.get(f.mime)) or ' '} " + f.mime, + ) + + if (self.query): + self.clear() + order = FileTable.colmap[self.order_col] + q = self.query + if order: q = q.order_by(order.desc() if self.order_desc else order, File.id) + self.add_rows(map(fmt_file, q.limit(self.limit))) + + def _scroll_cursor_in_to_view(self, animate: bool = False) -> None: + region = self._get_cell_region(self.cursor_row, 0) + spacing = self._get_cell_border() + self.scroll_to_region(region, animate=animate, spacing=spacing) + + async def watch_cursor_cell(self, old, value) -> None: + super().watch_cursor_cell(old, value) + if value[0] < len(self.data) and value[0] >= 0: + f = File.query.get(int(self.data[value[0]][0])) + await self.emit(self.Selected(self, f)) + + def on_click(self, event: events.Click) -> None: + super().on_click(event) + meta = self.get_style_at(event.x, event.y).meta + if meta: + if meta["row"] == -1: + qi = FileTable.colmap[meta["column"]] + if meta["column"] == self.order_col: + self.order_desc = not self.order_desc + self.order_col = meta["column"] diff --git a/modui/mime.py b/modui/mime.py new file mode 100644 index 0000000..fae51af --- /dev/null +++ b/modui/mime.py @@ -0,0 +1,126 @@ +from enum import Enum +from textual import log + +mimemoji = { + "audio" : "🔈", + "video" : "🎞", + "text" : "📄", + "image" : "🖼", + "application/zip" : "🗜️", + "application/x-zip-compressed" : "🗜️", + "application/x-tar" : "🗄", + "application/x-cpio" : "🗄", + "application/x-xz" : "🗜️", + "application/x-7z-compressed" : "🗜️", + "application/gzip" : "🗜️", + "application/zstd" : "🗜️", + "application/x-rar" : "🗜️", + "application/x-rar-compressed" : "🗜️", + "application/vnd.ms-cab-compressed" : "🗜️", + "application/x-bzip2" : "🗜️", + "application/x-lzip" : "🗜️", + "application/x-iso9660-image" : "💿", + "application/pdf" : "📕", + "application/epub+zip" : "📕", + "application/mxf" : "🎞", + "application/vnd.android.package-archive" : "📦", + "application/vnd.debian.binary-package" : "📦", + "application/x-rpm" : "📦", + "application/x-dosexec" : "⚙", + "application/x-execuftable" : "⚙", + "application/x-sharedlib" : "⚙", + "application/java-archive" : "☕", + "application/x-qemu-disk" : "🖴", + "application/pgp-encrypted" : "🔏", +} + +MIMECategory = Enum("MIMECategory", + ["Archive", "Text", "AV", "Document", "Fallback"] +) + +class MIMEHandler: + def __init__(self): + self.handlers = { + MIMECategory.Archive : [[ + "application/zip", + "application/x-zip-compressed", + "application/x-tar", + "application/x-cpio", + "application/x-xz", + "application/x-7z-compressed", + "application/gzip", + "application/zstd", + "application/x-rar", + "application/x-rar-compressed", + "application/vnd.ms-cab-compressed", + "application/x-bzip2", + "application/x-lzip", + "application/x-iso9660-image", + "application/vnd.android.package-archive", + "application/vnd.debian.binary-package", + "application/x-rpm", + "application/java-archive", + "application/vnd.openxmlformats" + ], []], + MIMECategory.Text : [[ + "text", + "application/json", + "application/xml", + ], []], + MIMECategory.AV : [[ + "audio", "video", "image", + "application/mxf" + ], []], + MIMECategory.Document : [[ + "application/pdf", + "application/epub", + "application/x-mobipocket-ebook", + ], []], + MIMECategory.Fallback : [[], []] + } + + self.exceptions = { + MIMECategory.Archive : { + ".cbz" : MIMECategory.Document, + ".xps" : MIMECategory.Document, + ".epub" : MIMECategory.Document, + }, + MIMECategory.Text : { + ".fb2" : MIMECategory.Document, + } + } + + def register(self, category, handler): + self.handlers[category][1].append(handler) + + def handle(self, mime, ext): + def getcat(s): + cat = MIMECategory.Fallback + for k, v in self.handlers.items(): + s = s.split(";")[0] + if s in v[0] or s.split("/")[0] in v[0]: + cat = k + break + + for x in v[0]: + if s.startswith(x): + cat = k + break + + if cat in self.exceptions: + cat = self.exceptions[cat].get(ext) or cat + + return cat + + cat = getcat(mime) + for handler in self.handlers[cat][1]: + try: + if handler(cat): return + except: pass + + for handler in self.handlers[MIMECategory.Fallback][1]: + try: + if handler(None): return + except: pass + + raise RuntimeError(f"Unhandled MIME type category: {cat}") diff --git a/modui/mpvwidget.py b/modui/mpvwidget.py new file mode 100644 index 0000000..50e5859 --- /dev/null +++ b/modui/mpvwidget.py @@ -0,0 +1,88 @@ +import time +import fcntl, struct, termios +from sys import stdout + +from textual import events, log +from textual.widgets import Static + +from fhost import app as fhost_app + +class MpvWidget(Static): + def __init__(self, **kwargs): + super().__init__(**kwargs) + + self.mpv = None + self.vo = fhost_app.config.get("MOD_PREVIEW_PROTO") + + if not self.vo in ["sixel", "kitty"]: + self.update("⚠ Previews not enabled. \n\nSet MOD_PREVIEW_PROTO to 'sixel' or 'kitty' in config.py,\nwhichever is supported by your terminal.") + else: + try: + import mpv + self.mpv = mpv.MPV() + self.mpv.profile = "sw-fast" + self.mpv["vo"] = self.vo + self.mpv[f"vo-{self.vo}-config-clear"] = False + self.mpv[f"vo-{self.vo}-alt-screen"] = False + self.mpv[f"vo-sixel-buffered"] = True + self.mpv["audio"] = False + self.mpv["loop-file"] = "inf" + self.mpv["image-display-duration"] = 0.5 if self.vo == "sixel" else "inf" + except Exception as e: + self.mpv = None + self.update(f"⚠ Previews require python-mpv with libmpv 0.36.0 or later \n\nError was:\n{type(e).__name__}: {e}") + + def start_mpv(self, f: str|None = None, pos: float|str|None = None) -> None: + self.display = True + self.screen._refresh_layout() + + if self.mpv: + if self.content_region.x: + r, c, w, h = struct.unpack('hhhh', fcntl.ioctl(0, termios.TIOCGWINSZ, '12345678')) + width = int((w / c) * self.content_region.width) + height = int((h / r) * (self.content_region.height + (1 if self.vo == "sixel" else 0))) + self.mpv[f"vo-{self.vo}-left"] = self.content_region.x + 1 + self.mpv[f"vo-{self.vo}-top"] = self.content_region.y + 1 + self.mpv[f"vo-{self.vo}-rows"] = self.content_region.height + (1 if self.vo == "sixel" else 0) + self.mpv[f"vo-{self.vo}-cols"] = self.content_region.width + self.mpv[f"vo-{self.vo}-width"] = width + self.mpv[f"vo-{self.vo}-height"] = height + + if pos != None: + self.mpv["start"] = pos + + if f: + self.mpv.loadfile(f) + else: + self.mpv.playlist_play_index(0) + + def stop_mpv(self, wait: bool = False) -> None: + if self.mpv: + if not self.mpv.idle_active: + self.mpv.stop(True) + if wait: + time.sleep(0.1) + self.clear_mpv() + self.display = False + + def on_resize(self, size) -> None: + if self.mpv: + if not self.mpv.idle_active: + t = self.mpv.time_pos + self.stop_mpv() + if t: + self.mpv["start"] = t + self.start_mpv() + + def clear_mpv(self) -> None: + if self.vo == "kitty": + stdout.write("\033_Ga=d;\033\\") + stdout.flush() + + def shutdown(self) -> None: + if self.mpv: + self.mpv.stop() + del self.mpv + if self.vo == "kitty": + stdout.write("\033_Ga=d;\033\\\033[?25l") + stdout.flush() diff --git a/modui/notification.py b/modui/notification.py new file mode 100644 index 0000000..ecae6e4 --- /dev/null +++ b/modui/notification.py @@ -0,0 +1,8 @@ +from textual.widgets import Static + +class Notification(Static): + def on_mount(self) -> None: + self.set_timer(3, self.remove) + + def on_click(self) -> None: + self.remove() diff --git a/nginx.conf b/nginx.conf new file mode 100644 index 0000000..df814e2 --- /dev/null +++ b/nginx.conf @@ -0,0 +1,36 @@ +server { + listen 80; + listen [::]:80; + server_name lqdl.net; + return 307 https://$server_name$request_uri; +} + +server { + listen 443 ssl; + listen [::]:443 ssl; + server_name lqdl.net; + + # include your certs + ssl_certificate lqdl.net.pem; + ssl_certificate_key lqdl.net.key; + ssl_session_timeout 5m; + ssl_ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE:ECDH:AES:HIGH:!NULL:!aNULL:!MD5:!ADH:!RC4; + ssl_protocols TLSv1.2 TLSv1.3; + + root 0x0; + + location / { + include uwsgi_params; + uwsgi_param UWSGI_SCHEME $scheme; + + # make sure this matches the port you're running uwsgi on + uwsgi_pass 127.0.0.1:3031; + } + + location /up/ { + internal; + } + location /favicon.ico { + try_files $uri =404; + } +} diff --git a/nsfw_detect.py b/nsfw_detect.py new file mode 100755 index 0000000..032f7e4 --- /dev/null +++ b/nsfw_detect.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 + +""" + Copyright © 2020 Mia Herkt + Licensed under the EUPL, Version 1.2 or - as soon as approved + by the European Commission - subsequent versions of the EUPL + (the "License"); + You may not use this work except in compliance with the License. + You may obtain a copy of the license at: + + https://joinup.ec.europa.eu/software/page/eupl + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + either express or implied. + See the License for the specific language governing permissions + and limitations under the License. +""" + +import numpy as np +import os +import sys +from io import BytesIO +from pathlib import Path + +os.environ["GLOG_minloglevel"] = "2" # seriously :| +import caffe +import av +av.logging.set_level(av.logging.PANIC) + +class NSFWDetector: + def __init__(self): + npath = Path(__file__).parent / "nsfw_model" + self.nsfw_net = caffe.Net( + str(npath / "deploy.prototxt"), + caffe.TEST, + weights = str(npath / "resnet_50_1by2_nsfw.caffemodel") + ) + self.caffe_transformer = caffe.io.Transformer({ + 'data': self.nsfw_net.blobs['data'].data.shape + }) + # move image channels to outermost + self.caffe_transformer.set_transpose('data', (2, 0, 1)) + # subtract the dataset-mean value in each channel + self.caffe_transformer.set_mean('data', np.array([104, 117, 123])) + # rescale from [0, 1] to [0, 255] + self.caffe_transformer.set_raw_scale('data', 255) + # swap channels from RGB to BGR + self.caffe_transformer.set_channel_swap('data', (2, 1, 0)) + + def _compute(self, img): + image = caffe.io.load_image(img) + + H, W, _ = image.shape + _, _, h, w = self.nsfw_net.blobs["data"].data.shape + h_off = int(max((H - h) / 2, 0)) + w_off = int(max((W - w) / 2, 0)) + crop = image[h_off:h_off + h, w_off:w_off + w, :] + + transformed_image = self.caffe_transformer.preprocess('data', crop) + transformed_image.shape = (1,) + transformed_image.shape + + input_name = self.nsfw_net.inputs[0] + output_layers = ["prob"] + all_outputs = self.nsfw_net.forward_all( + blobs=output_layers, **{input_name: transformed_image}) + + outputs = all_outputs[output_layers[0]][0].astype(float) + + return outputs + + def detect(self, fpath): + try: + with av.open(fpath) as container: + try: container.seek(int(container.duration / 2)) + except: container.seek(0) + + frame = next(container.decode(video=0)) + + if frame.width >= frame.height: + w = 256 + h = int(frame.height * (256 / frame.width)) + else: + w = int(frame.width * (256 / frame.height)) + h = 256 + frame = frame.reformat(width=w, height=h, format="rgb24") + img = BytesIO() + frame.to_image().save(img, format="ppm") + + scores = self._compute(img) + except: + return -1.0 + + return scores[1] + + +if __name__ == "__main__": + n = NSFWDetector() + + for inf in sys.argv[1:]: + score = n.detect(inf) + print(inf, score) diff --git a/nsfw_model/LICENSE.md b/nsfw_model/LICENSE.md new file mode 100644 index 0000000..d1124b0 --- /dev/null +++ b/nsfw_model/LICENSE.md @@ -0,0 +1,11 @@ + +Copyright 2016, Yahoo Inc. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/nsfw_model/deploy.prototxt b/nsfw_model/deploy.prototxt new file mode 100644 index 0000000..16fb53e --- /dev/null +++ b/nsfw_model/deploy.prototxt @@ -0,0 +1,3488 @@ +name: "ResNet_50_1by2_nsfw" +layer { + name: "data" + type: "Input" + top: "data" + input_param { shape: { dim: 1 dim: 3 dim: 224 dim: 224 } } +} +layer { + name: "conv_1" + type: "Convolution" + bottom: "data" + top: "conv_1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 3 + kernel_size: 7 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_1" + type: "BatchNorm" + bottom: "conv_1" + top: "conv_1" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_1" + type: "Scale" + bottom: "conv_1" + top: "conv_1" + scale_param { + bias_term: true + } +} +layer { + name: "relu_1" + type: "ReLU" + bottom: "conv_1" + top: "conv_1" +} +layer { + name: "pool1" + type: "Pooling" + bottom: "conv_1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv_stage0_block0_proj_shortcut" + type: "Convolution" + bottom: "pool1" + top: "conv_stage0_block0_proj_shortcut" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block0_proj_shortcut" + type: "BatchNorm" + bottom: "conv_stage0_block0_proj_shortcut" + top: "conv_stage0_block0_proj_shortcut" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block0_proj_shortcut" + type: "Scale" + bottom: "conv_stage0_block0_proj_shortcut" + top: "conv_stage0_block0_proj_shortcut" + scale_param { + bias_term: true + } +} +layer { + name: "conv_stage0_block0_branch2a" + type: "Convolution" + bottom: "pool1" + top: "conv_stage0_block0_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block0_branch2a" + type: "BatchNorm" + bottom: "conv_stage0_block0_branch2a" + top: "conv_stage0_block0_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block0_branch2a" + type: "Scale" + bottom: "conv_stage0_block0_branch2a" + top: "conv_stage0_block0_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block0_branch2a" + type: "ReLU" + bottom: "conv_stage0_block0_branch2a" + top: "conv_stage0_block0_branch2a" +} +layer { + name: "conv_stage0_block0_branch2b" + type: "Convolution" + bottom: "conv_stage0_block0_branch2a" + top: "conv_stage0_block0_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block0_branch2b" + type: "BatchNorm" + bottom: "conv_stage0_block0_branch2b" + top: "conv_stage0_block0_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block0_branch2b" + type: "Scale" + bottom: "conv_stage0_block0_branch2b" + top: "conv_stage0_block0_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block0_branch2b" + type: "ReLU" + bottom: "conv_stage0_block0_branch2b" + top: "conv_stage0_block0_branch2b" +} +layer { + name: "conv_stage0_block0_branch2c" + type: "Convolution" + bottom: "conv_stage0_block0_branch2b" + top: "conv_stage0_block0_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block0_branch2c" + type: "BatchNorm" + bottom: "conv_stage0_block0_branch2c" + top: "conv_stage0_block0_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block0_branch2c" + type: "Scale" + bottom: "conv_stage0_block0_branch2c" + top: "conv_stage0_block0_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage0_block0" + type: "Eltwise" + bottom: "conv_stage0_block0_proj_shortcut" + bottom: "conv_stage0_block0_branch2c" + top: "eltwise_stage0_block0" +} +layer { + name: "relu_stage0_block0" + type: "ReLU" + bottom: "eltwise_stage0_block0" + top: "eltwise_stage0_block0" +} +layer { + name: "conv_stage0_block1_branch2a" + type: "Convolution" + bottom: "eltwise_stage0_block0" + top: "conv_stage0_block1_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block1_branch2a" + type: "BatchNorm" + bottom: "conv_stage0_block1_branch2a" + top: "conv_stage0_block1_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block1_branch2a" + type: "Scale" + bottom: "conv_stage0_block1_branch2a" + top: "conv_stage0_block1_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block1_branch2a" + type: "ReLU" + bottom: "conv_stage0_block1_branch2a" + top: "conv_stage0_block1_branch2a" +} +layer { + name: "conv_stage0_block1_branch2b" + type: "Convolution" + bottom: "conv_stage0_block1_branch2a" + top: "conv_stage0_block1_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block1_branch2b" + type: "BatchNorm" + bottom: "conv_stage0_block1_branch2b" + top: "conv_stage0_block1_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block1_branch2b" + type: "Scale" + bottom: "conv_stage0_block1_branch2b" + top: "conv_stage0_block1_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block1_branch2b" + type: "ReLU" + bottom: "conv_stage0_block1_branch2b" + top: "conv_stage0_block1_branch2b" +} +layer { + name: "conv_stage0_block1_branch2c" + type: "Convolution" + bottom: "conv_stage0_block1_branch2b" + top: "conv_stage0_block1_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block1_branch2c" + type: "BatchNorm" + bottom: "conv_stage0_block1_branch2c" + top: "conv_stage0_block1_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block1_branch2c" + type: "Scale" + bottom: "conv_stage0_block1_branch2c" + top: "conv_stage0_block1_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage0_block1" + type: "Eltwise" + bottom: "eltwise_stage0_block0" + bottom: "conv_stage0_block1_branch2c" + top: "eltwise_stage0_block1" +} +layer { + name: "relu_stage0_block1" + type: "ReLU" + bottom: "eltwise_stage0_block1" + top: "eltwise_stage0_block1" +} +layer { + name: "conv_stage0_block2_branch2a" + type: "Convolution" + bottom: "eltwise_stage0_block1" + top: "conv_stage0_block2_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block2_branch2a" + type: "BatchNorm" + bottom: "conv_stage0_block2_branch2a" + top: "conv_stage0_block2_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block2_branch2a" + type: "Scale" + bottom: "conv_stage0_block2_branch2a" + top: "conv_stage0_block2_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block2_branch2a" + type: "ReLU" + bottom: "conv_stage0_block2_branch2a" + top: "conv_stage0_block2_branch2a" +} +layer { + name: "conv_stage0_block2_branch2b" + type: "Convolution" + bottom: "conv_stage0_block2_branch2a" + top: "conv_stage0_block2_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 32 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block2_branch2b" + type: "BatchNorm" + bottom: "conv_stage0_block2_branch2b" + top: "conv_stage0_block2_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block2_branch2b" + type: "Scale" + bottom: "conv_stage0_block2_branch2b" + top: "conv_stage0_block2_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage0_block2_branch2b" + type: "ReLU" + bottom: "conv_stage0_block2_branch2b" + top: "conv_stage0_block2_branch2b" +} +layer { + name: "conv_stage0_block2_branch2c" + type: "Convolution" + bottom: "conv_stage0_block2_branch2b" + top: "conv_stage0_block2_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage0_block2_branch2c" + type: "BatchNorm" + bottom: "conv_stage0_block2_branch2c" + top: "conv_stage0_block2_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage0_block2_branch2c" + type: "Scale" + bottom: "conv_stage0_block2_branch2c" + top: "conv_stage0_block2_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage0_block2" + type: "Eltwise" + bottom: "eltwise_stage0_block1" + bottom: "conv_stage0_block2_branch2c" + top: "eltwise_stage0_block2" +} +layer { + name: "relu_stage0_block2" + type: "ReLU" + bottom: "eltwise_stage0_block2" + top: "eltwise_stage0_block2" +} +layer { + name: "conv_stage1_block0_proj_shortcut" + type: "Convolution" + bottom: "eltwise_stage0_block2" + top: "conv_stage1_block0_proj_shortcut" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block0_proj_shortcut" + type: "BatchNorm" + bottom: "conv_stage1_block0_proj_shortcut" + top: "conv_stage1_block0_proj_shortcut" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block0_proj_shortcut" + type: "Scale" + bottom: "conv_stage1_block0_proj_shortcut" + top: "conv_stage1_block0_proj_shortcut" + scale_param { + bias_term: true + } +} +layer { + name: "conv_stage1_block0_branch2a" + type: "Convolution" + bottom: "eltwise_stage0_block2" + top: "conv_stage1_block0_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block0_branch2a" + type: "BatchNorm" + bottom: "conv_stage1_block0_branch2a" + top: "conv_stage1_block0_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block0_branch2a" + type: "Scale" + bottom: "conv_stage1_block0_branch2a" + top: "conv_stage1_block0_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block0_branch2a" + type: "ReLU" + bottom: "conv_stage1_block0_branch2a" + top: "conv_stage1_block0_branch2a" +} +layer { + name: "conv_stage1_block0_branch2b" + type: "Convolution" + bottom: "conv_stage1_block0_branch2a" + top: "conv_stage1_block0_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block0_branch2b" + type: "BatchNorm" + bottom: "conv_stage1_block0_branch2b" + top: "conv_stage1_block0_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block0_branch2b" + type: "Scale" + bottom: "conv_stage1_block0_branch2b" + top: "conv_stage1_block0_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block0_branch2b" + type: "ReLU" + bottom: "conv_stage1_block0_branch2b" + top: "conv_stage1_block0_branch2b" +} +layer { + name: "conv_stage1_block0_branch2c" + type: "Convolution" + bottom: "conv_stage1_block0_branch2b" + top: "conv_stage1_block0_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block0_branch2c" + type: "BatchNorm" + bottom: "conv_stage1_block0_branch2c" + top: "conv_stage1_block0_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block0_branch2c" + type: "Scale" + bottom: "conv_stage1_block0_branch2c" + top: "conv_stage1_block0_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage1_block0" + type: "Eltwise" + bottom: "conv_stage1_block0_proj_shortcut" + bottom: "conv_stage1_block0_branch2c" + top: "eltwise_stage1_block0" +} +layer { + name: "relu_stage1_block0" + type: "ReLU" + bottom: "eltwise_stage1_block0" + top: "eltwise_stage1_block0" +} +layer { + name: "conv_stage1_block1_branch2a" + type: "Convolution" + bottom: "eltwise_stage1_block0" + top: "conv_stage1_block1_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block1_branch2a" + type: "BatchNorm" + bottom: "conv_stage1_block1_branch2a" + top: "conv_stage1_block1_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block1_branch2a" + type: "Scale" + bottom: "conv_stage1_block1_branch2a" + top: "conv_stage1_block1_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block1_branch2a" + type: "ReLU" + bottom: "conv_stage1_block1_branch2a" + top: "conv_stage1_block1_branch2a" +} +layer { + name: "conv_stage1_block1_branch2b" + type: "Convolution" + bottom: "conv_stage1_block1_branch2a" + top: "conv_stage1_block1_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block1_branch2b" + type: "BatchNorm" + bottom: "conv_stage1_block1_branch2b" + top: "conv_stage1_block1_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block1_branch2b" + type: "Scale" + bottom: "conv_stage1_block1_branch2b" + top: "conv_stage1_block1_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block1_branch2b" + type: "ReLU" + bottom: "conv_stage1_block1_branch2b" + top: "conv_stage1_block1_branch2b" +} +layer { + name: "conv_stage1_block1_branch2c" + type: "Convolution" + bottom: "conv_stage1_block1_branch2b" + top: "conv_stage1_block1_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block1_branch2c" + type: "BatchNorm" + bottom: "conv_stage1_block1_branch2c" + top: "conv_stage1_block1_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block1_branch2c" + type: "Scale" + bottom: "conv_stage1_block1_branch2c" + top: "conv_stage1_block1_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage1_block1" + type: "Eltwise" + bottom: "eltwise_stage1_block0" + bottom: "conv_stage1_block1_branch2c" + top: "eltwise_stage1_block1" +} +layer { + name: "relu_stage1_block1" + type: "ReLU" + bottom: "eltwise_stage1_block1" + top: "eltwise_stage1_block1" +} +layer { + name: "conv_stage1_block2_branch2a" + type: "Convolution" + bottom: "eltwise_stage1_block1" + top: "conv_stage1_block2_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block2_branch2a" + type: "BatchNorm" + bottom: "conv_stage1_block2_branch2a" + top: "conv_stage1_block2_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block2_branch2a" + type: "Scale" + bottom: "conv_stage1_block2_branch2a" + top: "conv_stage1_block2_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block2_branch2a" + type: "ReLU" + bottom: "conv_stage1_block2_branch2a" + top: "conv_stage1_block2_branch2a" +} +layer { + name: "conv_stage1_block2_branch2b" + type: "Convolution" + bottom: "conv_stage1_block2_branch2a" + top: "conv_stage1_block2_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block2_branch2b" + type: "BatchNorm" + bottom: "conv_stage1_block2_branch2b" + top: "conv_stage1_block2_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block2_branch2b" + type: "Scale" + bottom: "conv_stage1_block2_branch2b" + top: "conv_stage1_block2_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block2_branch2b" + type: "ReLU" + bottom: "conv_stage1_block2_branch2b" + top: "conv_stage1_block2_branch2b" +} +layer { + name: "conv_stage1_block2_branch2c" + type: "Convolution" + bottom: "conv_stage1_block2_branch2b" + top: "conv_stage1_block2_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block2_branch2c" + type: "BatchNorm" + bottom: "conv_stage1_block2_branch2c" + top: "conv_stage1_block2_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block2_branch2c" + type: "Scale" + bottom: "conv_stage1_block2_branch2c" + top: "conv_stage1_block2_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage1_block2" + type: "Eltwise" + bottom: "eltwise_stage1_block1" + bottom: "conv_stage1_block2_branch2c" + top: "eltwise_stage1_block2" +} +layer { + name: "relu_stage1_block2" + type: "ReLU" + bottom: "eltwise_stage1_block2" + top: "eltwise_stage1_block2" +} +layer { + name: "conv_stage1_block3_branch2a" + type: "Convolution" + bottom: "eltwise_stage1_block2" + top: "conv_stage1_block3_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block3_branch2a" + type: "BatchNorm" + bottom: "conv_stage1_block3_branch2a" + top: "conv_stage1_block3_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block3_branch2a" + type: "Scale" + bottom: "conv_stage1_block3_branch2a" + top: "conv_stage1_block3_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block3_branch2a" + type: "ReLU" + bottom: "conv_stage1_block3_branch2a" + top: "conv_stage1_block3_branch2a" +} +layer { + name: "conv_stage1_block3_branch2b" + type: "Convolution" + bottom: "conv_stage1_block3_branch2a" + top: "conv_stage1_block3_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 64 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block3_branch2b" + type: "BatchNorm" + bottom: "conv_stage1_block3_branch2b" + top: "conv_stage1_block3_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block3_branch2b" + type: "Scale" + bottom: "conv_stage1_block3_branch2b" + top: "conv_stage1_block3_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage1_block3_branch2b" + type: "ReLU" + bottom: "conv_stage1_block3_branch2b" + top: "conv_stage1_block3_branch2b" +} +layer { + name: "conv_stage1_block3_branch2c" + type: "Convolution" + bottom: "conv_stage1_block3_branch2b" + top: "conv_stage1_block3_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage1_block3_branch2c" + type: "BatchNorm" + bottom: "conv_stage1_block3_branch2c" + top: "conv_stage1_block3_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage1_block3_branch2c" + type: "Scale" + bottom: "conv_stage1_block3_branch2c" + top: "conv_stage1_block3_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage1_block3" + type: "Eltwise" + bottom: "eltwise_stage1_block2" + bottom: "conv_stage1_block3_branch2c" + top: "eltwise_stage1_block3" +} +layer { + name: "relu_stage1_block3" + type: "ReLU" + bottom: "eltwise_stage1_block3" + top: "eltwise_stage1_block3" +} +layer { + name: "conv_stage2_block0_proj_shortcut" + type: "Convolution" + bottom: "eltwise_stage1_block3" + top: "conv_stage2_block0_proj_shortcut" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block0_proj_shortcut" + type: "BatchNorm" + bottom: "conv_stage2_block0_proj_shortcut" + top: "conv_stage2_block0_proj_shortcut" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block0_proj_shortcut" + type: "Scale" + bottom: "conv_stage2_block0_proj_shortcut" + top: "conv_stage2_block0_proj_shortcut" + scale_param { + bias_term: true + } +} +layer { + name: "conv_stage2_block0_branch2a" + type: "Convolution" + bottom: "eltwise_stage1_block3" + top: "conv_stage2_block0_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block0_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block0_branch2a" + top: "conv_stage2_block0_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block0_branch2a" + type: "Scale" + bottom: "conv_stage2_block0_branch2a" + top: "conv_stage2_block0_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block0_branch2a" + type: "ReLU" + bottom: "conv_stage2_block0_branch2a" + top: "conv_stage2_block0_branch2a" +} +layer { + name: "conv_stage2_block0_branch2b" + type: "Convolution" + bottom: "conv_stage2_block0_branch2a" + top: "conv_stage2_block0_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block0_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block0_branch2b" + top: "conv_stage2_block0_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block0_branch2b" + type: "Scale" + bottom: "conv_stage2_block0_branch2b" + top: "conv_stage2_block0_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block0_branch2b" + type: "ReLU" + bottom: "conv_stage2_block0_branch2b" + top: "conv_stage2_block0_branch2b" +} +layer { + name: "conv_stage2_block0_branch2c" + type: "Convolution" + bottom: "conv_stage2_block0_branch2b" + top: "conv_stage2_block0_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block0_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block0_branch2c" + top: "conv_stage2_block0_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block0_branch2c" + type: "Scale" + bottom: "conv_stage2_block0_branch2c" + top: "conv_stage2_block0_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block0" + type: "Eltwise" + bottom: "conv_stage2_block0_proj_shortcut" + bottom: "conv_stage2_block0_branch2c" + top: "eltwise_stage2_block0" +} +layer { + name: "relu_stage2_block0" + type: "ReLU" + bottom: "eltwise_stage2_block0" + top: "eltwise_stage2_block0" +} +layer { + name: "conv_stage2_block1_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block0" + top: "conv_stage2_block1_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block1_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block1_branch2a" + top: "conv_stage2_block1_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block1_branch2a" + type: "Scale" + bottom: "conv_stage2_block1_branch2a" + top: "conv_stage2_block1_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block1_branch2a" + type: "ReLU" + bottom: "conv_stage2_block1_branch2a" + top: "conv_stage2_block1_branch2a" +} +layer { + name: "conv_stage2_block1_branch2b" + type: "Convolution" + bottom: "conv_stage2_block1_branch2a" + top: "conv_stage2_block1_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block1_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block1_branch2b" + top: "conv_stage2_block1_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block1_branch2b" + type: "Scale" + bottom: "conv_stage2_block1_branch2b" + top: "conv_stage2_block1_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block1_branch2b" + type: "ReLU" + bottom: "conv_stage2_block1_branch2b" + top: "conv_stage2_block1_branch2b" +} +layer { + name: "conv_stage2_block1_branch2c" + type: "Convolution" + bottom: "conv_stage2_block1_branch2b" + top: "conv_stage2_block1_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block1_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block1_branch2c" + top: "conv_stage2_block1_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block1_branch2c" + type: "Scale" + bottom: "conv_stage2_block1_branch2c" + top: "conv_stage2_block1_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block1" + type: "Eltwise" + bottom: "eltwise_stage2_block0" + bottom: "conv_stage2_block1_branch2c" + top: "eltwise_stage2_block1" +} +layer { + name: "relu_stage2_block1" + type: "ReLU" + bottom: "eltwise_stage2_block1" + top: "eltwise_stage2_block1" +} +layer { + name: "conv_stage2_block2_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block1" + top: "conv_stage2_block2_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block2_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block2_branch2a" + top: "conv_stage2_block2_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block2_branch2a" + type: "Scale" + bottom: "conv_stage2_block2_branch2a" + top: "conv_stage2_block2_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block2_branch2a" + type: "ReLU" + bottom: "conv_stage2_block2_branch2a" + top: "conv_stage2_block2_branch2a" +} +layer { + name: "conv_stage2_block2_branch2b" + type: "Convolution" + bottom: "conv_stage2_block2_branch2a" + top: "conv_stage2_block2_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block2_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block2_branch2b" + top: "conv_stage2_block2_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block2_branch2b" + type: "Scale" + bottom: "conv_stage2_block2_branch2b" + top: "conv_stage2_block2_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block2_branch2b" + type: "ReLU" + bottom: "conv_stage2_block2_branch2b" + top: "conv_stage2_block2_branch2b" +} +layer { + name: "conv_stage2_block2_branch2c" + type: "Convolution" + bottom: "conv_stage2_block2_branch2b" + top: "conv_stage2_block2_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block2_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block2_branch2c" + top: "conv_stage2_block2_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block2_branch2c" + type: "Scale" + bottom: "conv_stage2_block2_branch2c" + top: "conv_stage2_block2_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block2" + type: "Eltwise" + bottom: "eltwise_stage2_block1" + bottom: "conv_stage2_block2_branch2c" + top: "eltwise_stage2_block2" +} +layer { + name: "relu_stage2_block2" + type: "ReLU" + bottom: "eltwise_stage2_block2" + top: "eltwise_stage2_block2" +} +layer { + name: "conv_stage2_block3_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block2" + top: "conv_stage2_block3_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block3_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block3_branch2a" + top: "conv_stage2_block3_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block3_branch2a" + type: "Scale" + bottom: "conv_stage2_block3_branch2a" + top: "conv_stage2_block3_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block3_branch2a" + type: "ReLU" + bottom: "conv_stage2_block3_branch2a" + top: "conv_stage2_block3_branch2a" +} +layer { + name: "conv_stage2_block3_branch2b" + type: "Convolution" + bottom: "conv_stage2_block3_branch2a" + top: "conv_stage2_block3_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block3_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block3_branch2b" + top: "conv_stage2_block3_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block3_branch2b" + type: "Scale" + bottom: "conv_stage2_block3_branch2b" + top: "conv_stage2_block3_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block3_branch2b" + type: "ReLU" + bottom: "conv_stage2_block3_branch2b" + top: "conv_stage2_block3_branch2b" +} +layer { + name: "conv_stage2_block3_branch2c" + type: "Convolution" + bottom: "conv_stage2_block3_branch2b" + top: "conv_stage2_block3_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block3_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block3_branch2c" + top: "conv_stage2_block3_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block3_branch2c" + type: "Scale" + bottom: "conv_stage2_block3_branch2c" + top: "conv_stage2_block3_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block3" + type: "Eltwise" + bottom: "eltwise_stage2_block2" + bottom: "conv_stage2_block3_branch2c" + top: "eltwise_stage2_block3" +} +layer { + name: "relu_stage2_block3" + type: "ReLU" + bottom: "eltwise_stage2_block3" + top: "eltwise_stage2_block3" +} +layer { + name: "conv_stage2_block4_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block3" + top: "conv_stage2_block4_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block4_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block4_branch2a" + top: "conv_stage2_block4_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block4_branch2a" + type: "Scale" + bottom: "conv_stage2_block4_branch2a" + top: "conv_stage2_block4_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block4_branch2a" + type: "ReLU" + bottom: "conv_stage2_block4_branch2a" + top: "conv_stage2_block4_branch2a" +} +layer { + name: "conv_stage2_block4_branch2b" + type: "Convolution" + bottom: "conv_stage2_block4_branch2a" + top: "conv_stage2_block4_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block4_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block4_branch2b" + top: "conv_stage2_block4_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block4_branch2b" + type: "Scale" + bottom: "conv_stage2_block4_branch2b" + top: "conv_stage2_block4_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block4_branch2b" + type: "ReLU" + bottom: "conv_stage2_block4_branch2b" + top: "conv_stage2_block4_branch2b" +} +layer { + name: "conv_stage2_block4_branch2c" + type: "Convolution" + bottom: "conv_stage2_block4_branch2b" + top: "conv_stage2_block4_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block4_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block4_branch2c" + top: "conv_stage2_block4_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block4_branch2c" + type: "Scale" + bottom: "conv_stage2_block4_branch2c" + top: "conv_stage2_block4_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block4" + type: "Eltwise" + bottom: "eltwise_stage2_block3" + bottom: "conv_stage2_block4_branch2c" + top: "eltwise_stage2_block4" +} +layer { + name: "relu_stage2_block4" + type: "ReLU" + bottom: "eltwise_stage2_block4" + top: "eltwise_stage2_block4" +} +layer { + name: "conv_stage2_block5_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block4" + top: "conv_stage2_block5_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block5_branch2a" + type: "BatchNorm" + bottom: "conv_stage2_block5_branch2a" + top: "conv_stage2_block5_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block5_branch2a" + type: "Scale" + bottom: "conv_stage2_block5_branch2a" + top: "conv_stage2_block5_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block5_branch2a" + type: "ReLU" + bottom: "conv_stage2_block5_branch2a" + top: "conv_stage2_block5_branch2a" +} +layer { + name: "conv_stage2_block5_branch2b" + type: "Convolution" + bottom: "conv_stage2_block5_branch2a" + top: "conv_stage2_block5_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 128 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block5_branch2b" + type: "BatchNorm" + bottom: "conv_stage2_block5_branch2b" + top: "conv_stage2_block5_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block5_branch2b" + type: "Scale" + bottom: "conv_stage2_block5_branch2b" + top: "conv_stage2_block5_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage2_block5_branch2b" + type: "ReLU" + bottom: "conv_stage2_block5_branch2b" + top: "conv_stage2_block5_branch2b" +} +layer { + name: "conv_stage2_block5_branch2c" + type: "Convolution" + bottom: "conv_stage2_block5_branch2b" + top: "conv_stage2_block5_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 512 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage2_block5_branch2c" + type: "BatchNorm" + bottom: "conv_stage2_block5_branch2c" + top: "conv_stage2_block5_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage2_block5_branch2c" + type: "Scale" + bottom: "conv_stage2_block5_branch2c" + top: "conv_stage2_block5_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage2_block5" + type: "Eltwise" + bottom: "eltwise_stage2_block4" + bottom: "conv_stage2_block5_branch2c" + top: "eltwise_stage2_block5" +} +layer { + name: "relu_stage2_block5" + type: "ReLU" + bottom: "eltwise_stage2_block5" + top: "eltwise_stage2_block5" +} +layer { + name: "conv_stage3_block0_proj_shortcut" + type: "Convolution" + bottom: "eltwise_stage2_block5" + top: "conv_stage3_block0_proj_shortcut" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 1024 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block0_proj_shortcut" + type: "BatchNorm" + bottom: "conv_stage3_block0_proj_shortcut" + top: "conv_stage3_block0_proj_shortcut" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block0_proj_shortcut" + type: "Scale" + bottom: "conv_stage3_block0_proj_shortcut" + top: "conv_stage3_block0_proj_shortcut" + scale_param { + bias_term: true + } +} +layer { + name: "conv_stage3_block0_branch2a" + type: "Convolution" + bottom: "eltwise_stage2_block5" + top: "conv_stage3_block0_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 2 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block0_branch2a" + type: "BatchNorm" + bottom: "conv_stage3_block0_branch2a" + top: "conv_stage3_block0_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block0_branch2a" + type: "Scale" + bottom: "conv_stage3_block0_branch2a" + top: "conv_stage3_block0_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block0_branch2a" + type: "ReLU" + bottom: "conv_stage3_block0_branch2a" + top: "conv_stage3_block0_branch2a" +} +layer { + name: "conv_stage3_block0_branch2b" + type: "Convolution" + bottom: "conv_stage3_block0_branch2a" + top: "conv_stage3_block0_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block0_branch2b" + type: "BatchNorm" + bottom: "conv_stage3_block0_branch2b" + top: "conv_stage3_block0_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block0_branch2b" + type: "Scale" + bottom: "conv_stage3_block0_branch2b" + top: "conv_stage3_block0_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block0_branch2b" + type: "ReLU" + bottom: "conv_stage3_block0_branch2b" + top: "conv_stage3_block0_branch2b" +} +layer { + name: "conv_stage3_block0_branch2c" + type: "Convolution" + bottom: "conv_stage3_block0_branch2b" + top: "conv_stage3_block0_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 1024 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block0_branch2c" + type: "BatchNorm" + bottom: "conv_stage3_block0_branch2c" + top: "conv_stage3_block0_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block0_branch2c" + type: "Scale" + bottom: "conv_stage3_block0_branch2c" + top: "conv_stage3_block0_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage3_block0" + type: "Eltwise" + bottom: "conv_stage3_block0_proj_shortcut" + bottom: "conv_stage3_block0_branch2c" + top: "eltwise_stage3_block0" +} +layer { + name: "relu_stage3_block0" + type: "ReLU" + bottom: "eltwise_stage3_block0" + top: "eltwise_stage3_block0" +} +layer { + name: "conv_stage3_block1_branch2a" + type: "Convolution" + bottom: "eltwise_stage3_block0" + top: "conv_stage3_block1_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block1_branch2a" + type: "BatchNorm" + bottom: "conv_stage3_block1_branch2a" + top: "conv_stage3_block1_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block1_branch2a" + type: "Scale" + bottom: "conv_stage3_block1_branch2a" + top: "conv_stage3_block1_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block1_branch2a" + type: "ReLU" + bottom: "conv_stage3_block1_branch2a" + top: "conv_stage3_block1_branch2a" +} +layer { + name: "conv_stage3_block1_branch2b" + type: "Convolution" + bottom: "conv_stage3_block1_branch2a" + top: "conv_stage3_block1_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block1_branch2b" + type: "BatchNorm" + bottom: "conv_stage3_block1_branch2b" + top: "conv_stage3_block1_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block1_branch2b" + type: "Scale" + bottom: "conv_stage3_block1_branch2b" + top: "conv_stage3_block1_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block1_branch2b" + type: "ReLU" + bottom: "conv_stage3_block1_branch2b" + top: "conv_stage3_block1_branch2b" +} +layer { + name: "conv_stage3_block1_branch2c" + type: "Convolution" + bottom: "conv_stage3_block1_branch2b" + top: "conv_stage3_block1_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 1024 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block1_branch2c" + type: "BatchNorm" + bottom: "conv_stage3_block1_branch2c" + top: "conv_stage3_block1_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block1_branch2c" + type: "Scale" + bottom: "conv_stage3_block1_branch2c" + top: "conv_stage3_block1_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage3_block1" + type: "Eltwise" + bottom: "eltwise_stage3_block0" + bottom: "conv_stage3_block1_branch2c" + top: "eltwise_stage3_block1" +} +layer { + name: "relu_stage3_block1" + type: "ReLU" + bottom: "eltwise_stage3_block1" + top: "eltwise_stage3_block1" +} +layer { + name: "conv_stage3_block2_branch2a" + type: "Convolution" + bottom: "eltwise_stage3_block1" + top: "conv_stage3_block2_branch2a" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block2_branch2a" + type: "BatchNorm" + bottom: "conv_stage3_block2_branch2a" + top: "conv_stage3_block2_branch2a" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block2_branch2a" + type: "Scale" + bottom: "conv_stage3_block2_branch2a" + top: "conv_stage3_block2_branch2a" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block2_branch2a" + type: "ReLU" + bottom: "conv_stage3_block2_branch2a" + top: "conv_stage3_block2_branch2a" +} +layer { + name: "conv_stage3_block2_branch2b" + type: "Convolution" + bottom: "conv_stage3_block2_branch2a" + top: "conv_stage3_block2_branch2b" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block2_branch2b" + type: "BatchNorm" + bottom: "conv_stage3_block2_branch2b" + top: "conv_stage3_block2_branch2b" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block2_branch2b" + type: "Scale" + bottom: "conv_stage3_block2_branch2b" + top: "conv_stage3_block2_branch2b" + scale_param { + bias_term: true + } +} +layer { + name: "relu_stage3_block2_branch2b" + type: "ReLU" + bottom: "conv_stage3_block2_branch2b" + top: "conv_stage3_block2_branch2b" +} +layer { + name: "conv_stage3_block2_branch2c" + type: "Convolution" + bottom: "conv_stage3_block2_branch2b" + top: "conv_stage3_block2_branch2c" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 1024 + pad: 0 + kernel_size: 1 + stride: 1 + weight_filler { + type: "xavier" + } + bias_filler { + type: "constant" + value: 0 + } + } +} +layer { + name: "bn_stage3_block2_branch2c" + type: "BatchNorm" + bottom: "conv_stage3_block2_branch2c" + top: "conv_stage3_block2_branch2c" + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + param { + lr_mult: 0 + decay_mult: 0 + } + batch_norm_param { + use_global_stats: true + } +} +layer { + name: "scale_stage3_block2_branch2c" + type: "Scale" + bottom: "conv_stage3_block2_branch2c" + top: "conv_stage3_block2_branch2c" + scale_param { + bias_term: true + } +} +layer { + name: "eltwise_stage3_block2" + type: "Eltwise" + bottom: "eltwise_stage3_block1" + bottom: "conv_stage3_block2_branch2c" + top: "eltwise_stage3_block2" +} +layer { + name: "relu_stage3_block2" + type: "ReLU" + bottom: "eltwise_stage3_block2" + top: "eltwise_stage3_block2" +} +layer { + name: "pool" + type: "Pooling" + bottom: "eltwise_stage3_block2" + top: "pool" + pooling_param { + pool: AVE + kernel_size: 7 + stride: 1 + } +} +layer { + name: "fc_nsfw" + type: "InnerProduct" + bottom: "pool" + top: "fc_nsfw" + param { + lr_mult: 5 + decay_mult: 1 + } + param { + lr_mult: 10 + decay_mult: 0 + } + inner_product_param{ + num_output: 2 + weight_filler { + type: "xavier" + std: 0.01 + } + bias_filler { + type: "xavier" + value: 0 + } + } +} +layer { + name: "prob" + type: "Softmax" + bottom: "fc_nsfw" + top: "prob" +} + diff --git a/nsfw_model/resnet_50_1by2_nsfw.caffemodel b/nsfw_model/resnet_50_1by2_nsfw.caffemodel new file mode 100644 index 0000000..c4f3105 Binary files /dev/null and b/nsfw_model/resnet_50_1by2_nsfw.caffemodel differ diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..21f57cf --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,2 @@ +[tool.pytest.ini_options] +log_level = "INFO" diff --git a/requirements.txt b/requirements.txt index 3004091..b765697 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,20 +1,25 @@ -alembic==0.8.8 -click==6.6 -decorator==4.0.10 -Flask==0.11.1 -Flask-Migrate==2.0.0 -Flask-Script==2.0.5 -Flask-SQLAlchemy==2.1 -humanize==0.5.1 -itsdangerous==0.24 -Jinja2==2.8 -Mako==1.0.4 -MarkupSafe==0.23 -python-editor==1.0.1 -python-magic==0.4.12 -requests==2.11.1 -short-url==1.2.2 -six==1.10.0 -SQLAlchemy==1.1.3 -validators==0.11.0 -Werkzeug==0.11.11 +click +Flask_Migrate +validators +alembic +requests +Jinja2 +Flask +flask_sqlalchemy +python_magic + +# vscan +clamd + +# nsfw detection +numpy + +# mod ui +av +PyMuPDF +libarchive_c +textual +python-mpv + +# dev +pytest diff --git a/templates/401.html b/templates/401.html new file mode 100644 index 0000000..672c7e4 --- /dev/null +++ b/templates/401.html @@ -0,0 +1,2 @@ +rm: cannot remove '{{ request.path.split("/")[1] }}': Permission denied + diff --git a/templates/404.html b/templates/404.html new file mode 100644 index 0000000..88ea3cc --- /dev/null +++ b/templates/404.html @@ -0,0 +1,15 @@ +{% set pid = range(20,100)|random %} +
Process {{ pid }} stopped
+* thread #1: tid = {{ pid }}, {{ "{:#018x}".format(id(g)) }}, name = 'fhost'
+    frame #0:
+Process {{ pid }} stopped
+* thread #8: tid = {{ pid }}, {{ "{:#018x}".format(id(request)) }} fhost`get(path='{{ request.path }}') + 27 at fhost.c:139, name = 'fhost/responder', stop reason = invalid address (fault address: 0x30)
+    frame #0: {3:#018x} fhost`get(path='{{ request.path }}') + 27 at fhost.c:139
+   136   get(SrvContext *ctx, const char *path)
+   137   {
+   138       StoredObj *obj = ctx->store->query(shurl_debase(path));
+-> 139       switch (obj->type) {
+   140           case ObjTypeFile:
+   141               ctx->serve_file_id(obj->id);
+   142               break;
+(lldb) q
diff --git a/templates/411.html b/templates/411.html new file mode 100644 index 0000000..8c57608 --- /dev/null +++ b/templates/411.html @@ -0,0 +1 @@ +Could not determine remote file size (no Content-Length in response header; shoot admin). diff --git a/templates/413.html b/templates/413.html new file mode 100644 index 0000000..0e5c32a --- /dev/null +++ b/templates/413.html @@ -0,0 +1 @@ +Remote file too large ({{ request.headers["content-length"]|filesizeformat(True) }} > {{ config["MAX_CONTENT_LENGTH"]|filesizeformat(True) }}). diff --git a/templates/451.html b/templates/451.html new file mode 100644 index 0000000..2d631a4 --- /dev/null +++ b/templates/451.html @@ -0,0 +1 @@ +451 Unavailable For Legal Reasons diff --git a/templates/index.html b/templates/index.html new file mode 100644 index 0000000..a2add39 --- /dev/null +++ b/templates/index.html @@ -0,0 +1,69 @@ +
+THE NULL POINTER
+================
+{% set fhost_url = url_for("fhost", _external=True).rstrip("/") %}
+HTTP POST files here:
+    curl -F'file=@yourfile.png' {{ fhost_url }}
+You can also POST remote URLs:
+    curl -F'url=http://example.com/image.jpg' {{ fhost_url }}
+If you don't want the resulting URL to be easy to guess:
+    curl -F'file=@yourfile.png' -Fsecret= {{ fhost_url }}
+    curl -F'url=http://example.com/image.jpg' -Fsecret= {{ fhost_url }}
+Or you can shorten URLs:
+    curl -F'shorten=http://example.com/some/long/url' {{ fhost_url }}
+
+It is possible to append your own file name to the URL:
+    {{ fhost_url }}/aaa.jpg/image.jpeg
+
+File URLs are valid for at least 30 days and up to a year (see below).
+Shortened URLs do not expire.
+
+Files can be set to expire sooner by adding an "expires" parameter (in hours)
+    curl -F'file=@yourfile.png' -Fexpires=24 {{ fhost_url }}
+OR by setting "expires" to a timestamp in epoch milliseconds
+    curl -F'file=@yourfile.png' -Fexpires=1681996320000 {{ fhost_url }}
+
+Expired files won't be removed immediately, but will be removed as part of
+the next purge.
+
+Whenever a file that does not already exist or has expired is uploaded,
+the HTTP response header includes an X-Token field. You can use this
+to perform management operations on the file.
+
+To delete the file immediately:
+    curl -Ftoken=token_here -Fdelete= {{ fhost_url }}/abc.txt
+To change the expiration date (see above):
+    curl -Ftoken=token_here -Fexpires=3 {{ fhost_url }}/abc.txt
+
+{% set max_size = config["MAX_CONTENT_LENGTH"]|filesizeformat(True) %}
+Maximum file size: {{ max_size }}
+Not allowed: {{ config["FHOST_MIME_BLACKLIST"]|join(", ") }}
+
+
+FILE RETENTION PERIOD
+---------------------
+
+retention = min_age + (-max_age + min_age) * pow((file_size / max_size - 1), 3)
+
+   days
+ {{'{: 6}'.format(config.get("FHOST_MAX_EXPIRATION", 31536000000)//86400000)}} |  \
+        |   \
+        |    \
+        |     \
+        |      \
+        |       \
+        |        ..
+        |          \
+ {{'{: 6.1f}'.format((config.get("FHOST_MIN_EXPIRATION", 2592000000)/2 + config.get("FHOST_MAX_EXPIRATION", 31536000000)/2)/86400000)}} | ----------..-------------------------------------------
+        |             ..
+        |               \
+        |                ..
+        |                  ...
+        |                     ..
+        |                       ...
+        |                          ....
+        |                              ......
+ {{'{: 6}'.format(config.get("FHOST_MIN_EXPIRATION", 2592000000)//86400000)}} |                                    ....................
+          0{{ ((config["MAX_CONTENT_LENGTH"]/2)|filesizeformat(True)).split(" ")[0].rjust(27) }}{{ max_size.split(" ")[0].rjust(27) }}
+           {{ max_size.split(" ")[1].rjust(54) }}
+
diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 0000000..0b29e00 --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,81 @@ +import pytest +import tempfile +import os +from flask_migrate import upgrade as db_upgrade +from io import BytesIO + +from fhost import app, db, url_for, File, URL + +@pytest.fixture +def client(): + with tempfile.TemporaryDirectory() as tmpdir: + app.config["SQLALCHEMY_DATABASE_URI"] = f"sqlite:///{tmpdir}/db.sqlite" + app.config["FHOST_STORAGE_PATH"] = os.path.join(tmpdir, "up") + app.config["TESTING"] = True + + with app.test_client() as client: + with app.app_context(): + db_upgrade() + yield client + +def test_client(client): + payloads = [ + ({ "file" : (BytesIO(b"hello"), "hello.txt") }, 200, b"https://localhost/E.txt\n"), + ({ "file" : (BytesIO(b"hello"), "hello.ignorethis") }, 200, b"https://localhost/E.txt\n"), + ({ "file" : (BytesIO(b"bye"), "bye.truncatethis") }, 200, b"https://localhost/Q.truncate\n"), + ({ "file" : (BytesIO(b"hi"), "hi.tar.gz") }, 200, b"https://localhost/h.tar.gz\n"), + ({ "file" : (BytesIO(b"lea!"), "lea!") }, 200, b"https://localhost/d.txt\n"), + ({ "file" : (BytesIO(b"why?"), "balls", "application/x-dosexec") }, 415, None), + ({ "shorten" : "https://0x0.st" }, 200, b"https://localhost/E\n"), + ({ "shorten" : "https://localhost" }, 400, None), + ({}, 400, None), + ] + + for p, s, r in payloads: + rv = client.post("/", buffered=True, + content_type="multipart/form-data", + data=p) + assert rv.status_code == s + if r: + assert rv.data == r + + f = File.query.get(2) + f.removed = True + db.session.add(f) + db.session.commit() + + rq = [ + (200, [ + "/", + "robots.txt", + "E.txt", + "E.txt/test", + "E.txt/test.py", + "d.txt", + "h.tar.gz", + ]), + (302, [ + "E", + ]), + (404, [ + "test.bin", + "test.bin/test", + "test.bin/test.py", + "test", + "test/test", + "test.bin/test.py", + "E.bin", + "E/test", + "E/test.bin", + ]), + (451, [ + "Q.truncate", + ]), + ] + + for code, paths in rq: + for p in paths: + app.logger.info(f"GET {p}") + rv = client.get(p) + assert rv.status_code == code + diff --git a/uwsgi.ini b/uwsgi.ini new file mode 100644 index 0000000..f40ec59 --- /dev/null +++ b/uwsgi.ini @@ -0,0 +1,3 @@ +[uwsgi] +module = fhost +callable = app