From a449dbf0137b8c15928fbbe664f77ddd8eec6fe3 Mon Sep 17 00:00:00 2001 From: LxL Date: Mon, 5 Aug 2019 00:46:38 +0800 Subject: [PATCH] Bump version: 1.2.0 to 1.3.0 (#80) --- .circleci/config.yml | 12 ++++++------ HISTORY.md | 15 +++++++++++++++ README.md | 6 +++++- README_CN.md | 6 +++++- requirements.txt | 2 +- scrapydweb/__init__.py | 5 +++-- scrapydweb/__version__.py | 2 +- scrapydweb/default_settings.py | 3 +++ scrapydweb/run.py | 4 +++- .../static/{v121dev => v130}/css/dropdown.css | 0 .../{v121dev => v130}/css/dropdown_mobileui.css | 0 .../css/icon_upload_icon_right.css | 0 .../static/{v121dev => v130}/css/multinode.css | 0 .../static/{v121dev => v130}/css/stacktable.css | 0 .../static/{v121dev => v130}/css/stats.css | 0 .../static/{v121dev => v130}/css/style.css | 0 .../{v121dev => v130}/css/style_mobileui.css | 0 scrapydweb/static/{v121dev => v130}/css/utf8.css | 0 .../{v121dev => v130}/css/utf8_mobileui.css | 0 .../element-ui@2.4.6/lib/index.js | 0 .../lib/theme-chalk/fonts/element-icons.woff | Bin .../element-ui@2.4.6/lib/theme-chalk/index.css | 0 scrapydweb/static/{v121dev => v130}/icon/fav.ico | Bin scrapydweb/static/{v121dev => v130}/icon/fav.png | Bin ...-man-spiderman-face-mask-round-avatar-512.png | Bin .../static/{v121dev => v130}/icon/spiderman.png | Bin scrapydweb/static/{v121dev => v130}/js/common.js | 0 .../static/{v121dev => v130}/js/echarts.min.js | 0 .../{v121dev => v130}/js/github_buttons.html | 0 .../{v121dev => v130}/js/github_buttons.js | 0 .../static/{v121dev => v130}/js/icons_menu.js | 0 .../static/{v121dev => v130}/js/jquery.min.js | 0 .../static/{v121dev => v130}/js/multinode.js | 0 .../static/{v121dev => v130}/js/stacktable.js | 0 scrapydweb/static/{v121dev => v130}/js/stats.js | 0 .../static/{v121dev => v130}/js/vue.min.js | 0 .../templates/scrapydweb/cluster_reports.html | 6 +++++- scrapydweb/utils/check_app_config.py | 2 +- scrapydweb/utils/setup_database.py | 7 +++++-- scrapydweb/vars.py | 2 +- scrapydweb/views/api.py | 4 ++-- scrapydweb/views/dashboard/jobs.py | 7 ++++--- scrapydweb/views/files/log.py | 8 ++++---- scrapydweb/views/operations/execute_task.py | 6 +++--- setup.py | 4 ++-- tests/test_aa_logparser.py | 2 +- tests/test_reports.py | 4 ++-- tests/test_schedule.py | 8 +++++--- 48 files changed, 77 insertions(+), 38 deletions(-) rename scrapydweb/static/{v121dev => v130}/css/dropdown.css (100%) rename scrapydweb/static/{v121dev => v130}/css/dropdown_mobileui.css (100%) rename scrapydweb/static/{v121dev => v130}/css/icon_upload_icon_right.css (100%) rename scrapydweb/static/{v121dev => v130}/css/multinode.css (100%) rename scrapydweb/static/{v121dev => v130}/css/stacktable.css (100%) rename scrapydweb/static/{v121dev => v130}/css/stats.css (100%) rename scrapydweb/static/{v121dev => v130}/css/style.css (100%) rename scrapydweb/static/{v121dev => v130}/css/style_mobileui.css (100%) rename scrapydweb/static/{v121dev => v130}/css/utf8.css (100%) rename scrapydweb/static/{v121dev => v130}/css/utf8_mobileui.css (100%) rename scrapydweb/static/{v121dev => v130}/element-ui@2.4.6/lib/index.js (100%) rename scrapydweb/static/{v121dev => v130}/element-ui@2.4.6/lib/theme-chalk/fonts/element-icons.woff (100%) rename scrapydweb/static/{v121dev => v130}/element-ui@2.4.6/lib/theme-chalk/index.css (100%) rename scrapydweb/static/{v121dev => v130}/icon/fav.ico (100%) rename scrapydweb/static/{v121dev => v130}/icon/fav.png (100%) rename scrapydweb/static/{v121dev => v130}/icon/spider-man-spiderman-face-mask-round-avatar-512.png (100%) rename scrapydweb/static/{v121dev => v130}/icon/spiderman.png (100%) rename scrapydweb/static/{v121dev => v130}/js/common.js (100%) rename scrapydweb/static/{v121dev => v130}/js/echarts.min.js (100%) rename scrapydweb/static/{v121dev => v130}/js/github_buttons.html (100%) rename scrapydweb/static/{v121dev => v130}/js/github_buttons.js (100%) rename scrapydweb/static/{v121dev => v130}/js/icons_menu.js (100%) rename scrapydweb/static/{v121dev => v130}/js/jquery.min.js (100%) rename scrapydweb/static/{v121dev => v130}/js/multinode.js (100%) rename scrapydweb/static/{v121dev => v130}/js/stacktable.js (100%) rename scrapydweb/static/{v121dev => v130}/js/stats.js (100%) rename scrapydweb/static/{v121dev => v130}/js/vue.min.js (100%) diff --git a/.circleci/config.yml b/.circleci/config.yml index cc25581..0a4147f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -69,14 +69,14 @@ jobs: - run: name: Setup DATA_PATH command: | - printf "\nDATA_PATH = '"$DATA_PATH"'\n" >> scrapydweb_settings_v8.py + printf "\nDATA_PATH = '"$DATA_PATH"'\n" >> scrapydweb_settings_v9.py - when: condition: <> steps: - run: name: Set DATABASE_URL to sqlite command: | - printf "\nDATABASE_URL = '"$DATABASE_URL"'\n" >> scrapydweb_settings_v8.py + printf "\nDATABASE_URL = '"$DATABASE_URL"'\n" >> scrapydweb_settings_v9.py - when: condition: <> steps: @@ -91,7 +91,7 @@ jobs: name: Set DATABASE_URL to postgresql command: | # postgres://circleci@127.0.0.1:5432 - printf "\nDATABASE_URL = '"$DATABASE_URL"'\n" >> scrapydweb_settings_v8.py + printf "\nDATABASE_URL = '"$DATABASE_URL"'\n" >> scrapydweb_settings_v9.py - when: condition: <> steps: @@ -121,7 +121,7 @@ jobs: name: Set DATABASE_URL to mysql command: | # mysql://user:passw0rd@127.0.0.1:3306 - printf "\nDATABASE_URL = '"$DATABASE_URL"'\n" >> scrapydweb_settings_v8.py + printf "\nDATABASE_URL = '"$DATABASE_URL"'\n" >> scrapydweb_settings_v9.py - run: name: Install dependencies @@ -168,8 +168,8 @@ jobs: - run: name: Generate report command: | - touch scrapydweb_settings_v8.py - cat scrapydweb_settings_v8.py + touch scrapydweb_settings_v9.py + cat scrapydweb_settings_v9.py echo $DATA_PATH echo $DATABASE_URL . venv/bin/activate diff --git a/HISTORY.md b/HISTORY.md index 90f9bfa..d753fd8 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,20 @@ Release History =============== +[1.3.0](https://github.com/my8100/scrapydweb/issues?q=is%3Aclosed+milestone%3A1.3.0) (2019-08-04) +------------------ +- New Features + - Add new pages Node Reports and Cluster Reports for aggregating jobs stats [(issue #72)](https://github.com/my8100/scrapydweb/issues/72) +- Improvements + - Adapt to [:link: *LogParser*](https://github.com/my8100/logparser) v0.8.2 + - Add DATA_PATH option for customizing path to save program data [(issue #40)](https://github.com/my8100/scrapydweb/issues/40) + - Add DATABASE_URL option to support MySQL or PostgreSQL backend [(issue #42)](https://github.com/my8100/scrapydweb/issues/42) + - Support specify the latest version of Scrapy project in the Run Spider page [(issue #4)](https://github.com/my8100/scrapydweb/issues/4#issuecomment-475145676) + - Support specify default values of settings & arguments in the Run Spider page [(issue #55)](https://github.com/my8100/scrapydweb/issues/55) +- Others + - Update config file to scrapydweb_settings_v9.py + - Support continuous integration (CI) on [CircleCI](https://circleci.com/) + + 1.2.0 (2019-03-12) ------------------ - New Features diff --git a/README.md b/README.md index c2d32c8..19b999f 100644 --- a/README.md +++ b/README.md @@ -66,10 +66,14 @@ and restart Scrapyd to make it visible externally. ```bash pip install scrapydweb ``` -:heavy_exclamation_mark: Note that you may need to execute `pip install -U pip` first in order to get the latest version of scrapydweb, or download the tar.gz file from https://pypi.org/project/scrapydweb/#files and get it installed via `pip install scrapydweb-x.x.x.tar.gz` +:heavy_exclamation_mark: Note that you may need to execute `python -m pip install --upgrade pip` first in order to get the latest version of scrapydweb, or download the tar.gz file from https://pypi.org/project/scrapydweb/#files and get it installed via `pip install scrapydweb-x.x.x.tar.gz` - Use git: ```bash +pip install --upgrade git+https://github.com/my8100/scrapydweb.git +``` +Or: +```bash git clone https://github.com/my8100/scrapydweb.git cd scrapydweb python setup.py install diff --git a/README_CN.md b/README_CN.md index 1057a06..863dac6 100644 --- a/README_CN.md +++ b/README_CN.md @@ -66,10 +66,14 @@ ```bash pip install scrapydweb ``` -:heavy_exclamation_mark: 如果 pip 安装结果不是最新版本的 scrapydweb,请先执行`pip install -U pip`,或者前往 https://pypi.org/project/scrapydweb/#files 下载 tar.gz 文件并执行安装命令 `pip install scrapydweb-x.x.x.tar.gz` +:heavy_exclamation_mark: 如果 pip 安装结果不是最新版本的 scrapydweb,请先执行`python -m pip install --upgrade pip`,或者前往 https://pypi.org/project/scrapydweb/#files 下载 tar.gz 文件并执行安装命令 `pip install scrapydweb-x.x.x.tar.gz` - 通过 git: ```bash +pip install --upgrade git+https://github.com/my8100/scrapydweb.git +``` +或: +```bash git clone https://github.com/my8100/scrapydweb.git cd scrapydweb python setup.py install diff --git a/requirements.txt b/requirements.txt index d05010a..8b9d417 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ APScheduler>=3.5.3 flask>=1.0.2 flask-compress>=1.4.0 Flask-SQLAlchemy>=2.3.2 -logparser==0.8.1 +logparser==0.8.2 requests>=2.21.0 setuptools>=40.6.3 six>=1.12.0 diff --git a/scrapydweb/__init__.py b/scrapydweb/__init__.py index 4ba53aa..f6ac950 100644 --- a/scrapydweb/__init__.py +++ b/scrapydweb/__init__.py @@ -215,7 +215,8 @@ def register_view(view, endpoint, url_defaults_list): register_view(DeployUploadView, 'deploy.upload', [('deploy/upload', None)]) register_view(DeployXhrView, 'deploy.xhr', [('deploy/xhr///', None)]) - from .views.operations.schedule import ScheduleView, ScheduleCheckView, ScheduleRunView, ScheduleXhrView, ScheduleTaskView + from .views.operations.schedule import (ScheduleView, ScheduleCheckView, ScheduleRunView, + ScheduleXhrView, ScheduleTaskView) register_view(ScheduleView, 'schedule', [ ('schedule///', None), ('schedule//', dict(spider=None)), @@ -271,7 +272,7 @@ def handle_template_context(app): STATIC = 'static' VERSION = 'v' + __version__.replace('.', '') # MUST be commented out for released version - VERSION = 'v121dev' + # VERSION = 'v131dev' @app.context_processor def inject_variable(): diff --git a/scrapydweb/__version__.py b/scrapydweb/__version__.py index 17d9166..3175ae2 100644 --- a/scrapydweb/__version__.py +++ b/scrapydweb/__version__.py @@ -1,7 +1,7 @@ # coding: utf-8 __title__ = 'scrapydweb' -__version__ = '1.2.0' +__version__ = '1.3.0' __author__ = 'my8100' __author_email__ = 'my8100@gmail.com' __url__ = 'https://github.com/my8100/scrapydweb' diff --git a/scrapydweb/default_settings.py b/scrapydweb/default_settings.py index aaa7f41..ed0e41a 100644 --- a/scrapydweb/default_settings.py +++ b/scrapydweb/default_settings.py @@ -61,11 +61,14 @@ # to the Scrapyd server. # e.g. '127.0.0.1:6800' or 'localhost:6801', do not forget the port number. LOCAL_SCRAPYD_SERVER = '' + # Enter the directory when you run Scrapyd, run the command below # to find out where the Scrapy logs are stored: # python -c "from os.path import abspath, isdir; from scrapyd.config import Config; path = abspath(Config().get('logs_dir')); print(path); print(isdir(path))" +# Check out https://scrapyd.readthedocs.io/en/stable/config.html#logs-dir for more info. # e.g. 'C:/Users/username/logs' or '/home/username/logs' LOCAL_SCRAPYD_LOGS_DIR = '' + # The default is False, set it to True to automatically run LogParser as a subprocess at startup. # Note that you can run the LogParser service separately via command 'logparser' as you like. # Run 'logparser -h' to find out the config file of LogParser for more advanced settings. diff --git a/scrapydweb/run.py b/scrapydweb/run.py index 090a219..a71d743 100644 --- a/scrapydweb/run.py +++ b/scrapydweb/run.py @@ -133,7 +133,9 @@ def load_custom_settings(config): "Then add your SCRAPYD_SERVERS in the config file and restart scrapydweb.\n".format( file=SCRAPYDWEB_SETTINGS_PY)) else: - sys.exit("\nThe config file '{file}' has been copied to current working directory.\n" + sys.exit("\nATTENTION:\nYou may encounter ERROR if there are any timer tasks added in v1.2.0,\n" + "and you have to restart scrapydweb and manually restart the stopped tasks.\n" + "\nThe config file '{file}' has been copied to current working directory.\n" "Please add your SCRAPYD_SERVERS in the config file and restart scrapydweb.\n".format( file=SCRAPYDWEB_SETTINGS_PY)) diff --git a/scrapydweb/static/v121dev/css/dropdown.css b/scrapydweb/static/v130/css/dropdown.css similarity index 100% rename from scrapydweb/static/v121dev/css/dropdown.css rename to scrapydweb/static/v130/css/dropdown.css diff --git a/scrapydweb/static/v121dev/css/dropdown_mobileui.css b/scrapydweb/static/v130/css/dropdown_mobileui.css similarity index 100% rename from scrapydweb/static/v121dev/css/dropdown_mobileui.css rename to scrapydweb/static/v130/css/dropdown_mobileui.css diff --git a/scrapydweb/static/v121dev/css/icon_upload_icon_right.css b/scrapydweb/static/v130/css/icon_upload_icon_right.css similarity index 100% rename from scrapydweb/static/v121dev/css/icon_upload_icon_right.css rename to scrapydweb/static/v130/css/icon_upload_icon_right.css diff --git a/scrapydweb/static/v121dev/css/multinode.css b/scrapydweb/static/v130/css/multinode.css similarity index 100% rename from scrapydweb/static/v121dev/css/multinode.css rename to scrapydweb/static/v130/css/multinode.css diff --git a/scrapydweb/static/v121dev/css/stacktable.css b/scrapydweb/static/v130/css/stacktable.css similarity index 100% rename from scrapydweb/static/v121dev/css/stacktable.css rename to scrapydweb/static/v130/css/stacktable.css diff --git a/scrapydweb/static/v121dev/css/stats.css b/scrapydweb/static/v130/css/stats.css similarity index 100% rename from scrapydweb/static/v121dev/css/stats.css rename to scrapydweb/static/v130/css/stats.css diff --git a/scrapydweb/static/v121dev/css/style.css b/scrapydweb/static/v130/css/style.css similarity index 100% rename from scrapydweb/static/v121dev/css/style.css rename to scrapydweb/static/v130/css/style.css diff --git a/scrapydweb/static/v121dev/css/style_mobileui.css b/scrapydweb/static/v130/css/style_mobileui.css similarity index 100% rename from scrapydweb/static/v121dev/css/style_mobileui.css rename to scrapydweb/static/v130/css/style_mobileui.css diff --git a/scrapydweb/static/v121dev/css/utf8.css b/scrapydweb/static/v130/css/utf8.css similarity index 100% rename from scrapydweb/static/v121dev/css/utf8.css rename to scrapydweb/static/v130/css/utf8.css diff --git a/scrapydweb/static/v121dev/css/utf8_mobileui.css b/scrapydweb/static/v130/css/utf8_mobileui.css similarity index 100% rename from scrapydweb/static/v121dev/css/utf8_mobileui.css rename to scrapydweb/static/v130/css/utf8_mobileui.css diff --git a/scrapydweb/static/v121dev/element-ui@2.4.6/lib/index.js b/scrapydweb/static/v130/element-ui@2.4.6/lib/index.js similarity index 100% rename from scrapydweb/static/v121dev/element-ui@2.4.6/lib/index.js rename to scrapydweb/static/v130/element-ui@2.4.6/lib/index.js diff --git a/scrapydweb/static/v121dev/element-ui@2.4.6/lib/theme-chalk/fonts/element-icons.woff b/scrapydweb/static/v130/element-ui@2.4.6/lib/theme-chalk/fonts/element-icons.woff similarity index 100% rename from scrapydweb/static/v121dev/element-ui@2.4.6/lib/theme-chalk/fonts/element-icons.woff rename to scrapydweb/static/v130/element-ui@2.4.6/lib/theme-chalk/fonts/element-icons.woff diff --git a/scrapydweb/static/v121dev/element-ui@2.4.6/lib/theme-chalk/index.css b/scrapydweb/static/v130/element-ui@2.4.6/lib/theme-chalk/index.css similarity index 100% rename from scrapydweb/static/v121dev/element-ui@2.4.6/lib/theme-chalk/index.css rename to scrapydweb/static/v130/element-ui@2.4.6/lib/theme-chalk/index.css diff --git a/scrapydweb/static/v121dev/icon/fav.ico b/scrapydweb/static/v130/icon/fav.ico similarity index 100% rename from scrapydweb/static/v121dev/icon/fav.ico rename to scrapydweb/static/v130/icon/fav.ico diff --git a/scrapydweb/static/v121dev/icon/fav.png b/scrapydweb/static/v130/icon/fav.png similarity index 100% rename from scrapydweb/static/v121dev/icon/fav.png rename to scrapydweb/static/v130/icon/fav.png diff --git a/scrapydweb/static/v121dev/icon/spider-man-spiderman-face-mask-round-avatar-512.png b/scrapydweb/static/v130/icon/spider-man-spiderman-face-mask-round-avatar-512.png similarity index 100% rename from scrapydweb/static/v121dev/icon/spider-man-spiderman-face-mask-round-avatar-512.png rename to scrapydweb/static/v130/icon/spider-man-spiderman-face-mask-round-avatar-512.png diff --git a/scrapydweb/static/v121dev/icon/spiderman.png b/scrapydweb/static/v130/icon/spiderman.png similarity index 100% rename from scrapydweb/static/v121dev/icon/spiderman.png rename to scrapydweb/static/v130/icon/spiderman.png diff --git a/scrapydweb/static/v121dev/js/common.js b/scrapydweb/static/v130/js/common.js similarity index 100% rename from scrapydweb/static/v121dev/js/common.js rename to scrapydweb/static/v130/js/common.js diff --git a/scrapydweb/static/v121dev/js/echarts.min.js b/scrapydweb/static/v130/js/echarts.min.js similarity index 100% rename from scrapydweb/static/v121dev/js/echarts.min.js rename to scrapydweb/static/v130/js/echarts.min.js diff --git a/scrapydweb/static/v121dev/js/github_buttons.html b/scrapydweb/static/v130/js/github_buttons.html similarity index 100% rename from scrapydweb/static/v121dev/js/github_buttons.html rename to scrapydweb/static/v130/js/github_buttons.html diff --git a/scrapydweb/static/v121dev/js/github_buttons.js b/scrapydweb/static/v130/js/github_buttons.js similarity index 100% rename from scrapydweb/static/v121dev/js/github_buttons.js rename to scrapydweb/static/v130/js/github_buttons.js diff --git a/scrapydweb/static/v121dev/js/icons_menu.js b/scrapydweb/static/v130/js/icons_menu.js similarity index 100% rename from scrapydweb/static/v121dev/js/icons_menu.js rename to scrapydweb/static/v130/js/icons_menu.js diff --git a/scrapydweb/static/v121dev/js/jquery.min.js b/scrapydweb/static/v130/js/jquery.min.js similarity index 100% rename from scrapydweb/static/v121dev/js/jquery.min.js rename to scrapydweb/static/v130/js/jquery.min.js diff --git a/scrapydweb/static/v121dev/js/multinode.js b/scrapydweb/static/v130/js/multinode.js similarity index 100% rename from scrapydweb/static/v121dev/js/multinode.js rename to scrapydweb/static/v130/js/multinode.js diff --git a/scrapydweb/static/v121dev/js/stacktable.js b/scrapydweb/static/v130/js/stacktable.js similarity index 100% rename from scrapydweb/static/v121dev/js/stacktable.js rename to scrapydweb/static/v130/js/stacktable.js diff --git a/scrapydweb/static/v121dev/js/stats.js b/scrapydweb/static/v130/js/stats.js similarity index 100% rename from scrapydweb/static/v121dev/js/stats.js rename to scrapydweb/static/v130/js/stats.js diff --git a/scrapydweb/static/v121dev/js/vue.min.js b/scrapydweb/static/v130/js/vue.min.js similarity index 100% rename from scrapydweb/static/v121dev/js/vue.min.js rename to scrapydweb/static/v130/js/vue.min.js diff --git a/scrapydweb/templates/scrapydweb/cluster_reports.html b/scrapydweb/templates/scrapydweb/cluster_reports.html index 2642743..6e7dcbf 100644 --- a/scrapydweb/templates/scrapydweb/cluster_reports.html +++ b/scrapydweb/templates/scrapydweb/cluster_reports.html @@ -12,7 +12,11 @@ {% endblock %} {% block body %} -

{{ selected_nodes|length }} Reports of /{{ project }}/{{ spider }}/{{ job }}/

+

+ {% if selected_nodes %}{% endif %} + {{ selected_nodes|length }} Reports of /{{ project }}/{{ spider }}/{{ job }}/ + {% if selected_nodes %}{% endif %} +

diff --git a/scrapydweb/utils/check_app_config.py b/scrapydweb/utils/check_app_config.py index 295be3e..0ee9323 100644 --- a/scrapydweb/utils/check_app_config.py +++ b/scrapydweb/utils/check_app_config.py @@ -334,7 +334,7 @@ def check_connectivity(server): (_group, _ip, _port, _auth) = server try: url = 'http://%s:%s' % (_ip, _port) - r = session.get(url, auth=_auth, timeout=30) + r = session.get(url, auth=_auth, timeout=10) assert r.status_code == 200, "%s got status_code %s" % (url, r.status_code) except Exception as err: logger.error(err) diff --git a/scrapydweb/utils/setup_database.py b/scrapydweb/utils/setup_database.py index ab7361c..0d2523e 100644 --- a/scrapydweb/utils/setup_database.py +++ b/scrapydweb/utils/setup_database.py @@ -51,6 +51,7 @@ def setup_database(database_url, database_path): 'jobs': '/'.join([database_url, DB_JOBS]) } else: + # db names for backward compatibility APSCHEDULER_DATABASE_URI = 'sqlite:///' + '/'.join([database_path, 'apscheduler.db']) # http://flask-sqlalchemy.pocoo.org/2.3/binds/#binds SQLALCHEMY_DATABASE_URI = 'sqlite:///' + '/'.join([database_path, 'timer_tasks.db']) @@ -80,7 +81,8 @@ def setup_mysql(username, password, host, port): """ ModuleNotFoundError: No module named 'MySQLdb' pip install mysqlclient - Python 2: pip install mysqlclient -> MySQLdb/_mysql.c(29) : fatal error C1083: Cannot open include file: 'mysql.h': No such file or directory + Python 2: pip install mysqlclient -> MySQLdb/_mysql.c(29) : + fatal error C1083: Cannot open include file: 'mysql.h': No such file or directory https://stackoverflow.com/questions/51294268/pip-install-mysqlclient-returns-fatal-error-c1083-cannot-open-file-mysql-h https://www.lfd.uci.edu/~gohlke/pythonlibs/#mysqlclient pip install "path to the downloaded mysqlclient.whl file" @@ -148,7 +150,8 @@ def setup_postgresql(username, password, host, port): # creating-utf-8-database-in-postgresql-on-windows10 # cur.execute("CREATE DATABASE %s ENCODING 'UTF8' LC_COLLATE 'en-US' LC_CTYPE 'en-US'" % dbname) - # psycopg2.DataError: new collation (en-US) is incompatible with the collation of the template database (Chinese (Simplified)_People's Republic of China.936) + # psycopg2.DataError: new collation (en-US) is incompatible with the collation of the template database + # (Chinese (Simplified)_People's Republic of China.936) # HINT: Use the same collation as in the template database, or use template0 as template. try: cur.execute("CREATE DATABASE %s ENCODING 'UTF8' LC_COLLATE 'en_US.UTF-8' LC_CTYPE 'en_US.UTF-8'" % dbname) diff --git a/scrapydweb/vars.py b/scrapydweb/vars.py index 63544d0..bd20776 100644 --- a/scrapydweb/vars.py +++ b/scrapydweb/vars.py @@ -15,7 +15,7 @@ PYTHON_VERSION = '.'.join([str(n) for n in sys.version_info[:3]]) PY2 = sys.version_info.major < 3 -SCRAPYDWEB_SETTINGS_PY = 'scrapydweb_settings_v8.py' +SCRAPYDWEB_SETTINGS_PY = 'scrapydweb_settings_v9.py' try: custom_settings_module = importlib.import_module(os.path.splitext(SCRAPYDWEB_SETTINGS_PY)[0]) except ImportError: diff --git a/scrapydweb/views/api.py b/scrapydweb/views/api.py index 12a3204..abd4920 100644 --- a/scrapydweb/views/api.py +++ b/scrapydweb/views/api.py @@ -89,10 +89,10 @@ def handle_result(self): elif self.opt == 'liststats': if self.js.get('logparser_version') != self.LOGPARSER_VERSION: if self.project and self.version_spider_job: # 'List Stats' in the Servers page - tip = "'pip install -U logparser' to update LogParser to v%s" % self.LOGPARSER_VERSION + tip = "'pip install --upgrade logparser' to update LogParser to v%s" % self.LOGPARSER_VERSION self.js = dict(status=self.OK, tip=tip) else: # XMLHttpRequest in the Jobs page; POST in jobs.py - self.js['tip'] = ("'pip install -U logparser' on host '%s' and run command 'logparser' " + self.js['tip'] = ("'pip install --upgrade logparser' on host '%s' and run command 'logparser' " "to update LogParser to v%s") % (self.SCRAPYD_SERVER, self.LOGPARSER_VERSION) self.js['status'] = self.ERROR elif self.project and self.version_spider_job: # 'List Stats' in the Servers page diff --git a/scrapydweb/views/dashboard/jobs.py b/scrapydweb/views/dashboard/jobs.py index 46e2740..18d8b53 100644 --- a/scrapydweb/views/dashboard/jobs.py +++ b/scrapydweb/views/dashboard/jobs.py @@ -367,8 +367,9 @@ def handle_jobs_without_db(self): else: if job['finish']: self.finished_jobs.append(job) - job['url_multinode_run'] = url_for('servers', node=self.node, opt='schedule', project=job['project'], - version_job=self.DEFAULT_LATEST_VERSION, spider=job['spider']) + job['url_multinode_run'] = url_for('servers', node=self.node, opt='schedule', + project=job['project'], version_job=self.DEFAULT_LATEST_VERSION, + spider=job['spider']) job['url_schedule'] = url_for('schedule', node=self.node, project=job['project'], version=self.DEFAULT_LATEST_VERSION, spider=job['spider']) job['url_start'] = url_for('api', node=self.node, opt='start', project=job['project'], @@ -384,7 +385,7 @@ def handle_jobs_without_db(self): job['url_stats'] = url_for('log', node=self.node, opt='stats', project=job['project'], ui=self.UI, spider=job['spider'], job=job['job'], job_finished=job_finished) job['url_clusterreports'] = url_for('clusterreports', node=self.node, project=job['project'], - spider=job['spider'], job=job['job']) + spider=job['spider'], job=job['job']) # Items m = re.search(HREF_PATTERN, job['href_items']) if m: diff --git a/scrapydweb/views/files/log.py b/scrapydweb/views/files/log.py index 0a8531f..7e94547 100644 --- a/scrapydweb/views/files/log.py +++ b/scrapydweb/views/files/log.py @@ -33,8 +33,8 @@ job_finished_key_dict = defaultdict(OrderedDict) # For /log/report/ job_finished_report_dict = defaultdict(OrderedDict) -REPORT_KEYS_SET = set(['from_memory', 'status', 'pages', 'items', 'shutdown_reason', 'finish_reason', - 'runtime', 'first_log_time', 'latest_log_time', 'log_categories', 'latest_matches']) +REPORT_KEYS_SET = {'from_memory', 'status', 'pages', 'items', 'shutdown_reason', 'finish_reason', 'runtime', + 'first_log_time', 'latest_log_time', 'log_categories', 'latest_matches'} # http://flask.pocoo.org/docs/1.0/api/#flask.views.View @@ -110,7 +110,7 @@ def __init__(self): self.email_content_kwargs = {} self.flag = '' - self.jobs_to_keep = self.JOBS_FINISHED_JOBS_LIMIT or 1000 + self.jobs_to_keep = self.JOBS_FINISHED_JOBS_LIMIT or 200 def dispatch_request(self, **kwargs): if self.report_logparser: @@ -205,7 +205,7 @@ def request_stats_by_logparser(self): "Or wait until LogParser parses the log. ") % self.SCRAPYD_SERVER, self.WARN) return elif js.get('logparser_version') != self.LOGPARSER_VERSION: - msg = "'pip install -U logparser' on host '%s' to update LogParser to v%s" % ( + msg = "'pip install --upgrade logparser' on host '%s' to update LogParser to v%s" % ( self.SCRAPYD_SERVER, self.LOGPARSER_VERSION) self.logger.warning(msg) flash(msg, self.WARN) diff --git a/scrapydweb/views/operations/execute_task.py b/scrapydweb/views/operations/execute_task.py index 9b3d0bd..282bd1a 100644 --- a/scrapydweb/views/operations/execute_task.py +++ b/scrapydweb/views/operations/execute_task.py @@ -16,7 +16,7 @@ EXTRACT_URL_SERVER_PATTERN = re.compile(r'//(.+?:\d+)') -class TaskExecuter(object): +class TaskExecutor(object): def __init__(self, task_id, task_name, url_scrapydweb, url_schedule_task, url_delete_task_result, auth, selected_nodes): @@ -159,7 +159,7 @@ def execute_task(task_id): username = metadata.get('username', '') password = metadata.get('password', '') url_delete_task_result = metadata.get('url_delete_task_result', '/1/tasks/xhr/delete/1/1/') - task_executer = TaskExecuter(task_id=task_id, + task_executor = TaskExecutor(task_id=task_id, task_name=task.name, url_scrapydweb=metadata.get('url_scrapydweb', 'http://127.0.0.1:5000'), url_schedule_task=metadata.get('url_schedule_task', '/1/schedule/task/'), @@ -167,6 +167,6 @@ def execute_task(task_id): auth=(username, password) if username and password else None, selected_nodes=json.loads(task.selected_nodes)) try: - task_executer.main() + task_executor.main() except Exception: apscheduler_logger.error(traceback.format_exc()) diff --git a/setup.py b/setup.py index f4f88d1..7b56ae0 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ exec(f.read(), about) with io.open("README.md", 'r', encoding='utf-8') as f: - long_description = re.sub(r':\w+:\s', '', f.read()) # Remove emojis + long_description = re.sub(r':\w+:\s', '', f.read()) # Remove emoji setup( @@ -37,7 +37,7 @@ "flask >= 1.0.2", # May 2, 2018 "flask-compress >= 1.4.0", # Jan 5, 2017 "Flask-SQLAlchemy >= 2.3.2", # Oct 11, 2017 - "logparser == 0.8.1", + "logparser == 0.8.2", "requests >= 2.21.0", # Dec 10, 2018 "setuptools >= 40.6.3", # Dec 11, 2018 "six >= 1.12.0", # Dec 10, 2018 diff --git a/tests/test_aa_logparser.py b/tests/test_aa_logparser.py index b6f34b4..f0af0eb 100644 --- a/tests/test_aa_logparser.py +++ b/tests/test_aa_logparser.py @@ -128,7 +128,7 @@ def rename(name, restore=False): replace_file_content(app.config['DEMO_JSON_PATH'], old, new) req(app, client, view='log', kws=kws, ins=["Mismatching logparser_version 0.0.0 in local stats", - "pip install -U logparser", "Using local logfile:", tab]) + "pip install --upgrade logparser", "Using local logfile:", tab]) replace_file_content(app.config['DEMO_JSON_PATH'], new, old) # delete ScrapydWeb_demo.json in logs diff --git a/tests/test_reports.py b/tests/test_reports.py index 78cbd75..b62fde0 100644 --- a/tests/test_reports.py +++ b/tests/test_reports.py @@ -28,7 +28,7 @@ def test_cluster_reports(app, client): spider=cst.SPIDER, job=cst.JOBID) url_redirect_to_clusterreports = url_for('clusterreports', node=1, project=cst.PROJECT, spider=cst.SPIDER, job=cst.JOBID) - ins = ['

0 Reports of ////

', '>Select a job', url_jobs, 'selected_nodes: [],'] + ins = ['0 Reports of ////', '>Select a job', url_jobs, 'selected_nodes: [],'] nos = ['>Select nodes'] req(app, client, view='clusterreports', kws=dict(node=1), ins=ins) @@ -37,7 +37,7 @@ def test_cluster_reports(app, client): '1': 'on', '2': 'on', } - ins[0] = '

%s Reports of /%s/%s/%s/

' % (len(data), cst.PROJECT, cst.SPIDER, cst.JOBID) + ins[0] = '%s Reports of /%s/%s/%s/' % (len(data), cst.PROJECT, cst.SPIDER, cst.JOBID) ins[-1] = 'selected_nodes: [1, 2],' ins.extend(nos) ins.append(url_servers) diff --git a/tests/test_schedule.py b/tests/test_schedule.py index efc2179..67b3df1 100644 --- a/tests/test_schedule.py +++ b/tests/test_schedule.py @@ -1,4 +1,5 @@ # coding: utf-8 +import platform import re from scrapy import __version__ as scrapy_version @@ -129,7 +130,7 @@ def test_run(app, client): nos=[flash, 'class="table wrap"', "start: '%s'," % last_but_two_finished_job_start]) -# Note that in LogParser is enabled in test_enable_logparser(), with PARSE_ROUND_INTERVAL defaults to 60. +# Note that in LogParser is enabled in test_enable_logparser(), with PARSE_ROUND_INTERVAL defaults to 10. # And LOGSTATS_INTERVAL is set to 10 in test_check() above. # This test would fail if Scrapy >= 1.5.2 since telnet console now requires username and password # https://doc.scrapy.org/en/latest/news.html#scrapy-1-5-2-2019-01-22 @@ -143,7 +144,7 @@ def test_telnet_in_stats(app, client): req(app, client, view='schedule.run', kws=dict(node=NODE), data=run_data, ins="run results - ScrapydWeb") kws = dict(node=node, opt='stats', project=cst.PROJECT, spider=cst.SPIDER, job=cst.JOBID) - for i in range(1, 10): + for i in range(1, 4): sleep(10) print(i * 10) text, __ = req(app, client, view='log', kws=kws) @@ -154,7 +155,8 @@ def test_telnet_in_stats(app, client): __, js = req(app, client, view='jobs', kws=dict(node=node), data={}) assert isinstance(js[KEY]['pages'], int) # and js[KEY]['pages'] > 0 - if scrapy_version > '1.5.1': + # Linux-5.0.9-301.fc30.x86_64-x86_64-with-fedora-30-Thirty' + if (platform.system() == 'Windows' or 'fedora' in platform.platform()) and scrapy_version > '1.5.1': print("telnet not available for scrapy_version: %s" % scrapy_version) telnet_ins = []