diff --git a/ppr-api/manage.py b/ppr-api/manage.py new file mode 100644 index 000000000..4ad5de0b7 --- /dev/null +++ b/ppr-api/manage.py @@ -0,0 +1,77 @@ +# Copyright © 2019 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Manage the database and some other items required to run the API +""" +import logging +import os + +from flask.cli import FlaskGroup # replaces flask_script Manager +from sqlalchemy.sql import text + +# models included so that migrate can build the database migrations +from ppr_api import models # pylint: disable=unused-import +from ppr_api import create_app +from ppr_api.models import db + +APP = create_app() +CLI = FlaskGroup(APP) # replaces MANAGER + + +def execute_script(session, file_name): + """Execute a SQL script as one or more SQL statements in a single file.""" + print("Executing SQL statements in file " + file_name) + with open(file_name, "r") as sql_file: + sql_command = "" + # Iterate over all lines in the sql file + for line in sql_file: + # Ignore commented lines + if not line.startswith("--") and line.strip("\n"): + # Append line to the command string + sql_command += line.strip("\n") + + # If the command string ends with ';', it is a full statement + if sql_command.endswith(";"): + sql_command = sql_command.replace(";", "") + # print(sql_command) + # Try to execute statement and commit it + try: + session.execute(text(sql_command)) + + # Assert in case of error + except Exception as ex: + print(repr(ex)) + + # Finally, clear command string + finally: + sql_command = "" + + session.commit() + sql_file.close() + + +@CLI.command("create_test_data") +def create_test_data(): + """Load unit test data in the dev/local environment. Delete all existing test data as a first step.""" + execute_script(db.session, "test_data/postgres_test_reset.sql") + execute_script(db.session, "test_data/postgres_create_first.sql") + filenames = os.listdir(os.path.join(os.getcwd(), "test_data/postgres_data_files")) + sorted_names = sorted(filenames) + for filename in sorted_names: + execute_script(db.session, os.path.join(os.getcwd(), ("test_data/postgres_data_files/" + filename))) + + +if __name__ == "__main__": + logging.log(logging.INFO, "Running the Flask CLI") + CLI() diff --git a/ppr-api/src/ppr_api/__init__.py b/ppr-api/src/ppr_api/__init__.py index e5800aceb..26178e5e4 100644 --- a/ppr-api/src/ppr_api/__init__.py +++ b/ppr-api/src/ppr_api/__init__.py @@ -35,9 +35,7 @@ from ppr_api.utils.auth import jwt from ppr_api.utils.logging import logger, setup_logging -setup_logging( - os.path.join(os.path.abspath(os.path.dirname(__file__)), "logging.yaml") -) # important to do this first +setup_logging(os.path.join(os.path.abspath(os.path.dirname(__file__)), "logging.yaml")) # important to do this first def create_app(service_environment=APP_RUNNING_ENVIRONMENT, **kwargs): @@ -50,16 +48,12 @@ def create_app(service_environment=APP_RUNNING_ENVIRONMENT, **kwargs): db.init_app(app) Migrate(app, db) - if ( - app.config.get("DEPLOYMENT_ENV", "") == "testing" - ): # CI only run upgrade for unit testing. + if app.config.get("DEPLOYMENT_ENV", "") == "testing": # CI only run upgrade for unit testing. logger.info("Running migration upgrade.") with app.app_context(): upgrade(directory="migrations", revision="head", sql=False, tag=None) # Alembic has it's own logging config, we'll need to restore our logging here. - setup_logging( - os.path.join(os.path.abspath(os.path.dirname(__file__)), "logging.yaml") - ) + setup_logging(os.path.join(os.path.abspath(os.path.dirname(__file__)), "logging.yaml")) logger.info("Finished migration upgrade.") else: logger.info("Logging, migrate set up.") @@ -114,9 +108,7 @@ def setup_test_data(): """Load unit test data in the dev/local environment. Delete all existing test data as a first step.""" try: test_path = os.getcwd() - logger.info( - f"Executing DB scripts to create test data from test data dir {test_path}..." - ) + logger.info(f"Executing DB scripts to create test data from test data dir {test_path}...") # execute_script(db.session, os.path.join(test_path, "test_data/postgres_test_reset.sql")) execute_script(db.session, "test_data/postgres_create_first.sql") filenames = os.listdir(os.path.join(test_path, "test_data/postgres_data_files")) diff --git a/ppr-api/src/ppr_api/config.py b/ppr-api/src/ppr_api/config.py index cb6803bb0..761473b3c 100644 --- a/ppr-api/src/ppr_api/config.py +++ b/ppr-api/src/ppr_api/config.py @@ -101,9 +101,7 @@ class Config: # pylint: disable=too-few-public-methods if DB_UNIX_SOCKET := os.getenv("DATABASE_UNIX_SOCKET", None): SQLALCHEMY_DATABASE_URI = f"postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?host={DB_UNIX_SOCKET}" else: - SQLALCHEMY_DATABASE_URI = ( - f"postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" - ) + SQLALCHEMY_DATABASE_URI = f"postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" # Connection pool settings DB_MIN_POOL_SIZE = os.getenv("DATABASE_MIN_POOL_SIZE", "2") @@ -143,25 +141,15 @@ class Config: # pylint: disable=too-few-public-methods # ACCOUNT_SVC_TIMEOUT = os.g # DB Query limits on result set sizes - ACCOUNT_REGISTRATIONS_MAX_RESULTS = os.getenv( - "ACCOUNT_REGISTRATIONS_MAX_RESULTS", "100" - ) + ACCOUNT_REGISTRATIONS_MAX_RESULTS = os.getenv("ACCOUNT_REGISTRATIONS_MAX_RESULTS", "100") ACCOUNT_DRAFTS_MAX_RESULTS = os.getenv("ACCOUNT_DRAFTS_MAX_RESULTS", "1000") ACCOUNT_SEARCH_MAX_RESULTS = os.getenv("ACCOUNT_SEARCH_MAX_RESULTS", "1000") # DEBTOR search trgram similarity quotients - SIMILARITY_QUOTIENT_BUSINESS_NAME: float = float( - os.getenv("SIMILARITY_QUOTIENT_BUSINESS_NAME", "0.6") - ) - SIMILARITY_QUOTIENT_FIRST_NAME: float = float( - os.getenv("SIMILARITY_QUOTIENT_FIRST_NAME", "0.4") - ) - SIMILARITY_QUOTIENT_LAST_NAME: float = float( - os.getenv("SIMILARITY_QUOTIENT_LAST_NAME", "0.29") - ) - SIMILARITY_QUOTIENT_DEFAULT: float = float( - os.getenv("SIMILARITY_QUOTIENT_DEFAULT", "0.5") - ) + SIMILARITY_QUOTIENT_BUSINESS_NAME: float = float(os.getenv("SIMILARITY_QUOTIENT_BUSINESS_NAME", "0.6")) + SIMILARITY_QUOTIENT_FIRST_NAME: float = float(os.getenv("SIMILARITY_QUOTIENT_FIRST_NAME", "0.4")) + SIMILARITY_QUOTIENT_LAST_NAME: float = float(os.getenv("SIMILARITY_QUOTIENT_LAST_NAME", "0.29")) + SIMILARITY_QUOTIENT_DEFAULT: float = float(os.getenv("SIMILARITY_QUOTIENT_DEFAULT", "0.5")) # Search results report number of financing statements threshold for async requests. SEARCH_PDF_ASYNC_THRESHOLD: int = int(os.getenv("SEARCH_PDF_ASYNC_THRESHOLD", "75")) @@ -174,36 +162,22 @@ class Config: # pylint: disable=too-few-public-methods # Google APIs and cloud storage GOOGLE_DEFAULT_SA = os.getenv("GOOGLE_DEFAULT_SA") - GCP_CS_SA_SCOPES = os.getenv( - "GCP_CS_SA_SCOPES", "https://www.googleapis.com/auth/cloud-platform" - ) + GCP_CS_SA_SCOPES = os.getenv("GCP_CS_SA_SCOPES", "https://www.googleapis.com/auth/cloud-platform") # Storage of search reports GCP_CS_BUCKET_ID = os.getenv("GCP_CS_BUCKET_ID", "ppr_search_results_dev") # Storage of verification mail reports - GCP_CS_BUCKET_ID_VERIFICATION = os.getenv( - "GCP_CS_BUCKET_ID_VERIFICATION", "ppr_verification_report_dev" - ) + GCP_CS_BUCKET_ID_VERIFICATION = os.getenv("GCP_CS_BUCKET_ID_VERIFICATION", "ppr_verification_report_dev") # Storage of registration verification reports - GCP_CS_BUCKET_ID_REGISTRATION = os.getenv( - "GCP_CS_BUCKET_ID_REGISTRATION", "ppr_registration_report_dev" - ) + GCP_CS_BUCKET_ID_REGISTRATION = os.getenv("GCP_CS_BUCKET_ID_REGISTRATION", "ppr_registration_report_dev") # Storage of mail verification reports GCP_CS_BUCKET_ID_MAIL = os.getenv("GCP_CS_BUCKET_ID_MAIL", "ppr_mail_report_dev") # Pub/Sub GCP_PS_PROJECT_ID = os.getenv("DEPLOYMENT_PROJECT", "eogruh-dev") - GCP_PS_SEARCH_REPORT_TOPIC = os.getenv( - "GCP_PS_SEARCH_REPORT_TOPIC", "ppr-search-report" - ) - GCP_PS_NOTIFICATION_TOPIC = os.getenv( - "GCP_PS_NOTIFICATION_TOPIC", "ppr-api-notification" - ) - GCP_PS_VERIFICATION_REPORT_TOPIC = os.getenv( - "GCP_PS_VERIFICATION_REPORT_TOPIC", "ppr-mail-report" - ) - GCP_PS_REGISTRATION_REPORT_TOPIC = os.getenv( - "GCP_PS_REGISTRATION_REPORT_TOPIC", "ppr-registration-report" - ) + GCP_PS_SEARCH_REPORT_TOPIC = os.getenv("GCP_PS_SEARCH_REPORT_TOPIC", "ppr-search-report") + GCP_PS_NOTIFICATION_TOPIC = os.getenv("GCP_PS_NOTIFICATION_TOPIC", "ppr-api-notification") + GCP_PS_VERIFICATION_REPORT_TOPIC = os.getenv("GCP_PS_VERIFICATION_REPORT_TOPIC", "ppr-mail-report") + GCP_PS_REGISTRATION_REPORT_TOPIC = os.getenv("GCP_PS_REGISTRATION_REPORT_TOPIC", "ppr-registration-report") GATEWAY_URL = os.getenv("GATEWAY_URL", "https://bcregistry-dev.apigee.net") SUBSCRIPTION_API_KEY = os.getenv("SUBSCRIPTION_API_KEY") @@ -215,9 +189,7 @@ class Config: # pylint: disable=too-few-public-methods MAX_SIZE_SEARCH_RT: int = int(os.getenv("MAX_SIZE_SEARCH_RT", "225000")) # Default 2, set to 1 to revert to original report api client REPORT_VERSION = os.getenv("REPORT_VERSION", "2") - REPORT_API_AUDIENCE = os.getenv( - "REPORT_API_AUDIENCE", "https://gotenberg-p56lvhvsqa-nn.a.run.app" - ) + REPORT_API_AUDIENCE = os.getenv("REPORT_API_AUDIENCE", "https://gotenberg-p56lvhvsqa-nn.a.run.app") # Number of registrations threshold for search report light format. REPORT_SEARCH_LIGHT: int = int(os.getenv("REPORT_SEARCH_LIGHT", "700")) @@ -257,9 +229,7 @@ class UnitTestingConfig(Config): # pylint: disable=too-few-public-methods DB_HOST = os.getenv("DATABASE_TEST_HOST", "") DB_PORT = os.getenv("DATABASE_TEST_PORT", "5432") # SQLALCHEMY_DATABASE_URI = f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" - SQLALCHEMY_DATABASE_URI = ( - f"postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" - ) + SQLALCHEMY_DATABASE_URI = f"postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" # JWT OIDC settings # JWT_OIDC_TEST_MODE will set jwt_manager to use diff --git a/ppr-api/test_data/create_test_data.py b/ppr-api/test_data/create_test_data.py deleted file mode 100644 index 9118b137c..000000000 --- a/ppr-api/test_data/create_test_data.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Create all unit test data. - The scripts run in the following order: - 1. ./test_reset.sql - 2. ./create_first.sql - 3. All the files in ./test_data sorted by name. -""" -import os - -from sqlalchemy.sql import text - -from ppr_api import create_app -from ppr_api.models import db -#from test_data.test_single import execute_file - - -def execute_script(session, file_name): - print('Executing SQL statements in file ' + file_name) - with open(file_name, 'r') as sql_file: - sql_command = '' - # Iterate over all lines in the sql file - for line in sql_file: - # Ignore commented lines - if not line.startswith('--') and line.strip('\n'): - # Append line to the command string - sql_command += line.strip('\n') - - # If the command string ends with ';', it is a full statement - if sql_command.endswith(';'): - sql_command = sql_command.replace(';', '') - # print('Executing SQL: ' + sql_command) - # Try to execute statement and commit it - try: - session.execute(text(sql_command)) - - # Assert in case of error - except Exception as ex: - print(repr(ex)) - - # Finally, clear command string - finally: - sql_command = '' - - session.commit() - sql_file.close() - - -app = create_app('testing') -with app.app_context(): - conn = db.engine.connect() - options = dict(bind=conn, binds={}) - session = db.create_scoped_session(options=options) - - execute_script(session, 'test_data/postgres_test_reset.sql') - execute_script(session, 'test_data/postgres_create_first.sql') - filenames = os.listdir(os.path.join(os.getcwd(), 'test_data/postgres_data_files')) - sorted_names = sorted(filenames) - for filename in sorted_names: - execute_script(session, os.path.join(os.getcwd(), ('test_data/postgres_data_files/' + filename))) - - conn.close()