Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

moving pre_prod_v15 to pre-prod #34

Closed
wants to merge 45 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
873709e
fix: connection pool install new-site error fix
tohidmalik002 Oct 19, 2024
9865091
fix: connection pool error fix for setup db
tohidmalik002 Oct 19, 2024
96f2765
fix: Change in condition builder
Oct 22, 2024
ac338e7
Merge pull request #21 from 8848digital/350_condition_builder
tinadn Oct 22, 2024
32249df
Merge pull request #20 from 8848digital/connection_pool_postgres_v2
aasif-patel Oct 22, 2024
45ace3a
Revert "fix: Change in condition builder"
aasif-patel Oct 22, 2024
3f6c3c4
Merge pull request #23 from 8848digital/revert-21-350_condition_builder
aasif-patel Oct 22, 2024
007fa30
fix: 250 Condition Builder for
Oct 23, 2024
56d2dfd
fix: removed filter to select mandatory table only for data improt se…
anandk-spyke-o1 Oct 23, 2024
b26b3e9
Merge pull request #24 from 8848digital/fix_condition_builder_
tinadn Oct 23, 2024
b86495a
fix: 'in' type condition from Filter
Oct 23, 2024
711edbf
Merge pull request #25 from 8848digital/345-data_import_select_mendat…
tinadn Oct 23, 2024
897f56b
Merge pull request #26 from 8848digital/fix_condition_builder_
tinadn Oct 23, 2024
7319a74
Fixed: Added unique validation for postgres
Oct 24, 2024
ee0e719
Fixed: Added validation msg for updation in postgress
Oct 24, 2024
d23e8da
fix: Restrict Link field with filter
Oct 24, 2024
faaf5c2
Merge pull request #28 from 8848digital/340_link_field
aasif-patel Oct 25, 2024
dd8e3b4
feat: Handle list type filter
Oct 28, 2024
cdf6610
Merge pull request #29 from 8848digital/fix_link_filter_446
aasif-patel Oct 29, 2024
ad04ed7
fix: avoid creating connection pool for test cases
tohidmalik002 Oct 29, 2024
5e5c76c
Merge pull request #30 from 8848digital/connection_pool
aasif-patel Oct 29, 2024
664d531
fix: replace comma to the blank value
bansodegaurav8848 Oct 31, 2024
5b931aa
fix: fix drop site error
tohidmalik002 Oct 31, 2024
40430ce
Merge pull request #31 from 8848digital/461_resolve_search_link_field
tinadn Nov 4, 2024
38a89d6
fix: error returning connecting pool on site restore
tohidmalik002 Nov 6, 2024
1a3afcd
fix: set_query filter for
Nov 6, 2024
89a5865
Merge pull request #33 from 8848digital/issue_558
tinadn Nov 6, 2024
ae77fb2
Merge pull request #32 from 8848digital/connection_pool
tinadn Nov 6, 2024
e378de5
Merge pull request #27 from 8848digital/248_fixing_unique_validation
aasif-patel Nov 6, 2024
dce73ab
Revert "248 fixing unique validation"
tinadn Nov 7, 2024
4fa45ab
Merge pull request #35 from 8848digital/revert-27-248_fixing_unique_v…
tinadn Nov 7, 2024
6360f15
Fixed: Added validation msg for updation in postgress
Oct 24, 2024
82e5492
Fixed: Added unique validation for postgres
Oct 24, 2024
7b76b2c
Fixed: validate attribute of unique
Abhishek8848 Nov 7, 2024
ae6e578
Merge pull request #37 from 8848digital/248_unique_validation_2
tinadn Nov 8, 2024
aab252b
fix: Fixed trasaction aborted issue in bulk operation
patel-asif45 Nov 11, 2024
59a3b7c
fix: Update Condition Issue Fix
chintanshah8848 Nov 11, 2024
88bb2d8
feat: validation for user link field
Nov 12, 2024
106eb23
Merge pull request #40 from 8848digital/fix_280
tinadn Nov 12, 2024
2818f2b
Merge pull request #39 from 8848digital/record_update_issue_fix
tinadn Nov 12, 2024
9594c03
fix: connection count
sibikumarkuppusamy Nov 12, 2024
5802aee
Merge pull request #38 from 8848digital/bulk_operation_issue
Satya8848 Nov 12, 2024
2fff6ff
Merge pull request #41 from 8848digital/dashboard-count-fix
tinadn Nov 12, 2024
9ccff64
fix: Error is selecting Work Order in Stock Entry
anandk-spyke-o1 Nov 12, 2024
584557c
Merge pull request #42 from 8848digital/issue-582
tinadn Nov 14, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 21 additions & 2 deletions frappe/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import frappe.utils
from frappe import _
from frappe.desk.reportview import validate_args
from frappe.desk.search import search_link
from frappe.model.db_query import check_parent_permission
from frappe.model.utils import is_virtual_doctype
from frappe.utils import get_safe_filters
Expand Down Expand Up @@ -416,7 +417,7 @@ def is_document_amended(doctype, docname):


@frappe.whitelist()
def validate_link(doctype: str, docname: str, fields=None):
def validate_link(doctype: str, docname: str, fields=None, args=None):
if not isinstance(doctype, str):
frappe.throw(_("DocType must be a string"))

Expand All @@ -432,7 +433,24 @@ def validate_link(doctype: str, docname: str, fields=None):
)

values = frappe._dict()
args = frappe.parse_json(args)
filters = args.filters or frappe._dict()

standard_queries = frappe.get_hooks().standard_queries or {}

if not args.get("query") and doctype in standard_queries:
args.update({"query" : standard_queries[doctype][-1]})

if args.get("query"):
if not search_link(doctype, docname, args.get("query"), filters):
return values
filters = {'name' : docname}
else:
if isinstance(filters, list):
filters.append(["name", "=", docname])
else:
filters.update({'name' : docname})

if is_virtual_doctype(doctype):
try:
frappe.get_doc(doctype, docname)
Expand All @@ -444,7 +462,8 @@ def validate_link(doctype: str, docname: str, fields=None):
)
return values

values.name = frappe.db.get_value(doctype, docname, cache=True)
result = frappe.db.get_list(doctype, filters = filters,pluck = 'name')
if result: values.name = result[0]

fields = frappe.parse_json(fields)
if not values.name or not fields:
Expand Down
1 change: 1 addition & 0 deletions frappe/commands/site.py
Original file line number Diff line number Diff line change
Expand Up @@ -915,6 +915,7 @@ def _drop_site(
from frappe.utils.backups import scheduled_backup

frappe.init(site=site)
frappe.flags.in_drop_site = True
frappe.connect()

try:
Expand Down
9 changes: 7 additions & 2 deletions frappe/custom/doctype/custom_field/custom_field.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,14 @@ frappe.ui.form.on("Custom Field", {
var filters = [
["DocType", "issingle", "=", 0],
["DocType", "custom", "=", 0],
["DocType", "name", "not in", frappe.model.core_doctypes_list],
["DocType", "restrict_to_domain", "in", frappe.boot.active_domains],
["DocType", "name", "not in", frappe.model.core_doctypes_list]

];

if (Array.isArray(frappe.boot.active_domains) && frappe.boot.active_domains.some(domain => domain)) {
filters.push(["DocType", "restrict_to_domain", "in", frappe.boot.active_domains]);
}

if (frappe.session.user !== "Administrator") {
filters.push(["DocType", "module", "not in", ["Core", "Custom"]]);
}
Expand Down
15 changes: 9 additions & 6 deletions frappe/custom/doctype/customize_form/customize_form.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,16 @@ frappe.ui.form.on("Customize Form", {

onload: function (frm) {
frm.set_query("doc_type", function () {
let filters = [
["DocType", "issingle", "=", 0],
["DocType", "custom", "=", 0],
["DocType", "name", "not in", frappe.model.core_doctypes_list]
]
if (Array.isArray(frappe.boot.active_domains) && frappe.boot.active_domains.some(domain => domain)) {
filters.push(["DocType", "restrict_to_domain", "in", frappe.boot.active_domains]);
}
return {
filters: [
["DocType", "issingle", "=", 0],
["DocType", "custom", "=", 0],
["DocType", "name", "not in", frappe.model.core_doctypes_list],
["DocType", "restrict_to_domain", "in", frappe.boot.active_domains],
],
filters: filters,
};
});

Expand Down
3 changes: 3 additions & 0 deletions frappe/database/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,9 @@ def sql(
try:
self._cursor.execute(query, values)
except Exception as e:
if self.db_type == "postgres":
frappe.db.rollback()

if self.is_syntax_error(e):
frappe.log(f"Syntax error in query:\n{query} {values or ''}")

Expand Down
87 changes: 62 additions & 25 deletions frappe/database/postgres/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,47 +126,79 @@ def is_interface_error(e):
class ConnectionPool:
_connection_pool = None
_lock = threading.Lock()
_init_lock = threading.Lock()

@classmethod
def _initialize(cls, conn_settings = {}):
if not cls._connection_pool:
with cls._lock:
if cls._connection_pool is None and conn_settings:
print("Initializing Connection Pool")
cls._connection_pool = pool.ThreadedConnectionPool(
minconn=5,
maxconn=100,
**conn_settings
)
def _initialize(cls, conn_settings={}):
with cls._init_lock:
if cls._connection_pool is None and conn_settings:
print("Initializing Connection Pool")
cls._connection_pool = pool.ThreadedConnectionPool(
minconn=5,
maxconn=100,
**conn_settings
)
return cls._connection_pool

@classmethod
def get_connection(cls, conn_settings = {}):
if not cls._connection_pool:
def get_connection(cls, conn_settings={}):
if cls._connection_pool is None:
cls._initialize(conn_settings)
try:
conn = cls._connection_pool.getconn()
conn = cls._connection_pool.getconn()
if not cls._is_connection_valid(conn):
cls._invalidate_pool(conn_settings)
conn = cls._connection_pool.getconn()
except Exception as e:
conn = psycopg2.connect(**conn_settings)
print(f"Error getting connection: {e}")
conn = cls._create_new_connection(conn_settings)
return conn

@classmethod
def put_connection(cls, conn):
def _is_connection_valid(cls, conn):
try:
with conn.cursor() as cursor:
cursor.execute("SELECT 1")
return True
except Exception:
return False

@classmethod
def _invalidate_pool(cls, conn_settings):
with cls._lock:
print("Invalidating connection pool")
cls._connection_pool = None # Invalidate the pool
cls._initialize(conn_settings) # Reinitialize the pool
print("Finished reinitializing the pool")

@classmethod
def _create_new_connection(cls, conn_settings):
try:
cls._connection_pool.putconn(conn)
return psycopg2.connect(**conn_settings)
except Exception as e:
conn.close()
print(f"Error creating new connection: {e}")
return None

@classmethod
def get_connection_pool(cls, conn_settings = {}):
if not cls._connection_pool:
cls._initialize(conn_settings)
def put_connection(cls, conn):
if conn:
try:
cls._connection_pool.putconn(conn)
except Exception as e:
print(f"Error returning connection to pool: {e}")
conn.close()

@classmethod
def get_connection_pool(cls):
if cls._connection_pool is None:
cls._initialize()
return cls._connection_pool

@classmethod
def close_all_connections(cls):
if cls._connection_pool:
cls._connection_pool.closeall()
cls._connection_pool = None # Reset pool after closing

class PostgresDatabase(PostgresExceptionUtil, Database):
REGEX_CHARACTER = "~"
Expand Down Expand Up @@ -218,7 +250,10 @@ def last_query(self):
def close(self):
"""Close database connection."""
if self._conn:
ConnectionPool.put_connection(self._conn)
if ConnectionPool._connection_pool and not (frappe.flags.in_install_db or frappe.flags.in_test or frappe.flags.in_drop_site):
ConnectionPool.put_connection(self._conn)
else:
self._conn.close()
self._cursor = None
self._conn = None

Expand All @@ -234,8 +269,10 @@ def get_connection(self):
}
if self.port:
conn_settings["port"] = self.port

conn = ConnectionPool.get_connection(conn_settings)
if frappe.flags.in_install_db or frappe.flags.in_test or frappe.flags.in_drop_site:
conn = psycopg2.connect(**conn_settings)
else:
conn = ConnectionPool.get_connection(conn_settings)
conn.set_isolation_level(ISOLATION_LEVEL_REPEATABLE_READ)
return conn

Expand Down
13 changes: 13 additions & 0 deletions frappe/database/postgres/setup_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,19 @@ def setup_database(force, source_sql=None, verbose=False):
root_conn = get_root_connection(frappe.flags.root_login, frappe.flags.root_password)
root_conn.commit()
root_conn.sql("end")

# Terminate existing connections
terminate_query = f"""
SELECT pg_terminate_backend(pg_stat_activity.pid)
FROM pg_stat_activity
WHERE pg_stat_activity.datname = '{frappe.conf.db_name}'
AND pid <> pg_backend_pid();
"""
try:
root_conn.sql(terminate_query)
except Exception as e:
print(f"Error executing terminate_query: {e}")

root_conn.sql(f'DROP DATABASE IF EXISTS "{frappe.conf.db_name}"')

# If user exists, just update password
Expand Down
1 change: 1 addition & 0 deletions frappe/desk/notifications.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,6 +356,7 @@ def get_doc_count(doctype, filters):
)
)
except:
frappe.db.rollback()
return 0


Expand Down
2 changes: 2 additions & 0 deletions frappe/desk/reportview.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,8 @@ def get_count() -> int:
if args.limit:
args.fields = [fieldname]
partial_query = execute(**args, run=0)
if isinstance(partial_query, list):
return 0
count = frappe.db.sql(f"""select count(*) from ( {partial_query} ) p""")[0][0]
else:
args.fields = [f"count({fieldname}) as total_count"]
Expand Down
2 changes: 1 addition & 1 deletion frappe/desk/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def search_widget(
order_by = f"`tab{doctype}`.idx desc, {order_by_based_on_meta}"

if not meta.translated_doctype:
_txt = frappe.db.escape((txt or "").replace("%", "").replace("@", ""))
_txt = frappe.db.escape((txt or "").replace("%", "").replace("@", "").replace(",",""))
# locate returns 0 if string is not found, convert 0 to null and then sort null to end in order by
_relevance = f"(1 / nullif(locate({_txt}, `tab{doctype}`.`name`), 0))"
formatted_fields.append(f"""{_relevance} as `_relevance`""")
Expand Down
46 changes: 46 additions & 0 deletions frappe/model/base_document.py
Original file line number Diff line number Diff line change
Expand Up @@ -532,6 +532,9 @@ def db_insert(self, ignore_if_duplicate=False):
# name will be set by document class in most cases
set_new_name(self)

if frappe.db.db_type == "postgres":
self.show_unique_validation_message_for_postgress()

conflict_handler = ""
# On postgres we can't implcitly ignore PK collision
# So instruct pg to ignore `name` field conflicts
Expand Down Expand Up @@ -589,8 +592,51 @@ def db_insert(self, ignore_if_duplicate=False):

self.set("__islocal", False)

def show_unique_validation_message_for_postgress(self):
# Prepare to check for duplicates based on unique columns
unique_column = self.get_unique_columns() # Custom method to retrieve unique columns
if unique_column:
# Prepare the WHERE clause for checking duplicates
where_clause = " AND ".join([f"LOWER({col}) = LOWER(%s)" for col in unique_column])
values_to_check = [self.get_value(col).lower() for col in unique_column] # Get values from the document and convert to lower case

# Check if any record with the same unique column values exists (case-insensitive)
existing_record = frappe.db.sql(
f"""SELECT COUNT(*) FROM "tab{self.doctype}" WHERE {where_clause}""",
values_to_check
)[0][0]

unique_column = unique_column[0].title()
if '_' in unique_column:
unique_column = unique_column.replace('_', ' ')

if existing_record > 0:
frappe.msgprint(
_("{0} must be unique").format(unique_column),
title=_("Message"),
indicator="blue",
)
raise frappe.DuplicateEntryError(self.doctype, self.name)

def get_unique_columns(self):
"""Retrieve the list of unique columns for the doctype."""
unique_columns = []

# Fetch the meta information for the current doctype
meta = frappe.get_meta(self.doctype)

# Iterate through the fields in the doctype's meta
for field in meta.fields:
# Check if the field has a unique constraint
if hasattr(field, 'unique') and field.unique:
unique_columns.append(field.fieldname)

return unique_columns

def db_update(self):
if self.get("__islocal") or not self.name:
if frappe.db.db_type == "postgres":
self.show_unique_validation_message_for_postgress()
self.db_insert()
return

Expand Down
Loading
Loading