Skip to content

Commit

Permalink
Merge pull request #12 from navariltd/fix26
Browse files Browse the repository at this point in the history
fix - handle bulk submission of items, prevent deletion of items,
  • Loading branch information
maniamartial authored Nov 29, 2024
2 parents 2fcd8c8 + 0711d02 commit 9e53ddb
Show file tree
Hide file tree
Showing 14 changed files with 878 additions and 549 deletions.
2 changes: 2 additions & 0 deletions kenya_compliance/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,9 @@
"Item": {
"validate": [
"kenya_compliance.kenya_compliance.overrides.server.item.validate"

],
"on_trash": "kenya_compliance.kenya_compliance.overrides.server.item.prevent_item_deletion"
},
}

Expand Down
97 changes: 49 additions & 48 deletions kenya_compliance/kenya_compliance/apis/apis.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,12 @@ def bulk_submit_sales_invoices(docs_list: str) -> None:
def bulk_register_item(docs_list: str) -> None:
data = json.loads(docs_list)
all_items = frappe.db.get_all("Item", {"custom_item_registered": 0}, ["name"])

for record in data:
for item in all_items:
if record == item.item_code:
if record == item.name:
item=frappe.get_doc("Item", record, for_update=False)
valuation_rate = item.valuation_rate if item.valuation_rate is not None else 0

request_data = {
"name": item.name,
"company_name": frappe.defaults.get_user_default("Company"),
Expand All @@ -83,7 +85,7 @@ def bulk_register_item(docs_list: str) -> None:
"taxTyCd": item.get("custom_taxation_type", "B"),
"btchNo": None,
"bcd": None,
"dftPrc": round(item.valuation_rate, 2),
"dftPrc": round(valuation_rate, 2),
"grpPrcL1": None,
"grpPrcL2": None,
"grpPrcL3": None,
Expand All @@ -98,12 +100,11 @@ def bulk_register_item(docs_list: str) -> None:
"modrId": split_user_email(item.modified_by),
"modrNm": item.modified_by,
}

perform_item_registration(request_data=json.dumps(request_data))


@frappe.whitelist()
def perform_customer_search(request_data: str) -> None:
def perform_customer_search(request_data: str, vendor="OSCU KRA") -> None:
"""Search customer details in the eTims Server
Args:
Expand All @@ -113,8 +114,8 @@ def perform_customer_search(request_data: str) -> None:

company_name = data["company_name"]

headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)
route_path, last_request_date = get_route_path("CustSearchReq")

if headers and server_url and route_path:
Expand All @@ -141,13 +142,13 @@ def perform_customer_search(request_data: str) -> None:


@frappe.whitelist()
def perform_item_registration(request_data: str) -> dict | None:
def perform_item_registration(request_data: str, vendor="OSCU KRA") -> dict | None:
data: dict = json.loads(request_data)

company_name = data.pop("company_name")

headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)
route_path, last_request_date = get_route_path("ItemSaveReq")

if headers and server_url and route_path:
Expand All @@ -173,13 +174,13 @@ def perform_item_registration(request_data: str) -> dict | None:


@frappe.whitelist()
def send_insurance_details(request_data: str) -> None:
def send_insurance_details(request_data: str, vendor="OSCU KRA") -> None:
data: dict = json.loads(request_data)

company_name = data["company_name"]

headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)
route_path, last_request_date = get_route_path("BhfInsuranceSaveReq")

if headers and server_url and route_path:
Expand Down Expand Up @@ -215,13 +216,13 @@ def send_insurance_details(request_data: str) -> None:


@frappe.whitelist()
def send_branch_customer_details(request_data: str) -> None:
def send_branch_customer_details(request_data: str, vendor="OSCU KRA") -> None:
data: dict = json.loads(request_data)

company_name = data["company_name"]

headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor),
server_url = get_server_url(company_name, vendor)
route_path, last_request_date = get_route_path("BhfCustSaveReq")

if headers and server_url and route_path:
Expand Down Expand Up @@ -262,11 +263,11 @@ def send_branch_customer_details(request_data: str) -> None:


@frappe.whitelist()
def save_branch_user_details(request_data: str) -> None:
def save_branch_user_details(request_data: str, vendor="OSCU KRA") -> None:
data: dict = json.loads(request_data)
company_name = data["company_name"]
headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)
route_path, last_request_date = get_route_path("BhfUserSaveReq")

if headers and server_url and route_path:
Expand Down Expand Up @@ -327,12 +328,12 @@ def create_branch_user() -> None:


@frappe.whitelist()
def perform_item_search(request_data: str) -> None:
def perform_item_search(request_data: str, vendor="OSCU KRA") -> None:
data: dict = json.loads(request_data)

company_name = data["company_name"]
headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)
route_path, last_request_date = get_route_path("ItemSearchReq")

if headers and server_url and route_path:
Expand All @@ -353,26 +354,27 @@ def perform_item_search(request_data: str) -> None:


@frappe.whitelist()
def perform_import_item_search(request_data: str) -> None:
def perform_import_item_search(request_data: str, vendor="OSCU KRA") -> None:
data: dict = json.loads(request_data)

company_name = data["company_name"]

if "branch_code" in data:
headers = build_headers(company_name, data["branch_code"])
server_url = get_server_url(company_name, data["branch_code"])
headers = build_headers(company_name,vendor, data["branch_code"])
server_url = get_server_url(company_name,vendor, data["branch_code"])

else:
headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)

route_path, last_request_date = get_route_path("ImportItemSearchReq")

'''Use last_request_date as the last request date if it exists, else use the current date - 1 year'''
if headers and server_url and route_path:
request_date = add_to_date(datetime.now(), years=-1).strftime("%Y%m%d%H%M%S")
last_request_date = last_request_date.strftime("%Y%m%d%H%M%S") or request_date
url = f"{server_url}{route_path}"
payload = {"lastReqDt": request_date}

payload = {"lastReqDt": last_request_date}
endpoints_builder.headers = headers
endpoints_builder.url = url
endpoints_builder.payload = payload
Expand All @@ -396,7 +398,6 @@ def perform_import_item_search_all_branches() -> None:

perform_import_item_search(request_data)


@frappe.whitelist()
def perform_purchases_search(request_data: str) -> None:
data: dict = json.loads(request_data)
Expand Down Expand Up @@ -466,13 +467,13 @@ def submit_inventory(request_data: str) -> None:


@frappe.whitelist()
def perform_item_classification_search(request_data: str) -> None:
def perform_item_classification_search(request_data: str, vendor="OSCU KRA") -> None:
data: dict = json.loads(request_data)

company_name = data["company_name"]

headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)
route_path, last_request_date = get_route_path("ItemClsSearchReq")

if headers and server_url and route_path:
Expand All @@ -495,13 +496,13 @@ def perform_item_classification_search(request_data: str) -> None:


@frappe.whitelist()
def search_branch_request(request_data: str) -> None:
def search_branch_request(request_data: str, vendor="OSCU KRA") -> None:
data: dict = json.loads(request_data)

company_name = data["company_name"]

headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)
route_path, last_request_date = get_route_path("BhfSearchReq")

if headers and server_url and route_path:
Expand All @@ -523,12 +524,12 @@ def search_branch_request(request_data: str) -> None:


@frappe.whitelist()
def send_imported_item_request(request_data: str) -> None:
def send_imported_item_request(request_data: str, vendor="OSCU KRA") -> None:
data: dict = json.loads(request_data)

company_name = data["company_name"]
headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)
route_path, last_request_date = get_route_path("ImportItemUpdateReq")

if headers and server_url and route_path:
Expand Down Expand Up @@ -570,13 +571,13 @@ def send_imported_item_request(request_data: str) -> None:


@frappe.whitelist()
def perform_notice_search(request_data: str) -> None:
def perform_notice_search(request_data: str, vendor="OSCU KRA") -> None:
data: dict = json.loads(request_data)

company_name = data["company_name"]

headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)

route_path, last_request_date = get_route_path("NoticeSearchReq")
request_date = add_to_date(datetime.now(), years=-1).strftime("%Y%m%d%H%M%S")
Expand All @@ -597,13 +598,13 @@ def perform_notice_search(request_data: str) -> None:


@frappe.whitelist()
def perform_stock_movement_search(request_data: str) -> None:
def perform_stock_movement_search(request_data: str, vendor="OSCU KRA") -> None:
data: dict = json.loads(request_data)

company_name = data["company_name"]

headers = build_headers(company_name, data["branch_id"])
server_url = get_server_url(company_name, data["branch_id"])
headers = build_headers(company_name,vendor, data["branch_id"])
server_url = get_server_url(company_name,vendor, data["branch_id"])

route_path, last_request_date = get_route_path("StockMoveReq")
request_date = last_request_date.strftime("%Y%m%d%H%M%S")
Expand All @@ -628,13 +629,13 @@ def perform_stock_movement_search(request_data: str) -> None:


@frappe.whitelist()
def submit_item_composition(request_data: str) -> None:
def submit_item_composition(request_data: str, vendor="OSCU KRA") -> None:
data: dict = json.loads(request_data)

company_name = data["company_name"]

headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)
route_path, last_request_date = get_route_path("SaveItemComposition")

if headers and server_url and route_path:
Expand Down
13 changes: 6 additions & 7 deletions kenya_compliance/kenya_compliance/background_tasks/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,6 @@ def send_stock_information() -> None:
{"docstatus": 1, "custom_submitted_successfully": 0},
["name"],
)

for entry in all_stock_ledger_entries:
doc = frappe.get_doc(
"Stock Ledger Entry", entry.name, for_update=False
Expand Down Expand Up @@ -149,11 +148,11 @@ def send_item_inventory_information() -> None:


@frappe.whitelist()
def refresh_code_lists() -> str | None:
def refresh_code_lists(vendor: str="OSCU KRA") -> str | None:
company_name: str | None = frappe.defaults.get_user_default("Company")

headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)

code_search_route_path, last_request_date = get_route_path(
"CodeSearchReq"
Expand All @@ -178,11 +177,11 @@ def refresh_code_lists() -> str | None:


@frappe.whitelist()
def get_item_classification_codes() -> str | None:
def get_item_classification_codes(vendor="OSCU KRA") -> str | None:
company_name: str | None = frappe.defaults.get_user_default("Company")

headers = build_headers(company_name)
server_url = get_server_url(company_name)
headers = build_headers(company_name, vendor)
server_url = get_server_url(company_name, vendor)

item_cls_route_path, last_request_date = get_route_path(
"ItemClsSearchReq"
Expand Down
Loading

0 comments on commit 9e53ddb

Please sign in to comment.