diff --git a/apps/accounting_exports/models.py b/apps/accounting_exports/models.py index a0c26f8..34a237e 100644 --- a/apps/accounting_exports/models.py +++ b/apps/accounting_exports/models.py @@ -1,20 +1,23 @@ -from django.db import models -from django.contrib.postgres.fields import ArrayField +from typing import List +from django.contrib.postgres.aggregates import ArrayAgg +from django.contrib.postgres.fields import ArrayField +from django.db import models +from django.db.models import Count from fyle_accounting_mappings.models import ExpenseAttribute +from apps.fyle.models import Expense +from apps.workspaces.models import BaseForeignWorkspaceModel, BaseModel, ExportSetting from ms_business_central_api.models.fields import ( + BooleanFalseField, + CustomDateTimeField, + CustomJsonField, + IntegerNullField, StringNotNullField, StringNullField, - CustomJsonField, - CustomDateTimeField, StringOptionsField, - IntegerNullField, - BooleanFalseField, - TextNotNullField + TextNotNullField, ) -from apps.workspaces.models import BaseForeignWorkspaceModel, BaseModel -from apps.fyle.models import Expense TYPE_CHOICES = ( ('INVOICES', 'INVOICES'), @@ -31,6 +34,55 @@ ) +def _group_expenses(expenses: List[Expense], export_setting: ExportSetting, fund_source: str): + """ + Group expenses based on specified fields + """ + + credit_card_expense_grouped_by = export_setting.credit_card_expense_grouped_by + credit_card_expense_date = export_setting.credit_card_expense_date + reimbursable_expense_grouped_by = export_setting.reimbursable_expense_grouped_by + reimbursable_expense_date = export_setting.reimbursable_expense_date + + default_fields = ['employee_email', 'fund_source'] + report_grouping_fields = ['report_id', 'claim_number'] + expense_grouping_fields = ['expense_id', 'expense_number'] + + # Define a mapping for fund sources and their associated group fields + fund_source_mapping = { + 'CCC': { + 'group_by': report_grouping_fields if credit_card_expense_grouped_by == 'REPORT' else expense_grouping_fields, + 'date_field': credit_card_expense_date.lower() if credit_card_expense_date != 'LAST_SPENT_AT' else None + }, + 'PERSONAL': { + 'group_by': report_grouping_fields if reimbursable_expense_grouped_by == 'REPORT' else expense_grouping_fields, + 'date_field': reimbursable_expense_date.lower() if reimbursable_expense_date != 'LAST_SPENT_AT' else None + } + } + + # Update expense_group_fields based on the fund_source + fund_source_data = fund_source_mapping.get(fund_source) + group_by_field = fund_source_data.get('group_by') + date_field = fund_source_data.get('date_field') + + default_fields.extend(group_by_field) + + if date_field: + default_fields.append(date_field) + + # Extract expense IDs from the provided expenses + expense_ids = [expense.id for expense in expenses] + # Retrieve expenses from the database + expenses = Expense.objects.filter(id__in=expense_ids).all() + + # Create expense groups by grouping expenses based on specified fields + expense_groups = list(expenses.values(*default_fields).annotate( + total=Count('*'), expense_ids=ArrayAgg('id')) + ) + + return expense_groups + + class AccountingExport(BaseForeignWorkspaceModel): """ Table to store accounting exports @@ -50,6 +102,48 @@ class AccountingExport(BaseForeignWorkspaceModel): class Meta: db_table = 'accounting_exports' + @staticmethod + def create_accounting_export(expense_objects: List[Expense], fund_source: str, workspace_id): + """ + Group expenses by report_id and fund_source, format date fields, and create AccountingExport objects. + """ + # Retrieve the ExportSetting for the workspace + export_setting = ExportSetting.objects.get(workspace_id=workspace_id) + + # Group expenses based on specified fields and fund_source + accounting_exports = _group_expenses(expense_objects, export_setting, fund_source) + + fund_source_map = { + 'PERSONAL': 'reimbursable', + 'CCC': 'credit_card' + } + + for accounting_export in accounting_exports: + # Determine the date field based on fund_source + date_field = getattr(export_setting, f"{fund_source_map.get(fund_source)}_expense_date", None) + + # Calculate and assign 'last_spent_at' based on the chosen date field + if date_field == 'last_spent_at': + latest_expense = Expense.objects.filter(id__in=accounting_export['expense_ids']).order_by('-spent_at').first() + accounting_export['last_spent_at'] = latest_expense.spent_at if latest_expense else None + + # Store expense IDs and remove unnecessary keys + expense_ids = accounting_export['expense_ids'] + accounting_export[date_field] = accounting_export[date_field].strftime('%Y-%m-%dT%H:%M:%S') + accounting_export.pop('total') + accounting_export.pop('expense_ids') + + # Create an AccountingExport object for the expense group + accounting_export_instance = AccountingExport.objects.create( + workspace_id=workspace_id, + fund_source=accounting_export['fund_source'], + description=accounting_export, + status='EXPORT_READY' + ) + + # Add related expenses to the AccountingExport object + accounting_export_instance.expenses.add(*expense_ids) + class AccountingExportSummary(BaseModel): """ diff --git a/apps/business_central/utils.py b/apps/business_central/utils.py index 6334b90..0fd0acf 100644 --- a/apps/business_central/utils.py +++ b/apps/business_central/utils.py @@ -17,7 +17,7 @@ def __init__(self, credentials_object: BusinessCentralCredentials, workspace_id: refresh_token = credentials_object.refresh_token self.connection = Dynamics( - enviroment=environment, + environment=environment, client_id=client_id, client_secret=client_secret, refresh_token=refresh_token, @@ -59,18 +59,20 @@ def _sync_data(self, data, attribute_type, display_name, workspace_id, field_nam """ destination_attributes = [] - for item in data: - detail = {field: getattr(item, field) for field in field_names} + detail = {field: item[field] for field in field_names} + if (attribute_type == 'EMPLOYEE' and item['status'] == 'Active') or attribute_type == 'LOCATION' or item['blocked'] != True: + active = True + else: + active = False destination_attributes.append(self._create_destination_attribute( attribute_type, display_name, - item.name, - item.id, - item.is_active, + item['displayName'], + item['id'], + active, detail )) - DestinationAttribute.bulk_create_or_update_destination_attributes( destination_attributes, attribute_type, workspace_id, True) @@ -89,9 +91,10 @@ def sync_accounts(self): """ workspace = Workspace.objects.get(id=self.workspace_id) self.connection.company_id = workspace.business_central_company_id + field_names = ['category', 'subCategory', 'accountType', 'directPosting', 'lastModifiedDateTime'] accounts = self.connection.accounts.get_all() - self._sync_data(accounts, 'ACCOUNT', 'accounts', self.workspace_id) + self._sync_data(accounts, 'ACCOUNT', 'accounts', self.workspace_id, field_names) return [] def sync_vendors(self): @@ -100,9 +103,10 @@ def sync_vendors(self): """ workspace = Workspace.objects.get(id=self.workspace_id) self.connection.company_id = workspace.business_central_company_id + field_names = ['email', 'currencyId', 'currencyCode', 'lastModifiedDateTime'] vendors = self.connection.vendors.get_all() - self._sync_data(vendors, 'VENDOR', 'vendor', self.workspace_id) + self._sync_data(vendors, 'VENDOR', 'vendor', self.workspace_id, field_names) return [] def sync_employees(self): @@ -111,9 +115,10 @@ def sync_employees(self): """ workspace = Workspace.objects.get(id=self.workspace_id) self.connection.company_id = workspace.business_central_company_id + field_names = ['email', 'email', 'personalEmail', 'lastModifiedDateTime'] employees = self.connection.employees.get_all() - self._sync_data(employees, 'EMPLOYEE', 'employee', self.workspace_id) + self._sync_data(employees, 'EMPLOYEE', 'employee', self.workspace_id, field_names) return [] def sync_locations(self): @@ -122,7 +127,8 @@ def sync_locations(self): """ workspace = Workspace.objects.get(id=self.workspace_id) self.connection.company_id = workspace.business_central_company_id + field_names = ['code', 'city', 'country'] locations = self.connection.locations.get_all() - self._sync_data(locations, 'LOCATION', 'location', self.workspace_id) + self._sync_data(locations, 'LOCATION', 'location', self.workspace_id, field_names) return [] diff --git a/apps/fyle/queue.py b/apps/fyle/queue.py index 0ce5573..b32096f 100644 --- a/apps/fyle/queue.py +++ b/apps/fyle/queue.py @@ -25,8 +25,8 @@ def queue_import_reimbursable_expenses(workspace_id: int, synchronous: bool = Fa if not synchronous: async_task( - 'apps.fyle.tasks.import_reimbursable_expenses', - workspace_id, accounting_export, + 'apps.fyle.tasks.import_expenses', + workspace_id, accounting_export, 'PERSONAL_CASH_ACCOUNT', 'PERSONAL' ) return @@ -49,9 +49,9 @@ def queue_import_credit_card_expenses(workspace_id: int, synchronous: bool = Fal if not synchronous: async_task( - 'apps.fyle.tasks.import_credit_card_expenses', - workspace_id, accounting_export, + 'apps.fyle.tasks.import_expenses', + workspace_id, accounting_export, 'PERSONAL_CORPORATE_CREDIT_CARD_ACCOUNT', 'CCC' ) return - import_expenses(workspace_id, accounting_export, 'PERSONAL_CASH_ACCOUNT', 'PERSONAL') + import_expenses(workspace_id, accounting_export, 'PERSONAL_CORPORATE_CREDIT_CARD_ACCOUNT', 'CCC') diff --git a/apps/fyle/serializers.py b/apps/fyle/serializers.py index 5b39a2a..1ea4a65 100644 --- a/apps/fyle/serializers.py +++ b/apps/fyle/serializers.py @@ -3,18 +3,18 @@ """ import logging from datetime import datetime, timezone + from django.db.models import Q +from fyle_accounting_mappings.models import ExpenseAttribute +from fyle_integrations_platform_connector import PlatformConnector from rest_framework import serializers -from rest_framework.response import Response from rest_framework.exceptions import APIException +from rest_framework.response import Response from rest_framework.views import status -from fyle_integrations_platform_connector import PlatformConnector - -from fyle_accounting_mappings.models import ExpenseAttribute -from apps.fyle.models import ExpenseFilter -from apps.workspaces.models import Workspace, FyleCredential from apps.fyle.helpers import get_expense_fields +from apps.fyle.models import ExpenseFilter +from apps.workspaces.models import FyleCredential, Workspace logger = logging.getLogger(__name__) logger.level = logging.INFO @@ -112,14 +112,15 @@ class ExpenseFieldSerializer(serializers.Serializer): """ Workspace Admin Serializer """ - expense_fields = serializers.SerializerMethodField() + field_name = serializers.CharField() + type = serializers.CharField() + is_custom = serializers.BooleanField() - def get_expense_fields(self, validated_data): + def get_expense_fields(self, workspace_id:int): """ Get Expense Fields """ - workspace_id = self.context['request'].parser_context.get('kwargs').get('workspace_id') expense_fields = get_expense_fields(workspace_id=workspace_id) return expense_fields diff --git a/apps/fyle/tasks.py b/apps/fyle/tasks.py index 59fe79c..aab3070 100644 --- a/apps/fyle/tasks.py +++ b/apps/fyle/tasks.py @@ -61,6 +61,6 @@ def import_expenses(workspace_id, accounting_export: AccountingExport, source_ac ) accounting_export.status = 'COMPLETE' - accounting_export.errors = None + accounting_export.business_central_errors = None accounting_export.save() diff --git a/apps/fyle/views.py b/apps/fyle/views.py index 225260d..b1e1407 100644 --- a/apps/fyle/views.py +++ b/apps/fyle/views.py @@ -13,7 +13,6 @@ FyleFieldsSerializer, ImportFyleAttributesSerializer, ) -from apps.workspaces.models import Workspace from ms_business_central_api.utils import LookupFieldMixin logger = logging.getLogger(__name__) @@ -62,7 +61,10 @@ class CustomFieldView(generics.ListAPIView): """ serializer_class = ExpenseFieldSerializer - queryset = Workspace.objects.all() + pagination_class = None + + def get_queryset(self): + return ExpenseFieldSerializer().get_expense_fields(self.kwargs["workspace_id"]) class ExportableExpenseGroupsView(generics.RetrieveAPIView): diff --git a/apps/mappings/apps.py b/apps/mappings/apps.py index 7ef2ccf..41645b4 100644 --- a/apps/mappings/apps.py +++ b/apps/mappings/apps.py @@ -4,3 +4,7 @@ class MappingsConfig(AppConfig): default_auto_field = "django.db.models.BigAutoField" name = "apps.mappings" + + def ready(self): + super(MappingsConfig, self).ready() + import apps.mappings.signals # noqa diff --git a/apps/mappings/constants.py b/apps/mappings/constants.py new file mode 100644 index 0000000..7597c83 --- /dev/null +++ b/apps/mappings/constants.py @@ -0,0 +1,45 @@ +FYLE_EXPENSE_SYSTEM_FIELDS = [ + 'employee id', + 'organisation name', + 'employee name', + 'employee email', + 'expense date', + 'expense id', + 'report id', + 'employee id', + 'department', + 'state', + 'reporter', + 'report', + 'purpose', + 'vendor', + 'category', + 'category code', + 'mileage distance', + 'mileage unit', + 'flight from city', + 'flight to city', + 'flight from date', + 'flight to date', + 'flight from class', + 'flight to class', + 'hotel checkin', + 'hotel checkout', + 'hotel location', + 'hotel breakfast', + 'currency', + 'amount', + 'foreign currency', + 'foreign amount', + 'tax', + 'approver', + 'project', + 'billable', + 'cost center', + 'cost center code', + 'approved on', + 'reimbursable', + 'receipts', + 'paid date', + 'expense created date' +] diff --git a/apps/mappings/exceptions.py b/apps/mappings/exceptions.py new file mode 100644 index 0000000..f04a74d --- /dev/null +++ b/apps/mappings/exceptions.py @@ -0,0 +1,65 @@ +import logging +import traceback + +from dynamics.exceptions.dynamics_exceptions import InvalidTokenError +from fyle.platform.exceptions import InternalServerError +from fyle.platform.exceptions import InvalidTokenError as FyleInvalidTokenError +from fyle.platform.exceptions import WrongParamsError + +from apps.mappings.models import ImportLog +from apps.workspaces.models import BusinessCentralCredentials + +logger = logging.getLogger(__name__) +logger.level = logging.INFO + + +def handle_import_exceptions(func): + def new_fn(expense_attribute_instance, *args): + import_log: ImportLog = args[0] + workspace_id = import_log.workspace_id + attribute_type = import_log.attribute_type + error = { + 'task': 'Import {0} to Fyle and Auto Create Mappings'.format(attribute_type), + 'workspace_id': workspace_id, + 'message': None, + 'response': None + } + try: + return func(expense_attribute_instance, *args) + except WrongParamsError as exception: + error['message'] = exception.message + error['response'] = exception.response + error['alert'] = True + import_log.status = 'FAILED' + + except (BusinessCentralCredentials.DoesNotExist, InvalidTokenError): + error['message'] = 'Invalid Token or Business central credentials does not exist workspace_id - {0}'.format(workspace_id) + error['alert'] = False + import_log.status = 'FAILED' + + except FyleInvalidTokenError: + error['message'] = 'Invalid Token for fyle' + error['alert'] = False + import_log.status = 'FAILED' + + except InternalServerError: + error['message'] = 'Internal server error while importing to Fyle' + error['alert'] = True + import_log.status = 'FAILED' + + except Exception: + response = traceback.format_exc() + error['message'] = 'Something went wrong' + error['response'] = response + error['alert'] = False + import_log.status = 'FATAL' + + if error['alert']: + logger.error(error) + else: + logger.info(error) + + import_log.error_log = error + import_log.save() + + return new_fn diff --git a/apps/mappings/imports/modules/__init__.py b/apps/mappings/imports/modules/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/apps/mappings/imports/modules/base.py b/apps/mappings/imports/modules/base.py new file mode 100644 index 0000000..82af8bf --- /dev/null +++ b/apps/mappings/imports/modules/base.py @@ -0,0 +1,318 @@ +import math +from datetime import datetime, timedelta, timezone +from typing import List + +from fyle_accounting_mappings.models import DestinationAttribute, ExpenseAttribute, Mapping +from fyle_integrations_platform_connector import PlatformConnector + +from apps.accounting_exports.models import Error +from apps.business_central.utils import BusinessCentralConnector +from apps.mappings.exceptions import handle_import_exceptions +from apps.mappings.models import ImportLog +from apps.workspaces.models import BusinessCentralCredentials, FyleCredential + + +class Base: + """ + The Base class for all the modules + """ + def __init__( + self, + workspace_id: int, + source_field: str, + destination_field: str, + platform_class_name: str, + sync_after:datetime, + ): + self.workspace_id = workspace_id + self.source_field = source_field + self.destination_field = destination_field + self.platform_class_name = platform_class_name + self.sync_after = sync_after + + def get_platform_class(self, platform: PlatformConnector): + """ + Get the platform class + :param platform: PlatformConnector object + :return: platform class + """ + return getattr(platform, self.platform_class_name) + + def get_auto_sync_permission(self): + """ + Get the auto sync permission + :return: bool + """ + is_auto_sync_status_allowed = False + if (self.destination_field == 'PROJECT' and self.source_field == 'PROJECT') or self.source_field == 'CATEGORY': + is_auto_sync_status_allowed = True + + return is_auto_sync_status_allowed + + def construct_attributes_filter(self, attribute_type: str, paginated_destination_attribute_values: List[str] = []): + """ + Construct the attributes filter + :param attribute_type: attribute type + :param paginated_destination_attribute_values: paginated destination attribute values + :return: dict + """ + filters = { + 'attribute_type': attribute_type, + 'workspace_id': self.workspace_id + } + + if self.sync_after and self.platform_class_name != 'expense_custom_fields': + filters['updated_at__gte'] = self.sync_after + + if paginated_destination_attribute_values: + filters['value__in'] = paginated_destination_attribute_values + + return filters + + def remove_duplicate_attributes(self, destination_attributes: List[DestinationAttribute]): + """ + Remove duplicate attributes + :param destination_attributes: destination attributes + :return: list[DestinationAttribute] + """ + unique_attributes = [] + attribute_values = [] + + for destination_attribute in destination_attributes: + if destination_attribute.value.lower() not in attribute_values: + unique_attributes.append(destination_attribute) + attribute_values.append(destination_attribute.value.lower()) + + return unique_attributes + + def resolve_expense_attribute_errors(self): + """ + Resolve Expense Attribute Errors + :return: None + """ + errored_attribute_ids: List[int] = Error.objects.filter( + is_resolved=False, + workspace_id=self.workspace_id, + type='{}_MAPPING'.format(self.source_field) + ).values_list('expense_attribute_id', flat=True) + + if errored_attribute_ids: + mapped_attribute_ids = self.__get_mapped_attributes_ids(errored_attribute_ids) + if mapped_attribute_ids: + Error.objects.filter(expense_attribute_id__in=mapped_attribute_ids).update(is_resolved=True) + + @handle_import_exceptions + def import_destination_attribute_to_fyle(self, import_log: ImportLog): + """ + Import destiantion_attributes field to Fyle and Auto Create Mappings + :param import_log: ImportLog object + """ + fyle_credentials = FyleCredential.objects.get(workspace_id=self.workspace_id) + platform = PlatformConnector(fyle_credentials=fyle_credentials) + + self.sync_expense_attributes(platform) + + self.sync_destination_attributes(self.destination_field) + + self.construct_payload_and_import_to_fyle(platform, import_log) + + self.sync_expense_attributes(platform) + + self.create_mappings() + + self.resolve_expense_attribute_errors() + + def create_mappings(self): + """ + Create mappings + """ + destination_attributes_without_duplicates = [] + destination_attributes = DestinationAttribute.objects.filter( + workspace_id=self.workspace_id, + attribute_type=self.destination_field, + mapping__isnull=True + ).order_by('value', 'id') + destination_attributes_without_duplicates = self.remove_duplicate_attributes(destination_attributes) + if destination_attributes_without_duplicates: + Mapping.bulk_create_mappings( + destination_attributes_without_duplicates, + self.source_field, + self.destination_field, + self.workspace_id + ) + + def sync_expense_attributes(self, platform: PlatformConnector): + """ + Sync expense attributes + :param platform: PlatformConnector object + """ + platform_class = self.get_platform_class(platform) + if self.platform_class_name in ['expense_custom_fields', 'merchants']: + platform_class.sync() + else: + platform_class.sync(sync_after=self.sync_after if self.sync_after else None) + + def sync_destination_attributes(self, business_central_attribute_type: str): + """ + Sync destination attributes + :param business_central_attribute_type: Business Central attribute type + """ + business_central_credentials = BusinessCentralCredentials.objects.get(workspace_id=self.workspace_id) + business_central_connection = BusinessCentralConnector(credentials_object=business_central_credentials, workspace_id=self.workspace_id) + + sync_methods = { + 'ACCOUNT': business_central_connection.sync_accounts, + 'COMPANY': business_central_connection.sync_companies, + 'LOCATION': business_central_connection.sync_locations, + 'EMPLOYEE': business_central_connection.sync_employees, + 'VENDOR': business_central_connection.sync_vendors, + } + + sync_method = sync_methods.get(business_central_attribute_type) + sync_method() + + def construct_payload_and_import_to_fyle( + self, + platform: PlatformConnector, + import_log: ImportLog + ): + """ + Construct Payload and Import to fyle in Batches + """ + is_auto_sync_status_allowed = self.get_auto_sync_permission() + + filters = self.construct_attributes_filter(self.destination_field) + + destination_attributes_count = DestinationAttribute.objects.filter(**filters).count() + + is_auto_sync_status_allowed = self.get_auto_sync_permission() + + # If there are no destination attributes, mark the import as complete + if destination_attributes_count == 0: + import_log.status = 'COMPLETE' + import_log.last_successful_run_at = datetime.now() + import_log.error_log = [] + import_log.total_batches_count = 0 + import_log.processed_batches_count = 0 + import_log.save() + return + else: + import_log.total_batches_count = math.ceil(destination_attributes_count / 200) + import_log.save() + + destination_attributes_generator = self.get_destination_attributes_generator(destination_attributes_count, filters) + platform_class = self.get_platform_class(platform) + + for paginated_destination_attributes, is_last_batch in destination_attributes_generator: + fyle_payload = self.setup_fyle_payload_creation( + paginated_destination_attributes=paginated_destination_attributes, + is_auto_sync_status_allowed=is_auto_sync_status_allowed + ) + + self.post_to_fyle_and_sync( + fyle_payload=fyle_payload, + resource_class=platform_class, + is_last_batch=is_last_batch, + import_log=import_log + ) + + def get_destination_attributes_generator(self, destination_attributes_count: int, filters: dict): + """ + Get destination attributes generator + :param destination_attributes_count: Destination attributes count + :param filters: dict + :return: Generator of destination_attributes + """ + for offset in range(0, destination_attributes_count, 200): + limit = offset + 200 + paginated_destination_attributes = DestinationAttribute.objects.filter(**filters).order_by('value', 'id')[offset:limit] + paginated_destination_attributes_without_duplicates = self.remove_duplicate_attributes(paginated_destination_attributes) + is_last_batch = True if limit >= destination_attributes_count else False + + yield paginated_destination_attributes_without_duplicates, is_last_batch + + def setup_fyle_payload_creation( + self, + paginated_destination_attributes: List[DestinationAttribute], + is_auto_sync_status_allowed: bool + ): + """ + Setup Fyle Payload Creation + :param paginated_destination_attributes: List of DestinationAttribute objects + :param is_auto_sync_status_allowed: bool + :return: Fyle Payload + """ + paginated_destination_attribute_values = [attribute.value for attribute in paginated_destination_attributes] + existing_expense_attributes_map = self.get_existing_fyle_attributes(paginated_destination_attribute_values) + + return self.construct_fyle_payload(paginated_destination_attributes, existing_expense_attributes_map, is_auto_sync_status_allowed) + + def get_existing_fyle_attributes(self, paginated_destination_attribute_values: List[str]): + """ + Get Existing Fyle Attributes + :param paginated_destination_attribute_values: List of DestinationAttribute values + :return: Map of attribute value to attribute source_id + """ + filters = self.construct_attributes_filter(self.source_field, paginated_destination_attribute_values) + existing_expense_attributes_values = ExpenseAttribute.objects.filter(**filters).values('value', 'source_id') + # This is a map of attribute name to attribute source_id + return {attribute['value'].lower(): attribute['source_id'] for attribute in existing_expense_attributes_values} + + def post_to_fyle_and_sync(self, fyle_payload: List[object], resource_class, is_last_batch: bool, import_log: ImportLog): + """ + Post to Fyle and Sync + :param fyle_payload: List of Fyle Payload + :param resource_class: Platform Class + :param is_last_batch: bool + :param import_log: ImportLog object + """ + if fyle_payload and self.platform_class_name in ['expense_custom_fields', 'merchants']: + resource_class.post(fyle_payload) + elif fyle_payload: + resource_class.post_bulk(fyle_payload) + + self.update_import_log_post_import(is_last_batch, import_log) + + def update_import_log_post_import(self, is_last_batch: bool, import_log: ImportLog): + """ + Update Import Log Post Import + :param is_last_batch: bool + :param import_log: ImportLog object + """ + if is_last_batch: + import_log.last_successful_run_at = datetime.now() + import_log.processed_batches_count += 1 + import_log.status = 'COMPLETE' + import_log.error_log = [] + else: + import_log.processed_batches_count += 1 + + import_log.save() + + def check_import_log_and_start_import(self): + """ + Checks if the import is already in progress and if not, starts the import process + """ + import_log, is_created = ImportLog.objects.get_or_create( + workspace_id=self.workspace_id, + attribute_type=self.source_field, + defaults={ + 'status': 'IN_PROGRESS' + } + ) + time_difference = datetime.now() - timedelta(minutes=30) + offset_aware_time_difference = time_difference.replace(tzinfo=timezone.utc) + + # If the import is already in progress or if the last successful run is within 30 minutes, don't start the import process + if (import_log.status == 'IN_PROGRESS' and not is_created) \ + or (self.sync_after and (self.sync_after > offset_aware_time_difference)): + return + + # Update the required values since we're beginning the import process + else: + import_log.status = 'IN_PROGRESS' + import_log.processed_batches_count = 0 + import_log.total_batches_count = 0 + import_log.save() + + self.import_destination_attribute_to_fyle(import_log) diff --git a/apps/mappings/imports/modules/categories.py b/apps/mappings/imports/modules/categories.py new file mode 100644 index 0000000..99fcdf1 --- /dev/null +++ b/apps/mappings/imports/modules/categories.py @@ -0,0 +1,85 @@ +from datetime import datetime +from typing import List + +from fyle_accounting_mappings.models import CategoryMapping, DestinationAttribute + +from apps.mappings.imports.modules.base import Base + + +class Category(Base): + """ + Class for Category module + """ + + def __init__(self, workspace_id: int, destination_field: str, sync_after: datetime): + super().__init__( + workspace_id=workspace_id, + source_field="CATEGORY", + destination_field=destination_field, + platform_class_name="categories", + sync_after=sync_after, + ) + + def trigger_import(self): + """ + Trigger import for Category module + """ + self.check_import_log_and_start_import() + + def construct_fyle_payload( + self, + paginated_destination_attributes: List[DestinationAttribute], + existing_fyle_attributes_map: object, + is_auto_sync_status_allowed: bool + ): + """ + Construct Fyle payload for Category module + :param paginated_destination_attributes: List of paginated destination attributes + :param existing_fyle_attributes_map: Existing Fyle attributes map + :param is_auto_sync_status_allowed: Is auto sync status allowed + :return: Fyle payload + """ + payload = [] + + for attribute in paginated_destination_attributes: + category = { + "name": attribute.value, + "code": attribute.destination_id, + "is_enabled": attribute.active, + } + + # Create a new category if it does not exist in Fyle + if attribute.value.lower() not in existing_fyle_attributes_map: + payload.append(category) + # Disable the existing category in Fyle if auto-sync status is allowed and the destination_attributes is inactive + elif is_auto_sync_status_allowed and not attribute.active: + category['id'] = existing_fyle_attributes_map[attribute.value.lower()] + payload.append(category) + + return payload + + def create_mappings(self): + """ + Create mappings for Category module + """ + filters = { + "workspace_id": self.workspace_id, + "attribute_type": self.destination_field, + "destination_account__isnull": True + } + + # get all the destination attributes that have category mappings as null + destination_attributes: List[ + DestinationAttribute + ] = DestinationAttribute.objects.filter(**filters) + + destination_attributes_without_duplicates = [] + destination_attributes_without_duplicates = self.remove_duplicate_attributes( + destination_attributes + ) + + CategoryMapping.bulk_create_mappings( + destination_attributes_without_duplicates, + self.destination_field, + self.workspace_id, + ) diff --git a/apps/mappings/imports/modules/cost_centers.py b/apps/mappings/imports/modules/cost_centers.py new file mode 100644 index 0000000..9e6b8c4 --- /dev/null +++ b/apps/mappings/imports/modules/cost_centers.py @@ -0,0 +1,59 @@ +from datetime import datetime +from typing import List + +from fyle_accounting_mappings.models import DestinationAttribute + +from apps.mappings.imports.modules.base import Base + + +class CostCenter(Base): + """ + Class for Cost Center module + """ + + def __init__(self, workspace_id: int, destination_field: str, sync_after: datetime): + super().__init__( + workspace_id=workspace_id, + source_field="COST_CENTER", + destination_field=destination_field, + platform_class_name="cost_centers", + sync_after=sync_after, + ) + + def trigger_import(self): + """ + Trigger import for Cost Center module + """ + self.check_import_log_and_start_import() + + def construct_fyle_payload( + self, + paginated_destination_attributes: List[DestinationAttribute], + existing_fyle_attributes_map: object, + is_auto_sync_status_allowed: bool + ): + """ + Construct Fyle payload for CostCenter module + :param paginated_destination_attributes: List of paginated destination attributes + :param existing_fyle_attributes_map: Existing Fyle attributes map + :param is_auto_sync_status_allowed: Is auto sync status allowed + :return: Fyle payload + """ + payload = [] + + for attribute in paginated_destination_attributes: + cost_center = { + 'name': attribute.value, + 'code': attribute.destination_id, + 'is_enabled': True if attribute.active is None else attribute.active, + 'description': 'Cost Center - {0}, Id - {1}'.format( + attribute.value, + attribute.destination_id + ) + } + + # Create a new cost-center if it does not exist in Fyle + if attribute.value.lower() not in existing_fyle_attributes_map: + payload.append(cost_center) + + return payload diff --git a/apps/mappings/imports/modules/expense_custom_fields.py b/apps/mappings/imports/modules/expense_custom_fields.py new file mode 100644 index 0000000..a4142f6 --- /dev/null +++ b/apps/mappings/imports/modules/expense_custom_fields.py @@ -0,0 +1,176 @@ +from datetime import datetime +from typing import Dict, List + +from fyle_accounting_mappings.models import DestinationAttribute, ExpenseAttribute +from fyle_integrations_platform_connector import PlatformConnector + +from apps.mappings.constants import FYLE_EXPENSE_SYSTEM_FIELDS +from apps.mappings.exceptions import handle_import_exceptions +from apps.mappings.imports.modules.base import Base +from apps.mappings.models import ImportLog +from apps.workspaces.models import FyleCredential + + +class ExpenseCustomField(Base): + """ + Class for ExepenseCustomField module + """ + def __init__(self, workspace_id: int, source_field: str, destination_field: str, sync_after: datetime): + super().__init__( + workspace_id=workspace_id, + source_field=source_field, + destination_field=destination_field, + platform_class_name='expense_custom_fields', + sync_after=sync_after + ) + + def trigger_import(self): + """ + Trigger import for ExepenseCustomField module + """ + self.check_import_log_and_start_import() + + def construct_custom_field_placeholder(self, source_placeholder: str, fyle_attribute: str, existing_attribute: Dict): + """ + Construct placeholder for custom field + :param source_placeholder: Placeholder from mapping settings + :param fyle_attribute: Fyle attribute + :param existing_attribute: Existing attribute + """ + new_placeholder = None + placeholder = None + + if existing_attribute: + placeholder = existing_attribute['placeholder'] if 'placeholder' in existing_attribute else None + + # Here is the explanation of what's happening in the if-else ladder below + # source_field is the field that's save in mapping settings, this field user may or may not fill in the custom field form + # placeholder is the field that's saved in the detail column of destination attributes + # fyle_attribute is what we're constructing when both of these fields would not be available + + if not (source_placeholder or placeholder): + # If source_placeholder and placeholder are both None, then we're creating adding a self constructed placeholder + new_placeholder = 'Select {0}'.format(fyle_attribute) + elif not source_placeholder and placeholder: + # If source_placeholder is None but placeholder is not, then we're choosing same place holder as 1 in detail section + new_placeholder = placeholder + elif source_placeholder and not placeholder: + # If source_placeholder is not None but placeholder is None, then we're choosing the placeholder as filled by user in form + new_placeholder = source_placeholder + else: + # Else, we're choosing the placeholder as filled by user in form or None + new_placeholder = source_placeholder + + return new_placeholder + + def construct_fyle_expense_custom_field_payload( + self, + business_central_attributes: List[DestinationAttribute], + platform: PlatformConnector, + source_placeholder: str = None + ): + """ + Construct payload for expense custom fields + :param business_central_attributes: List of destination attributes + :param platform: PlatformConnector object + :param source_placeholder: Placeholder from mapping settings + """ + fyle_expense_custom_field_options = [] + fyle_attribute = self.source_field + + [fyle_expense_custom_field_options.append(business_central_attribute.value) for business_central_attribute in business_central_attributes] + + if fyle_attribute.lower() not in FYLE_EXPENSE_SYSTEM_FIELDS: + existing_attribute = ExpenseAttribute.objects.filter( + attribute_type=fyle_attribute, workspace_id=self.workspace_id).values_list('detail', flat=True).first() + + custom_field_id = None + + if existing_attribute is not None: + custom_field_id = existing_attribute['custom_field_id'] + + fyle_attribute = fyle_attribute.replace('_', ' ').title() + placeholder = self.construct_custom_field_placeholder(source_placeholder, fyle_attribute, existing_attribute) + + expense_custom_field_payload = { + 'field_name': fyle_attribute, + 'type': 'SELECT', + 'is_enabled': True, + 'is_mandatory': False, + 'placeholder': placeholder, + 'options': fyle_expense_custom_field_options, + 'code': None + } + + if custom_field_id: + expense_field = platform.expense_custom_fields.get_by_id(custom_field_id) + expense_custom_field_payload['id'] = custom_field_id + expense_custom_field_payload['is_mandatory'] = expense_field['is_mandatory'] + + return expense_custom_field_payload + + # construct_payload_and_import_to_fyle method is overridden + def construct_payload_and_import_to_fyle( + self, + platform: PlatformConnector, + import_log: ImportLog, + source_placeholder: str = None + ): + """ + Construct Payload and Import to fyle in Batches + """ + filters = self.construct_attributes_filter(self.destination_field) + + destination_attributes_count = DestinationAttribute.objects.filter(**filters).count() + + # If there are no destination attributes, mark the import as complete + if destination_attributes_count == 0: + import_log.status = 'COMPLETE' + import_log.last_successful_run_at = datetime.now() + import_log.error_log = [] + import_log.total_batches_count = 0 + import_log.processed_batches_count = 0 + import_log.save() + return + else: + import_log.total_batches_count = 1 + import_log.save() + + destination_attributes = DestinationAttribute.objects.filter(**filters) + destination_attributes_without_duplicates = self.remove_duplicate_attributes(destination_attributes) + platform_class = self.get_platform_class(platform) + + fyle_payload = self.construct_fyle_expense_custom_field_payload( + destination_attributes_without_duplicates, + platform, + source_placeholder + ) + + self.post_to_fyle_and_sync( + fyle_payload=fyle_payload, + resource_class=platform_class, + is_last_batch=True, + import_log=import_log + ) + + # import_destination_attribute_to_fyle method is overridden + @handle_import_exceptions + def import_destination_attribute_to_fyle(self, import_log: ImportLog): + """ + Import destiantion_attributes field to Fyle and Auto Create Mappings + :param import_log: ImportLog object + """ + + fyle_credentials = FyleCredential.objects.get(workspace_id=self.workspace_id) + platform = PlatformConnector(fyle_credentials=fyle_credentials) + + self.sync_destination_attributes(self.destination_field) + + self.construct_payload_and_import_to_fyle( + platform=platform, + import_log=import_log + ) + + self.sync_expense_attributes(platform) + + self.create_mappings() diff --git a/apps/mappings/imports/modules/merchants.py b/apps/mappings/imports/modules/merchants.py new file mode 100644 index 0000000..02ac821 --- /dev/null +++ b/apps/mappings/imports/modules/merchants.py @@ -0,0 +1,71 @@ +from datetime import datetime +from typing import List + +from fyle_accounting_mappings.models import DestinationAttribute +from fyle_integrations_platform_connector import PlatformConnector + +from apps.mappings.exceptions import handle_import_exceptions +from apps.mappings.imports.modules.base import Base +from apps.mappings.models import ImportLog +from apps.workspaces.models import FyleCredential + + +class Merchant(Base): + """ + Class for Merchant module + """ + def __init__(self, workspace_id: int, destination_field: str, sync_after: datetime): + super().__init__( + workspace_id=workspace_id, + source_field='MERCHANT', + destination_field=destination_field, + platform_class_name='merchants', + sync_after=sync_after + ) + + def trigger_import(self): + """ + Trigger import for Merchant module + """ + self.check_import_log_and_start_import() + + # remove the is_auto_sync_status_allowed parameter + def construct_fyle_payload( + self, + paginated_destination_attributes: List[DestinationAttribute], + existing_fyle_attributes_map: object, + is_auto_sync_status_allowed: bool + ): + """ + Construct Fyle payload for Merchant module + :param paginated_destination_attributes: List of paginated destination attributes + :param existing_fyle_attributes_map: Existing Fyle attributes map + :param is_auto_sync_status_allowed: Is auto sync status allowed + :return: Fyle payload + """ + payload = [] + + for attribute in paginated_destination_attributes: + # Create a new merchant if it does not exist in Fyle + if attribute.value.lower() not in existing_fyle_attributes_map: + payload.append(attribute.value) + + return payload + + # import_destination_attribute_to_fyle method is overridden + @handle_import_exceptions + def import_destination_attribute_to_fyle(self, import_log: ImportLog): + """ + Import destiantion_attributes field to Fyle and Auto Create Mappings + :param import_log: ImportLog object + """ + fyle_credentials = FyleCredential.objects.get(workspace_id=self.workspace_id) + platform = PlatformConnector(fyle_credentials=fyle_credentials) + + self.sync_expense_attributes(platform) + + self.sync_destination_attributes(self.destination_field) + + self.construct_payload_and_import_to_fyle(platform, import_log) + + self.sync_expense_attributes(platform) diff --git a/apps/mappings/imports/modules/projects.py b/apps/mappings/imports/modules/projects.py new file mode 100644 index 0000000..65f469a --- /dev/null +++ b/apps/mappings/imports/modules/projects.py @@ -0,0 +1,63 @@ +from datetime import datetime +from typing import List + +from fyle_accounting_mappings.models import DestinationAttribute + +from apps.mappings.imports.modules.base import Base + + +class Project(Base): + """ + Class for Project module + """ + + def __init__(self, workspace_id: int, destination_field: str, sync_after: datetime): + super().__init__( + workspace_id=workspace_id, + source_field="PROJECT", + destination_field=destination_field, + platform_class_name="projects", + sync_after=sync_after, + ) + + def trigger_import(self): + """ + Trigger import for Project module + """ + self.check_import_log_and_start_import() + + def construct_fyle_payload( + self, + paginated_destination_attributes: List[DestinationAttribute], + existing_fyle_attributes_map: object, + is_auto_sync_status_allowed: bool + ): + """ + Construct Fyle payload for Project module + :param paginated_destination_attributes: List of paginated destination attributes + :param existing_fyle_attributes_map: Existing Fyle attributes map + :param is_auto_sync_status_allowed: Is auto sync status allowed + :return: Fyle payload + """ + payload = [] + + for attribute in paginated_destination_attributes: + project = { + 'name': attribute.value, + 'code': attribute.destination_id, + 'description': 'Business Central Project - {0}, Id - {1}'.format( + attribute.value, + attribute.destination_id + ), + 'is_enabled': True if attribute.active is None else attribute.active + } + + # Create a new project if it does not exist in Fyle + if attribute.value.lower() not in existing_fyle_attributes_map: + payload.append(project) + # Disable the existing project in Fyle if auto-sync status is allowed and the destination_attributes is inactive + elif is_auto_sync_status_allowed and not attribute.active: + project['id'] = existing_fyle_attributes_map[attribute.value.lower()] + payload.append(project) + + return payload diff --git a/apps/mappings/imports/queues.py b/apps/mappings/imports/queues.py new file mode 100644 index 0000000..7c71d52 --- /dev/null +++ b/apps/mappings/imports/queues.py @@ -0,0 +1,52 @@ +from django_q.tasks import Chain +from fyle_accounting_mappings.models import MappingSetting + +from apps.workspaces.models import ImportSetting + + +def chain_import_fields_to_fyle(workspace_id): + """ + Chain import fields to Fyle + :param workspace_id: Workspace Id + """ + mapping_settings = MappingSetting.objects.filter(workspace_id=workspace_id, import_to_fyle=True) + custom_field_mapping_settings = MappingSetting.objects.filter(workspace_id=workspace_id, is_custom=True, import_to_fyle=True) + import_settings = ImportSetting.objects.get(workspace_id=workspace_id) + chain = Chain() + + if import_settings.import_categories: + chain.append( + 'apps.mappings.imports.tasks.trigger_import_via_schedule', + workspace_id, + 'ACCOUNT', + 'CATEGORY' + ) + + if import_settings.import_vendors_as_merchants: + chain.append( + 'apps.mappings.imports.tasks.trigger_import_via_schedule', + workspace_id, + 'VENDOR', + 'MERCHANT' + ) + + for mapping_setting in mapping_settings: + if mapping_setting.source_field in ['PROJECT', 'COST_CENTER']: + chain.append( + 'apps.mappings.imports.tasks.trigger_import_via_schedule', + workspace_id, + mapping_setting.destination_field, + mapping_setting.source_field + ) + + for custom_fields_mapping_setting in custom_field_mapping_settings: + chain.append( + 'apps.mappings.imports.tasks.trigger_import_via_schedule', + workspace_id, + custom_fields_mapping_setting.destination_field, + custom_fields_mapping_setting.source_field, + True + ) + + if chain.length() > 0: + chain.run() diff --git a/apps/mappings/imports/schedules.py b/apps/mappings/imports/schedules.py new file mode 100644 index 0000000..c37f3b0 --- /dev/null +++ b/apps/mappings/imports/schedules.py @@ -0,0 +1,50 @@ +from datetime import datetime + +from django_q.models import Schedule +from fyle_accounting_mappings.models import MappingSetting + +from apps.workspaces.models import ImportSetting + + +def schedule_or_delete_fyle_import_tasks(import_settings: ImportSetting, mapping_setting_instance: MappingSetting = None): + """ + Schedule or delete Fyle import tasks based on the configuration. + :param configuration: Workspace Configuration Instance + :param instance: Mapping Setting Instance + :return: None + """ + task_to_be_scheduled = None + # Check if there is a task to be scheduled + if mapping_setting_instance and mapping_setting_instance.import_to_fyle: + task_to_be_scheduled = mapping_setting_instance + + if task_to_be_scheduled or import_settings.import_categories: + Schedule.objects.update_or_create( + func='apps.mappings.imports.queues.chain_import_fields_to_fyle', + args='{}'.format(import_settings.workspace_id), + defaults={ + 'schedule_type': Schedule.MINUTES, + 'minutes': 24 * 60, + 'next_run': datetime.now() + } + ) + return + + import_fields_count = MappingSetting.objects.filter( + import_to_fyle=True, + workspace_id=import_settings.workspace_id, + source_field__in=['CATEGORY', 'PROJECT', 'COST_CENTER'] + ).count() + + custom_field_import_fields_count = MappingSetting.objects.filter( + import_to_fyle=True, + workspace_id=import_settings.workspace_id, + is_custom=True + ).count() + + # If the import fields count is 0, delete the schedule + if import_fields_count == 0 and custom_field_import_fields_count == 0: + Schedule.objects.filter( + func='apps.mappings.imports.queues.chain_import_fields_to_fyle', + args='{}'.format(import_settings.workspace_id) + ).delete() diff --git a/apps/mappings/imports/tasks.py b/apps/mappings/imports/tasks.py new file mode 100644 index 0000000..9cb3a6e --- /dev/null +++ b/apps/mappings/imports/tasks.py @@ -0,0 +1,32 @@ +from apps.mappings.imports.modules.categories import Category +from apps.mappings.imports.modules.cost_centers import CostCenter +from apps.mappings.imports.modules.expense_custom_fields import ExpenseCustomField +from apps.mappings.imports.modules.merchants import Merchant +from apps.mappings.imports.modules.projects import Project +from apps.mappings.models import ImportLog + +SOURCE_FIELD_CLASS_MAP = { + 'CATEGORY': Category, + 'MERCHANT': Merchant, + 'COST_CENTER': CostCenter, + 'PROJECT': Project, +} + + +def trigger_import_via_schedule(workspace_id: int, destination_field: str, source_field: str, is_custom: bool = False): + """ + Trigger import via schedule + :param workspace_id: Workspace id + :param destination_field: Destination field + :param source_field: Type of attribute (e.g. 'CATEGORY', 'MERCHANT', 'COST_CENTER', 'PROJECT') + """ + import_log = ImportLog.objects.filter(workspace_id=workspace_id, attribute_type=source_field).first() + sync_after = import_log.last_successful_run_at if import_log else None + + if is_custom: + item = ExpenseCustomField(workspace_id, source_field, destination_field, sync_after) + item.trigger_import() + else: + module_class = SOURCE_FIELD_CLASS_MAP[source_field] + item = module_class(workspace_id, destination_field, sync_after) + item.trigger_import() diff --git a/apps/mappings/models.py b/apps/mappings/models.py index e69de29..a680906 100644 --- a/apps/mappings/models.py +++ b/apps/mappings/models.py @@ -0,0 +1,35 @@ +from django.db import models + +from apps.workspaces.models import BaseForeignWorkspaceModel +from ms_business_central_api.models.fields import ( + CustomDateTimeField, + CustomJsonField, + IntegerNotNullField, + StringNotNullField, + StringOptionsField, +) + +IMPORT_STATUS_CHOICES = ( + ('FATAL', 'FATAL'), + ('COMPLETE', 'COMPLETE'), + ('IN_PROGRESS', 'IN_PROGRESS'), + ('FAILED', 'FAILED') +) + + +class ImportLog(BaseForeignWorkspaceModel): + """ + Table to store import logs + """ + + id = models.AutoField(auto_created=True, primary_key=True, verbose_name='ID', serialize=False) + attribute_type = StringNotNullField(max_length=150, help_text='Attribute type') + status = StringOptionsField(help_text='Status', choices=IMPORT_STATUS_CHOICES) + error_log = CustomJsonField(help_text='Emails Selected For Email Notification') + total_batches_count = IntegerNotNullField(help_text='Queued batches', default=0) + processed_batches_count = IntegerNotNullField(help_text='Processed batches', default=0) + last_successful_run_at = CustomDateTimeField(help_text='Last successful run') + + class Meta: + db_table = 'import_logs' + unique_together = ('workspace', 'attribute_type') diff --git a/apps/mappings/signals.py b/apps/mappings/signals.py new file mode 100644 index 0000000..782928f --- /dev/null +++ b/apps/mappings/signals.py @@ -0,0 +1,112 @@ +import logging +from datetime import datetime, timedelta, timezone + +from django.db.models.signals import post_save, pre_save +from django.dispatch import receiver +from fyle.platform.exceptions import WrongParamsError +from fyle_accounting_mappings.models import MappingSetting +from fyle_integrations_platform_connector import PlatformConnector +from rest_framework.exceptions import ValidationError + +from apps.mappings.imports.modules.expense_custom_fields import ExpenseCustomField +from apps.mappings.imports.schedules import schedule_or_delete_fyle_import_tasks +from apps.mappings.models import ImportLog +from apps.workspaces.models import FyleCredential, ImportSetting + +logger = logging.getLogger(__name__) + + +@receiver(post_save, sender=MappingSetting) +def run_post_mapping_settings_triggers(sender, instance: MappingSetting, **kwargs): + """ + :param sender: Sender Class + :param instance: Row instance of Sender Class + :return: None + """ + configuration = ImportSetting.objects.filter(workspace_id=instance.workspace_id).first() + + if instance.source_field == 'PROJECT': + schedule_or_delete_fyle_import_tasks(configuration, instance) + + if instance.source_field == 'COST_CENTER': + schedule_or_delete_fyle_import_tasks(configuration, instance) + + if instance.is_custom: + schedule_or_delete_fyle_import_tasks(configuration, instance) + + +@receiver(pre_save, sender=MappingSetting) +def run_pre_mapping_settings_triggers(sender, instance: MappingSetting, **kwargs): + """ + :param sender: Sender Class + :param instance: Row instance of Sender Class + :return: None + """ + default_attributes = ['EMPLOYEE', 'CATEGORY', 'PROJECT', 'COST_CENTER', 'TAX_GROUP', 'CORPORATE_CARD'] + + instance.source_field = instance.source_field.upper().replace(' ', '_') + + if instance.source_field not in default_attributes and instance.import_to_fyle: + # TODO: sync intacct fields before we upload custom field + try: + workspace_id = int(instance.workspace_id) + # Checking is import_log exists or not if not create one + import_log, is_created = ImportLog.objects.get_or_create( + workspace_id=workspace_id, + attribute_type=instance.source_field, + defaults={ + 'status': 'IN_PROGRESS' + } + ) + + last_successful_run_at = None + if import_log and not is_created: + last_successful_run_at = import_log.last_successful_run_at if import_log.last_successful_run_at else None + time_difference = datetime.now() - timedelta(minutes=32) + offset_aware_time_difference = time_difference.replace(tzinfo=timezone.utc) + + # if the import_log is present and the last_successful_run_at is less than 30mins then we need to update it + # so that the schedule can run + if last_successful_run_at and offset_aware_time_difference and (offset_aware_time_difference < last_successful_run_at): + import_log.last_successful_run_at = offset_aware_time_difference + last_successful_run_at = offset_aware_time_difference + import_log.save() + + # Creating the expense_custom_field object with the correct last_successful_run_at value + expense_custom_field = ExpenseCustomField( + workspace_id=workspace_id, + source_field=instance.source_field, + destination_field=instance.destination_field, + sync_after=last_successful_run_at + ) + + fyle_credentials = FyleCredential.objects.get(workspace_id=workspace_id) + platform = PlatformConnector(fyle_credentials=fyle_credentials) + + # setting the import_log status to IN_PROGRESS + import_log.status = 'IN_PROGRESS' + import_log.save() + + expense_custom_field.construct_payload_and_import_to_fyle(platform, import_log) + expense_custom_field.sync_expense_attributes(platform) + + # NOTE: We are not setting the import_log status to COMPLETE + # since the post_save trigger will run the import again in async manner + + except WrongParamsError as error: + logger.error( + 'Error while creating %s workspace_id - %s in Fyle %s %s', + instance.source_field, instance.workspace_id, error.message, {'error': error.response} + ) + if error.response and 'message' in error.response: + raise ValidationError({ + 'message': error.response['message'], + 'field_name': instance.source_field + }) + + # setting the import_log.last_successful_run_at to -30mins for the post_save_trigger + import_log = ImportLog.objects.filter(workspace_id=workspace_id, attribute_type=instance.source_field).first() + if import_log.last_successful_run_at: + last_successful_run_at = import_log.last_successful_run_at - timedelta(minutes=30) + import_log.last_successful_run_at = last_successful_run_at + import_log.save() diff --git a/apps/mappings/tasks.py b/apps/mappings/tasks.py new file mode 100644 index 0000000..82f1e48 --- /dev/null +++ b/apps/mappings/tasks.py @@ -0,0 +1,22 @@ +from apps.business_central.utils import BusinessCentralConnector +from apps.workspaces.models import BusinessCentralCredentials + + +def sync_business_central_attributes(business_central_attribute_type: str, workspace_id: int): + business_central_credentials: BusinessCentralCredentials = BusinessCentralCredentials.objects.get(workspace_id=workspace_id) + + business_central_connection = BusinessCentralConnector( + credentials_object=business_central_credentials, + workspace_id=workspace_id + ) + + sync_functions = { + 'ACCOUNT': business_central_connection.sync_accounts, + 'COMPANY': business_central_connection.sync_companies, + 'LOCATION': business_central_connection.sync_locations, + 'EMPLOYEE': business_central_connection.sync_employees, + 'VENDOR': business_central_connection.sync_vendors, + } + + sync_function = sync_functions[business_central_attribute_type] + sync_function() diff --git a/apps/mappings/urls.py b/apps/mappings/urls.py index e69de29..727c034 100644 --- a/apps/mappings/urls.py +++ b/apps/mappings/urls.py @@ -0,0 +1,20 @@ +"""ms_business_central_api URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/3.0/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.urls import include, path + +urlpatterns = [ + path('', include('fyle_accounting_mappings.urls')) +] diff --git a/apps/users/helpers.py b/apps/users/helpers.py index 1d91ba3..35ededb 100644 --- a/apps/users/helpers.py +++ b/apps/users/helpers.py @@ -1,5 +1,6 @@ from typing import Tuple +from fyle_integrations_platform_connector import PlatformConnector from fyle_rest_auth.models import AuthToken from apps.fyle.helpers import get_cluster_domain @@ -20,3 +21,21 @@ def get_cluster_domain_and_refresh_token(user) -> Tuple[str, str]: cluster_domain = get_cluster_domain(refresh_token) return cluster_domain, refresh_token + + +def get_user_profile(request): + """ + Get user profile + """ + refresh_token = AuthToken.objects.get(user__user_id=request.user).refresh_token + cluster_domain, _ = get_cluster_domain_and_refresh_token(request.user) + + fyle_credentials = FyleCredential( + cluster_domain=cluster_domain, + refresh_token=refresh_token + ) + + platform = PlatformConnector(fyle_credentials) + employee_profile = platform.connection.v1beta.spender.my_profile.get() + + return employee_profile diff --git a/apps/users/urls.py b/apps/users/urls.py index 6475fc7..d264fce 100644 --- a/apps/users/urls.py +++ b/apps/users/urls.py @@ -15,8 +15,9 @@ """ from django.urls import path -from apps.users.views import FyleOrgsView +from apps.users.views import FyleOrgsView, UserProfileView urlpatterns = [ + path('profile/', UserProfileView.as_view(), name='user-profile'), path('orgs/', FyleOrgsView.as_view(), name='fyle-orgs') ] diff --git a/apps/users/views.py b/apps/users/views.py index 05dfd87..768317a 100644 --- a/apps/users/views.py +++ b/apps/users/views.py @@ -3,7 +3,7 @@ from rest_framework.response import Response from apps.fyle.helpers import get_fyle_orgs -from apps.users.helpers import get_cluster_domain_and_refresh_token +from apps.users.helpers import get_cluster_domain_and_refresh_token, get_user_profile class FyleOrgsView(generics.ListCreateAPIView): @@ -24,3 +24,18 @@ def get(self, request, *args, **kwargs): data=fyle_orgs, status=status.HTTP_200_OK ) + + +class UserProfileView(generics.RetrieveAPIView): + + permission_classes = [IsAuthenticated] + + def get(self, request, *args, **kwargs): + """ + Get User Details + """ + employee_profile = get_user_profile(request) + return Response( + data=employee_profile, + status=status.HTTP_200_OK + ) diff --git a/apps/workspaces/helpers.py b/apps/workspaces/helpers.py index 9671147..9cf1a44 100644 --- a/apps/workspaces/helpers.py +++ b/apps/workspaces/helpers.py @@ -1,15 +1,14 @@ -import logging import base64 -import requests import json +import logging +import requests from django.conf import settings -from future.moves.urllib.parse import urlencode from dynamics.exceptions.dynamics_exceptions import InternalServerError, InvalidTokenError +from future.moves.urllib.parse import urlencode -from apps.workspaces.models import BusinessCentralCredentials, Workspace from apps.business_central.utils import BusinessCentralConnector - +from apps.workspaces.models import BusinessCentralCredentials, Workspace logger = logging.getLogger(__name__) @@ -119,8 +118,8 @@ def connect_business_central(authorization_code, redirect_uri, workspace_id): workspace.business_central_company_id = connection[0]["id"] workspace.save() - if workspace.onboarding_state == "CONNECTION": - # If workspace's onboarding state is "CONNECTION", update it to "EXPORT_SETTINGS" + if workspace.onboarding_state == "COMPANY_SELECTION": + # If workspace's onboarding state is "COMPANY_SELECTION", update it to "EXPORT_SETTINGS" workspace.onboarding_state = "EXPORT_SETTINGS" workspace.save() diff --git a/apps/workspaces/models.py b/apps/workspaces/models.py index 36b720c..3347cb9 100644 --- a/apps/workspaces/models.py +++ b/apps/workspaces/models.py @@ -1,23 +1,24 @@ -from django.db import models from django.contrib.auth import get_user_model from django.contrib.postgres.fields import ArrayField +from django.db import models from ms_business_central_api.models.fields import ( - StringNotNullField, + BooleanFalseField, + BooleanTrueField, CustomDateTimeField, + CustomJsonField, + IntegerNullField, + StringNotNullField, + StringNullField, StringOptionsField, TextNotNullField, - StringNullField, - BooleanTrueField, - BooleanFalseField, - IntegerNullField, - CustomJsonField ) User = get_user_model() ONBOARDING_STATE_CHOICES = ( ('CONNECTION', 'CONNECTION'), + ('COMPANY_SELECTION', 'COMPANY_SELECTION'), ('EXPORT_SETTINGS', 'EXPORT_SETTINGS'), ('IMPORT_SETTINGS', 'IMPORT_SETTINGS'), ('ADVANCED_CONFIGURATION', 'ADVANCED_CONFIGURATION'), @@ -26,7 +27,7 @@ def get_default_onboarding_state(): - return 'EXPORT_SETTINGS' + return 'CONNECTION' class Workspace(models.Model): @@ -37,8 +38,8 @@ class Workspace(models.Model): name = StringNotNullField(help_text='Name of the workspace') user = models.ManyToManyField(User, help_text='Reference to users table') org_id = models.CharField(max_length=255, help_text='org id', unique=True) - last_synced_at = CustomDateTimeField(help_text='Datetime when expenses were pulled last') - ccc_last_synced_at = CustomDateTimeField(help_text='Datetime when ccc expenses were pulled last') + reimbursable_last_synced_at = CustomDateTimeField(help_text='Datetime when expenses were pulled last') + credit_card_last_synced_at = CustomDateTimeField(help_text='Datetime when ccc expenses were pulled last') source_synced_at = CustomDateTimeField(help_text='Datetime when source dimensions were pulled') destination_synced_at = CustomDateTimeField(help_text='Datetime when destination dimensions were pulled') onboarding_state = StringOptionsField( diff --git a/apps/workspaces/serializers.py b/apps/workspaces/serializers.py index a77e280..2eb7331 100644 --- a/apps/workspaces/serializers.py +++ b/apps/workspaces/serializers.py @@ -2,23 +2,23 @@ Workspace Serializers """ from django.core.cache import cache -from rest_framework import serializers +from fyle_accounting_mappings.models import ExpenseAttribute from fyle_rest_auth.helpers import get_fyle_admin from fyle_rest_auth.models import AuthToken -from fyle_accounting_mappings.models import ExpenseAttribute +from rest_framework import serializers +from apps.fyle.helpers import get_cluster_domain +from apps.users.models import User from apps.workspaces.helpers import connect_business_central -from ms_business_central_api.utils import assert_valid from apps.workspaces.models import ( - Workspace, - FyleCredential, + AdvancedSetting, BusinessCentralCredentials, ExportSetting, + FyleCredential, ImportSetting, - AdvancedSetting + Workspace, ) -from apps.users.models import User -from apps.fyle.helpers import get_cluster_domain +from ms_business_central_api.utils import assert_valid class WorkspaceSerializer(serializers.ModelSerializer): @@ -69,6 +69,10 @@ def create(self, validated_data): cluster_domain=cluster_domain ) + if workspace.onboarding_state == 'CONNECTION': + workspace.onboarding_state = 'COMPANY_SELECTION' + workspace.save() + return workspace @@ -206,13 +210,13 @@ class WorkspaceAdminSerializer(serializers.Serializer): """ Workspace Admin Serializer """ - admin_emails = serializers.SerializerMethodField() + email = serializers.CharField() + name = serializers.CharField() - def get_admin_emails(self, validated_data): + def get_admin_emails(self, workspace_id): """ Get Workspace Admins """ - workspace_id = self.context['request'].parser_context.get('kwargs').get('workspace_id') workspace = Workspace.objects.get(id=workspace_id) admin_emails = [] diff --git a/apps/workspaces/urls.py b/apps/workspaces/urls.py index cef5585..f4dc691 100644 --- a/apps/workspaces/urls.py +++ b/apps/workspaces/urls.py @@ -1,16 +1,15 @@ -from django.urls import path, include +from django.urls import include, path from apps.workspaces.views import ( - ReadyView, - WorkspaceView, + AdvancedSettingView, + ConnectBusinessCentralView, ExportSettingView, ImportSettingView, - AdvancedSettingView, + ReadyView, WorkspaceAdminsView, - ConnectBusinessCentralView + WorkspaceView, ) - workspace_app_paths = [ path('', WorkspaceView.as_view(), name='workspaces'), path('ready/', ReadyView.as_view(), name='ready'), @@ -26,6 +25,7 @@ path('/accounting_exports/', include('apps.accounting_exports.urls')), path('/fyle/', include('apps.fyle.urls')), path('/business_central/', include('apps.business_central.urls')), + path('/mappings/', include('apps.mappings.urls')) ] urlpatterns = [] diff --git a/apps/workspaces/views.py b/apps/workspaces/views.py index 9b8272b..510e9ef 100644 --- a/apps/workspaces/views.py +++ b/apps/workspaces/views.py @@ -1,29 +1,20 @@ import logging -from django.contrib.auth import get_user_model +from django.contrib.auth import get_user_model +from fyle_rest_auth.utils import AuthUtils from rest_framework import generics from rest_framework.views import Response, status -from fyle_rest_auth.utils import AuthUtils - - -from ms_business_central_api.utils import assert_valid -from apps.workspaces.models import ( - Workspace, - ExportSetting, - ImportSetting, - AdvancedSetting, - BusinessCentralCredentials -) +from apps.workspaces.models import AdvancedSetting, BusinessCentralCredentials, ExportSetting, ImportSetting, Workspace from apps.workspaces.serializers import ( - WorkspaceSerializer, + AdvancedSettingSerializer, BusinessCentralCredentialSerializer, ExportSettingsSerializer, ImportSettingsSerializer, - AdvancedSettingSerializer, - WorkspaceAdminSerializer + WorkspaceAdminSerializer, + WorkspaceSerializer, ) - +from ms_business_central_api.utils import assert_valid logger = logging.getLogger(__name__) logger.level = logging.INFO @@ -125,4 +116,7 @@ class WorkspaceAdminsView(generics.ListAPIView): Retrieve Workspace Admins """ serializer_class = WorkspaceAdminSerializer - queryset = Workspace.objects.all() + pagination_class = None + + def get_queryset(self): + return WorkspaceAdminSerializer().get_admin_emails(self.kwargs['workspace_id']) diff --git a/tests/conftest.py b/tests/conftest.py index 57d60fe..4d359e1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -115,8 +115,8 @@ def create_temp_workspace(): id=workspace_id, name='Fyle For Testing {}'.format(workspace_id), org_id='riseabovehate{}'.format(workspace_id), - last_synced_at=None, - ccc_last_synced_at=None, + reimbursable_last_synced_at=None, + credit_card_last_synced_at=None, created_at=datetime.now(tz=timezone.utc), updated_at=datetime.now(tz=timezone.utc) ) @@ -307,7 +307,7 @@ def add_export_settings(): default_bank_account_name='Accounts Payable', default_back_account_id='1', reimbursable_expense_state='PAYMENT_PROCESSING', - reimbursable_expense_date='current_date' if workspace_id == 1 else 'last_spent_at', + reimbursable_expense_date='spent_at' if workspace_id == 1 else 'last_spent_at', reimbursable_expense_grouped_by='REPORT' if workspace_id == 1 else 'EXPENSE', credit_card_expense_export_type='CREDIT_CARD_PURCHASE' if workspace_id in [1, 2] else 'JOURNAL_ENTRY', credit_card_expense_state='PAYMENT_PROCESSING', diff --git a/tests/test_fyle/fixtures.py b/tests/test_fyle/fixtures.py index 94138d7..15eb706 100644 --- a/tests/test_fyle/fixtures.py +++ b/tests/test_fyle/fixtures.py @@ -221,4 +221,66 @@ ] } ], + 'expenses':[ + { + 'id':91, + 'employee_email':'ashwin.t@fyle.in', + 'employee_name':'Joanna', + 'category':'Food', + 'sub_category':None, + 'project':'Aaron Abbott', + 'org_id':'or79Cob97KSh', + 'expense_id':'txxTi9ZfdepC', + 'expense_number':'E/2022/05/T/16', + 'claim_number':'C/2022/05/R/4', + 'report_title':'R/2022/05/R/4', + 'amount':50.0, + 'currency':'USD', + 'foreign_amount':None, + 'foreign_currency':None, + 'tax_amount':None, + 'tax_group_id':None, + 'settlement_id':'setDiksMn83K7', + 'reimbursable':True, + 'billable':False, + 'exported':False, + 'state':'PAYMENT_PROCESSING', + 'vendor':'Ashwin', + 'cost_center':'Marketing', + 'purpose':None, + 'report_id':'rpViBmuYmAgw', + 'corporate_card_id':None, + 'file_ids':[ + + ], + 'spent_at':'2022-05-13T17:00:00Z', + 'approved_at':'2022-05-13T09:30:13.484000Z', + 'posted_at': '2021-12-22T07:30:26.289842+00:00', + 'expense_created_at':'2022-05-13T09:29:43.535468Z', + 'expense_updated_at':'2022-05-13T09:32:06.643941Z', + 'created_at':'2022-05-23T11:11:28.241406Z', + 'updated_at':'2022-05-23T11:11:28.241440Z', + 'fund_source':'PERSONAL', + 'source_account_type': 'PERSONAL_CASH_ACCOUNT', + 'verified_at':'2022-05-23T11:11:28.241440Z', + 'custom_properties':{ + 'Team':'', + 'Class':'', + 'Klass':'', + 'Location':'', + 'Team Copy':'', + 'Tax Groups':'', + 'Departments':'', + 'Team 2 Postman':'', + 'User Dimension':'', + 'Location Entity':'', + 'Operating System':'', + 'System Operating':'', + 'User Dimension Copy':'', + 'Custom Expense Field':'None' + }, + 'paid_on_qbo':False, + 'payment_number':'P/2022/05/R/7' + } + ], } diff --git a/tests/test_fyle/test_views.py b/tests/test_fyle/test_views.py index ac2b7ba..afd405a 100644 --- a/tests/test_fyle/test_views.py +++ b/tests/test_fyle/test_views.py @@ -3,6 +3,8 @@ from django.urls import reverse +from apps.accounting_exports.models import AccountingExport +from apps.fyle.models import Expense from apps.workspaces.models import FyleCredential, Workspace from tests.helpers import dict_compare_keys from tests.test_fyle.fixtures import fixtures as data @@ -100,7 +102,7 @@ def test_fyle_expense_fields(api_client, test_connection, create_temp_workspace, response = json.loads(response.content) assert ( - dict_compare_keys(response['results'], data['fyle_expense_custom_fields']) == [] + dict_compare_keys(response, data['fyle_expense_custom_fields']) == [] ), 'expense group api return diffs in keys' @@ -114,11 +116,24 @@ def test_exportable_expense_group_view(api_client, test_connection, create_temp_ assert response.status_code == 200 -def test_accounting_export_sync_view(api_client, test_connection, create_temp_workspace, add_fyle_credentials, add_export_settings): +def test_accounting_export_sync_view(api_client, test_connection, create_temp_workspace, add_fyle_credentials, add_export_settings, mocker): access_token = test_connection.access_token url = reverse('sync-accounting-exports', kwargs={'workspace_id': 1}) + + mocker.patch( + 'fyle_integrations_platform_connector.apis.Expenses.get', + return_value = data['expenses'], + ) + api_client.credentials(HTTP_AUTHORIZATION='Bearer {}'.format(access_token)) response = api_client.post(url) assert response.status_code == 200 + + accounting_exports = AccountingExport.objects.filter(workspace_id=1) + + assert accounting_exports[0].status == 'COMPLETE' + + expenses = Expense.objects.filter(org_id='or79Cob97KSh') + assert len(expenses) == 1 diff --git a/tests/test_users/test_views.py b/tests/test_users/test_views.py index 19f17b0..5e94935 100644 --- a/tests/test_users/test_views.py +++ b/tests/test_users/test_views.py @@ -10,7 +10,9 @@ def test_setup(): def test_get_all_orgs_view(api_client, test_connection, create_temp_workspace, add_fyle_credentials, mocker): - + """ + Test get all orgs view + """ access_token = test_connection.access_token url = reverse('fyle-orgs') @@ -24,3 +26,16 @@ def test_get_all_orgs_view(api_client, test_connection, create_temp_workspace, a response = api_client.get(url) assert response.status_code == 200 assert response.data == fyle_data['get_all_orgs'] + + +def test_get_user_profile(api_client, test_connection, create_temp_workspace, add_fyle_credentials): + """ + Test get user profile + """ + access_token = test_connection.access_token + url = reverse('user-profile') + + api_client.credentials(HTTP_AUTHORIZATION='Bearer {}'.format(access_token)) + + response = api_client.get(url) + assert response.status_code == 200 diff --git a/tests/test_workspaces/test_view.py b/tests/test_workspaces/test_view.py index ccb29c7..fa9ff69 100644 --- a/tests/test_workspaces/test_view.py +++ b/tests/test_workspaces/test_view.py @@ -1,13 +1,9 @@ import json + import pytest # noqa from django.urls import reverse -from apps.workspaces.models import ( - Workspace, - ExportSetting, - ImportSetting, - AdvancedSetting, - BusinessCentralCredentials -) + +from apps.workspaces.models import AdvancedSetting, BusinessCentralCredentials, ExportSetting, ImportSetting, Workspace def test_post_of_workspace(api_client, test_connection): @@ -23,6 +19,7 @@ def test_post_of_workspace(api_client, test_connection): assert response.status_code == 201 assert workspace.name == response.data['name'] assert workspace.org_id == response.data['org_id'] + assert workspace.onboarding_state == response.data['onboarding_state'] response = json.loads(response.content)