Skip to content

Commit

Permalink
feat: Added limits to attributes (#649)
Browse files Browse the repository at this point in the history
* feat: Added limits to attributes

* resolved comments and added test

* added test

* renamed function

* fixed test
  • Loading branch information
Ashutosh619-sudo authored Oct 8, 2024
1 parent f231952 commit bffcf72
Show file tree
Hide file tree
Showing 4 changed files with 222 additions and 65 deletions.
172 changes: 110 additions & 62 deletions apps/netsuite/connector.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import re
import json
from datetime import datetime, timedelta
from django.utils import timezone
from typing import List, Dict
import logging

Expand Down Expand Up @@ -29,8 +30,13 @@
logger.level = logging.INFO

SYNC_UPPER_LIMIT = {
'projects': 25000,
'customers': 25000
'projects': 10000,
'customers': 25000,
'classes': 2000,
'accounts': 2000,
'locations': 2000,
'departments': 2000,
'vendors': 20000,
}


Expand Down Expand Up @@ -76,11 +82,31 @@ def get_tax_code_name(item_id, tax_type, rate):
return '{0}: {1} @{2}%'.format(tax_type, item_id, rate)
else:
return '{0} @{1}%'.format(item_id, rate)

def is_sync_allowed(self, attribute_type: str, attribute_count: int):
"""
Checks if the sync is allowed
Returns:
bool: True
"""
if attribute_count > SYNC_UPPER_LIMIT[attribute_type]:
workspace_created_at = Workspace.objects.get(id=self.workspace_id).created_at
if workspace_created_at > timezone.make_aware(datetime(2024, 10, 1), timezone.get_current_timezone()):
return False
else:
return True

return True

def sync_accounts(self):
"""
Sync accounts
"""
attribute_count = self.connection.accounts.count()
if not self.is_sync_allowed(attribute_type = 'accounts', attribute_count=attribute_count):
logger.info('Skipping sync of accounts for workspace %s as it has %s counts which is over the limit', self.workspace_id, attribute_count)
return
accounts_generator = self.connection.accounts.get_all_generator()
for accounts in accounts_generator:
attributes = {
Expand Down Expand Up @@ -517,6 +543,11 @@ def sync_locations(self):
"""
Sync locations
"""
attribute_count = self.connection.locations.count()
if not self.is_sync_allowed(attribute_type = 'locations', attribute_count = attribute_count):
logger.info('Skipping sync of locations for workspace %s as it has %s counts which is over the limit', self.workspace_id, attribute_count)
return

subsidiary_mapping = SubsidiaryMapping.objects.get(workspace_id=self.workspace_id)

location_generator = self.connection.locations.get_all_generator()
Expand Down Expand Up @@ -556,6 +587,11 @@ def sync_classifications(self):
"""
Sync classification
"""
attribute_count = self.connection.classifications.count()
if not self.is_sync_allowed(attribute_type = 'classes', attribute_count = attribute_count):
logger.info('Skipping sync of classes for workspace %s as it has %s counts which is over the limit', self.workspace_id, attribute_count)
return

classification_generator = self.connection.classifications.get_all_generator()

classification_attributes = []
Expand All @@ -580,6 +616,10 @@ def sync_departments(self):
"""
Sync departments
"""
attribute_count = self.connection.departments.count()
if not self.is_sync_allowed(attribute_type = 'departments', attribute_count = attribute_count):
logger.info('Skipping sync of department for workspace %s as it has %s counts which is over the limit', self.workspace_id, attribute_count)
return
department_generator = self.connection.departments.get_all_generator()

department_attributes = []
Expand All @@ -604,6 +644,11 @@ def sync_vendors(self):
"""
Sync vendors
"""
attribute_count = self.connection.vendors.count()
if not self.is_sync_allowed(attribute_type = 'vendors', attribute_count=attribute_count):
logger.info('Skipping sync of vendors for workspace %s as it has %s counts which is over the limit', self.workspace_id, attribute_count)
return

subsidiary_mapping = SubsidiaryMapping.objects.get(workspace_id=self.workspace_id)
configuration = Configuration.objects.filter(workspace_id=self.workspace_id).first()
if not configuration:
Expand Down Expand Up @@ -1036,77 +1081,80 @@ def sync_projects(self):
"""
Sync projects
"""
projects_count = self.connection.projects.count()

if projects_count <= SYNC_UPPER_LIMIT['projects']:
projects_generator = self.connection.projects.get_all_generator()

for projects in projects_generator:
attributes = []
destination_ids = DestinationAttribute.objects.filter(
workspace_id=self.workspace_id,
attribute_type= 'PROJECT',
display_name='Project'
).values_list('destination_id', flat=True)
attribute_count = self.connection.projects.count()
if not self.is_sync_allowed(attribute_type = 'projects', attribute_count = attribute_count):
logger.info('Skipping sync of projects for workspace %s as it has %s counts which is over the limit', self.workspace_id, attribute_count)
return

projects_generator = self.connection.projects.get_all_generator()
for projects in projects_generator:
attributes = []
destination_ids = DestinationAttribute.objects.filter(
workspace_id=self.workspace_id,
attribute_type= 'PROJECT',
display_name='Project'
).values_list('destination_id', flat=True)

for project in projects:
value = self.__decode_project_or_customer_name(project['entityId'])
for project in projects:
value = self.__decode_project_or_customer_name(project['entityId'])

if project['internalId'] in destination_ids :
attributes.append({
'attribute_type': 'PROJECT',
'display_name': 'Project',
'value': value,
'destination_id': project['internalId'],
'active': not project['isInactive']
})
elif not project['isInactive']:
attributes.append({
'attribute_type': 'PROJECT',
'display_name': 'Project',
'value': value,
'destination_id': project['internalId'],
'active': True
})
DestinationAttribute.bulk_create_or_update_destination_attributes(
attributes, 'PROJECT', self.workspace_id, True)
if project['internalId'] in destination_ids :
attributes.append({
'attribute_type': 'PROJECT',
'display_name': 'Project',
'value': value,
'destination_id': project['internalId'],
'active': not project['isInactive']
})
elif not project['isInactive']:
attributes.append({
'attribute_type': 'PROJECT',
'display_name': 'Project',
'value': value,
'destination_id': project['internalId'],
'active': True
})
DestinationAttribute.bulk_create_or_update_destination_attributes(
attributes, 'PROJECT', self.workspace_id, True)

return []

def sync_customers(self):
"""
Sync customers
"""
customers_count = self.connection.customers.count()

if customers_count <= SYNC_UPPER_LIMIT['customers']:
customers_generator = self.connection.customers.get_all_generator()
attribute_count = self.connection.customers.count()
if not self.is_sync_allowed(attribute_type = 'customers', attribute_count = attribute_count):
logger.info('Skipping sync of customers for workspace %s as it has %s counts which is over the limit', self.workspace_id, attribute_count)
return

customers_generator = self.connection.customers.get_all_generator()

for customers in customers_generator:
attributes = []
destination_ids = DestinationAttribute.objects.filter(workspace_id=self.workspace_id,\
attribute_type= 'PROJECT', display_name='Customer').values_list('destination_id', flat=True)
for customer in customers:
value = self.__decode_project_or_customer_name(customer['entityId'])
if customer['internalId'] in destination_ids :
attributes.append({
'attribute_type': 'PROJECT',
'display_name': 'Customer',
'value': value,
'destination_id': customer['internalId'],
'active': not customer['isInactive']
})
elif not customer['isInactive']:
attributes.append({
'attribute_type': 'PROJECT',
'display_name': 'Customer',
'value': value,
'destination_id': customer['internalId'],
'active': True
})
for customers in customers_generator:
attributes = []
destination_ids = DestinationAttribute.objects.filter(workspace_id=self.workspace_id,\
attribute_type= 'PROJECT', display_name='Customer').values_list('destination_id', flat=True)
for customer in customers:
value = self.__decode_project_or_customer_name(customer['entityId'])
if customer['internalId'] in destination_ids :
attributes.append({
'attribute_type': 'PROJECT',
'display_name': 'Customer',
'value': value,
'destination_id': customer['internalId'],
'active': not customer['isInactive']
})
elif not customer['isInactive']:
attributes.append({
'attribute_type': 'PROJECT',
'display_name': 'Customer',
'value': value,
'destination_id': customer['internalId'],
'active': True
})

DestinationAttribute.bulk_create_or_update_destination_attributes(
attributes, 'PROJECT', self.workspace_id, True)
DestinationAttribute.bulk_create_or_update_destination_attributes(
attributes, 'PROJECT', self.workspace_id, True)

return []

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ isort==5.10.1
lazy-object-proxy==1.6.0
lxml==4.6.5
mccabe==0.6.1
netsuitesdk==2.21.3
netsuitesdk==2.23.0
oauthlib==3.2.1
packaging==21.3
platformdirs==2.4.0
Expand Down
5 changes: 5 additions & 0 deletions tests/test_mappings/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,11 @@ def test_remove_duplicates(db):


def test_async_auto_map_employees(mocker, db):
mocker.patch(
'netsuitesdk.api.vendors.Vendors.count',
return_value=5
)

mocker.patch(
'netsuitesdk.api.vendors.Vendors.get_all_generator',
return_value=netsuite_data['get_all_vendors']
Expand Down
Loading

0 comments on commit bffcf72

Please sign in to comment.