Skip to content

Commit

Permalink
delete requests and use aiohttp
Browse files Browse the repository at this point in the history
  • Loading branch information
Morgan Diverrez authored and Benvii committed Aug 27, 2024
1 parent 5c3fae2 commit 550bb9b
Showing 1 changed file with 33 additions and 33 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
import logging
import os
import sys
import urllib
from functools import partial
from pathlib import Path
from time import time
Expand All @@ -48,17 +47,15 @@
import aiohttp
import aiometer
import pandas as pd
import requests
from aiohttp_socks import ProxyConnector
from docopt import docopt
from dotenv import load_dotenv


async def _get_number_page(row, token):
# request numberPage of question with length page's equal to 200
url_base_api, headers, connector = await prep_call(token)
url_base_api, headers = await prep_call(token)

async with aiohttp.ClientSession(connector=connector) as session:
async with aiohttp.ClientSession(trust_env=True) as session:
url = f'{url_base_api}knowledge-bases/{row[1]}/search'

body = dict(knowledgeType=['question'], channel='faq')
Expand All @@ -67,7 +64,7 @@ async def _get_number_page(row, token):
params = dict(limit=200)

async with session.post(
url=url, json=body, headers=headers, params=params
url=url, json=body, headers=headers, params=params
) as response:
if response.status != 200:
logging.error(await response.text(), response.status)
Expand All @@ -83,17 +80,17 @@ async def _get_number_page(row, token):

async def _get_question(token, row, current_page):
# request documentId and question by page with length page's equal to 200
url_base_api, headers, connector = await prep_call(token)
url_base_api, headers = await prep_call(token)

async with aiohttp.ClientSession(connector=connector) as session:
async with aiohttp.ClientSession(trust_env=True) as session:
url = f'{url_base_api}knowledge-bases/{row.iloc[0]}/search'
body = dict(knowledgeType=['question'], channel='faq')
if cli_args.get('--tag_title') is not None:
body['filters'] = [{'name': cli_args.get('--tag_title'), 'type': 'tag'}]
params = dict(limit=200, page=current_page)

async with session.post(
url, json=body, headers=headers, params=params
url, json=body, headers=headers, params=params
) as response:
if response.status != 200:
logging.error(await response.text(), response.status)
Expand All @@ -112,13 +109,13 @@ async def _get_question(token, row, current_page):


async def _get_answer(token, row):
url_base_api, headers, connector = await prep_call(token)
url_base_api, headers = await prep_call(token)
if cli_args.get('--tag_title') is not None:
headers['customResponses'] = cli_args.get('--tag_title')
# Définir l'URL de la requête
url = f"{url_base_api}knowledge-bases/{row.get('knowledge_base_id')}/questions/{row.get('documentId')}/channels/{row.get('channel_id')}/responses"

async with aiohttp.ClientSession(connector=connector) as session:
async with aiohttp.ClientSession(trust_env=True) as session:
async with session.get(url, headers=headers) as response:
if response.status != 200:
row['Text'] = None
Expand Down Expand Up @@ -154,7 +151,7 @@ async def _get_answer(token, row):
def receipt_id_from_allowed_desired_knowledge_base(allowed_knowledge_bases):
filtered_data = filter(
lambda item: item.get('name') in cli_args.get('--knowledge_base')
and any(channel.get('systemName') == 'faq' for channel in item.get('channels')),
and any(channel.get('systemName') == 'faq' for channel in item.get('channels')),
allowed_knowledge_bases,
)
knowledge_bases_id_list = [
Expand All @@ -180,16 +177,19 @@ async def prep_call(token=None):
headers = {'Content-Type': 'application/json', 'Accept-Language': 'fr'}
if token:
headers['Authorization'] = f'Bearer {token}'
if urllib.request.getproxies().get('https'):
proxy = urllib.request.getproxies().get('https')
connector = ProxyConnector.from_url(url=proxy, rdns=True)
elif urllib.request.getproxies().get('http'):
proxy = urllib.request.getproxies().get('http')
connector = ProxyConnector.from_url(url=proxy, rdns=True)
else:
connector = None

return url_base_api, headers, connector
return url_base_api, headers


async def fetch_auth_token(session, url, headers, json=None):
async with session.post(url, json=json, headers=headers) as response:
data = await response.json()
return data

async def fetch_allowed_knowledge_bases(session, url, headers):
async with session.get(url, headers=headers) as response:
data = await response.json()
return data


async def _main(args, body_credentials):
Expand All @@ -211,38 +211,38 @@ async def _main(args, body_credentials):

# receipt auth token
_start = time()
url_base_api, headers, connector = await prep_call()
url_base_api, headers = await prep_call()

logging.debug('request token with apiKey and apiSecret')
url = f'{url_base_api}auth'
headers = {'Content-Type': 'application/json'}
response_auth = requests.post(url, json=body_credentials, headers=headers)

if not response_auth.ok:
logging.error(response_auth.text, response_auth.status_code)
async with aiohttp.ClientSession(trust_env=True) as session:
response_auth = await fetch_auth_token(session=session, json=body_credentials, url=url, headers=headers)
if not response_auth:
logging.error(response_auth.get('text'), response_auth.get('status_code'))
sys.exit(1)

# save token
token = response_auth.json().get('token')

token = response_auth.get('token')
# request knowledge bases accessible with this token
logging.debug('request allowed knowledge bases list and associated channels')
url = f'{url_base_api}knowledge-bases?limit=200'
headers['Authorization'] = f'Bearer {token}'
response_allowed_knowledge_bases = requests.get(url, headers=headers)

if not response_allowed_knowledge_bases.ok:
async with aiohttp.ClientSession(trust_env=True) as session:
response_allowed_knowledge_bases = await fetch_allowed_knowledge_bases(session=session, url=url, headers=headers)
if not response_allowed_knowledge_bases.get('data'):
logging.error(
response_allowed_knowledge_bases.text,
response_allowed_knowledge_bases.status_code,
response_allowed_knowledge_bases.get('text'),
response_allowed_knowledge_bases.get('status_code'),
)
sys.exit(1)

# filter knowledge base id and faq channel id associated
logging.debug(
'filtering knowledge base allowed for take knowledge_base_id and channel_id associated'
)
results_allowed_knowledge_bases = response_allowed_knowledge_bases.json().get(
results_allowed_knowledge_bases = response_allowed_knowledge_bases.get(
'data'
)
df_knowledge_bases = receipt_id_from_allowed_desired_knowledge_base(
Expand Down

0 comments on commit 550bb9b

Please sign in to comment.