From 0df87afcba8cd84bab032a2ae08eaf161550e4fd Mon Sep 17 00:00:00 2001 From: Char Date: Mon, 21 Jun 2021 16:36:06 -0700 Subject: [PATCH 01/66] test: add tests for basicMessage admin protocol get and delete Signed-off-by: Char --- int/tests/test_basicmessage.py | 121 +++++++++++++++++++++++++++++++-- 1 file changed, 117 insertions(+), 4 deletions(-) diff --git a/int/tests/test_basicmessage.py b/int/tests/test_basicmessage.py index fdb17018..6efc1859 100644 --- a/int/tests/test_basicmessage.py +++ b/int/tests/test_basicmessage.py @@ -4,7 +4,6 @@ from aries_staticagent import StaticConnection - @pytest.mark.asyncio async def test_send(connection: StaticConnection, connection_id: str): with connection.next() as future_recip_message: @@ -13,13 +12,127 @@ async def test_send(connection: StaticConnection, connection_id: str): { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", "connection_id": connection_id, - "content": "Your hovercraft is full of eels.", + "content": "Message #1: Your hovercraft is full of eels.", }, return_route="all", ), timeout=60, ) recip_message = await asyncio.wait_for(future_recip_message, 60) + assert recip_message["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/sent" + assert recip_message["message"]["content"] == "Message #1: Your hovercraft is full of eels." + # Delete messages to clear the state between tests + await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", + } + ) + + +@pytest.mark.asyncio +async def test_delete(connection: StaticConnection, connection_id: str): + for i in range(6): + with connection.next() as future_recip_message: + sent_message = await asyncio.wait_for( + connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", + "connection_id": connection_id, + "content": "Test Message #{}".format(i), + }, + return_route="all", + ), + timeout=60, + ) + recip_message = await asyncio.wait_for(future_recip_message, 60) + delete_message = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", + } + ) + get_messages = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", + } + ) + assert delete_message["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/deleted" + assert get_messages["count"] == 0 + - assert recip_message - assert sent_message +@pytest.mark.asyncio +async def test_get(connection: StaticConnection, connection_id: str): + with connection.next() as future_recip_message: + sent_message = await asyncio.wait_for( + connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", + "connection_id": connection_id, + "content": "Message #2: Are you suggesting coconuts migrate?", + }, + return_route="all", + ), + timeout=60, + ) + recip_message = await asyncio.wait_for(future_recip_message, 60) + with connection.next() as future_recip_message: + sent_message = await asyncio.wait_for( + connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", + "connection_id": connection_id, + "content": "Message #3: 'Tis but a flesh wound.", + }, + return_route="all", + ), + timeout=60, + ) + recip_message = await asyncio.wait_for(future_recip_message, 60) + get_messages = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", + } + ) + assert get_messages["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" + assert get_messages["count"] == 2 + # Delete messages to clear the state between tests + await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", + } + ) + + +@pytest.mark.asyncio +async def test_get_limit_offset(connection: StaticConnection, connection_id: str): + for i in range(6): + with connection.next() as future_recip_message: + sent_message = await asyncio.wait_for( + connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", + "connection_id": connection_id, + "content": "Test Message #{}".format(i), + }, + return_route="all", + ), + timeout=60, + ) + recip_message = await asyncio.wait_for(future_recip_message, 60) + get_messages = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", + "limit": 3, + "offset": 2 + } + ) + assert get_messages["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" + assert get_messages["count"] == 3 + assert get_messages["messages"][0]["content"] == "Test Message #3" + assert get_messages["messages"][1]["content"] == "Test Message #2" + assert get_messages["messages"][2]["content"] == "Test Message #1" + # Delete messages to clear the state between tests + await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", + } + ) From e62d2afb2f2a773862d7b5768c06c3965661d241 Mon Sep 17 00:00:00 2001 From: Char Date: Fri, 25 Jun 2021 11:40:02 -0700 Subject: [PATCH 02/66] test: add test_connections.py Signed-off-by: Char --- int/tests/__init__.py | 12 +- int/tests/test_basicmessage.py | 9 +- int/tests/test_connections.py | 208 +++++++++++++++++++++++++++++++++ int/tests/test_invitations.py | 1 + 4 files changed, 225 insertions(+), 5 deletions(-) create mode 100644 int/tests/test_connections.py diff --git a/int/tests/__init__.py b/int/tests/__init__.py index f8e2bcda..18d0b56e 100644 --- a/int/tests/__init__.py +++ b/int/tests/__init__.py @@ -2,7 +2,7 @@ from aiohttp import web from aries_staticagent import StaticConnection, Module - +import logging class BaseAgent: """Simple Agent class. @@ -22,6 +22,16 @@ async def handle_web_request(self, request: web.Request): with self.connection.session(response.append) as session: await self.connection.handle(await request.read(), session) + # create logger + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + console_handler = logging.StreamHandler() + console_handler.setLevel(logging.DEBUG) + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + console_handler.setFormatter(formatter) + logger.addHandler(console_handler) + logger.warning('No suitable message handler for this type') + if response: return web.Response(body=response.pop()) diff --git a/int/tests/test_basicmessage.py b/int/tests/test_basicmessage.py index 6efc1859..06a55322 100644 --- a/int/tests/test_basicmessage.py +++ b/int/tests/test_basicmessage.py @@ -4,6 +4,7 @@ from aries_staticagent import StaticConnection + @pytest.mark.asyncio async def test_send(connection: StaticConnection, connection_id: str): with connection.next() as future_recip_message: @@ -12,7 +13,7 @@ async def test_send(connection: StaticConnection, connection_id: str): { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", "connection_id": connection_id, - "content": "Message #1: Your hovercraft is full of eels.", + "content": "Your hovercraft is full of eels.", }, return_route="all", ), @@ -20,7 +21,7 @@ async def test_send(connection: StaticConnection, connection_id: str): ) recip_message = await asyncio.wait_for(future_recip_message, 60) assert recip_message["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/sent" - assert recip_message["message"]["content"] == "Message #1: Your hovercraft is full of eels." + assert recip_message["message"]["content"] == "Your hovercraft is full of eels." # Delete messages to clear the state between tests await connection.send_and_await_reply_async( { @@ -67,7 +68,7 @@ async def test_get(connection: StaticConnection, connection_id: str): { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", "connection_id": connection_id, - "content": "Message #2: Are you suggesting coconuts migrate?", + "content": "Are you suggesting coconuts migrate?", }, return_route="all", ), @@ -80,7 +81,7 @@ async def test_get(connection: StaticConnection, connection_id: str): { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", "connection_id": connection_id, - "content": "Message #3: 'Tis but a flesh wound.", + "content": "'Tis but a flesh wound.", }, return_route="all", ), diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py new file mode 100644 index 00000000..b7f660e5 --- /dev/null +++ b/int/tests/test_connections.py @@ -0,0 +1,208 @@ +"""Connection Tests""" +import asyncio +import pytest +from acapy_backchannel import Client +from acapy_backchannel.api.connection import delete_connection, get_connections +import time +import logging + +logging.basicConfig(level=logging.INFO) + + +@pytest.mark.asyncio +async def clear_connections(client: Client): + """Clear all connections, if any.""" + connections = await get_connections.asyncio(client=client) + for connection in connections.results: + if connection.state == "connection": + await delete_connection.asyncio( + client=client, conn_id=connection.connection_id + ) + # return(connections) + + +@pytest.mark.asyncio +@pytest.fixture(autouse=True) +async def clear_connection_state(backchannel: Client): + """Clear invitations after each test.""" + # yield + # await clear_connections(backchannel) + yield await clear_connections(backchannel) + + +time.sleep(3) + + +# Temporary Test: before connection +@pytest.mark.asyncio +async def test_get_list_before_connection(connection): + get_list_before_connection = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" + } + ) + logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.INFO) + logging.warning('Log of test_get_list_before_connection') + print("get_list before connection: ",get_list_before_connection["connections"]) + assert True#False + + +@pytest.mark.asyncio +async def test_create_connection(connection): + """Send an invitation and receive it to create a new connection""" + invitation = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", + "alias": "Invitation I sent to Alice", + "label": "Bob", + "group": "admin", + "auto_accept": True, + "multi_use": True, + }, + return_route="all", + ) + received = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", + "invitation": invitation["invitation_url"], + "auto_accept": True, + } + ) + assert received["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" + + +# Temporary Test: after connection +@pytest.mark.asyncio +async def test_get_list_after_connection(connection): + get_list_after_connection = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" + } + ) + print("get_list after connection: ",get_list_after_connection["connections"]) + assert True#False + + +@pytest.mark.asyncio +async def test_get_list(connection): + invitation = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", + "alias": "Invitation I sent to Alice", + "label": "Bob", + "group": "admin", + "auto_accept": True, + "multi_use": True, + }, + return_route="all", + ) + received = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", + "invitation": invitation["invitation_url"], + "auto_accept": True, + } + ) + print("Invitation: ",invitation) + print("Received: ",received) + invitation2 = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", + "alias": "Second invitation I sent to Alice", + "label": "Bob", + "group": "admin", + "auto_accept": True, + "multi_use": True, + }, + return_route="all", + ) + received2 = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", + "invitation": invitation2["invitation_url"], + "auto_accept": True, + } + ) + get_list = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" + } + ) + assert get_list["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/list" + + +@pytest.mark.asyncio +async def test_update(connection): + """Update connection attribute""" + invitation = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", + "alias": "Invitation I sent to Alice", + "label": "Bob", + "group": "admin", + "auto_accept": True, + "multi_use": True, + }, + return_route="all", + ) + received = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", + "invitation": invitation["invitation_url"], + "auto_accept": True, + } + ) + update = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/update", + "connection_id": received["connection_id"], + "label": "Updated label", + "role": "Updated role", + } + ) + assert update["label"] == "Updated label" + + +@pytest.mark.asyncio +async def test_delete(connection): + invitation = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", + "alias": "Invitation I sent to Alice", + "label": "Bob", + "group": "admin", + "auto_accept": True, + "multi_use": True, + }, + return_route="all", + ) + received = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", + "invitation": invitation["invitation_url"], + "auto_accept": True, + } + ) + get_list_beforedelete = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" + } + ) + print('Connections before delete: ',get_list_beforedelete["connections"]) + assert received["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" + delete_connection = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/delete", + "connection_id": received["connection_id"] + } + ) + get_list_afterdelete = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" + } + ) + print("List after delete",get_list_afterdelete["connections"]) + # for i in get_list_beforedelete["connections"]: + # if i not in get_list_afterdelete["connections"]: + # print(i) + assert delete_connection["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" diff --git a/int/tests/test_invitations.py b/int/tests/test_invitations.py index 3be94e21..401474da 100644 --- a/int/tests/test_invitations.py +++ b/int/tests/test_invitations.py @@ -45,6 +45,7 @@ async def test_create_invitation(connection): }, return_route="all", ) + print(reply) assert ( reply["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/invitation" From 618c46b5c31bf4437914c7b611c15a8b94362531 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Fri, 25 Jun 2021 20:46:05 -0500 Subject: [PATCH 03/66] refactor: simplify logging Signed-off-by: Daniel Bluhm --- int/tests/__init__.py | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/int/tests/__init__.py b/int/tests/__init__.py index 18d0b56e..20430124 100644 --- a/int/tests/__init__.py +++ b/int/tests/__init__.py @@ -1,8 +1,12 @@ -# 1. Copy BaseAgent implementation from agent-testing into int/tests/__init__.py +"""Common helpers.""" + +import logging from aiohttp import web from aries_staticagent import StaticConnection, Module -import logging + +LOGGER = logging.getLogger(__name__) + class BaseAgent: """Simple Agent class. @@ -20,17 +24,10 @@ async def handle_web_request(self, request: web.Request): """Handle HTTP POST.""" response = [] with self.connection.session(response.append) as session: - await self.connection.handle(await request.read(), session) - - # create logger - logger = logging.getLogger(__name__) - logger.setLevel(logging.DEBUG) - console_handler = logging.StreamHandler() - console_handler.setLevel(logging.DEBUG) - formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') - console_handler.setFormatter(formatter) - logger.addHandler(console_handler) - logger.warning('No suitable message handler for this type') + try: + await self.connection.handle(await request.read(), session) + except: + LOGGER.exception("Message handling failed") if response: return web.Response(body=response.pop()) From 819c12ef7cc6c00234a21609449e602b0160a39c Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Fri, 25 Jun 2021 20:58:32 -0500 Subject: [PATCH 04/66] style: reformat with black Signed-off-by: Daniel Bluhm --- int/tests/test_basicmessage.py | 32 ++++++++++++++++------- int/tests/test_connections.py | 48 +++++++++++++++++++++------------- 2 files changed, 52 insertions(+), 28 deletions(-) diff --git a/int/tests/test_basicmessage.py b/int/tests/test_basicmessage.py index 06a55322..6d31621c 100644 --- a/int/tests/test_basicmessage.py +++ b/int/tests/test_basicmessage.py @@ -20,7 +20,10 @@ async def test_send(connection: StaticConnection, connection_id: str): timeout=60, ) recip_message = await asyncio.wait_for(future_recip_message, 60) - assert recip_message["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/sent" + assert ( + recip_message["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/sent" + ) assert recip_message["message"]["content"] == "Your hovercraft is full of eels." # Delete messages to clear the state between tests await connection.send_and_await_reply_async( @@ -39,7 +42,7 @@ async def test_delete(connection: StaticConnection, connection_id: str): { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", "connection_id": connection_id, - "content": "Test Message #{}".format(i), + "content": "Test Message #{}".format(i), }, return_route="all", ), @@ -53,10 +56,13 @@ async def test_delete(connection: StaticConnection, connection_id: str): ) get_messages = await connection.send_and_await_reply_async( { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", } ) - assert delete_message["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/deleted" + assert ( + delete_message["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/deleted" + ) assert get_messages["count"] == 0 @@ -90,10 +96,13 @@ async def test_get(connection: StaticConnection, connection_id: str): recip_message = await asyncio.wait_for(future_recip_message, 60) get_messages = await connection.send_and_await_reply_async( { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", } ) - assert get_messages["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" + assert ( + get_messages["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" + ) assert get_messages["count"] == 2 # Delete messages to clear the state between tests await connection.send_and_await_reply_async( @@ -112,7 +121,7 @@ async def test_get_limit_offset(connection: StaticConnection, connection_id: str { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", "connection_id": connection_id, - "content": "Test Message #{}".format(i), + "content": "Test Message #{}".format(i), }, return_route="all", ), @@ -121,12 +130,15 @@ async def test_get_limit_offset(connection: StaticConnection, connection_id: str recip_message = await asyncio.wait_for(future_recip_message, 60) get_messages = await connection.send_and_await_reply_async( { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", "limit": 3, - "offset": 2 + "offset": 2, } ) - assert get_messages["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" + assert ( + get_messages["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" + ) assert get_messages["count"] == 3 assert get_messages["messages"][0]["content"] == "Test Message #3" assert get_messages["messages"][1]["content"] == "Test Message #2" diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index b7f660e5..376e5d29 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -38,13 +38,13 @@ async def clear_connection_state(backchannel: Client): async def test_get_list_before_connection(connection): get_list_before_connection = await connection.send_and_await_reply_async( { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" } ) - logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.INFO) - logging.warning('Log of test_get_list_before_connection') - print("get_list before connection: ",get_list_before_connection["connections"]) - assert True#False + logging.basicConfig(format="%(asctime)s - %(message)s", level=logging.INFO) + logging.warning("Log of test_get_list_before_connection") + print("get_list before connection: ", get_list_before_connection["connections"]) + assert True # False @pytest.mark.asyncio @@ -68,7 +68,10 @@ async def test_create_connection(connection): "auto_accept": True, } ) - assert received["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" + assert ( + received["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" + ) # Temporary Test: after connection @@ -79,8 +82,8 @@ async def test_get_list_after_connection(connection): "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" } ) - print("get_list after connection: ",get_list_after_connection["connections"]) - assert True#False + print("get_list after connection: ", get_list_after_connection["connections"]) + assert True # False @pytest.mark.asyncio @@ -103,8 +106,8 @@ async def test_get_list(connection): "auto_accept": True, } ) - print("Invitation: ",invitation) - print("Received: ",received) + print("Invitation: ", invitation) + print("Received: ", received) invitation2 = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", @@ -125,10 +128,13 @@ async def test_get_list(connection): ) get_list = await connection.send_and_await_reply_async( { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" } ) - assert get_list["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/list" + assert ( + get_list["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/list" + ) @pytest.mark.asyncio @@ -185,15 +191,18 @@ async def test_delete(connection): ) get_list_beforedelete = await connection.send_and_await_reply_async( { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" } ) - print('Connections before delete: ',get_list_beforedelete["connections"]) - assert received["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" + print("Connections before delete: ", get_list_beforedelete["connections"]) + assert ( + received["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" + ) delete_connection = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/delete", - "connection_id": received["connection_id"] + "connection_id": received["connection_id"], } ) get_list_afterdelete = await connection.send_and_await_reply_async( @@ -201,8 +210,11 @@ async def test_delete(connection): "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" } ) - print("List after delete",get_list_afterdelete["connections"]) + print("List after delete", get_list_afterdelete["connections"]) # for i in get_list_beforedelete["connections"]: # if i not in get_list_afterdelete["connections"]: # print(i) - assert delete_connection["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" + assert ( + delete_connection["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" + ) From f703b3b016cf32091bcd7dc769d9b305b79b1352 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Fri, 25 Jun 2021 20:59:55 -0500 Subject: [PATCH 05/66] refactor: drop logging in test_connections for now Signed-off-by: Daniel Bluhm --- int/tests/test_connections.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index 376e5d29..32a8d35a 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -4,9 +4,6 @@ from acapy_backchannel import Client from acapy_backchannel.api.connection import delete_connection, get_connections import time -import logging - -logging.basicConfig(level=logging.INFO) @pytest.mark.asyncio @@ -41,8 +38,6 @@ async def test_get_list_before_connection(connection): "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" } ) - logging.basicConfig(format="%(asctime)s - %(message)s", level=logging.INFO) - logging.warning("Log of test_get_list_before_connection") print("get_list before connection: ", get_list_before_connection["connections"]) assert True # False From 9083a6cbb7113ddc62c47d3cbd4ed0f90ce58858 Mon Sep 17 00:00:00 2001 From: Char Date: Mon, 28 Jun 2021 13:46:32 -0700 Subject: [PATCH 06/66] fix: add condition to send_and_await_reply_async() function Signed-off-by: Char --- int/tests/test_connections.py | 253 +++++++++++++++++----------------- int/tests/test_invitations.py | 8 +- 2 files changed, 135 insertions(+), 126 deletions(-) diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index 32a8d35a..8a42d227 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -4,6 +4,7 @@ from acapy_backchannel import Client from acapy_backchannel.api.connection import delete_connection, get_connections import time +from aries_staticagent import Message @pytest.mark.asyncio @@ -15,7 +16,6 @@ async def clear_connections(client: Client): await delete_connection.asyncio( client=client, conn_id=connection.connection_id ) - # return(connections) @pytest.mark.asyncio @@ -27,41 +27,42 @@ async def clear_connection_state(backchannel: Client): yield await clear_connections(backchannel) -time.sleep(3) - - # Temporary Test: before connection -@pytest.mark.asyncio -async def test_get_list_before_connection(connection): - get_list_before_connection = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" - } - ) - print("get_list before connection: ", get_list_before_connection["connections"]) - assert True # False +# @pytest.mark.asyncio +# async def test_get_list_before_connection(connection): +# get_list_before_connection = await connection.send_and_await_reply_async( +# { +# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" +# } +# ) +# print("get_list before connection: ", get_list_before_connection["connections"]) +# assert True # False @pytest.mark.asyncio async def test_create_connection(connection): """Send an invitation and receive it to create a new connection""" - invitation = await connection.send_and_await_reply_async( - { + sent_invitation = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", "group": "admin", "auto_accept": True, "multi_use": True, - }, + }) + invitation = await connection.send_and_await_reply_async( + sent_invitation, + condition=lambda reply: reply.thread["thid"] == sent_invitation.id, return_route="all", ) - received = await connection.send_and_await_reply_async( - { + sent_msg = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", "invitation": invitation["invitation_url"], "auto_accept": True, - } + }) + received = await connection.send_and_await_reply_async( + sent_msg, + condition=lambda reply: reply.thread["thid"] == sent_msg.id ) assert ( received["@type"] @@ -69,57 +70,63 @@ async def test_create_connection(connection): ) -# Temporary Test: after connection -@pytest.mark.asyncio -async def test_get_list_after_connection(connection): - get_list_after_connection = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" - } - ) - print("get_list after connection: ", get_list_after_connection["connections"]) - assert True # False +# # Temporary Test: after connection +# @pytest.mark.asyncio +# async def test_get_list_after_connection(connection): +# get_list_after_connection = await connection.send_and_await_reply_async( +# { +# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" +# } +# ) +# print("get_list after connection: ", get_list_after_connection["connections"]) +# assert True # False @pytest.mark.asyncio async def test_get_list(connection): - invitation = await connection.send_and_await_reply_async( - { + sent_invitation = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", "group": "admin", "auto_accept": True, "multi_use": True, - }, + }) + invitation = await connection.send_and_await_reply_async( + sent_invitation, + condition=lambda reply: reply.thread["thid"] == sent_invitation.id, return_route="all", ) - received = await connection.send_and_await_reply_async( - { + sent_msg = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", "invitation": invitation["invitation_url"], "auto_accept": True, - } + }) + received = await connection.send_and_await_reply_async( + sent_msg, + condition=lambda reply: reply.thread["thid"] == sent_msg.id ) - print("Invitation: ", invitation) - print("Received: ", received) - invitation2 = await connection.send_and_await_reply_async( - { + sent_invitation2 = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Second invitation I sent to Alice", "label": "Bob", "group": "admin", "auto_accept": True, "multi_use": True, - }, + }) + invitation2 = await connection.send_and_await_reply_async( + sent_invitation2, + condition=lambda reply: reply.thread["thid"] == sent_invitation2.id, return_route="all", ) - received2 = await connection.send_and_await_reply_async( - { + sent_msg2 = { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", "invitation": invitation2["invitation_url"], "auto_accept": True, } + received2 = await connection.send_and_await_reply_async( + sent_msg2, + condition=lambda reply: reply.thread["thid"] == sent_msg2.id ) get_list = await connection.send_and_await_reply_async( { @@ -132,84 +139,84 @@ async def test_get_list(connection): ) -@pytest.mark.asyncio -async def test_update(connection): - """Update connection attribute""" - invitation = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", - "alias": "Invitation I sent to Alice", - "label": "Bob", - "group": "admin", - "auto_accept": True, - "multi_use": True, - }, - return_route="all", - ) - received = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", - "invitation": invitation["invitation_url"], - "auto_accept": True, - } - ) - update = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/update", - "connection_id": received["connection_id"], - "label": "Updated label", - "role": "Updated role", - } - ) - assert update["label"] == "Updated label" - - -@pytest.mark.asyncio -async def test_delete(connection): - invitation = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", - "alias": "Invitation I sent to Alice", - "label": "Bob", - "group": "admin", - "auto_accept": True, - "multi_use": True, - }, - return_route="all", - ) - received = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", - "invitation": invitation["invitation_url"], - "auto_accept": True, - } - ) - get_list_beforedelete = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" - } - ) - print("Connections before delete: ", get_list_beforedelete["connections"]) - assert ( - received["@type"] - == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" - ) - delete_connection = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/delete", - "connection_id": received["connection_id"], - } - ) - get_list_afterdelete = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" - } - ) - print("List after delete", get_list_afterdelete["connections"]) - # for i in get_list_beforedelete["connections"]: - # if i not in get_list_afterdelete["connections"]: - # print(i) - assert ( - delete_connection["@type"] - == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" - ) +# @pytest.mark.asyncio +# async def test_update(connection): +# """Update connection attribute""" +# invitation = await connection.send_and_await_reply_async( +# { +# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", +# "alias": "Invitation I sent to Alice", +# "label": "Bob", +# "group": "admin", +# "auto_accept": True, +# "multi_use": True, +# }, +# return_route="all", +# ) +# received = await connection.send_and_await_reply_async( +# { +# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", +# "invitation": invitation["invitation_url"], +# "auto_accept": True, +# } +# ) +# update = await connection.send_and_await_reply_async( +# { +# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/update", +# "connection_id": received["connection_id"], +# "label": "Updated label", +# "role": "Updated role", +# } +# ) +# assert update["label"] == "Updated label" + + +# @pytest.mark.asyncio +# async def test_delete(connection): +# invitation = await connection.send_and_await_reply_async( +# { +# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", +# "alias": "Invitation I sent to Alice", +# "label": "Bob", +# "group": "admin", +# "auto_accept": True, +# "multi_use": True, +# }, +# return_route="all", +# ) +# received = await connection.send_and_await_reply_async( +# { +# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", +# "invitation": invitation["invitation_url"], +# "auto_accept": True, +# } +# ) +# get_list_beforedelete = await connection.send_and_await_reply_async( +# { +# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" +# } +# ) +# print("Connections before delete: ", get_list_beforedelete["connections"]) +# assert ( +# received["@type"] +# == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" +# ) +# delete_connection = await connection.send_and_await_reply_async( +# { +# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/delete", +# "connection_id": received["connection_id"], +# } +# ) +# get_list_afterdelete = await connection.send_and_await_reply_async( +# { +# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" +# } +# ) +# print("List after delete", get_list_afterdelete["connections"]) +# # for i in get_list_beforedelete["connections"]: +# # if i not in get_list_afterdelete["connections"]: +# # print(i) +# assert ( +# delete_connection["@type"] +# == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" +# ) diff --git a/int/tests/test_invitations.py b/int/tests/test_invitations.py index 401474da..c91323d3 100644 --- a/int/tests/test_invitations.py +++ b/int/tests/test_invitations.py @@ -34,15 +34,17 @@ async def clear_invitation_state(backchannel: Client): @pytest.mark.asyncio async def test_create_invitation(connection): - reply = await connection.send_and_await_reply_async( - { + msg = { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", "group": "admin", "auto_accept": True, "multi_use": True, - }, + } + reply = await connection.send_and_await_reply_async( + msg, + # condition=lambda reply: reply.thread["thid"] == msg.id, return_route="all", ) print(reply) From 29bd24eba22fc2ed43275a5ad34945b384c68510 Mon Sep 17 00:00:00 2001 From: Char Date: Mon, 28 Jun 2021 15:02:01 -0700 Subject: [PATCH 07/66] fix: add assign_thread_from function to ReceiveInvitationHandler Signed-off-by: Char --- acapy_plugin_toolbox/connections.py | 1 + 1 file changed, 1 insertion(+) diff --git a/acapy_plugin_toolbox/connections.py b/acapy_plugin_toolbox/connections.py index 3e193a9f..a581c8a0 100644 --- a/acapy_plugin_toolbox/connections.py +++ b/acapy_plugin_toolbox/connections.py @@ -311,4 +311,5 @@ async def handle(self, context: RequestContext, responder: BaseResponder): mediation_id=context.message.mediation_id, ) connection_resp = Connection(**conn_record_to_message_repr(connection)) + connection_resp.assign_thread_from(context.message) await responder.send_reply(connection_resp) From a7402958a10f328b5c5b4bf71a371dfadbc322e7 Mon Sep 17 00:00:00 2001 From: Char Date: Mon, 28 Jun 2021 15:02:01 -0700 Subject: [PATCH 08/66] fix: add assign_thread_from function to ReceiveInvitationHandler Signed-off-by: Char --- acapy_plugin_toolbox/connections.py | 1 + 1 file changed, 1 insertion(+) diff --git a/acapy_plugin_toolbox/connections.py b/acapy_plugin_toolbox/connections.py index 3a207785..3ab2766e 100644 --- a/acapy_plugin_toolbox/connections.py +++ b/acapy_plugin_toolbox/connections.py @@ -331,4 +331,5 @@ async def handle(self, context: RequestContext, responder: BaseResponder): mediation_id=context.message.mediation_id, ) connection_resp = Connection(**conn_record_to_message_repr(connection)) + connection_resp.assign_thread_from(context.message) await responder.send_reply(connection_resp) From 03f8eba5b38c077e7c7a29e74ffee408f41ceea9 Mon Sep 17 00:00:00 2001 From: Char Date: Wed, 30 Jun 2021 16:03:51 -0700 Subject: [PATCH 09/66] fix: clear connection state between tests Signed-off-by: Char --- int/tests/test_connections.py | 248 +++++++++++++++------------------- 1 file changed, 108 insertions(+), 140 deletions(-) diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index 8a42d227..db72164f 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -3,46 +3,25 @@ import pytest from acapy_backchannel import Client from acapy_backchannel.api.connection import delete_connection, get_connections -import time from aries_staticagent import Message -@pytest.mark.asyncio -async def clear_connections(client: Client): - """Clear all connections, if any.""" - connections = await get_connections.asyncio(client=client) +@pytest.fixture(autouse=True) +async def clear_connection_state(backchannel: Client, connection_id: str): + """Clear connections after each test.""" + yield + connections = await get_connections.asyncio(client=backchannel) for connection in connections.results: - if connection.state == "connection": + if connection.connection_id != connection_id: await delete_connection.asyncio( - client=client, conn_id=connection.connection_id + client=backchannel, conn_id=connection.connection_id ) -@pytest.mark.asyncio -@pytest.fixture(autouse=True) -async def clear_connection_state(backchannel: Client): - """Clear invitations after each test.""" - # yield - # await clear_connections(backchannel) - yield await clear_connections(backchannel) - - -# Temporary Test: before connection -# @pytest.mark.asyncio -# async def test_get_list_before_connection(connection): -# get_list_before_connection = await connection.send_and_await_reply_async( -# { -# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" -# } -# ) -# print("get_list before connection: ", get_list_before_connection["connections"]) -# assert True # False - - @pytest.mark.asyncio async def test_create_connection(connection): """Send an invitation and receive it to create a new connection""" - sent_invitation = Message({ + msg_invitation = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", @@ -51,40 +30,29 @@ async def test_create_connection(connection): "multi_use": True, }) invitation = await connection.send_and_await_reply_async( - sent_invitation, - condition=lambda reply: reply.thread["thid"] == sent_invitation.id, + msg_invitation, + condition=lambda reply: reply.thread["thid"] == msg_invitation.id, return_route="all", ) - sent_msg = Message({ + msg_received = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", "invitation": invitation["invitation_url"], "auto_accept": True, }) received = await connection.send_and_await_reply_async( - sent_msg, - condition=lambda reply: reply.thread["thid"] == sent_msg.id + msg_received, + condition=lambda reply: reply.thread["thid"] == msg_received.id ) assert ( received["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" ) - - -# # Temporary Test: after connection -# @pytest.mark.asyncio -# async def test_get_list_after_connection(connection): -# get_list_after_connection = await connection.send_and_await_reply_async( -# { -# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" -# } -# ) -# print("get_list after connection: ", get_list_after_connection["connections"]) -# assert True # False + assert received["label"] == msg_invitation["label"] @pytest.mark.asyncio async def test_get_list(connection): - sent_invitation = Message({ + msg_invitation = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", @@ -93,20 +61,20 @@ async def test_get_list(connection): "multi_use": True, }) invitation = await connection.send_and_await_reply_async( - sent_invitation, - condition=lambda reply: reply.thread["thid"] == sent_invitation.id, + msg_invitation, + condition=lambda reply: reply.thread["thid"] == msg_invitation.id, return_route="all", ) - sent_msg = Message({ + msg_received = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", "invitation": invitation["invitation_url"], "auto_accept": True, }) received = await connection.send_and_await_reply_async( - sent_msg, - condition=lambda reply: reply.thread["thid"] == sent_msg.id + msg_received, + condition=lambda reply: reply.thread["thid"] == msg_received.id ) - sent_invitation2 = Message({ + msg_invitation2 = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Second invitation I sent to Alice", "label": "Bob", @@ -115,108 +83,108 @@ async def test_get_list(connection): "multi_use": True, }) invitation2 = await connection.send_and_await_reply_async( - sent_invitation2, - condition=lambda reply: reply.thread["thid"] == sent_invitation2.id, + msg_invitation2, + condition=lambda reply: reply.thread["thid"] == msg_invitation2.id, return_route="all", ) - sent_msg2 = { + msg_received2 = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", "invitation": invitation2["invitation_url"], "auto_accept": True, - } + }) received2 = await connection.send_and_await_reply_async( - sent_msg2, - condition=lambda reply: reply.thread["thid"] == sent_msg2.id + msg_received2, + condition=lambda reply: reply.thread["thid"] == msg_received2.id ) get_list = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" } ) - assert ( - get_list["@type"] + assert (get_list["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/list" ) + assert received["connection_id"] in [connection_item["connection_id"] for connection_item in get_list["connections"]] + assert received2["connection_id"] in [connection_item["connection_id"] for connection_item in get_list["connections"]] -# @pytest.mark.asyncio -# async def test_update(connection): -# """Update connection attribute""" -# invitation = await connection.send_and_await_reply_async( -# { -# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", -# "alias": "Invitation I sent to Alice", -# "label": "Bob", -# "group": "admin", -# "auto_accept": True, -# "multi_use": True, -# }, -# return_route="all", -# ) -# received = await connection.send_and_await_reply_async( -# { -# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", -# "invitation": invitation["invitation_url"], -# "auto_accept": True, -# } -# ) -# update = await connection.send_and_await_reply_async( -# { -# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/update", -# "connection_id": received["connection_id"], -# "label": "Updated label", -# "role": "Updated role", -# } -# ) -# assert update["label"] == "Updated label" +@pytest.mark.asyncio +async def test_update(connection): + """Update connection attribute""" + msg_invitation = Message({ + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", + "alias": "Invitation I sent to Alice", + "label": "Bob", + "group": "admin", + "auto_accept": True, + "multi_use": True, + }) + invitation = await connection.send_and_await_reply_async( + msg_invitation, + condition=lambda reply: reply.thread["thid"] == msg_invitation.id, + return_route="all", + ) + msg_received = Message({ + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", + "invitation": invitation["invitation_url"], + "auto_accept": True, + }) + received = await connection.send_and_await_reply_async( + msg_received, + condition=lambda reply: reply.thread["thid"] == msg_received.id, + ) + msg_update = Message({ + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/update", + "connection_id": received["connection_id"], + "label": "Updated label", + "role": "Updated role", + }) + update = await connection.send_and_await_reply_async( + msg_update, + condition=lambda reply: reply.thread["thid"] == msg_update.id, + ) + assert update["label"] == "Updated label" -# @pytest.mark.asyncio -# async def test_delete(connection): -# invitation = await connection.send_and_await_reply_async( -# { -# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", -# "alias": "Invitation I sent to Alice", -# "label": "Bob", -# "group": "admin", -# "auto_accept": True, -# "multi_use": True, -# }, -# return_route="all", -# ) -# received = await connection.send_and_await_reply_async( -# { -# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", -# "invitation": invitation["invitation_url"], -# "auto_accept": True, -# } -# ) -# get_list_beforedelete = await connection.send_and_await_reply_async( -# { -# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" -# } -# ) -# print("Connections before delete: ", get_list_beforedelete["connections"]) -# assert ( -# received["@type"] -# == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" -# ) -# delete_connection = await connection.send_and_await_reply_async( -# { -# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/delete", -# "connection_id": received["connection_id"], -# } -# ) -# get_list_afterdelete = await connection.send_and_await_reply_async( -# { -# "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" -# } -# ) -# print("List after delete", get_list_afterdelete["connections"]) -# # for i in get_list_beforedelete["connections"]: -# # if i not in get_list_afterdelete["connections"]: -# # print(i) -# assert ( -# delete_connection["@type"] -# == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" -# ) +@pytest.mark.asyncio +async def test_delete(connection): + invitation_msg = Message( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", + "alias": "Invitation I sent to Alice", + "label": "Bob", + "group": "admin", + "auto_accept": True, + "multi_use": True, + } + ) + invitation = await connection.send_and_await_reply_async( + invitation_msg, + condition=lambda reply: reply.thread["thid"] == invitation_msg.id, + return_route="all", + ) + msg_received = Message({ + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", + "invitation": invitation["invitation_url"], + "auto_accept": True, + }) + received = await connection.send_and_await_reply_async( + msg_received, + condition=lambda reply: reply.thread["thid"] == msg_received.id, + ) + delete_connection = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/delete", + "connection_id": received["connection_id"], + } + ) + assert delete_connection["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" + assert delete_connection["connection_id"] == received["connection_id"] + get_list = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" + } + ) + for connection_item in get_list["connections"]: + assert connection_item["label"] != invitation_msg["label"] + assert connection_item["connection_id"] != received["connection_id"] From cef9709fd76a592ec6936888335df1d0759b654a Mon Sep 17 00:00:00 2001 From: Char Date: Wed, 30 Jun 2021 17:17:15 -0700 Subject: [PATCH 10/66] chore: fold clear_invitations into clear_invitation_state Signed-off-by: Char --- int/tests/test_invitations.py | 48 ++++++++++++----------------------- 1 file changed, 16 insertions(+), 32 deletions(-) diff --git a/int/tests/test_invitations.py b/int/tests/test_invitations.py index c91323d3..ecf8f40d 100644 --- a/int/tests/test_invitations.py +++ b/int/tests/test_invitations.py @@ -1,50 +1,33 @@ -"""Example tests.""" +"""Invitations tests""" import pytest - from acapy_backchannel import Client from acapy_backchannel.api.connection import delete_connection, get_connections -async def clear_invitations(client: Client): - """Clear all invitations, if any.""" - connections = await get_connections.asyncio(client=client) +@pytest.fixture(autouse=True) +async def clear_invitation_state(backchannel: Client, connection_id: str): + """Clear invitation after each test.""" + yield + connections = await get_connections.asyncio(client=backchannel) for connection in connections.results: if connection.state == "invitation": await delete_connection.asyncio( - client=client, conn_id=connection.connection_id + client=backchannel, conn_id=connection.connection_id ) -@pytest.fixture(autouse=True) -async def clear_invitation_state(backchannel: Client): - """Clear invitations after each test.""" - # We don't need to do any setup tasks for this fixture. - # Normally we would do some setup to create a value and then yield it for - # use in the test method. This fixture is special in that it doesn't require - # that setup and does not need to yield a value for use in a test method. - # Just need to clear state that may have been triggered by the test method. - - yield - - # Everything that follows the yield is executed after the test method and - # is where we perform tear down. - - await clear_invitations(backchannel) - - @pytest.mark.asyncio async def test_create_invitation(connection): - msg = { + """Test create invitation protocol""" + reply = await connection.send_and_await_reply_async( + { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", "group": "admin", "auto_accept": True, "multi_use": True, - } - reply = await connection.send_and_await_reply_async( - msg, - # condition=lambda reply: reply.thread["thid"] == msg.id, + }, return_route="all", ) print(reply) @@ -56,6 +39,7 @@ async def test_create_invitation(connection): @pytest.mark.asyncio async def test_get_list(connection): + """Test get list protocol""" reply = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/get-list" @@ -70,9 +54,9 @@ async def test_get_list(connection): @pytest.mark.asyncio async def test_num_results(connection): + """Test that create message causes new item in results list""" # Input number of messages to add to the list - added_num = 1 - # Add new messages + added_num = 2 for i in range(added_num): await connection.send_and_await_reply_async( { @@ -85,7 +69,6 @@ async def test_num_results(connection): }, return_route="all", ) - # Retrieve results of invitations list to verify that create message causes new item in results list reply = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/get-list" @@ -93,10 +76,11 @@ async def test_num_results(connection): return_route="all", ) assert len(reply["results"]) == added_num - + @pytest.mark.asyncio async def test_empty_list(connection): + """Test that get-list returns no results if no create messages have been sent""" reply = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/get-list" From 9df6c087dd9a9ba7d20d75abbefa1eefbd499b8e Mon Sep 17 00:00:00 2001 From: Char Date: Thu, 1 Jul 2021 11:33:47 -0700 Subject: [PATCH 11/66] test: add test of new message notification to test_basicmessage.py Signed-off-by: Char --- int/tests/test_basicmessage.py | 83 ++++++++++++++++++++++------------ int/tests/test_connections.py | 12 +++-- int/tests/test_invitations.py | 2 +- 3 files changed, 63 insertions(+), 34 deletions(-) diff --git a/int/tests/test_basicmessage.py b/int/tests/test_basicmessage.py index 6d31621c..912c71ec 100644 --- a/int/tests/test_basicmessage.py +++ b/int/tests/test_basicmessage.py @@ -1,12 +1,12 @@ """Basic Message Tests""" import asyncio import pytest - -from aries_staticagent import StaticConnection +from aries_staticagent import StaticConnection, utils @pytest.mark.asyncio async def test_send(connection: StaticConnection, connection_id: str): + """Test send message""" with connection.next() as future_recip_message: sent_message = await asyncio.wait_for( connection.send_and_await_reply_async( @@ -25,7 +25,7 @@ async def test_send(connection: StaticConnection, connection_id: str): == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/sent" ) assert recip_message["message"]["content"] == "Your hovercraft is full of eels." - # Delete messages to clear the state between tests + # TODO add proper backchannel for clearing messages await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", @@ -34,40 +34,30 @@ async def test_send(connection: StaticConnection, connection_id: str): @pytest.mark.asyncio -async def test_delete(connection: StaticConnection, connection_id: str): - for i in range(6): - with connection.next() as future_recip_message: - sent_message = await asyncio.wait_for( - connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", - "connection_id": connection_id, - "content": "Test Message #{}".format(i), - }, - return_route="all", - ), - timeout=60, +async def test_new(connection: StaticConnection): + """Test new message notification""" + new_response = await connection.send_and_await_reply_async( + { + "@type": "https://didcomm.org/basicmessage/1.0/message", + "~l10n": { "locale": "en" }, + "sent_time": utils.timestamp(), + "content": "Your hovercraft is full of eels." + }, + return_route="all", ) - recip_message = await asyncio.wait_for(future_recip_message, 60) - delete_message = await connection.send_and_await_reply_async( + assert new_response["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/new" + assert new_response["message"]["content"] == "Your hovercraft is full of eels." + # Delete messages to clear the state between tests + await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", } ) - get_messages = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", - } - ) - assert ( - delete_message["@type"] - == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/deleted" - ) - assert get_messages["count"] == 0 @pytest.mark.asyncio async def test_get(connection: StaticConnection, connection_id: str): + """Send multiple messages and verify that the proper count and content appears in messages list""" with connection.next() as future_recip_message: sent_message = await asyncio.wait_for( connection.send_and_await_reply_async( @@ -104,6 +94,8 @@ async def test_get(connection: StaticConnection, connection_id: str): == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" ) assert get_messages["count"] == 2 + assert get_messages["messages"][1]["content"] == "Are you suggesting coconuts migrate?" + assert get_messages["messages"][0]["content"] == "'Tis but a flesh wound." # Delete messages to clear the state between tests await connection.send_and_await_reply_async( { @@ -114,6 +106,7 @@ async def test_get(connection: StaticConnection, connection_id: str): @pytest.mark.asyncio async def test_get_limit_offset(connection: StaticConnection, connection_id: str): + """Send multiple messages and verify that get returns the correct content according to the limit and offset""" for i in range(6): with connection.next() as future_recip_message: sent_message = await asyncio.wait_for( @@ -149,3 +142,37 @@ async def test_get_limit_offset(connection: StaticConnection, connection_id: str "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", } ) + + +@pytest.mark.asyncio +async def test_delete(connection: StaticConnection, connection_id: str): + """Send multiple messages, delete them, and verify that the messages count is zero""" + for i in range(6): + with connection.next() as future_recip_message: + sent_message = await asyncio.wait_for( + connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", + "connection_id": connection_id, + "content": "Test Message #{}".format(i), + }, + return_route="all", + ), + timeout=60, + ) + recip_message = await asyncio.wait_for(future_recip_message, 60) + delete_message = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", + } + ) + get_messages = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", + } + ) + assert ( + delete_message["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/deleted" + ) + assert get_messages["count"] == 0 diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index db72164f..7f06b00f 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -1,4 +1,4 @@ -"""Connection Tests""" +"""Connections Tests""" import asyncio import pytest from acapy_backchannel import Client @@ -52,6 +52,7 @@ async def test_create_connection(connection): @pytest.mark.asyncio async def test_get_list(connection): + """Create two connections and verify that their connection_ids are in connections list""" msg_invitation = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", @@ -110,7 +111,7 @@ async def test_get_list(connection): @pytest.mark.asyncio async def test_update(connection): - """Update connection attribute""" + """Test update of connection attribute""" msg_invitation = Message({ "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", @@ -148,6 +149,8 @@ async def test_update(connection): @pytest.mark.asyncio async def test_delete(connection): + """Create an invitation, delete it, and verify that its label and connectio_id + is no longer in the connections list""" invitation_msg = Message( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", @@ -185,6 +188,5 @@ async def test_delete(connection): "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" } ) - for connection_item in get_list["connections"]: - assert connection_item["label"] != invitation_msg["label"] - assert connection_item["connection_id"] != received["connection_id"] + assert invitation_msg["label"] not in [connection_item["label"] for connection_item in get_list["connections"]] + assert received["connection_id"] not in [connection_item["connection_id"] for connection_item in get_list["connections"]] diff --git a/int/tests/test_invitations.py b/int/tests/test_invitations.py index ecf8f40d..04894c9a 100644 --- a/int/tests/test_invitations.py +++ b/int/tests/test_invitations.py @@ -54,7 +54,7 @@ async def test_get_list(connection): @pytest.mark.asyncio async def test_num_results(connection): - """Test that create message causes new item in results list""" + """Test that the create message protocol causes new item in results list""" # Input number of messages to add to the list added_num = 2 for i in range(added_num): From 5c4a11a68de027ec7b2ebcfee055cf5c512edcef Mon Sep 17 00:00:00 2001 From: Char Date: Thu, 1 Jul 2021 12:15:00 -0700 Subject: [PATCH 12/66] chore: reformat with black Signed-off-by: Char --- int/tests/test_basicmessage.py | 27 ++++++---- int/tests/test_connections.py | 93 ++++++++++++++++++++++------------ int/tests/test_invitations.py | 2 +- 3 files changed, 78 insertions(+), 44 deletions(-) diff --git a/int/tests/test_basicmessage.py b/int/tests/test_basicmessage.py index 912c71ec..2cbe826c 100644 --- a/int/tests/test_basicmessage.py +++ b/int/tests/test_basicmessage.py @@ -25,7 +25,7 @@ async def test_send(connection: StaticConnection, connection_id: str): == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/sent" ) assert recip_message["message"]["content"] == "Your hovercraft is full of eels." - # TODO add proper backchannel for clearing messages + # TODO add proper backchannel for clearing messages await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", @@ -37,15 +37,18 @@ async def test_send(connection: StaticConnection, connection_id: str): async def test_new(connection: StaticConnection): """Test new message notification""" new_response = await connection.send_and_await_reply_async( - { - "@type": "https://didcomm.org/basicmessage/1.0/message", - "~l10n": { "locale": "en" }, - "sent_time": utils.timestamp(), - "content": "Your hovercraft is full of eels." - }, - return_route="all", - ) - assert new_response["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/new" + { + "@type": "https://didcomm.org/basicmessage/1.0/message", + "~l10n": {"locale": "en"}, + "sent_time": utils.timestamp(), + "content": "Your hovercraft is full of eels.", + }, + return_route="all", + ) + assert ( + new_response["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/new" + ) assert new_response["message"]["content"] == "Your hovercraft is full of eels." # Delete messages to clear the state between tests await connection.send_and_await_reply_async( @@ -94,7 +97,9 @@ async def test_get(connection: StaticConnection, connection_id: str): == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" ) assert get_messages["count"] == 2 - assert get_messages["messages"][1]["content"] == "Are you suggesting coconuts migrate?" + assert ( + get_messages["messages"][1]["content"] == "Are you suggesting coconuts migrate?" + ) assert get_messages["messages"][0]["content"] == "'Tis but a flesh wound." # Delete messages to clear the state between tests await connection.send_and_await_reply_async( diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index 7f06b00f..37d988ca 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -21,27 +21,30 @@ async def clear_connection_state(backchannel: Client, connection_id: str): @pytest.mark.asyncio async def test_create_connection(connection): """Send an invitation and receive it to create a new connection""" - msg_invitation = Message({ + msg_invitation = Message( + { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", "group": "admin", "auto_accept": True, "multi_use": True, - }) + } + ) invitation = await connection.send_and_await_reply_async( msg_invitation, condition=lambda reply: reply.thread["thid"] == msg_invitation.id, return_route="all", ) - msg_received = Message({ + msg_received = Message( + { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", "invitation": invitation["invitation_url"], "auto_accept": True, - }) + } + ) received = await connection.send_and_await_reply_async( - msg_received, - condition=lambda reply: reply.thread["thid"] == msg_received.id + msg_received, condition=lambda reply: reply.thread["thid"] == msg_received.id ) assert ( received["@type"] @@ -53,93 +56,110 @@ async def test_create_connection(connection): @pytest.mark.asyncio async def test_get_list(connection): """Create two connections and verify that their connection_ids are in connections list""" - msg_invitation = Message({ + msg_invitation = Message( + { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", "group": "admin", "auto_accept": True, "multi_use": True, - }) + } + ) invitation = await connection.send_and_await_reply_async( msg_invitation, condition=lambda reply: reply.thread["thid"] == msg_invitation.id, return_route="all", ) - msg_received = Message({ + msg_received = Message( + { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", "invitation": invitation["invitation_url"], "auto_accept": True, - }) + } + ) received = await connection.send_and_await_reply_async( - msg_received, - condition=lambda reply: reply.thread["thid"] == msg_received.id + msg_received, condition=lambda reply: reply.thread["thid"] == msg_received.id ) - msg_invitation2 = Message({ + msg_invitation2 = Message( + { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Second invitation I sent to Alice", "label": "Bob", "group": "admin", "auto_accept": True, "multi_use": True, - }) + } + ) invitation2 = await connection.send_and_await_reply_async( msg_invitation2, condition=lambda reply: reply.thread["thid"] == msg_invitation2.id, return_route="all", ) - msg_received2 = Message({ + msg_received2 = Message( + { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", "invitation": invitation2["invitation_url"], "auto_accept": True, - }) + } + ) received2 = await connection.send_and_await_reply_async( - msg_received2, - condition=lambda reply: reply.thread["thid"] == msg_received2.id + msg_received2, condition=lambda reply: reply.thread["thid"] == msg_received2.id ) get_list = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" } ) - assert (get_list["@type"] + assert ( + get_list["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/list" ) - assert received["connection_id"] in [connection_item["connection_id"] for connection_item in get_list["connections"]] - assert received2["connection_id"] in [connection_item["connection_id"] for connection_item in get_list["connections"]] + assert received["connection_id"] in [ + connection_item["connection_id"] for connection_item in get_list["connections"] + ] + assert received2["connection_id"] in [ + connection_item["connection_id"] for connection_item in get_list["connections"] + ] @pytest.mark.asyncio async def test_update(connection): """Test update of connection attribute""" - msg_invitation = Message({ + msg_invitation = Message( + { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", "group": "admin", "auto_accept": True, "multi_use": True, - }) + } + ) invitation = await connection.send_and_await_reply_async( msg_invitation, condition=lambda reply: reply.thread["thid"] == msg_invitation.id, return_route="all", ) - msg_received = Message({ + msg_received = Message( + { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", "invitation": invitation["invitation_url"], "auto_accept": True, - }) + } + ) received = await connection.send_and_await_reply_async( msg_received, condition=lambda reply: reply.thread["thid"] == msg_received.id, ) - msg_update = Message({ + msg_update = Message( + { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/update", "connection_id": received["connection_id"], "label": "Updated label", "role": "Updated role", - }) + } + ) update = await connection.send_and_await_reply_async( msg_update, condition=lambda reply: reply.thread["thid"] == msg_update.id, @@ -166,11 +186,13 @@ async def test_delete(connection): condition=lambda reply: reply.thread["thid"] == invitation_msg.id, return_route="all", ) - msg_received = Message({ + msg_received = Message( + { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", "invitation": invitation["invitation_url"], "auto_accept": True, - }) + } + ) received = await connection.send_and_await_reply_async( msg_received, condition=lambda reply: reply.thread["thid"] == msg_received.id, @@ -181,12 +203,19 @@ async def test_delete(connection): "connection_id": received["connection_id"], } ) - assert delete_connection["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" + assert ( + delete_connection["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" + ) assert delete_connection["connection_id"] == received["connection_id"] get_list = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" } ) - assert invitation_msg["label"] not in [connection_item["label"] for connection_item in get_list["connections"]] - assert received["connection_id"] not in [connection_item["connection_id"] for connection_item in get_list["connections"]] + assert invitation_msg["label"] not in [ + connection_item["label"] for connection_item in get_list["connections"] + ] + assert received["connection_id"] not in [ + connection_item["connection_id"] for connection_item in get_list["connections"] + ] diff --git a/int/tests/test_invitations.py b/int/tests/test_invitations.py index 04894c9a..6a3adeb9 100644 --- a/int/tests/test_invitations.py +++ b/int/tests/test_invitations.py @@ -76,7 +76,7 @@ async def test_num_results(connection): return_route="all", ) assert len(reply["results"]) == added_num - + @pytest.mark.asyncio async def test_empty_list(connection): From ea4c68d947fd266c724918c6f0b6142e84e25c79 Mon Sep 17 00:00:00 2001 From: Char Date: Fri, 2 Jul 2021 12:02:19 -0700 Subject: [PATCH 13/66] feat: add automated endorser DID registration Signed-off-by: Char --- int/tests/conftest.py | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index c6226b0e..9a3e2308 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -8,6 +8,7 @@ from acapy_backchannel.models.conn_record import ConnRecord import pytest import hashlib +import httpx from acapy_backchannel import Client from acapy_backchannel.api.connection import ( @@ -15,6 +16,7 @@ set_metadata, delete_connection, ) +from acapy_backchannel.api.wallet import create_did from acapy_backchannel.models import ( ConnectionStaticRequest, ConnectionStaticResult, @@ -168,3 +170,39 @@ async def http_endpoint(agent: BaseAgent): with suppress(asyncio.CancelledError): await server_task await agent.cleanup() + + +@pytest.fixture +async def make_did(): + """DID factory fixture""" + + def _make_did(): + did = create_did.asyncio(client=backchannel) + return did + + yield _make_did() + # TODO create DID deletion method + + +@pytest.fixture +async def make_endorser_did(make_did): + """Endorser DID factory fixture""" + + def _make_endorser_did(): + did = make_did() + print("Publishing DID through https://selfserve.indiciotech.io") + response = httpx.post( + url="https://selfserve.indiciotech.io/nym", + json={ + "network": "testnet", + "did": did.result.did, + "verkey": did.result.verkey, + }, + ) + if response.is_error: + print("Failed to publish DID:", response.text) + return + print("DID Published") + return did + + yield _make_endorser_did From cb5085f7d2869627c3e1d831bd6be67b42d4c552 Mon Sep 17 00:00:00 2001 From: Char Date: Fri, 2 Jul 2021 15:43:50 -0700 Subject: [PATCH 14/66] feat: add fixture to accept TAA Signed-off-by: Char --- int/tests/conftest.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index 9a3e2308..e7769573 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -22,7 +22,6 @@ ConnectionStaticResult, ConnectionMetadataSetRequest, ) - from aries_staticagent import StaticConnection, Target from . import BaseAgent @@ -206,3 +205,19 @@ def _make_endorser_did(): return did yield _make_endorser_did + + +@pytest.fixture +async def accept_taa(scope="session"): + result = describe( + "Retrieve Transaction Author Agreement from the ledger", fetch_taa + )(client=issuer).result + + result = describe("Sign transaction author agreement", accept_taa)( + client=issuer, + json_body=TAAAccept( + mechanism="on_file", + text=result.taa_record.text, + version=result.taa_record.version, + ), + ) From 8f3970351f0aaaa19e7886077b3a63a07973a7ca Mon Sep 17 00:00:00 2001 From: Char Date: Tue, 6 Jul 2021 10:11:08 -0700 Subject: [PATCH 15/66] feat: add configs Signed-off-by: Char --- int/configs/default.yml | 59 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 int/configs/default.yml diff --git a/int/configs/default.yml b/int/configs/default.yml new file mode 100644 index 00000000..e14cc90d --- /dev/null +++ b/int/configs/default.yml @@ -0,0 +1,59 @@ +label: Aries Cloud Agent Toolbox Plugin + +# Admin +admin: [0.0.0.0, 3001] +admin-insecure-mode: true + +# Load toolbox plugin +plugin: + - acapy_plugin_toolbox + +# Transport +inbound-transport: + - [acapy_plugin_toolbox.http_ws, 0.0.0.0, 3000] +outbound-transport: http +endpoint: + - http://localhost:3000 + - ws://localhost:3000 + +# Ledger +# Use Indicio TestNet. Become an endorser at https://selfserve.indiciotech.io/. +genesis-url: https://raw.githubusercontent.com/Indicio-tech/indicio-network/master/genesis_files/pool_transactions_testnet_genesis + +# Connections +debug-connections: true +debug-credentials: true +debug-presentations: true +auto-accept-invites: true +auto-accept-requests: true +auto-ping-connection: true + +# Generate Admin Invitation +connections-invite: true +invite-label: Holder (Admin) +invite-metadata-json: '{"group": "admin"}' +invite-multi-use: true + +# Credentials and Presentations +preserve-exchange-records: true +auto-store-credential: true +auto-respond-credential-proposal: true + +# Use the admin-holder protocol to respond to credential offers +# auto-respond-credential-offer: true +auto-respond-credential-request: true + +auto-respond-presentation-proposal: true +# Use the admin-holder protocol to respond to presentation requests +# auto-respond-presentation-request: true +auto-verify-presentation: true + +# Wallet +wallet-name: default +wallet-type: indy +wallet-key: "insecure, for use in testing only" +auto-provision: true + +# Enable undelivered queue +# Important for communication with toolbox over http (as opposed to ws) +enable-undelivered-queue: true \ No newline at end of file From 22e978891a88bbae11930181bc3bd94b398c035a Mon Sep 17 00:00:00 2001 From: Char Date: Wed, 7 Jul 2021 10:03:35 -0700 Subject: [PATCH 16/66] feat: add genesis-url to command Signed-off-by: Char --- int/docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/int/docker-compose.yml b/int/docker-compose.yml index ddde3a46..ac8c6b17 100644 --- a/int/docker-compose.yml +++ b/int/docker-compose.yml @@ -16,7 +16,7 @@ services: dockerfile: ./docker/Dockerfile ports: - "3001:3001" - command: start -it http 0.0.0.0 3000 -ot http -e http://acapy_plugin_agent:3000 --no-ledger --admin 0.0.0.0 3001 --admin-insecure-mode --plugin acapy_plugin_toolbox --log-level debug + command: start -it http 0.0.0.0 3000 -ot http -e http://acapy_plugin_agent:3000 --no-ledger --admin 0.0.0.0 3001 --admin-insecure-mode --plugin acapy_plugin_toolbox --log-level debug --genesis-url https://raw.githubusercontent.com/Indicio-tech/indicio-network/master/genesis_files/pool_transactions_testnet_genesis #************************************************************* # tester: drives tests for acapy_plugin_toolbox in a * From c26c956280e36e6867a69d16b4d103e3ccd2630d Mon Sep 17 00:00:00 2001 From: Char Date: Wed, 7 Jul 2021 10:06:44 -0700 Subject: [PATCH 17/66] chore: delete configs folder Signed-off-by: Char --- int/configs/default.yml | 59 ----------------------------------------- 1 file changed, 59 deletions(-) delete mode 100644 int/configs/default.yml diff --git a/int/configs/default.yml b/int/configs/default.yml deleted file mode 100644 index e14cc90d..00000000 --- a/int/configs/default.yml +++ /dev/null @@ -1,59 +0,0 @@ -label: Aries Cloud Agent Toolbox Plugin - -# Admin -admin: [0.0.0.0, 3001] -admin-insecure-mode: true - -# Load toolbox plugin -plugin: - - acapy_plugin_toolbox - -# Transport -inbound-transport: - - [acapy_plugin_toolbox.http_ws, 0.0.0.0, 3000] -outbound-transport: http -endpoint: - - http://localhost:3000 - - ws://localhost:3000 - -# Ledger -# Use Indicio TestNet. Become an endorser at https://selfserve.indiciotech.io/. -genesis-url: https://raw.githubusercontent.com/Indicio-tech/indicio-network/master/genesis_files/pool_transactions_testnet_genesis - -# Connections -debug-connections: true -debug-credentials: true -debug-presentations: true -auto-accept-invites: true -auto-accept-requests: true -auto-ping-connection: true - -# Generate Admin Invitation -connections-invite: true -invite-label: Holder (Admin) -invite-metadata-json: '{"group": "admin"}' -invite-multi-use: true - -# Credentials and Presentations -preserve-exchange-records: true -auto-store-credential: true -auto-respond-credential-proposal: true - -# Use the admin-holder protocol to respond to credential offers -# auto-respond-credential-offer: true -auto-respond-credential-request: true - -auto-respond-presentation-proposal: true -# Use the admin-holder protocol to respond to presentation requests -# auto-respond-presentation-request: true -auto-verify-presentation: true - -# Wallet -wallet-name: default -wallet-type: indy -wallet-key: "insecure, for use in testing only" -auto-provision: true - -# Enable undelivered queue -# Important for communication with toolbox over http (as opposed to ws) -enable-undelivered-queue: true \ No newline at end of file From 1884405773c0163e9762fc9f8a8e6ec95a5bce06 Mon Sep 17 00:00:00 2001 From: Char Date: Wed, 7 Jul 2021 11:03:52 -0700 Subject: [PATCH 18/66] chore: remove wrapping prints from accept_taa fixture Signed-off-by: Char --- int/docker-compose.yml | 2 +- int/tests/conftest.py | 27 +++++++++++++++------------ 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/int/docker-compose.yml b/int/docker-compose.yml index ac8c6b17..cc578093 100644 --- a/int/docker-compose.yml +++ b/int/docker-compose.yml @@ -16,7 +16,7 @@ services: dockerfile: ./docker/Dockerfile ports: - "3001:3001" - command: start -it http 0.0.0.0 3000 -ot http -e http://acapy_plugin_agent:3000 --no-ledger --admin 0.0.0.0 3001 --admin-insecure-mode --plugin acapy_plugin_toolbox --log-level debug --genesis-url https://raw.githubusercontent.com/Indicio-tech/indicio-network/master/genesis_files/pool_transactions_testnet_genesis + command: start -it http 0.0.0.0 3000 -ot http -e http://acapy_plugin_agent:3000 --admin 0.0.0.0 3001 --admin-insecure-mode --plugin acapy_plugin_toolbox --log-level debug --genesis-url https://raw.githubusercontent.com/Indicio-tech/indicio-network/master/genesis_files/pool_transactions_testnet_genesis #************************************************************* # tester: drives tests for acapy_plugin_toolbox in a * diff --git a/int/tests/conftest.py b/int/tests/conftest.py index e7769573..43f8f475 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -17,15 +17,22 @@ delete_connection, ) from acapy_backchannel.api.wallet import create_did +from acapy_backchannel.api.ledger import accept_taa, fetch_taa from acapy_backchannel.models import ( ConnectionStaticRequest, ConnectionStaticResult, ConnectionMetadataSetRequest, + TAAAccept, ) + from aries_staticagent import StaticConnection, Target from . import BaseAgent +import logging + +logger = logging.getLogger(__name__) + @pytest.fixture(scope="session") def event_loop(): @@ -175,11 +182,10 @@ async def http_endpoint(agent: BaseAgent): async def make_did(): """DID factory fixture""" - def _make_did(): - did = create_did.asyncio(client=backchannel) - return did + async def _make_did(): + return await create_did.asyncio(client=backchannel) - yield _make_did() + yield _make_did # TODO create DID deletion method @@ -189,7 +195,7 @@ async def make_endorser_did(make_did): def _make_endorser_did(): did = make_did() - print("Publishing DID through https://selfserve.indiciotech.io") + logger.info("Publishing DID through https://selfserve.indiciotech.io") response = httpx.post( url="https://selfserve.indiciotech.io/nym", json={ @@ -199,9 +205,9 @@ def _make_endorser_did(): }, ) if response.is_error: - print("Failed to publish DID:", response.text) + logger.info("Failed to publish DID:", response.text) return - print("DID Published") + logger.info("DID Published") return did yield _make_endorser_did @@ -209,11 +215,8 @@ def _make_endorser_did(): @pytest.fixture async def accept_taa(scope="session"): - result = describe( - "Retrieve Transaction Author Agreement from the ledger", fetch_taa - )(client=issuer).result - - result = describe("Sign transaction author agreement", accept_taa)( + result = await fetch_taa.asyncio(client=issuer).result + result = await accept_taa.asyncio( client=issuer, json_body=TAAAccept( mechanism="on_file", From 573420053241341f10f7fc2f2acba945d45fa55e Mon Sep 17 00:00:00 2001 From: Char Date: Wed, 7 Jul 2021 12:37:05 -0700 Subject: [PATCH 19/66] fix: move scoping to fixture decorator Signed-off-by: Char --- int/tests/conftest.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index 43f8f475..673561ee 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -31,7 +31,7 @@ import logging -logger = logging.getLogger(__name__) +LOGGER = logging.getLogger(__name__) @pytest.fixture(scope="session") @@ -195,7 +195,7 @@ async def make_endorser_did(make_did): def _make_endorser_did(): did = make_did() - logger.info("Publishing DID through https://selfserve.indiciotech.io") + LOGGER.info("Publishing DID through https://selfserve.indiciotech.io") response = httpx.post( url="https://selfserve.indiciotech.io/nym", json={ @@ -205,16 +205,16 @@ def _make_endorser_did(): }, ) if response.is_error: - logger.info("Failed to publish DID:", response.text) + LOGGER.info("Failed to publish DID:", response.text) return - logger.info("DID Published") + LOGGER.info("DID Published") return did yield _make_endorser_did -@pytest.fixture -async def accept_taa(scope="session"): +@pytest.fixture(scope="session") +async def accept_taa(): result = await fetch_taa.asyncio(client=issuer).result result = await accept_taa.asyncio( client=issuer, From 1e3fde5b39feee1d6854f8b228c47b35e8b94a98 Mon Sep 17 00:00:00 2001 From: Char Date: Mon, 12 Jul 2021 11:36:23 -0700 Subject: [PATCH 20/66] chore: raise response.error if DID failed to publish Signed-off-by: Char --- int/tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index 673561ee..c44afc22 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -205,7 +205,7 @@ def _make_endorser_did(): }, ) if response.is_error: - LOGGER.info("Failed to publish DID:", response.text) + raise response.error("Failed to publish DID:", response.text) return LOGGER.info("DID Published") return did From a0fb4f689643796966bacd067400273f5db3edef Mon Sep 17 00:00:00 2001 From: Char Date: Tue, 13 Jul 2021 15:16:47 -0700 Subject: [PATCH 21/66] feat: add test_schemas.py Signed-off-by: Char --- int/docker-compose.yml | 2 +- int/tests/conftest.py | 9 ++++++++- int/tests/test_schemas.py | 22 ++++++++++++++++++++++ 3 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 int/tests/test_schemas.py diff --git a/int/docker-compose.yml b/int/docker-compose.yml index cc578093..44ddc519 100644 --- a/int/docker-compose.yml +++ b/int/docker-compose.yml @@ -16,7 +16,7 @@ services: dockerfile: ./docker/Dockerfile ports: - "3001:3001" - command: start -it http 0.0.0.0 3000 -ot http -e http://acapy_plugin_agent:3000 --admin 0.0.0.0 3001 --admin-insecure-mode --plugin acapy_plugin_toolbox --log-level debug --genesis-url https://raw.githubusercontent.com/Indicio-tech/indicio-network/master/genesis_files/pool_transactions_testnet_genesis + command: start -it http 0.0.0.0 3000 -ot http -e http://acapy_plugin_agent:3000 --admin 0.0.0.0 3001 --admin-insecure-mode --plugin acapy_plugin_toolbox --log-level debug --genesis-url https://raw.githubusercontent.com/Indicio-tech/indicio-network/master/genesis_files/pool_transactions_testnet_genesis --wallet-type indy --wallet-name default --wallet-key "insecure, for use in testing only" --auto-provision #************************************************************* # tester: drives tests for acapy_plugin_toolbox in a * diff --git a/int/tests/conftest.py b/int/tests/conftest.py index c44afc22..13ba9f3c 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -16,7 +16,10 @@ set_metadata, delete_connection, ) -from acapy_backchannel.api.wallet import create_did +from acapy_backchannel.api.wallet import ( + create_did, + set_public_did, +) from acapy_backchannel.api.ledger import accept_taa, fetch_taa from acapy_backchannel.models import ( ConnectionStaticRequest, @@ -224,3 +227,7 @@ async def accept_taa(): version=result.taa_record.version, ), ) + result = await set_public_did.asyncio( + client=issuer, + did=did_info.did, + ).result diff --git a/int/tests/test_schemas.py b/int/tests/test_schemas.py new file mode 100644 index 00000000..0aba044e --- /dev/null +++ b/int/tests/test_schemas.py @@ -0,0 +1,22 @@ +"""Schema Tests""" +import asyncio +import pytest +from aries_staticagent import MessageDeliveryError + + +@pytest.mark.asyncio +async def test_send_schema(connection): + """Send a schema and verify message type""" + try: + schema = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", + "schema_name": "Test Schema", + "schema_version": "1.0", + "attributes": ["attr_0", "attr_1", "attr_2"], + "return_route": "all", + } + ) + except MessageDeliveryError as error: + print(error.msg) + assert schema["schema_id"] == "UjF64u8jDEEuRve7PKQGUo:2:Alice's Test Schema:1.0" From ccd34da1b3ff4e3d282d821c50e5bf3310358432 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Tue, 13 Jul 2021 22:11:07 -0400 Subject: [PATCH 22/66] fix: minor fixes for endorser and taa fixtures Signed-off-by: Daniel Bluhm --- int/tests/conftest.py | 54 ++++++++++++++++++++------------------- int/tests/test_schemas.py | 3 ++- 2 files changed, 30 insertions(+), 27 deletions(-) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index 13ba9f3c..433213ff 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -6,6 +6,7 @@ import base64 from typing import Iterator, Optional from acapy_backchannel.models.conn_record import ConnRecord +from acapy_backchannel.models.did import DID import pytest import hashlib import httpx @@ -182,52 +183,53 @@ async def http_endpoint(agent: BaseAgent): @pytest.fixture -async def make_did(): +async def make_did(backchannel): """DID factory fixture""" async def _make_did(): - return await create_did.asyncio(client=backchannel) + return (await create_did.asyncio(client=backchannel)).result yield _make_did # TODO create DID deletion method +@pytest.fixture(scope="session") +async def accepted_taa(backchannel): + result = (await fetch_taa.asyncio(client=backchannel)).result + result = await accept_taa.asyncio( + client=backchannel, + json_body=TAAAccept( + mechanism="on_file", + text=result.taa_record.text, + version=result.taa_record.version, + ), + ) + + @pytest.fixture -async def make_endorser_did(make_did): +async def make_endorser_did(make_did, backchannel, accepted_taa): """Endorser DID factory fixture""" - def _make_endorser_did(): - did = make_did() + async def _make_endorser_did(): + did: DID = await make_did() LOGGER.info("Publishing DID through https://selfserve.indiciotech.io") response = httpx.post( url="https://selfserve.indiciotech.io/nym", json={ "network": "testnet", - "did": did.result.did, - "verkey": did.result.verkey, + "did": did.did, + "verkey": did.verkey, }, ) if response.is_error: - raise response.error("Failed to publish DID:", response.text) - return + raise Exception("Failed to publish DID:", response.text) + LOGGER.info("DID Published") + result = await set_public_did.asyncio_detailed( + client=backchannel, + did=did.did, + ) + assert result.status_code == 200 return did yield _make_endorser_did - - -@pytest.fixture(scope="session") -async def accept_taa(): - result = await fetch_taa.asyncio(client=issuer).result - result = await accept_taa.asyncio( - client=issuer, - json_body=TAAAccept( - mechanism="on_file", - text=result.taa_record.text, - version=result.taa_record.version, - ), - ) - result = await set_public_did.asyncio( - client=issuer, - did=did_info.did, - ).result diff --git a/int/tests/test_schemas.py b/int/tests/test_schemas.py index 0aba044e..b2548a24 100644 --- a/int/tests/test_schemas.py +++ b/int/tests/test_schemas.py @@ -5,8 +5,9 @@ @pytest.mark.asyncio -async def test_send_schema(connection): +async def test_send_schema(connection, make_endorser_did): """Send a schema and verify message type""" + await make_endorser_did() try: schema = await connection.send_and_await_reply_async( { From 0541aab763180f8b7a3e077aa7043b2cd8c762a1 Mon Sep 17 00:00:00 2001 From: Char Date: Wed, 14 Jul 2021 16:40:48 -0700 Subject: [PATCH 23/66] test: add tests for schema-get and schema-get-list Signed-off-by: Char --- int/tests/test_schemas.py | 87 +++++++++++++++++++++++++++++++++------ 1 file changed, 74 insertions(+), 13 deletions(-) diff --git a/int/tests/test_schemas.py b/int/tests/test_schemas.py index b2548a24..e42ffd02 100644 --- a/int/tests/test_schemas.py +++ b/int/tests/test_schemas.py @@ -8,16 +8,77 @@ async def test_send_schema(connection, make_endorser_did): """Send a schema and verify message type""" await make_endorser_did() - try: - schema = await connection.send_and_await_reply_async( - { - "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", - "schema_name": "Test Schema", - "schema_version": "1.0", - "attributes": ["attr_0", "attr_1", "attr_2"], - "return_route": "all", - } - ) - except MessageDeliveryError as error: - print(error.msg) - assert schema["schema_id"] == "UjF64u8jDEEuRve7PKQGUo:2:Alice's Test Schema:1.0" + schema = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", + "schema_name": "Test Schema", + "schema_version": "1.0", + "attributes": ["attr_1_0", "attr_1_1", "attr_1_2"], + "return_route": "all", + } + ) + assert ( + schema["@type"] + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/schema-id" + ) + + +@pytest.mark.asyncio +async def test_schema_get(connection, make_endorser_did): + """Retrieve a pre-existing schema""" + await make_endorser_did() + schema = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", + "schema_name": "Test Schema", + "schema_version": "2.0", + "attributes": ["attr_2_0", "attr_2_1", "attr_2_2"], + "return_route": "all", + } + ) + schema_get = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/schema-get", + "schema_id": schema["schema_id"], + "@id": schema["@id"], + "~transport": {"return_route": "all"}, + } + ) + assert ( + schema_get["@type"] + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/schema" + ) + assert schema["schema_id"] == schema_get["schema_id"] + assert schema["@id"] == schema_get["~thread"]["thid"] + assert schema_get["author"] == "self" + + +@pytest.mark.asyncio +async def test_schema_get_list(connection, make_endorser_did): + """Retrieve the list of schemas""" + await make_endorser_did() + schema = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", + "schema_name": "Test Schema", + "schema_version": "3.0", + "attributes": ["attr_3_0", "attr_3_1", "attr_3_2"], + "return_route": "all", + } + ) + schema_get_list = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/schema-get-list", + "@id": schema["@id"], + "~transport": {"return_route": "all"}, + } + ) + assert ( + schema_get_list["@type"] + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/schema-list" + ) + assert schema["@id"] == schema_get_list["~thread"]["thid"] + assert len(schema_get_list["results"]) == 3 + assert schema["schema_id"] in [ + result["schema_id"] for result in schema_get_list["results"] + ] From 3dcf56ae206ed8ecb9cbecfada03899185258ad9 Mon Sep 17 00:00:00 2001 From: Char Date: Fri, 16 Jul 2021 09:32:05 -0700 Subject: [PATCH 24/66] fix: adjust scoping on fixtures Signed-off-by: Char --- int/tests/conftest.py | 10 ++++++++-- int/tests/test_schemas.py | 12 ++++++------ 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index 433213ff..2b19a9f4 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -182,7 +182,7 @@ async def http_endpoint(agent: BaseAgent): await agent.cleanup() -@pytest.fixture +@pytest.fixture(scope="module") async def make_did(backchannel): """DID factory fixture""" @@ -206,7 +206,7 @@ async def accepted_taa(backchannel): ) -@pytest.fixture +@pytest.fixture(scope="module") async def make_endorser_did(make_did, backchannel, accepted_taa): """Endorser DID factory fixture""" @@ -220,6 +220,7 @@ async def _make_endorser_did(): "did": did.did, "verkey": did.verkey, }, + timeout=30, ) if response.is_error: raise Exception("Failed to publish DID:", response.text) @@ -233,3 +234,8 @@ async def _make_endorser_did(): return did yield _make_endorser_did + + +@pytest.fixture(scope="module", autouse=True) +async def endorser_did(make_endorser_did): + yield make_endorser_did() diff --git a/int/tests/test_schemas.py b/int/tests/test_schemas.py index e42ffd02..e8c8cd96 100644 --- a/int/tests/test_schemas.py +++ b/int/tests/test_schemas.py @@ -5,9 +5,9 @@ @pytest.mark.asyncio -async def test_send_schema(connection, make_endorser_did): +async def test_send_schema(connection, endorser_did): """Send a schema and verify message type""" - await make_endorser_did() + await endorser_did() schema = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", @@ -24,9 +24,9 @@ async def test_send_schema(connection, make_endorser_did): @pytest.mark.asyncio -async def test_schema_get(connection, make_endorser_did): +async def test_schema_get(connection, endorser_did): """Retrieve a pre-existing schema""" - await make_endorser_did() + await endorser_did() schema = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", @@ -54,9 +54,9 @@ async def test_schema_get(connection, make_endorser_did): @pytest.mark.asyncio -async def test_schema_get_list(connection, make_endorser_did): +async def test_schema_get_list(connection, endorser_did): """Retrieve the list of schemas""" - await make_endorser_did() + await endorser_did() schema = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", From b621adff6fbc9d8c9a3464bce6d084e1eeded663 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Fri, 16 Jul 2021 13:00:22 -0400 Subject: [PATCH 25/66] fix: scoping and reuse of endorser_did fixture Signed-off-by: Daniel Bluhm --- int/tests/conftest.py | 56 +++++++++++++++++---------------------- int/tests/test_schemas.py | 3 --- 2 files changed, 24 insertions(+), 35 deletions(-) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index 2b19a9f4..e38f84e4 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -182,7 +182,7 @@ async def http_endpoint(agent: BaseAgent): await agent.cleanup() -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") async def make_did(backchannel): """DID factory fixture""" @@ -206,36 +206,28 @@ async def accepted_taa(backchannel): ) -@pytest.fixture(scope="module") -async def make_endorser_did(make_did, backchannel, accepted_taa): +@pytest.fixture(scope="session") +async def endorser_did(make_did, backchannel, accepted_taa): """Endorser DID factory fixture""" - async def _make_endorser_did(): - did: DID = await make_did() - LOGGER.info("Publishing DID through https://selfserve.indiciotech.io") - response = httpx.post( - url="https://selfserve.indiciotech.io/nym", - json={ - "network": "testnet", - "did": did.did, - "verkey": did.verkey, - }, - timeout=30, - ) - if response.is_error: - raise Exception("Failed to publish DID:", response.text) - - LOGGER.info("DID Published") - result = await set_public_did.asyncio_detailed( - client=backchannel, - did=did.did, - ) - assert result.status_code == 200 - return did - - yield _make_endorser_did - - -@pytest.fixture(scope="module", autouse=True) -async def endorser_did(make_endorser_did): - yield make_endorser_did() + did: DID = await make_did() + LOGGER.info("Publishing DID through https://selfserve.indiciotech.io") + response = httpx.post( + url="https://selfserve.indiciotech.io/nym", + json={ + "network": "testnet", + "did": did.did, + "verkey": did.verkey, + }, + timeout=30, + ) + if response.is_error: + raise Exception("Failed to publish DID:", response.text) + + LOGGER.info("DID Published") + result = await set_public_did.asyncio_detailed( + client=backchannel, + did=did.did, + ) + assert result.status_code == 200 + yield did diff --git a/int/tests/test_schemas.py b/int/tests/test_schemas.py index e8c8cd96..5180bbce 100644 --- a/int/tests/test_schemas.py +++ b/int/tests/test_schemas.py @@ -7,7 +7,6 @@ @pytest.mark.asyncio async def test_send_schema(connection, endorser_did): """Send a schema and verify message type""" - await endorser_did() schema = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", @@ -26,7 +25,6 @@ async def test_send_schema(connection, endorser_did): @pytest.mark.asyncio async def test_schema_get(connection, endorser_did): """Retrieve a pre-existing schema""" - await endorser_did() schema = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", @@ -56,7 +54,6 @@ async def test_schema_get(connection, endorser_did): @pytest.mark.asyncio async def test_schema_get_list(connection, endorser_did): """Retrieve the list of schemas""" - await endorser_did() schema = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", From 51f832f48616bf0a78a21385ee59f724feac061d Mon Sep 17 00:00:00 2001 From: Char Date: Fri, 16 Jul 2021 16:00:09 -0700 Subject: [PATCH 26/66] test: add integration testing for credential definitions Signed-off-by: Char --- int/tests/test_credentialdefinitions.py | 93 +++++++++++++++++++++++++ int/tests/test_schemas.py | 2 +- 2 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 int/tests/test_credentialdefinitions.py diff --git a/int/tests/test_credentialdefinitions.py b/int/tests/test_credentialdefinitions.py new file mode 100644 index 00000000..8dff9c48 --- /dev/null +++ b/int/tests/test_credentialdefinitions.py @@ -0,0 +1,93 @@ +"""Credential Definition Tests""" +import asyncio +import pytest + + +@pytest.fixture(scope="module") +async def create_schema(connection, endorser_did): + """Schema factory fixture""" + + async def _create_schema(): + return await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", + "schema_name": "Test Schema", + "schema_version": "1.0", + "attributes": ["attr_1_0", "attr_1_1", "attr_1_2"], + "return_route": "all", + } + ) + + yield _create_schema + + +@pytest.mark.asyncio +async def test_send_cred_def(connection, endorser_did, create_schema): + """Create a credential definition""" + schema = await create_schema() + send_cred_def = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", + "schema_id": schema["schema_id"], + "~transport": {"return_route": "all"}, + } + ) + assert ( + send_cred_def["@type"] + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/credential-definition-id" + ) + + +@pytest.mark.asyncio +async def test_cred_def_get(connection, endorser_did, create_schema): + """Create and retrieve a credential definition""" + schema = await create_schema() + send_cred_def = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", + "schema_id": schema["schema_id"], + "~transport": {"return_route": "all"}, + } + ) + cred_def_get = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/credential-definition-get", + "cred_def_id": send_cred_def["cred_def_id"], + "~transport": {"return_route": "all"}, + } + ) + assert ( + cred_def_get["@type"] + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/credential-definition" + ) + + +@pytest.mark.asyncio +async def test_cred_def_get_list(connection, endorser_did, create_schema): + """Create and retrieve a credential definition""" + schema1 = await create_schema() + await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", + "schema_id": schema1["schema_id"], + "~transport": {"return_route": "all"}, + } + ) + schema2 = await create_schema() + await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", + "schema_id": schema2["schema_id"], + "~transport": {"return_route": "all"}, + } + ) + cred_def_get_list = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/credential-definition-get-list", + "~transport": {"return_route": "all"}, + } + ) + assert ( + cred_def_get_list["@type"] + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/credential-definition-list" + ) diff --git a/int/tests/test_schemas.py b/int/tests/test_schemas.py index 5180bbce..ad7c23e1 100644 --- a/int/tests/test_schemas.py +++ b/int/tests/test_schemas.py @@ -75,7 +75,7 @@ async def test_schema_get_list(connection, endorser_did): == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/schema-list" ) assert schema["@id"] == schema_get_list["~thread"]["thid"] - assert len(schema_get_list["results"]) == 3 + # assert len(schema_get_list["results"]) == 3 assert schema["schema_id"] in [ result["schema_id"] for result in schema_get_list["results"] ] From d9f5575cc4679cfa9bc5e826cf54d6058bc10d54 Mon Sep 17 00:00:00 2001 From: Char Date: Mon, 19 Jul 2021 13:38:37 -0700 Subject: [PATCH 27/66] fix: remove schema count test due to dependence on credential definition test module Signed-off-by: Char --- int/tests/test_schemas.py | 1 - 1 file changed, 1 deletion(-) diff --git a/int/tests/test_schemas.py b/int/tests/test_schemas.py index ad7c23e1..0b0f4aa2 100644 --- a/int/tests/test_schemas.py +++ b/int/tests/test_schemas.py @@ -75,7 +75,6 @@ async def test_schema_get_list(connection, endorser_did): == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/schema-list" ) assert schema["@id"] == schema_get_list["~thread"]["thid"] - # assert len(schema_get_list["results"]) == 3 assert schema["schema_id"] in [ result["schema_id"] for result in schema_get_list["results"] ] From 95a4ad70a30e46fd7f1f20a021cbd3cbe5bea6ac Mon Sep 17 00:00:00 2001 From: Char Date: Mon, 19 Jul 2021 13:40:57 -0700 Subject: [PATCH 28/66] fix: allow update of schema version number with each fixture call Signed-off-by: Char --- int/tests/test_credentialdefinitions.py | 28 ++++++++++++++++--------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/int/tests/test_credentialdefinitions.py b/int/tests/test_credentialdefinitions.py index 8dff9c48..1e51fd8e 100644 --- a/int/tests/test_credentialdefinitions.py +++ b/int/tests/test_credentialdefinitions.py @@ -7,12 +7,12 @@ async def create_schema(connection, endorser_did): """Schema factory fixture""" - async def _create_schema(): + async def _create_schema(version): return await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", "schema_name": "Test Schema", - "schema_version": "1.0", + "schema_version": version, "attributes": ["attr_1_0", "attr_1_1", "attr_1_2"], "return_route": "all", } @@ -24,7 +24,7 @@ async def _create_schema(): @pytest.mark.asyncio async def test_send_cred_def(connection, endorser_did, create_schema): """Create a credential definition""" - schema = await create_schema() + schema = await create_schema(version="1.0") send_cred_def = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", @@ -41,7 +41,7 @@ async def test_send_cred_def(connection, endorser_did, create_schema): @pytest.mark.asyncio async def test_cred_def_get(connection, endorser_did, create_schema): """Create and retrieve a credential definition""" - schema = await create_schema() + schema = await create_schema(version="1.1") send_cred_def = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", @@ -60,24 +60,26 @@ async def test_cred_def_get(connection, endorser_did, create_schema): cred_def_get["@type"] == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/credential-definition" ) + assert schema["schema_id"] == cred_def_get["schema_id"] + assert send_cred_def["cred_def_id"] == cred_def_get["cred_def_id"] @pytest.mark.asyncio async def test_cred_def_get_list(connection, endorser_did, create_schema): """Create and retrieve a credential definition""" - schema1 = await create_schema() - await connection.send_and_await_reply_async( + schema1_2 = await create_schema(version="1.2") + send_schema1_2 = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", - "schema_id": schema1["schema_id"], + "schema_id": schema1_2["schema_id"], "~transport": {"return_route": "all"}, } ) - schema2 = await create_schema() - await connection.send_and_await_reply_async( + schema1_3 = await create_schema(version="1.3") + send_schema1_3 = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", - "schema_id": schema2["schema_id"], + "schema_id": schema1_3["schema_id"], "~transport": {"return_route": "all"}, } ) @@ -91,3 +93,9 @@ async def test_cred_def_get_list(connection, endorser_did, create_schema): cred_def_get_list["@type"] == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/credential-definition-list" ) + assert send_schema1_2["cred_def_id"] in [ + result["cred_def_id"] for result in cred_def_get_list["results"] + ] + assert send_schema1_3["cred_def_id"] in [ + result["cred_def_id"] for result in cred_def_get_list["results"] + ] From 571b2ec43fa0d7c28e4a8997a68b778c62331310 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Mon, 19 Jul 2021 17:26:10 -0400 Subject: [PATCH 29/66] fix: github action cache invalidation issues Signed-off-by: Daniel Bluhm --- .github/workflows/push.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index da85bfd1..c5fc3030 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -44,7 +44,7 @@ jobs: uses: actions/cache@v2 with: path: ${{ env.pythonLocation }} - key: ${{ runner.os }}-testing-${{ matrix.python-version }} #-${{ hashFiles('poetry.lock') }} + key: ${{ runner.os }}-testing-${{ matrix.python-version }}-${{ hashFiles('poetry.lock') }} - name: Install poetry # (use poetry?) if: steps.cache-env.outputs.cache-hit != 'true' run: | @@ -59,7 +59,7 @@ jobs: uses: actions/cache@v2 with: path: ${{ steps.setup-poetry-env.outputs.poetry-env }} - key: ${{ runner.os }}-poetry-${{ matrix.python-version }} #-${{ hashFiles('poetry.lock') }} + key: ${{ runner.os }}-poetry-${{ matrix.python-version }}-${{ hashFiles('poetry.lock') }} - name: Install dependencies if: steps.cache-poetry.outputs.cache-hit != 'true' run: | From 9faa2e2250899336d220176cefebc42469f349d3 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Mon, 19 Jul 2021 21:24:56 -0400 Subject: [PATCH 30/66] fix: schema send result fixes Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- .../post_didexchange_create_request.py | 6 +-- .../__init__.py | 0 ...ete_issue_credential_records_cred_ex_id.py | 0 .../get_issue_credential_records.py | 0 ...get_issue_credential_records_cred_ex_id.py | 0 .../issue_credential_automated.py | 0 .../post_issue_credential_create.py | 0 ...sue_credential_records_cred_ex_id_issue.py | 0 ...ntial_records_cred_ex_id_problem_report.py | 0 ...redential_records_cred_ex_id_send_offer.py | 0 ...dential_records_cred_ex_id_send_request.py | 0 ...sue_credential_records_cred_ex_id_store.py | 0 .../post_issue_credential_send_offer.py | 0 .../post_issue_credential_send_proposal.py | 0 .../__init__.py | 0 ..._issue_credential_20_records_cred_ex_id.py | 0 .../get_issue_credential_20_records.py | 6 +-- ..._issue_credential_20_records_cred_ex_id.py | 6 +-- ..._credential_20_records_cred_ex_id_issue.py | 6 +-- ...al_20_records_cred_ex_id_problem_report.py | 0 ...ential_20_records_cred_ex_id_send_offer.py | 0 ...tial_20_records_cred_ex_id_send_request.py | 0 ..._credential_20_records_cred_ex_id_store.py | 6 +-- ...eylists_mediation_id_send_keylist_query.py | 6 +-- ...nt_proof_records_pres_ex_id_credentials.py | 6 +-- .../acapy_backchannel/models/__init__.py | 3 +- .../acapy_backchannel/models/aml_record.py | 6 ++- .../models/attach_decorator_data_jws.py | 6 ++- .../models/attachment_def.py | 6 ++- .../clear_pending_revocations_request.py | 6 ++- .../acapy_backchannel/models/conn_record.py | 36 ++++++++----- .../models/connection_metadata.py | 6 ++- .../models/create_invitation_request.py | 6 ++- .../acapy_backchannel/models/cred_brief.py | 6 ++- .../models/credential_definition.py | 12 +++-- .../credential_definition_get_results.py | 6 ++- .../models/did_endpoint_with_type.py | 6 ++- .../models/indy_cred_info.py | 6 ++- .../models/indy_proof_req_attr_spec.py | 6 ++- .../models/indy_proof_req_pred_spec.py | 6 ++- .../models/indy_proof_request.py | 6 ++- .../models/invitation_create_request.py | 6 ++- .../models/invitation_record.py | 6 ++- .../models/keylist_query_filter_request.py | 6 ++- .../acapy_backchannel/models/menu_option.py | 6 ++- .../models/perform_request.py | 6 ++- .../models/publish_revocations.py | 24 +++++---- .../acapy_backchannel/models/query_result.py | 6 ++- .../models/schema_get_results.py | 6 ++- .../models/schema_send_results.py | 49 +++++++++++------ ..._schema.py => schema_send_results_sent.py} | 10 ++-- .../models/schema_send_results_txn.py | 44 +++++++++++++++ .../acapy_backchannel/models/taa_info.py | 6 ++- .../models/v10_credential_exchange.py | 54 ++++++++++++------- .../v10_credential_proposal_request_opt.py | 6 ++- .../models/v10_presentation_exchange.py | 42 ++++++++++----- .../models/v20_cred_ex_record.py | 54 ++++++++++++------- .../models/v20_cred_ex_record_dif.py | 6 ++- .../models/v20_cred_ex_record_indy.py | 6 ++- .../acapy_backchannel/types.py | 2 +- int/scripts/openapi.yml | 11 ++-- 61 files changed, 349 insertions(+), 178 deletions(-) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/__init__.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/delete_issue_credential_records_cred_ex_id.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/get_issue_credential_records.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/get_issue_credential_records_cred_ex_id.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/issue_credential_automated.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/post_issue_credential_create.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/post_issue_credential_records_cred_ex_id_issue.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/post_issue_credential_records_cred_ex_id_problem_report.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/post_issue_credential_records_cred_ex_id_send_offer.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/post_issue_credential_records_cred_ex_id_send_request.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/post_issue_credential_records_cred_ex_id_store.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/post_issue_credential_send_offer.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_10 => issue_credential_v10}/post_issue_credential_send_proposal.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_20 => issue_credential_v20}/__init__.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_20 => issue_credential_v20}/delete_issue_credential_20_records_cred_ex_id.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_20 => issue_credential_v20}/get_issue_credential_20_records.py (96%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_20 => issue_credential_v20}/get_issue_credential_20_records_cred_ex_id.py (91%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_20 => issue_credential_v20}/post_issue_credential_20_records_cred_ex_id_issue.py (93%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_20 => issue_credential_v20}/post_issue_credential_20_records_cred_ex_id_problem_report.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_20 => issue_credential_v20}/post_issue_credential_20_records_cred_ex_id_send_offer.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_20 => issue_credential_v20}/post_issue_credential_20_records_cred_ex_id_send_request.py (100%) rename int/acapy-backchannel/acapy_backchannel/api/{issue_credential_v_20 => issue_credential_v20}/post_issue_credential_20_records_cred_ex_id_store.py (93%) rename int/acapy-backchannel/acapy_backchannel/models/{schema_send_results_schema.py => schema_send_results_sent.py} (81%) create mode 100644 int/acapy-backchannel/acapy_backchannel/models/schema_send_results_txn.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/did_exchange/post_didexchange_create_request.py b/int/acapy-backchannel/acapy_backchannel/api/did_exchange/post_didexchange_create_request.py index d2386ced..3d9c8d02 100644 --- a/int/acapy-backchannel/acapy_backchannel/api/did_exchange/post_didexchange_create_request.py +++ b/int/acapy-backchannel/acapy_backchannel/api/did_exchange/post_didexchange_create_request.py @@ -36,7 +36,7 @@ def _get_kwargs( } -def _build_response(*, response: httpx.Response) -> Response[None]: +def _build_response(*, response: httpx.Response) -> Response[Any]: return Response( status_code=response.status_code, content=response.content, @@ -52,7 +52,7 @@ def sync_detailed( mediation_id: Union[Unset, str] = UNSET, my_endpoint: Union[Unset, str] = UNSET, my_label: Union[Unset, str] = UNSET, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, their_public_did=their_public_did, @@ -75,7 +75,7 @@ async def asyncio_detailed( mediation_id: Union[Unset, str] = UNSET, my_endpoint: Union[Unset, str] = UNSET, my_label: Union[Unset, str] = UNSET, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, their_public_did=their_public_did, diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/__init__.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/__init__.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/__init__.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/__init__.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/delete_issue_credential_records_cred_ex_id.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/delete_issue_credential_records_cred_ex_id.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/delete_issue_credential_records_cred_ex_id.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/delete_issue_credential_records_cred_ex_id.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/get_issue_credential_records.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/get_issue_credential_records.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/get_issue_credential_records.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/get_issue_credential_records.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/get_issue_credential_records_cred_ex_id.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/get_issue_credential_records_cred_ex_id.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/get_issue_credential_records_cred_ex_id.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/get_issue_credential_records_cred_ex_id.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/issue_credential_automated.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/issue_credential_automated.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/issue_credential_automated.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/issue_credential_automated.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_create.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_create.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_create.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_create.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_records_cred_ex_id_issue.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_records_cred_ex_id_issue.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_records_cred_ex_id_issue.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_records_cred_ex_id_issue.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_records_cred_ex_id_problem_report.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_records_cred_ex_id_problem_report.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_records_cred_ex_id_problem_report.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_records_cred_ex_id_problem_report.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_records_cred_ex_id_send_offer.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_records_cred_ex_id_send_offer.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_records_cred_ex_id_send_offer.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_records_cred_ex_id_send_offer.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_records_cred_ex_id_send_request.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_records_cred_ex_id_send_request.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_records_cred_ex_id_send_request.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_records_cred_ex_id_send_request.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_records_cred_ex_id_store.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_records_cred_ex_id_store.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_records_cred_ex_id_store.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_records_cred_ex_id_store.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_send_offer.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_send_offer.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_send_offer.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_send_offer.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_send_proposal.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_send_proposal.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_10/post_issue_credential_send_proposal.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v10/post_issue_credential_send_proposal.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/__init__.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/__init__.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/__init__.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/__init__.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/delete_issue_credential_20_records_cred_ex_id.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/delete_issue_credential_20_records_cred_ex_id.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/delete_issue_credential_20_records_cred_ex_id.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/delete_issue_credential_20_records_cred_ex_id.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/get_issue_credential_20_records.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/get_issue_credential_20_records.py similarity index 96% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/get_issue_credential_20_records.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/get_issue_credential_20_records.py index abe9a7d6..73c039da 100644 --- a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/get_issue_credential_20_records.py +++ b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/get_issue_credential_20_records.py @@ -46,7 +46,7 @@ def _get_kwargs( } -def _build_response(*, response: httpx.Response) -> Response[None]: +def _build_response(*, response: httpx.Response) -> Response[Any]: return Response( status_code=response.status_code, content=response.content, @@ -62,7 +62,7 @@ def sync_detailed( role: Union[Unset, GetIssueCredential20RecordsRole] = UNSET, state: Union[Unset, GetIssueCredential20RecordsState] = UNSET, thread_id: Union[Unset, str] = UNSET, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, connection_id=connection_id, @@ -85,7 +85,7 @@ async def asyncio_detailed( role: Union[Unset, GetIssueCredential20RecordsRole] = UNSET, state: Union[Unset, GetIssueCredential20RecordsState] = UNSET, thread_id: Union[Unset, str] = UNSET, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, connection_id=connection_id, diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/get_issue_credential_20_records_cred_ex_id.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/get_issue_credential_20_records_cred_ex_id.py similarity index 91% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/get_issue_credential_20_records_cred_ex_id.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/get_issue_credential_20_records_cred_ex_id.py index 9ebc92d1..4653e48e 100644 --- a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/get_issue_credential_20_records_cred_ex_id.py +++ b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/get_issue_credential_20_records_cred_ex_id.py @@ -24,7 +24,7 @@ def _get_kwargs( } -def _build_response(*, response: httpx.Response) -> Response[None]: +def _build_response(*, response: httpx.Response) -> Response[Any]: return Response( status_code=response.status_code, content=response.content, @@ -37,7 +37,7 @@ def sync_detailed( *, client: Client, cred_ex_id: str, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, cred_ex_id=cred_ex_id, @@ -54,7 +54,7 @@ async def asyncio_detailed( *, client: Client, cred_ex_id: str, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, cred_ex_id=cred_ex_id, diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_issue.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_issue.py similarity index 93% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_issue.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_issue.py index a658e1f3..19593214 100644 --- a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_issue.py +++ b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_issue.py @@ -29,7 +29,7 @@ def _get_kwargs( } -def _build_response(*, response: httpx.Response) -> Response[None]: +def _build_response(*, response: httpx.Response) -> Response[Any]: return Response( status_code=response.status_code, content=response.content, @@ -43,7 +43,7 @@ def sync_detailed( client: Client, cred_ex_id: str, json_body: V20CredIssueRequest, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, cred_ex_id=cred_ex_id, @@ -62,7 +62,7 @@ async def asyncio_detailed( client: Client, cred_ex_id: str, json_body: V20CredIssueRequest, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, cred_ex_id=cred_ex_id, diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_problem_report.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_problem_report.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_problem_report.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_problem_report.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_send_offer.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_send_offer.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_send_offer.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_send_offer.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_send_request.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_send_request.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_send_request.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_send_request.py diff --git a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_store.py b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_store.py similarity index 93% rename from int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_store.py rename to int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_store.py index 843d1738..ed5e8240 100644 --- a/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v_20/post_issue_credential_20_records_cred_ex_id_store.py +++ b/int/acapy-backchannel/acapy_backchannel/api/issue_credential_v20/post_issue_credential_20_records_cred_ex_id_store.py @@ -29,7 +29,7 @@ def _get_kwargs( } -def _build_response(*, response: httpx.Response) -> Response[None]: +def _build_response(*, response: httpx.Response) -> Response[Any]: return Response( status_code=response.status_code, content=response.content, @@ -43,7 +43,7 @@ def sync_detailed( client: Client, cred_ex_id: str, json_body: V20CredStoreRequest, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, cred_ex_id=cred_ex_id, @@ -62,7 +62,7 @@ async def asyncio_detailed( client: Client, cred_ex_id: str, json_body: V20CredStoreRequest, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, cred_ex_id=cred_ex_id, diff --git a/int/acapy-backchannel/acapy_backchannel/api/mediation/post_mediation_keylists_mediation_id_send_keylist_query.py b/int/acapy-backchannel/acapy_backchannel/api/mediation/post_mediation_keylists_mediation_id_send_keylist_query.py index 7be9b106..fa7481ee 100644 --- a/int/acapy-backchannel/acapy_backchannel/api/mediation/post_mediation_keylists_mediation_id_send_keylist_query.py +++ b/int/acapy-backchannel/acapy_backchannel/api/mediation/post_mediation_keylists_mediation_id_send_keylist_query.py @@ -38,7 +38,7 @@ def _get_kwargs( } -def _build_response(*, response: httpx.Response) -> Response[None]: +def _build_response(*, response: httpx.Response) -> Response[Any]: return Response( status_code=response.status_code, content=response.content, @@ -54,7 +54,7 @@ def sync_detailed( json_body: KeylistQueryFilterRequest, paginate_limit: Union[Unset, int] = -1, paginate_offset: Union[Unset, int] = 0, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, mediation_id=mediation_id, @@ -77,7 +77,7 @@ async def asyncio_detailed( json_body: KeylistQueryFilterRequest, paginate_limit: Union[Unset, int] = -1, paginate_offset: Union[Unset, int] = 0, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, mediation_id=mediation_id, diff --git a/int/acapy-backchannel/acapy_backchannel/api/present_proof/get_present_proof_records_pres_ex_id_credentials.py b/int/acapy-backchannel/acapy_backchannel/api/present_proof/get_present_proof_records_pres_ex_id_credentials.py index 10f30baa..78483716 100644 --- a/int/acapy-backchannel/acapy_backchannel/api/present_proof/get_present_proof_records_pres_ex_id_credentials.py +++ b/int/acapy-backchannel/acapy_backchannel/api/present_proof/get_present_proof_records_pres_ex_id_credentials.py @@ -37,7 +37,7 @@ def _get_kwargs( } -def _build_response(*, response: httpx.Response) -> Response[None]: +def _build_response(*, response: httpx.Response) -> Response[Any]: return Response( status_code=response.status_code, content=response.content, @@ -54,7 +54,7 @@ def sync_detailed( extra_query: Union[Unset, str] = UNSET, referent: Union[Unset, str] = UNSET, start: Union[Unset, str] = UNSET, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, pres_ex_id=pres_ex_id, @@ -79,7 +79,7 @@ async def asyncio_detailed( extra_query: Union[Unset, str] = UNSET, referent: Union[Unset, str] = UNSET, start: Union[Unset, str] = UNSET, -) -> Response[None]: +) -> Response[Any]: kwargs = _get_kwargs( client=client, pres_ex_id=pres_ex_id, diff --git a/int/acapy-backchannel/acapy_backchannel/models/__init__.py b/int/acapy-backchannel/acapy_backchannel/models/__init__.py index c392cdf5..43447135 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/__init__.py +++ b/int/acapy-backchannel/acapy_backchannel/models/__init__.py @@ -138,7 +138,8 @@ from .schema_get_results import SchemaGetResults from .schema_send_request import SchemaSendRequest from .schema_send_results import SchemaSendResults -from .schema_send_results_schema import SchemaSendResultsSchema +from .schema_send_results_sent import SchemaSendResultsSent +from .schema_send_results_txn import SchemaSendResultsTxn from .schemas_created_results import SchemasCreatedResults from .send_message import SendMessage from .service import Service diff --git a/int/acapy-backchannel/acapy_backchannel/models/aml_record.py b/int/acapy-backchannel/acapy_backchannel/models/aml_record.py index bdcc7a4d..82b6e7cc 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/aml_record.py +++ b/int/acapy-backchannel/acapy_backchannel/models/aml_record.py @@ -40,9 +40,11 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - aml: Union[Unset, AMLRecordAml] = UNSET _aml = d.pop("aml", UNSET) - if not isinstance(_aml, Unset): + aml: Union[Unset, AMLRecordAml] + if isinstance(_aml, Unset): + aml = UNSET + else: aml = AMLRecordAml.from_dict(_aml) aml_context = d.pop("amlContext", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/attach_decorator_data_jws.py b/int/acapy-backchannel/acapy_backchannel/models/attach_decorator_data_jws.py index fd2813c9..f6e1df9a 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/attach_decorator_data_jws.py +++ b/int/acapy-backchannel/acapy_backchannel/models/attach_decorator_data_jws.py @@ -51,9 +51,11 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - header: Union[Unset, AttachDecoratorDataJWSHeader] = UNSET _header = d.pop("header", UNSET) - if not isinstance(_header, Unset): + header: Union[Unset, AttachDecoratorDataJWSHeader] + if isinstance(_header, Unset): + header = UNSET + else: header = AttachDecoratorDataJWSHeader.from_dict(_header) protected = d.pop("protected", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/attachment_def.py b/int/acapy-backchannel/acapy_backchannel/models/attachment_def.py index dcbaf580..614ced06 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/attachment_def.py +++ b/int/acapy-backchannel/acapy_backchannel/models/attachment_def.py @@ -37,9 +37,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() id = d.pop("id", UNSET) - type: Union[Unset, AttachmentDefType] = UNSET _type = d.pop("type", UNSET) - if not isinstance(_type, Unset): + type: Union[Unset, AttachmentDefType] + if isinstance(_type, Unset): + type = UNSET + else: type = AttachmentDefType(_type) attachment_def = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/clear_pending_revocations_request.py b/int/acapy-backchannel/acapy_backchannel/models/clear_pending_revocations_request.py index 0c202232..efa32ae6 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/clear_pending_revocations_request.py +++ b/int/acapy-backchannel/acapy_backchannel/models/clear_pending_revocations_request.py @@ -31,9 +31,11 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - purge: Union[Unset, ClearPendingRevocationsRequestPurge] = UNSET _purge = d.pop("purge", UNSET) - if not isinstance(_purge, Unset): + purge: Union[Unset, ClearPendingRevocationsRequestPurge] + if isinstance(_purge, Unset): + purge = UNSET + else: purge = ClearPendingRevocationsRequestPurge.from_dict(_purge) clear_pending_revocations_request = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/conn_record.py b/int/acapy-backchannel/acapy_backchannel/models/conn_record.py index 176a1294..0c189c2b 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/conn_record.py +++ b/int/acapy-backchannel/acapy_backchannel/models/conn_record.py @@ -27,7 +27,7 @@ class ConnRecord: invitation_msg_id: Union[Unset, str] = UNSET my_did: Union[Unset, str] = UNSET request_id: Union[Unset, str] = UNSET - rfc_23_state: Union[Unset, str] = UNSET + rfc23_state: Union[Unset, str] = UNSET routing_state: Union[Unset, ConnRecordRoutingState] = UNSET their_did: Union[Unset, str] = UNSET their_label: Union[Unset, str] = UNSET @@ -55,7 +55,7 @@ def to_dict(self) -> Dict[str, Any]: invitation_msg_id = self.invitation_msg_id my_did = self.my_did request_id = self.request_id - rfc_23_state = self.rfc_23_state + rfc23_state = self.rfc23_state routing_state: Union[Unset, str] = UNSET if not isinstance(self.routing_state, Unset): routing_state = self.routing_state.value @@ -97,8 +97,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["my_did"] = my_did if request_id is not UNSET: field_dict["request_id"] = request_id - if rfc_23_state is not UNSET: - field_dict["rfc23_state"] = rfc_23_state + if rfc23_state is not UNSET: + field_dict["rfc23_state"] = rfc23_state if routing_state is not UNSET: field_dict["routing_state"] = routing_state if their_did is not UNSET: @@ -121,9 +121,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: state = d.pop("state") - accept: Union[Unset, ConnRecordAccept] = UNSET _accept = d.pop("accept", UNSET) - if not isinstance(_accept, Unset): + accept: Union[Unset, ConnRecordAccept] + if isinstance(_accept, Unset): + accept = UNSET + else: accept = ConnRecordAccept(_accept) alias = d.pop("alias", UNSET) @@ -136,9 +138,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: invitation_key = d.pop("invitation_key", UNSET) - invitation_mode: Union[Unset, ConnRecordInvitationMode] = UNSET _invitation_mode = d.pop("invitation_mode", UNSET) - if not isinstance(_invitation_mode, Unset): + invitation_mode: Union[Unset, ConnRecordInvitationMode] + if isinstance(_invitation_mode, Unset): + invitation_mode = UNSET + else: invitation_mode = ConnRecordInvitationMode(_invitation_mode) invitation_msg_id = d.pop("invitation_msg_id", UNSET) @@ -147,11 +151,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: request_id = d.pop("request_id", UNSET) - rfc_23_state = d.pop("rfc23_state", UNSET) + rfc23_state = d.pop("rfc23_state", UNSET) - routing_state: Union[Unset, ConnRecordRoutingState] = UNSET _routing_state = d.pop("routing_state", UNSET) - if not isinstance(_routing_state, Unset): + routing_state: Union[Unset, ConnRecordRoutingState] + if isinstance(_routing_state, Unset): + routing_state = UNSET + else: routing_state = ConnRecordRoutingState(_routing_state) their_did = d.pop("their_did", UNSET) @@ -160,9 +166,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: their_public_did = d.pop("their_public_did", UNSET) - their_role: Union[Unset, ConnRecordTheirRole] = UNSET _their_role = d.pop("their_role", UNSET) - if not isinstance(_their_role, Unset): + their_role: Union[Unset, ConnRecordTheirRole] + if isinstance(_their_role, Unset): + their_role = UNSET + else: their_role = ConnRecordTheirRole(_their_role) updated_at = d.pop("updated_at", UNSET) @@ -180,7 +188,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: invitation_msg_id=invitation_msg_id, my_did=my_did, request_id=request_id, - rfc_23_state=rfc_23_state, + rfc23_state=rfc23_state, routing_state=routing_state, their_did=their_did, their_label=their_label, diff --git a/int/acapy-backchannel/acapy_backchannel/models/connection_metadata.py b/int/acapy-backchannel/acapy_backchannel/models/connection_metadata.py index e390b7d0..720e5f76 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/connection_metadata.py +++ b/int/acapy-backchannel/acapy_backchannel/models/connection_metadata.py @@ -31,9 +31,11 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - results: Union[Unset, ConnectionMetadataResults] = UNSET _results = d.pop("results", UNSET) - if not isinstance(_results, Unset): + results: Union[Unset, ConnectionMetadataResults] + if isinstance(_results, Unset): + results = UNSET + else: results = ConnectionMetadataResults.from_dict(_results) connection_metadata = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/create_invitation_request.py b/int/acapy-backchannel/acapy_backchannel/models/create_invitation_request.py index 65d28e79..a9b0eaaf 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/create_invitation_request.py +++ b/int/acapy-backchannel/acapy_backchannel/models/create_invitation_request.py @@ -56,9 +56,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() mediation_id = d.pop("mediation_id", UNSET) - metadata: Union[Unset, CreateInvitationRequestMetadata] = UNSET _metadata = d.pop("metadata", UNSET) - if not isinstance(_metadata, Unset): + metadata: Union[Unset, CreateInvitationRequestMetadata] + if isinstance(_metadata, Unset): + metadata = UNSET + else: metadata = CreateInvitationRequestMetadata.from_dict(_metadata) recipient_keys = cast(List[str], d.pop("recipient_keys", UNSET)) diff --git a/int/acapy-backchannel/acapy_backchannel/models/cred_brief.py b/int/acapy-backchannel/acapy_backchannel/models/cred_brief.py index 64512a3d..314b9610 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/cred_brief.py +++ b/int/acapy-backchannel/acapy_backchannel/models/cred_brief.py @@ -52,9 +52,11 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - attrs: Union[Unset, CredBriefAttrs] = UNSET _attrs = d.pop("attrs", UNSET) - if not isinstance(_attrs, Unset): + attrs: Union[Unset, CredBriefAttrs] + if isinstance(_attrs, Unset): + attrs = UNSET + else: attrs = CredBriefAttrs.from_dict(_attrs) cred_def_id = d.pop("cred_def_id", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/credential_definition.py b/int/acapy-backchannel/acapy_backchannel/models/credential_definition.py index 379b46d2..a96f10cf 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/credential_definition.py +++ b/int/acapy-backchannel/acapy_backchannel/models/credential_definition.py @@ -62,14 +62,18 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: tag = d.pop("tag", UNSET) - type: Union[Unset, CredentialDefinitionType] = UNSET _type = d.pop("type", UNSET) - if not isinstance(_type, Unset): + type: Union[Unset, CredentialDefinitionType] + if isinstance(_type, Unset): + type = UNSET + else: type = CredentialDefinitionType.from_dict(_type) - value: Union[Unset, CredentialDefinitionValue] = UNSET _value = d.pop("value", UNSET) - if not isinstance(_value, Unset): + value: Union[Unset, CredentialDefinitionValue] + if isinstance(_value, Unset): + value = UNSET + else: value = CredentialDefinitionValue.from_dict(_value) ver = d.pop("ver", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/credential_definition_get_results.py b/int/acapy-backchannel/acapy_backchannel/models/credential_definition_get_results.py index 97f8f120..65114f5d 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/credential_definition_get_results.py +++ b/int/acapy-backchannel/acapy_backchannel/models/credential_definition_get_results.py @@ -31,9 +31,11 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - credential_definition: Union[Unset, CredentialDefinition] = UNSET _credential_definition = d.pop("credential_definition", UNSET) - if not isinstance(_credential_definition, Unset): + credential_definition: Union[Unset, CredentialDefinition] + if isinstance(_credential_definition, Unset): + credential_definition = UNSET + else: credential_definition = CredentialDefinition.from_dict(_credential_definition) credential_definition_get_results = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/did_endpoint_with_type.py b/int/acapy-backchannel/acapy_backchannel/models/did_endpoint_with_type.py index 7da5e29a..4f1363ff 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/did_endpoint_with_type.py +++ b/int/acapy-backchannel/acapy_backchannel/models/did_endpoint_with_type.py @@ -45,9 +45,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: endpoint = d.pop("endpoint", UNSET) - endpoint_type: Union[Unset, DIDEndpointWithTypeEndpointType] = UNSET _endpoint_type = d.pop("endpoint_type", UNSET) - if not isinstance(_endpoint_type, Unset): + endpoint_type: Union[Unset, DIDEndpointWithTypeEndpointType] + if isinstance(_endpoint_type, Unset): + endpoint_type = UNSET + else: endpoint_type = DIDEndpointWithTypeEndpointType(_endpoint_type) did_endpoint_with_type = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/indy_cred_info.py b/int/acapy-backchannel/acapy_backchannel/models/indy_cred_info.py index ff56c3be..1db45497 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/indy_cred_info.py +++ b/int/acapy-backchannel/acapy_backchannel/models/indy_cred_info.py @@ -36,9 +36,11 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - attrs: Union[Unset, IndyCredInfoAttrs] = UNSET _attrs = d.pop("attrs", UNSET) - if not isinstance(_attrs, Unset): + attrs: Union[Unset, IndyCredInfoAttrs] + if isinstance(_attrs, Unset): + attrs = UNSET + else: attrs = IndyCredInfoAttrs.from_dict(_attrs) referent = d.pop("referent", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/indy_proof_req_attr_spec.py b/int/acapy-backchannel/acapy_backchannel/models/indy_proof_req_attr_spec.py index b80dda94..b4b13076 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/indy_proof_req_attr_spec.py +++ b/int/acapy-backchannel/acapy_backchannel/models/indy_proof_req_attr_spec.py @@ -58,9 +58,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: names = cast(List[str], d.pop("names", UNSET)) - non_revoked: Union[Unset, IndyProofReqNonRevoked] = UNSET _non_revoked = d.pop("non_revoked", UNSET) - if not isinstance(_non_revoked, Unset): + non_revoked: Union[Unset, IndyProofReqNonRevoked] + if isinstance(_non_revoked, Unset): + non_revoked = UNSET + else: non_revoked = IndyProofReqNonRevoked.from_dict(_non_revoked) restrictions = [] diff --git a/int/acapy-backchannel/acapy_backchannel/models/indy_proof_req_pred_spec.py b/int/acapy-backchannel/acapy_backchannel/models/indy_proof_req_pred_spec.py index d1d28019..8ffe1381 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/indy_proof_req_pred_spec.py +++ b/int/acapy-backchannel/acapy_backchannel/models/indy_proof_req_pred_spec.py @@ -63,9 +63,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: p_value = d.pop("p_value") - non_revoked: Union[Unset, IndyProofReqNonRevoked] = UNSET _non_revoked = d.pop("non_revoked", UNSET) - if not isinstance(_non_revoked, Unset): + non_revoked: Union[Unset, IndyProofReqNonRevoked] + if isinstance(_non_revoked, Unset): + non_revoked = UNSET + else: non_revoked = IndyProofReqNonRevoked.from_dict(_non_revoked) restrictions = [] diff --git a/int/acapy-backchannel/acapy_backchannel/models/indy_proof_request.py b/int/acapy-backchannel/acapy_backchannel/models/indy_proof_request.py index 871e3ea8..2d914b48 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/indy_proof_request.py +++ b/int/acapy-backchannel/acapy_backchannel/models/indy_proof_request.py @@ -63,9 +63,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: version = d.pop("version") - non_revoked: Union[Unset, IndyProofReqNonRevoked] = UNSET _non_revoked = d.pop("non_revoked", UNSET) - if not isinstance(_non_revoked, Unset): + non_revoked: Union[Unset, IndyProofReqNonRevoked] + if isinstance(_non_revoked, Unset): + non_revoked = UNSET + else: non_revoked = IndyProofReqNonRevoked.from_dict(_non_revoked) nonce = d.pop("nonce", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/invitation_create_request.py b/int/acapy-backchannel/acapy_backchannel/models/invitation_create_request.py index 44d46a21..54da2f93 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/invitation_create_request.py +++ b/int/acapy-backchannel/acapy_backchannel/models/invitation_create_request.py @@ -80,9 +80,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: mediation_id = d.pop("mediation_id", UNSET) - metadata: Union[Unset, InvitationCreateRequestMetadata] = UNSET _metadata = d.pop("metadata", UNSET) - if not isinstance(_metadata, Unset): + metadata: Union[Unset, InvitationCreateRequestMetadata] + if isinstance(_metadata, Unset): + metadata = UNSET + else: metadata = InvitationCreateRequestMetadata.from_dict(_metadata) my_label = d.pop("my_label", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/invitation_record.py b/int/acapy-backchannel/acapy_backchannel/models/invitation_record.py index 03bb57de..ebd399c0 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/invitation_record.py +++ b/int/acapy-backchannel/acapy_backchannel/models/invitation_record.py @@ -64,9 +64,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: invi_msg_id = d.pop("invi_msg_id", UNSET) - invitation: Union[Unset, InvitationRecordInvitation] = UNSET _invitation = d.pop("invitation", UNSET) - if not isinstance(_invitation, Unset): + invitation: Union[Unset, InvitationRecordInvitation] + if isinstance(_invitation, Unset): + invitation = UNSET + else: invitation = InvitationRecordInvitation.from_dict(_invitation) invitation_id = d.pop("invitation_id", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/keylist_query_filter_request.py b/int/acapy-backchannel/acapy_backchannel/models/keylist_query_filter_request.py index 4e754628..ab0e6c2f 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/keylist_query_filter_request.py +++ b/int/acapy-backchannel/acapy_backchannel/models/keylist_query_filter_request.py @@ -31,9 +31,11 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - filter_: Union[Unset, KeylistQueryFilterRequestFilter] = UNSET _filter_ = d.pop("filter", UNSET) - if not isinstance(_filter_, Unset): + filter_: Union[Unset, KeylistQueryFilterRequestFilter] + if isinstance(_filter_, Unset): + filter_ = UNSET + else: filter_ = KeylistQueryFilterRequestFilter.from_dict(_filter_) keylist_query_filter_request = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/menu_option.py b/int/acapy-backchannel/acapy_backchannel/models/menu_option.py index af51749e..3040a284 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/menu_option.py +++ b/int/acapy-backchannel/acapy_backchannel/models/menu_option.py @@ -56,9 +56,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: disabled = d.pop("disabled", UNSET) - form: Union[Unset, MenuForm] = UNSET _form = d.pop("form", UNSET) - if not isinstance(_form, Unset): + form: Union[Unset, MenuForm] + if isinstance(_form, Unset): + form = UNSET + else: form = MenuForm.from_dict(_form) menu_option = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/perform_request.py b/int/acapy-backchannel/acapy_backchannel/models/perform_request.py index 3be09dc0..3779815b 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/perform_request.py +++ b/int/acapy-backchannel/acapy_backchannel/models/perform_request.py @@ -37,9 +37,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() name = d.pop("name", UNSET) - params: Union[Unset, PerformRequestParams] = UNSET _params = d.pop("params", UNSET) - if not isinstance(_params, Unset): + params: Union[Unset, PerformRequestParams] + if isinstance(_params, Unset): + params = UNSET + else: params = PerformRequestParams.from_dict(_params) perform_request = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/publish_revocations.py b/int/acapy-backchannel/acapy_backchannel/models/publish_revocations.py index 80ef61e1..e1084c3f 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/publish_revocations.py +++ b/int/acapy-backchannel/acapy_backchannel/models/publish_revocations.py @@ -12,32 +12,34 @@ class PublishRevocations: """ """ - rrid_2_crid: Union[Unset, PublishRevocationsRrid2Crid] = UNSET + rrid2crid: Union[Unset, PublishRevocationsRrid2Crid] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - rrid_2_crid: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.rrid_2_crid, Unset): - rrid_2_crid = self.rrid_2_crid.to_dict() + rrid2crid: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.rrid2crid, Unset): + rrid2crid = self.rrid2crid.to_dict() field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update({}) - if rrid_2_crid is not UNSET: - field_dict["rrid2crid"] = rrid_2_crid + if rrid2crid is not UNSET: + field_dict["rrid2crid"] = rrid2crid return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - rrid_2_crid: Union[Unset, PublishRevocationsRrid2Crid] = UNSET - _rrid_2_crid = d.pop("rrid2crid", UNSET) - if not isinstance(_rrid_2_crid, Unset): - rrid_2_crid = PublishRevocationsRrid2Crid.from_dict(_rrid_2_crid) + _rrid2crid = d.pop("rrid2crid", UNSET) + rrid2crid: Union[Unset, PublishRevocationsRrid2Crid] + if isinstance(_rrid2crid, Unset): + rrid2crid = UNSET + else: + rrid2crid = PublishRevocationsRrid2Crid.from_dict(_rrid2crid) publish_revocations = cls( - rrid_2_crid=rrid_2_crid, + rrid2crid=rrid2crid, ) publish_revocations.additional_properties = d diff --git a/int/acapy-backchannel/acapy_backchannel/models/query_result.py b/int/acapy-backchannel/acapy_backchannel/models/query_result.py index 2804dde5..4950129e 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/query_result.py +++ b/int/acapy-backchannel/acapy_backchannel/models/query_result.py @@ -31,9 +31,11 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - results: Union[Unset, QueryResultResults] = UNSET _results = d.pop("results", UNSET) - if not isinstance(_results, Unset): + results: Union[Unset, QueryResultResults] + if isinstance(_results, Unset): + results = UNSET + else: results = QueryResultResults.from_dict(_results) query_result = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/schema_get_results.py b/int/acapy-backchannel/acapy_backchannel/models/schema_get_results.py index 2e81c2a7..dfbdcacc 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/schema_get_results.py +++ b/int/acapy-backchannel/acapy_backchannel/models/schema_get_results.py @@ -31,9 +31,11 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - schema: Union[Unset, Schema] = UNSET _schema = d.pop("schema", UNSET) - if not isinstance(_schema, Unset): + schema: Union[Unset, Schema] + if isinstance(_schema, Unset): + schema = UNSET + else: schema = Schema.from_dict(_schema) schema_get_results = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/schema_send_results.py b/int/acapy-backchannel/acapy_backchannel/models/schema_send_results.py index fef01dd8..6c975dfc 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/schema_send_results.py +++ b/int/acapy-backchannel/acapy_backchannel/models/schema_send_results.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, Dict, List, Type, TypeVar, Union import attr -from ..models.schema_send_results_schema import SchemaSendResultsSchema +from ..models.schema_send_results_sent import SchemaSendResultsSent +from ..models.schema_send_results_txn import SchemaSendResultsTxn +from ..types import UNSET, Unset T = TypeVar("T", bound="SchemaSendResults") @@ -11,36 +13,49 @@ class SchemaSendResults: """ """ - schema: SchemaSendResultsSchema - schema_id: str + sent: Union[Unset, SchemaSendResultsSent] = UNSET + txn: Union[Unset, SchemaSendResultsTxn] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - schema = self.schema.to_dict() + sent: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.sent, Unset): + sent = self.sent.to_dict() - schema_id = self.schema_id + txn: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.txn, Unset): + txn = self.txn.to_dict() field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) - field_dict.update( - { - "schema": schema, - "schema_id": schema_id, - } - ) + field_dict.update({}) + if sent is not UNSET: + field_dict["sent"] = sent + if txn is not UNSET: + field_dict["txn"] = txn return field_dict @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - schema = SchemaSendResultsSchema.from_dict(d.pop("schema")) - - schema_id = d.pop("schema_id") + _sent = d.pop("sent", UNSET) + sent: Union[Unset, SchemaSendResultsSent] + if isinstance(_sent, Unset): + sent = UNSET + else: + sent = SchemaSendResultsSent.from_dict(_sent) + + _txn = d.pop("txn", UNSET) + txn: Union[Unset, SchemaSendResultsTxn] + if isinstance(_txn, Unset): + txn = UNSET + else: + txn = SchemaSendResultsTxn.from_dict(_txn) schema_send_results = cls( - schema=schema, - schema_id=schema_id, + sent=sent, + txn=txn, ) schema_send_results.additional_properties = d diff --git a/int/acapy-backchannel/acapy_backchannel/models/schema_send_results_schema.py b/int/acapy-backchannel/acapy_backchannel/models/schema_send_results_sent.py similarity index 81% rename from int/acapy-backchannel/acapy_backchannel/models/schema_send_results_schema.py rename to int/acapy-backchannel/acapy_backchannel/models/schema_send_results_sent.py index 934f1247..488dcdaa 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/schema_send_results_schema.py +++ b/int/acapy-backchannel/acapy_backchannel/models/schema_send_results_sent.py @@ -2,11 +2,11 @@ import attr -T = TypeVar("T", bound="SchemaSendResultsSchema") +T = TypeVar("T", bound="SchemaSendResultsSent") @attr.s(auto_attribs=True) -class SchemaSendResultsSchema: +class SchemaSendResultsSent: """Schema result""" additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) @@ -22,10 +22,10 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - schema_send_results_schema = cls() + schema_send_results_sent = cls() - schema_send_results_schema.additional_properties = d - return schema_send_results_schema + schema_send_results_sent.additional_properties = d + return schema_send_results_sent @property def additional_keys(self) -> List[str]: diff --git a/int/acapy-backchannel/acapy_backchannel/models/schema_send_results_txn.py b/int/acapy-backchannel/acapy_backchannel/models/schema_send_results_txn.py new file mode 100644 index 00000000..421f6e83 --- /dev/null +++ b/int/acapy-backchannel/acapy_backchannel/models/schema_send_results_txn.py @@ -0,0 +1,44 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +T = TypeVar("T", bound="SchemaSendResultsTxn") + + +@attr.s(auto_attribs=True) +class SchemaSendResultsTxn: + """Schema transaction""" + + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + schema_send_results_txn = cls() + + schema_send_results_txn.additional_properties = d + return schema_send_results_txn + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/acapy-backchannel/acapy_backchannel/models/taa_info.py b/int/acapy-backchannel/acapy_backchannel/models/taa_info.py index ef1b887d..d96084a6 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/taa_info.py +++ b/int/acapy-backchannel/acapy_backchannel/models/taa_info.py @@ -49,9 +49,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: taa_required = d.pop("taa_required") - taa_accepted = None _taa_accepted = d.pop("taa_accepted") - if _taa_accepted is not None: + taa_accepted: Optional[TAAAcceptance] + if _taa_accepted is None: + taa_accepted = None + else: taa_accepted = TAAAcceptance.from_dict(_taa_accepted) taa_info = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/v10_credential_exchange.py b/int/acapy-backchannel/acapy_backchannel/models/v10_credential_exchange.py index 6c7eb730..c4484c06 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/v10_credential_exchange.py +++ b/int/acapy-backchannel/acapy_backchannel/models/v10_credential_exchange.py @@ -174,9 +174,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: created_at = d.pop("created_at", UNSET) - credential: Union[Unset, V10CredentialExchangeCredential] = UNSET _credential = d.pop("credential", UNSET) - if not isinstance(_credential, Unset): + credential: Union[Unset, V10CredentialExchangeCredential] + if isinstance(_credential, Unset): + credential = UNSET + else: credential = V10CredentialExchangeCredential.from_dict(_credential) credential_definition_id = d.pop("credential_definition_id", UNSET) @@ -185,54 +187,70 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: credential_id = d.pop("credential_id", UNSET) - credential_offer: Union[Unset, V10CredentialExchangeCredentialOffer] = UNSET _credential_offer = d.pop("credential_offer", UNSET) - if not isinstance(_credential_offer, Unset): + credential_offer: Union[Unset, V10CredentialExchangeCredentialOffer] + if isinstance(_credential_offer, Unset): + credential_offer = UNSET + else: credential_offer = V10CredentialExchangeCredentialOffer.from_dict(_credential_offer) - credential_offer_dict: Union[Unset, V10CredentialExchangeCredentialOfferDict] = UNSET _credential_offer_dict = d.pop("credential_offer_dict", UNSET) - if not isinstance(_credential_offer_dict, Unset): + credential_offer_dict: Union[Unset, V10CredentialExchangeCredentialOfferDict] + if isinstance(_credential_offer_dict, Unset): + credential_offer_dict = UNSET + else: credential_offer_dict = V10CredentialExchangeCredentialOfferDict.from_dict(_credential_offer_dict) - credential_proposal_dict: Union[Unset, V10CredentialExchangeCredentialProposalDict] = UNSET _credential_proposal_dict = d.pop("credential_proposal_dict", UNSET) - if not isinstance(_credential_proposal_dict, Unset): + credential_proposal_dict: Union[Unset, V10CredentialExchangeCredentialProposalDict] + if isinstance(_credential_proposal_dict, Unset): + credential_proposal_dict = UNSET + else: credential_proposal_dict = V10CredentialExchangeCredentialProposalDict.from_dict(_credential_proposal_dict) - credential_request: Union[Unset, V10CredentialExchangeCredentialRequest] = UNSET _credential_request = d.pop("credential_request", UNSET) - if not isinstance(_credential_request, Unset): + credential_request: Union[Unset, V10CredentialExchangeCredentialRequest] + if isinstance(_credential_request, Unset): + credential_request = UNSET + else: credential_request = V10CredentialExchangeCredentialRequest.from_dict(_credential_request) - credential_request_metadata: Union[Unset, V10CredentialExchangeCredentialRequestMetadata] = UNSET _credential_request_metadata = d.pop("credential_request_metadata", UNSET) - if not isinstance(_credential_request_metadata, Unset): + credential_request_metadata: Union[Unset, V10CredentialExchangeCredentialRequestMetadata] + if isinstance(_credential_request_metadata, Unset): + credential_request_metadata = UNSET + else: credential_request_metadata = V10CredentialExchangeCredentialRequestMetadata.from_dict( _credential_request_metadata ) error_msg = d.pop("error_msg", UNSET) - initiator: Union[Unset, V10CredentialExchangeInitiator] = UNSET _initiator = d.pop("initiator", UNSET) - if not isinstance(_initiator, Unset): + initiator: Union[Unset, V10CredentialExchangeInitiator] + if isinstance(_initiator, Unset): + initiator = UNSET + else: initiator = V10CredentialExchangeInitiator(_initiator) parent_thread_id = d.pop("parent_thread_id", UNSET) - raw_credential: Union[Unset, V10CredentialExchangeRawCredential] = UNSET _raw_credential = d.pop("raw_credential", UNSET) - if not isinstance(_raw_credential, Unset): + raw_credential: Union[Unset, V10CredentialExchangeRawCredential] + if isinstance(_raw_credential, Unset): + raw_credential = UNSET + else: raw_credential = V10CredentialExchangeRawCredential.from_dict(_raw_credential) revoc_reg_id = d.pop("revoc_reg_id", UNSET) revocation_id = d.pop("revocation_id", UNSET) - role: Union[Unset, V10CredentialExchangeRole] = UNSET _role = d.pop("role", UNSET) - if not isinstance(_role, Unset): + role: Union[Unset, V10CredentialExchangeRole] + if isinstance(_role, Unset): + role = UNSET + else: role = V10CredentialExchangeRole(_role) schema_id = d.pop("schema_id", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/v10_credential_proposal_request_opt.py b/int/acapy-backchannel/acapy_backchannel/models/v10_credential_proposal_request_opt.py index e41f5525..4d6debe2 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/v10_credential_proposal_request_opt.py +++ b/int/acapy-backchannel/acapy_backchannel/models/v10_credential_proposal_request_opt.py @@ -82,9 +82,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: cred_def_id = d.pop("cred_def_id", UNSET) - credential_proposal: Union[Unset, CredentialPreview] = UNSET _credential_proposal = d.pop("credential_proposal", UNSET) - if not isinstance(_credential_proposal, Unset): + credential_proposal: Union[Unset, CredentialPreview] + if isinstance(_credential_proposal, Unset): + credential_proposal = UNSET + else: credential_proposal = CredentialPreview.from_dict(_credential_proposal) issuer_did = d.pop("issuer_did", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/v10_presentation_exchange.py b/int/acapy-backchannel/acapy_backchannel/models/v10_presentation_exchange.py index a3125880..f4722e27 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/v10_presentation_exchange.py +++ b/int/acapy-backchannel/acapy_backchannel/models/v10_presentation_exchange.py @@ -125,40 +125,52 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: error_msg = d.pop("error_msg", UNSET) - initiator: Union[Unset, V10PresentationExchangeInitiator] = UNSET _initiator = d.pop("initiator", UNSET) - if not isinstance(_initiator, Unset): + initiator: Union[Unset, V10PresentationExchangeInitiator] + if isinstance(_initiator, Unset): + initiator = UNSET + else: initiator = V10PresentationExchangeInitiator(_initiator) - presentation: Union[Unset, V10PresentationExchangePresentation] = UNSET _presentation = d.pop("presentation", UNSET) - if not isinstance(_presentation, Unset): + presentation: Union[Unset, V10PresentationExchangePresentation] + if isinstance(_presentation, Unset): + presentation = UNSET + else: presentation = V10PresentationExchangePresentation.from_dict(_presentation) presentation_exchange_id = d.pop("presentation_exchange_id", UNSET) - presentation_proposal_dict: Union[Unset, V10PresentationExchangePresentationProposalDict] = UNSET _presentation_proposal_dict = d.pop("presentation_proposal_dict", UNSET) - if not isinstance(_presentation_proposal_dict, Unset): + presentation_proposal_dict: Union[Unset, V10PresentationExchangePresentationProposalDict] + if isinstance(_presentation_proposal_dict, Unset): + presentation_proposal_dict = UNSET + else: presentation_proposal_dict = V10PresentationExchangePresentationProposalDict.from_dict( _presentation_proposal_dict ) - presentation_request: Union[Unset, V10PresentationExchangePresentationRequest] = UNSET _presentation_request = d.pop("presentation_request", UNSET) - if not isinstance(_presentation_request, Unset): + presentation_request: Union[Unset, V10PresentationExchangePresentationRequest] + if isinstance(_presentation_request, Unset): + presentation_request = UNSET + else: presentation_request = V10PresentationExchangePresentationRequest.from_dict(_presentation_request) - presentation_request_dict: Union[Unset, V10PresentationExchangePresentationRequestDict] = UNSET _presentation_request_dict = d.pop("presentation_request_dict", UNSET) - if not isinstance(_presentation_request_dict, Unset): + presentation_request_dict: Union[Unset, V10PresentationExchangePresentationRequestDict] + if isinstance(_presentation_request_dict, Unset): + presentation_request_dict = UNSET + else: presentation_request_dict = V10PresentationExchangePresentationRequestDict.from_dict( _presentation_request_dict ) - role: Union[Unset, V10PresentationExchangeRole] = UNSET _role = d.pop("role", UNSET) - if not isinstance(_role, Unset): + role: Union[Unset, V10PresentationExchangeRole] + if isinstance(_role, Unset): + role = UNSET + else: role = V10PresentationExchangeRole(_role) state = d.pop("state", UNSET) @@ -169,9 +181,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: updated_at = d.pop("updated_at", UNSET) - verified: Union[Unset, V10PresentationExchangeVerified] = UNSET _verified = d.pop("verified", UNSET) - if not isinstance(_verified, Unset): + verified: Union[Unset, V10PresentationExchangeVerified] + if isinstance(_verified, Unset): + verified = UNSET + else: verified = V10PresentationExchangeVerified(_verified) v10_presentation_exchange = cls( diff --git a/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record.py b/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record.py index caf9fede..1acca756 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record.py +++ b/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record.py @@ -158,53 +158,71 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: cred_id_stored = d.pop("cred_id_stored", UNSET) - cred_issue: Union[Unset, V20CredExRecordCredIssue] = UNSET _cred_issue = d.pop("cred_issue", UNSET) - if not isinstance(_cred_issue, Unset): + cred_issue: Union[Unset, V20CredExRecordCredIssue] + if isinstance(_cred_issue, Unset): + cred_issue = UNSET + else: cred_issue = V20CredExRecordCredIssue.from_dict(_cred_issue) - cred_offer: Union[Unset, V20CredExRecordCredOffer] = UNSET _cred_offer = d.pop("cred_offer", UNSET) - if not isinstance(_cred_offer, Unset): + cred_offer: Union[Unset, V20CredExRecordCredOffer] + if isinstance(_cred_offer, Unset): + cred_offer = UNSET + else: cred_offer = V20CredExRecordCredOffer.from_dict(_cred_offer) - cred_preview: Union[Unset, V20CredExRecordCredPreview] = UNSET _cred_preview = d.pop("cred_preview", UNSET) - if not isinstance(_cred_preview, Unset): + cred_preview: Union[Unset, V20CredExRecordCredPreview] + if isinstance(_cred_preview, Unset): + cred_preview = UNSET + else: cred_preview = V20CredExRecordCredPreview.from_dict(_cred_preview) - cred_proposal: Union[Unset, V20CredExRecordCredProposal] = UNSET _cred_proposal = d.pop("cred_proposal", UNSET) - if not isinstance(_cred_proposal, Unset): + cred_proposal: Union[Unset, V20CredExRecordCredProposal] + if isinstance(_cred_proposal, Unset): + cred_proposal = UNSET + else: cred_proposal = V20CredExRecordCredProposal.from_dict(_cred_proposal) - cred_request: Union[Unset, V20CredExRecordCredRequest] = UNSET _cred_request = d.pop("cred_request", UNSET) - if not isinstance(_cred_request, Unset): + cred_request: Union[Unset, V20CredExRecordCredRequest] + if isinstance(_cred_request, Unset): + cred_request = UNSET + else: cred_request = V20CredExRecordCredRequest.from_dict(_cred_request) - cred_request_metadata: Union[Unset, V20CredExRecordCredRequestMetadata] = UNSET _cred_request_metadata = d.pop("cred_request_metadata", UNSET) - if not isinstance(_cred_request_metadata, Unset): + cred_request_metadata: Union[Unset, V20CredExRecordCredRequestMetadata] + if isinstance(_cred_request_metadata, Unset): + cred_request_metadata = UNSET + else: cred_request_metadata = V20CredExRecordCredRequestMetadata.from_dict(_cred_request_metadata) error_msg = d.pop("error_msg", UNSET) - initiator: Union[Unset, V20CredExRecordInitiator] = UNSET _initiator = d.pop("initiator", UNSET) - if not isinstance(_initiator, Unset): + initiator: Union[Unset, V20CredExRecordInitiator] + if isinstance(_initiator, Unset): + initiator = UNSET + else: initiator = V20CredExRecordInitiator(_initiator) parent_thread_id = d.pop("parent_thread_id", UNSET) - role: Union[Unset, V20CredExRecordRole] = UNSET _role = d.pop("role", UNSET) - if not isinstance(_role, Unset): + role: Union[Unset, V20CredExRecordRole] + if isinstance(_role, Unset): + role = UNSET + else: role = V20CredExRecordRole(_role) - state: Union[Unset, V20CredExRecordState] = UNSET _state = d.pop("state", UNSET) - if not isinstance(_state, Unset): + state: Union[Unset, V20CredExRecordState] + if isinstance(_state, Unset): + state = UNSET + else: state = V20CredExRecordState(_state) thread_id = d.pop("thread_id", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record_dif.py b/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record_dif.py index fcb87b11..eeb3f29b 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record_dif.py +++ b/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record_dif.py @@ -58,9 +58,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: cred_ex_id = d.pop("cred_ex_id", UNSET) - item: Union[Unset, V20CredExRecordDIFItem] = UNSET _item = d.pop("item", UNSET) - if not isinstance(_item, Unset): + item: Union[Unset, V20CredExRecordDIFItem] + if isinstance(_item, Unset): + item = UNSET + else: item = V20CredExRecordDIFItem.from_dict(_item) state = d.pop("state", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record_indy.py b/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record_indy.py index 09b2d32b..ab9011b5 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record_indy.py +++ b/int/acapy-backchannel/acapy_backchannel/models/v20_cred_ex_record_indy.py @@ -66,9 +66,11 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: cred_ex_indy_id = d.pop("cred_ex_indy_id", UNSET) - cred_request_metadata: Union[Unset, V20CredExRecordIndyCredRequestMetadata] = UNSET _cred_request_metadata = d.pop("cred_request_metadata", UNSET) - if not isinstance(_cred_request_metadata, Unset): + cred_request_metadata: Union[Unset, V20CredExRecordIndyCredRequestMetadata] + if isinstance(_cred_request_metadata, Unset): + cred_request_metadata = UNSET + else: cred_request_metadata = V20CredExRecordIndyCredRequestMetadata.from_dict(_cred_request_metadata) cred_rev_id = d.pop("cred_rev_id", UNSET) diff --git a/int/acapy-backchannel/acapy_backchannel/types.py b/int/acapy-backchannel/acapy_backchannel/types.py index 2b1cfc5b..a6f00ece 100644 --- a/int/acapy-backchannel/acapy_backchannel/types.py +++ b/int/acapy-backchannel/acapy_backchannel/types.py @@ -40,4 +40,4 @@ class Response(Generic[T]): parsed: Optional[T] -__all__ = ["File", "Response"] +__all__ = ["File", "Response", "FileJsonType"] diff --git a/int/scripts/openapi.yml b/int/scripts/openapi.yml index 2a1ce0ec..cdcd7c4d 100644 --- a/int/scripts/openapi.yml +++ b/int/scripts/openapi.yml @@ -4773,15 +4773,14 @@ components: - schema_id type: object properties: - schema: + sent: type: object properties: {} description: Schema result - schema_id: - pattern: ^[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}:2:.+:[0-9.]+$ - type: string - description: Schema identifier - example: WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0 + txn: + type: object + properties: {} + description: Schema transaction SchemasCreatedResults: type: object properties: From a781055c840fefc8099120121685396eb90f5f30 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Mon, 19 Jul 2021 22:08:14 -0400 Subject: [PATCH 31/66] fix: schemas for sent object of schema send results Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- .../acapy_backchannel/models/__init__.py | 3 +- .../models/schema_send_results.py | 23 +++---- .../acapy_backchannel/models/schema_sent.py | 63 +++++++++++++++++++ ..._results_sent.py => schema_sent_schema.py} | 12 ++-- int/scripts/openapi.yml | 22 +++++-- 5 files changed, 97 insertions(+), 26 deletions(-) create mode 100644 int/acapy-backchannel/acapy_backchannel/models/schema_sent.py rename int/acapy-backchannel/acapy_backchannel/models/{schema_send_results_sent.py => schema_sent_schema.py} (80%) diff --git a/int/acapy-backchannel/acapy_backchannel/models/__init__.py b/int/acapy-backchannel/acapy_backchannel/models/__init__.py index 43447135..6b0d5d8e 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/__init__.py +++ b/int/acapy-backchannel/acapy_backchannel/models/__init__.py @@ -138,8 +138,9 @@ from .schema_get_results import SchemaGetResults from .schema_send_request import SchemaSendRequest from .schema_send_results import SchemaSendResults -from .schema_send_results_sent import SchemaSendResultsSent from .schema_send_results_txn import SchemaSendResultsTxn +from .schema_sent import SchemaSent +from .schema_sent_schema import SchemaSentSchema from .schemas_created_results import SchemasCreatedResults from .send_message import SendMessage from .service import Service diff --git a/int/acapy-backchannel/acapy_backchannel/models/schema_send_results.py b/int/acapy-backchannel/acapy_backchannel/models/schema_send_results.py index 6c975dfc..ad38e3d7 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/schema_send_results.py +++ b/int/acapy-backchannel/acapy_backchannel/models/schema_send_results.py @@ -2,8 +2,8 @@ import attr -from ..models.schema_send_results_sent import SchemaSendResultsSent from ..models.schema_send_results_txn import SchemaSendResultsTxn +from ..models.schema_sent import SchemaSent from ..types import UNSET, Unset T = TypeVar("T", bound="SchemaSendResults") @@ -13,14 +13,12 @@ class SchemaSendResults: """ """ - sent: Union[Unset, SchemaSendResultsSent] = UNSET + sent: SchemaSent txn: Union[Unset, SchemaSendResultsTxn] = UNSET additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: - sent: Union[Unset, Dict[str, Any]] = UNSET - if not isinstance(self.sent, Unset): - sent = self.sent.to_dict() + sent = self.sent.to_dict() txn: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.txn, Unset): @@ -28,9 +26,11 @@ def to_dict(self) -> Dict[str, Any]: field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) - field_dict.update({}) - if sent is not UNSET: - field_dict["sent"] = sent + field_dict.update( + { + "sent": sent, + } + ) if txn is not UNSET: field_dict["txn"] = txn @@ -39,12 +39,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - _sent = d.pop("sent", UNSET) - sent: Union[Unset, SchemaSendResultsSent] - if isinstance(_sent, Unset): - sent = UNSET - else: - sent = SchemaSendResultsSent.from_dict(_sent) + sent = SchemaSent.from_dict(d.pop("sent")) _txn = d.pop("txn", UNSET) txn: Union[Unset, SchemaSendResultsTxn] diff --git a/int/acapy-backchannel/acapy_backchannel/models/schema_sent.py b/int/acapy-backchannel/acapy_backchannel/models/schema_sent.py new file mode 100644 index 00000000..f8867347 --- /dev/null +++ b/int/acapy-backchannel/acapy_backchannel/models/schema_sent.py @@ -0,0 +1,63 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +from ..models.schema_sent_schema import SchemaSentSchema + +T = TypeVar("T", bound="SchemaSent") + + +@attr.s(auto_attribs=True) +class SchemaSent: + """ """ + + schema: SchemaSentSchema + schema_id: str + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + schema = self.schema.to_dict() + + schema_id = self.schema_id + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "schema": schema, + "schema_id": schema_id, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + schema = SchemaSentSchema.from_dict(d.pop("schema")) + + schema_id = d.pop("schema_id") + + schema_sent = cls( + schema=schema, + schema_id=schema_id, + ) + + schema_sent.additional_properties = d + return schema_sent + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/acapy-backchannel/acapy_backchannel/models/schema_send_results_sent.py b/int/acapy-backchannel/acapy_backchannel/models/schema_sent_schema.py similarity index 80% rename from int/acapy-backchannel/acapy_backchannel/models/schema_send_results_sent.py rename to int/acapy-backchannel/acapy_backchannel/models/schema_sent_schema.py index 488dcdaa..3134f8c4 100644 --- a/int/acapy-backchannel/acapy_backchannel/models/schema_send_results_sent.py +++ b/int/acapy-backchannel/acapy_backchannel/models/schema_sent_schema.py @@ -2,12 +2,12 @@ import attr -T = TypeVar("T", bound="SchemaSendResultsSent") +T = TypeVar("T", bound="SchemaSentSchema") @attr.s(auto_attribs=True) -class SchemaSendResultsSent: - """Schema result""" +class SchemaSentSchema: + """schema""" additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) @@ -22,10 +22,10 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: d = src_dict.copy() - schema_send_results_sent = cls() + schema_sent_schema = cls() - schema_send_results_sent.additional_properties = d - return schema_send_results_sent + schema_sent_schema.additional_properties = d + return schema_sent_schema @property def additional_keys(self) -> List[str]: diff --git a/int/scripts/openapi.yml b/int/scripts/openapi.yml index cdcd7c4d..27c74a2b 100644 --- a/int/scripts/openapi.yml +++ b/int/scripts/openapi.yml @@ -4769,18 +4769,30 @@ components: example: "1.0" SchemaSendResults: required: - - schema - - schema_id + - sent type: object properties: sent: - type: object - properties: {} - description: Schema result + $ref: '#/components/schemas/SchemaSent' txn: type: object properties: {} description: Schema transaction + SchemaSent: + type: object + required: + - schema + - schema_id + properties: + schema: + type: object + properties: {} + description: schema + schema_id: + pattern: ^[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}:2:.+:[0-9.]+$ + type: string + description: Schema identifier + example: WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0 SchemasCreatedResults: type: object properties: From 0717835477ce8387d01dc4704ffa9f9c11bbbf1e Mon Sep 17 00:00:00 2001 From: Char Date: Tue, 20 Jul 2021 00:18:56 -0700 Subject: [PATCH 32/66] fix: use backchannel instead of admin-schemas Signed-off-by: Char Signed-off-by: Char Howland --- int/tests/test_credentialdefinitions.py | 33 ++++++++++++++----------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/int/tests/test_credentialdefinitions.py b/int/tests/test_credentialdefinitions.py index 1e51fd8e..86d9f307 100644 --- a/int/tests/test_credentialdefinitions.py +++ b/int/tests/test_credentialdefinitions.py @@ -2,20 +2,23 @@ import asyncio import pytest +from acapy_backchannel import Client +from acapy_backchannel.models.schema_send_request import SchemaSendRequest +from acapy_backchannel.api.schema import publish_schema + @pytest.fixture(scope="module") -async def create_schema(connection, endorser_did): +async def create_schema(backchannel: Client, endorser_did): """Schema factory fixture""" async def _create_schema(version): - return await connection.send_and_await_reply_async( - { - "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-schemas/0.1/send-schema", - "schema_name": "Test Schema", - "schema_version": version, - "attributes": ["attr_1_0", "attr_1_1", "attr_1_2"], - "return_route": "all", - } + return await publish_schema.asyncio( + client=backchannel, + json_body=SchemaSendRequest( + attributes=["attr_1_0", "attr_1_1", "attr_1_2"], + schema_name="Test Schema", + schema_version=version, + ), ) yield _create_schema @@ -28,7 +31,7 @@ async def test_send_cred_def(connection, endorser_did, create_schema): send_cred_def = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", - "schema_id": schema["schema_id"], + "schema_id": schema.sent.schema_id, "~transport": {"return_route": "all"}, } ) @@ -45,7 +48,7 @@ async def test_cred_def_get(connection, endorser_did, create_schema): send_cred_def = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", - "schema_id": schema["schema_id"], + "schema_id": schema.sent.schema_id, "~transport": {"return_route": "all"}, } ) @@ -60,18 +63,18 @@ async def test_cred_def_get(connection, endorser_did, create_schema): cred_def_get["@type"] == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/credential-definition" ) - assert schema["schema_id"] == cred_def_get["schema_id"] + assert schema.sent.schema_id == cred_def_get["schema_id"] assert send_cred_def["cred_def_id"] == cred_def_get["cred_def_id"] @pytest.mark.asyncio async def test_cred_def_get_list(connection, endorser_did, create_schema): - """Create and retrieve a credential definition""" + """Retrieve the list of credential definitions""" schema1_2 = await create_schema(version="1.2") send_schema1_2 = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", - "schema_id": schema1_2["schema_id"], + "schema_id": schema1_2.sent.schema_id, "~transport": {"return_route": "all"}, } ) @@ -79,7 +82,7 @@ async def test_cred_def_get_list(connection, endorser_did, create_schema): send_schema1_3 = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", - "schema_id": schema1_3["schema_id"], + "schema_id": schema1_3.sent.schema_id, "~transport": {"return_route": "all"}, } ) From d382f4a80ed06efa921e706a6e9a7ff38dbbaa3d Mon Sep 17 00:00:00 2001 From: Char Date: Thu, 22 Jul 2021 15:41:34 -0700 Subject: [PATCH 33/66] feat: add connection and credential definition fixtures Signed-off-by: Char Signed-off-by: Char Howland --- int/tests/conftest.py | 6 ++-- int/tests/test_holder.py | 77 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 81 insertions(+), 2 deletions(-) create mode 100644 int/tests/test_holder.py diff --git a/int/tests/conftest.py b/int/tests/conftest.py index e38f84e4..a38080f4 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -195,6 +195,7 @@ async def _make_did(): @pytest.fixture(scope="session") async def accepted_taa(backchannel): + backchannel.timeout = 15 result = (await fetch_taa.asyncio(client=backchannel)).result result = await accept_taa.asyncio( client=backchannel, @@ -209,7 +210,7 @@ async def accepted_taa(backchannel): @pytest.fixture(scope="session") async def endorser_did(make_did, backchannel, accepted_taa): """Endorser DID factory fixture""" - + backchannel.timeout = 15 did: DID = await make_did() LOGGER.info("Publishing DID through https://selfserve.indiciotech.io") response = httpx.post( @@ -219,12 +220,13 @@ async def endorser_did(make_did, backchannel, accepted_taa): "did": did.did, "verkey": did.verkey, }, - timeout=30, + timeout=15, ) if response.is_error: raise Exception("Failed to publish DID:", response.text) LOGGER.info("DID Published") + backchannel.timeout = 15 result = await set_public_did.asyncio_detailed( client=backchannel, did=did.did, diff --git a/int/tests/test_holder.py b/int/tests/test_holder.py new file mode 100644 index 00000000..c897ad2f --- /dev/null +++ b/int/tests/test_holder.py @@ -0,0 +1,77 @@ +"""Holder Tests""" +import asyncio +import pytest + +from acapy_backchannel import Client +from acapy_backchannel.models.create_invitation_request import CreateInvitationRequest +from acapy_backchannel.models.conn_record import ConnRecord +from acapy_backchannel.models.receive_invitation_request import ReceiveInvitationRequest +from acapy_backchannel.models.schema_send_request import SchemaSendRequest +from acapy_backchannel.models.credential_definition_send_request import ( + CredentialDefinitionSendRequest, +) +from acapy_backchannel.api.connection import ( + create_invitation, + receive_invitation, + get_connection, +) +from acapy_backchannel.api.schema import publish_schema +from acapy_backchannel.api.credential_definition import publish_cred_def + + +@pytest.fixture(scope="module") +async def issuer_holder_connection(backchannel: Client): + """Invitation creation fixture""" + invitation_created = await create_invitation.asyncio( + client=backchannel, json_body=CreateInvitationRequest() + ) + connection_created = await receive_invitation.asyncio( + client=backchannel, + json_body=ReceiveInvitationRequest( + id=invitation_created.invitation.id, + type=invitation_created.invitation.type, + did=invitation_created.invitation.did, + image_url=invitation_created.invitation.image_url, + label=invitation_created.invitation.label, + recipient_keys=invitation_created.invitation.recipient_keys, + routing_keys=invitation_created.invitation.routing_keys, + service_endpoint=invitation_created.invitation.service_endpoint, + ), + ) + return await get_connection.asyncio( + client=backchannel, conn_id=connection_created.connection_id + ) + + +@pytest.fixture(scope="module") +async def create_schema(backchannel: Client, endorser_did): + """Schema factory fixture""" + + async def _create_schema(version): + return await publish_schema.asyncio( + client=backchannel, + json_body=SchemaSendRequest( + attributes=["attr_1_0", "attr_1_1", "attr_1_2"], + schema_name="Test Schema", + schema_version=version, + ), + ) + + yield _create_schema + + +@pytest.fixture(scope="module") +async def create_cred_def(backchannel: Client, endorser_did, create_schema): + """Credential definition fixture""" + + async def _create_cred_def(version): + schema = await create_schema(version="1.0") + backchannel.timeout = 20 + return await publish_cred_def.asyncio( + client=backchannel, + json_body=CredentialDefinitionSendRequest( + schema_id=schema.sent.schema_id, + ), + ) + + yield _create_cred_def From bdf0542e75dce06a475186cb77a1cd47371af94c Mon Sep 17 00:00:00 2001 From: Char Date: Fri, 23 Jul 2021 08:56:17 -0700 Subject: [PATCH 34/66] test: add test of holder credential exchange Signed-off-by: Char Signed-off-by: Char Howland --- int/tests/test_holder.py | 68 ++++++++++++++++++++++++++++++++++++++-- 1 file changed, 65 insertions(+), 3 deletions(-) diff --git a/int/tests/test_holder.py b/int/tests/test_holder.py index c897ad2f..c5ceaa02 100644 --- a/int/tests/test_holder.py +++ b/int/tests/test_holder.py @@ -1,15 +1,22 @@ """Holder Tests""" import asyncio import pytest +from typing import cast from acapy_backchannel import Client from acapy_backchannel.models.create_invitation_request import CreateInvitationRequest from acapy_backchannel.models.conn_record import ConnRecord from acapy_backchannel.models.receive_invitation_request import ReceiveInvitationRequest from acapy_backchannel.models.schema_send_request import SchemaSendRequest +from acapy_backchannel.models.v10_credential_proposal_request_mand import ( + V10CredentialProposalRequestMand, +) +from acapy_backchannel.models.credential_preview import CredentialPreview from acapy_backchannel.models.credential_definition_send_request import ( CredentialDefinitionSendRequest, ) +from acapy_backchannel.models.cred_attr_spec import CredAttrSpec +from acapy_backchannel.models.v10_credential_exchange import V10CredentialExchange from acapy_backchannel.api.connection import ( create_invitation, receive_invitation, @@ -17,6 +24,7 @@ ) from acapy_backchannel.api.schema import publish_schema from acapy_backchannel.api.credential_definition import publish_cred_def +from acapy_backchannel.api.issue_credential_v10 import issue_credential_automated @pytest.fixture(scope="module") @@ -38,9 +46,14 @@ async def issuer_holder_connection(backchannel: Client): service_endpoint=invitation_created.invitation.service_endpoint, ), ) - return await get_connection.asyncio( + ensure_connected = await get_connection.asyncio( client=backchannel, conn_id=connection_created.connection_id ) + return invitation_created, connection_created + # To access the connection ids + # something = issuer_holder_connection + # conn_id_invitation = something[0].connection_id + # conn_id_received = something[1].connection_id @pytest.fixture(scope="module") @@ -65,8 +78,8 @@ async def create_cred_def(backchannel: Client, endorser_did, create_schema): """Credential definition fixture""" async def _create_cred_def(version): - schema = await create_schema(version="1.0") - backchannel.timeout = 20 + schema = await create_schema(version) + backchannel.timeout = 15 return await publish_cred_def.asyncio( client=backchannel, json_body=CredentialDefinitionSendRequest( @@ -75,3 +88,52 @@ async def _create_cred_def(version): ) yield _create_cred_def + + +@pytest.mark.asyncio +async def test_holder_credential_exchange( + backchannel: Client, + connection, + issuer_holder_connection, + endorser_did, + create_schema, + create_cred_def, +): + connected = issuer_holder_connection + cred_def = await create_cred_def(version="1.0") + with connection.next() as future_cred_offer_received: + if ( + future_cred_offer_received["@type"] + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-offer-received" + ): + issue_result = await issue_credential_automated.asyncio( + client=backchannel, + json_body=V10CredentialProposalRequestMand( + connection_id=connected[0].connection_id, + credential_proposal=CredentialPreview( + [ + CredAttrSpec(name="attr_1_0", value="test_0"), + CredAttrSpec(name="attr_1_1", value="test_1"), + CredAttrSpec(name="attr_1_2", value="test_2"), + ] + ), + cred_def_id=cred_def.additional_properties["sent"][ + "credential_definition_id" + ], + ), + timeout=60, + ) + issue_result = cast(V10CredentialExchange, issue_result) + cred_offer_received = await asyncio.wait_for(future_cred_offer_received, 60) + print("cred_offer_received: ", cred_offer_received) + assert False + + +# @pytest.mark.asyncio +# async def test_credentials_get_list(connection, endorser_did, create_schema, create_cred_def): +# credentials_get_list = await connection.send_and_await_reply_async( +# { +# "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credentials-get-list", +# } +# ) +# assert credentials_get_list["@type"] == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credentials-list" From 2ffe30e3d3c8df551b4fb905fe79565cfeb55b00 Mon Sep 17 00:00:00 2001 From: Char Date: Mon, 26 Jul 2021 08:26:28 -0700 Subject: [PATCH 35/66] chore: adjust message type condition on next() Signed-off-by: Char Signed-off-by: Char Howland --- int/tests/test_holder.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/int/tests/test_holder.py b/int/tests/test_holder.py index c5ceaa02..cf1ef6bb 100644 --- a/int/tests/test_holder.py +++ b/int/tests/test_holder.py @@ -101,12 +101,11 @@ async def test_holder_credential_exchange( ): connected = issuer_holder_connection cred_def = await create_cred_def(version="1.0") - with connection.next() as future_cred_offer_received: - if ( - future_cred_offer_received["@type"] - == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-offer-received" - ): - issue_result = await issue_credential_automated.asyncio( + with connection.next( + type_="did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-offer-received" + ) as future_cred_offer_received: + issue_result = await asyncio.wait_for( + issue_credential_automated.asyncio( client=backchannel, json_body=V10CredentialProposalRequestMand( connection_id=connected[0].connection_id, @@ -121,10 +120,11 @@ async def test_holder_credential_exchange( "credential_definition_id" ], ), - timeout=60, - ) - issue_result = cast(V10CredentialExchange, issue_result) - cred_offer_received = await asyncio.wait_for(future_cred_offer_received, 60) + ), + timeout=60, + ) + issue_result = cast(V10CredentialExchange, issue_result) + cred_offer_received = await asyncio.wait_for(future_cred_offer_received, 60) print("cred_offer_received: ", cred_offer_received) assert False From 175d99ba41595347cb14a084286a1f63960f6b0c Mon Sep 17 00:00:00 2001 From: Char Date: Mon, 26 Jul 2021 10:42:36 -0700 Subject: [PATCH 36/66] chore: add auto-ping and auto_accept to create connection Signed-off-by: Char Signed-off-by: Char Howland --- int/docker-compose.yml | 3 ++- int/tests/test_holder.py | 16 ++++++++++------ 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/int/docker-compose.yml b/int/docker-compose.yml index 44ddc519..998a00e3 100644 --- a/int/docker-compose.yml +++ b/int/docker-compose.yml @@ -16,7 +16,7 @@ services: dockerfile: ./docker/Dockerfile ports: - "3001:3001" - command: start -it http 0.0.0.0 3000 -ot http -e http://acapy_plugin_agent:3000 --admin 0.0.0.0 3001 --admin-insecure-mode --plugin acapy_plugin_toolbox --log-level debug --genesis-url https://raw.githubusercontent.com/Indicio-tech/indicio-network/master/genesis_files/pool_transactions_testnet_genesis --wallet-type indy --wallet-name default --wallet-key "insecure, for use in testing only" --auto-provision + command: start -it http 0.0.0.0 3000 -ot http -e http://acapy_plugin_agent:3000 --admin 0.0.0.0 3001 --admin-insecure-mode --plugin acapy_plugin_toolbox --log-level debug --genesis-url https://raw.githubusercontent.com/Indicio-tech/indicio-network/master/genesis_files/pool_transactions_testnet_genesis --wallet-type indy --wallet-name default --wallet-key "insecure, for use in testing only" --auto-provision --auto-ping-connection #************************************************************* # tester: drives tests for acapy_plugin_toolbox in a * @@ -25,6 +25,7 @@ services: tests: container_name: juggernaut + command: -k test_holder_credential_exchange build: context: . dockerfile: ./Dockerfile.test.runner diff --git a/int/tests/test_holder.py b/int/tests/test_holder.py index cf1ef6bb..f4213273 100644 --- a/int/tests/test_holder.py +++ b/int/tests/test_holder.py @@ -28,11 +28,16 @@ @pytest.fixture(scope="module") -async def issuer_holder_connection(backchannel: Client): +async def issuer_holder_connection(backchannel: Client, connection): """Invitation creation fixture""" invitation_created = await create_invitation.asyncio( - client=backchannel, json_body=CreateInvitationRequest() + client=backchannel, + json_body=CreateInvitationRequest(), + auto_accept="true", ) + # with connection.next( + # type_="https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connected" + # ) as future_connected_message: connection_created = await receive_invitation.asyncio( client=backchannel, json_body=ReceiveInvitationRequest( @@ -45,10 +50,9 @@ async def issuer_holder_connection(backchannel: Client): routing_keys=invitation_created.invitation.routing_keys, service_endpoint=invitation_created.invitation.service_endpoint, ), + auto_accept="true", ) - ensure_connected = await get_connection.asyncio( - client=backchannel, conn_id=connection_created.connection_id - ) + # await asyncio.wait_for(future_connected_message, 10) return invitation_created, connection_created # To access the connection ids # something = issuer_holder_connection @@ -79,7 +83,7 @@ async def create_cred_def(backchannel: Client, endorser_did, create_schema): async def _create_cred_def(version): schema = await create_schema(version) - backchannel.timeout = 15 + backchannel.timeout = 30 return await publish_cred_def.asyncio( client=backchannel, json_body=CredentialDefinitionSendRequest( From 1d274a891219fffda5e29a3e68e150d3634efa74 Mon Sep 17 00:00:00 2001 From: Char Howland Date: Wed, 28 Jul 2021 10:15:49 -0700 Subject: [PATCH 37/66] chore: remove with connection.next() from test_holder_credential_exchange Signed-off-by: Char Howland --- int/tests/test_holder.py | 60 +++++++++++++++++++--------------------- 1 file changed, 28 insertions(+), 32 deletions(-) diff --git a/int/tests/test_holder.py b/int/tests/test_holder.py index f4213273..daa4861f 100644 --- a/int/tests/test_holder.py +++ b/int/tests/test_holder.py @@ -35,9 +35,6 @@ async def issuer_holder_connection(backchannel: Client, connection): json_body=CreateInvitationRequest(), auto_accept="true", ) - # with connection.next( - # type_="https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connected" - # ) as future_connected_message: connection_created = await receive_invitation.asyncio( client=backchannel, json_body=ReceiveInvitationRequest( @@ -52,12 +49,7 @@ async def issuer_holder_connection(backchannel: Client, connection): ), auto_accept="true", ) - # await asyncio.wait_for(future_connected_message, 10) return invitation_created, connection_created - # To access the connection ids - # something = issuer_holder_connection - # conn_id_invitation = something[0].connection_id - # conn_id_received = something[1].connection_id @pytest.fixture(scope="module") @@ -105,32 +97,36 @@ async def test_holder_credential_exchange( ): connected = issuer_holder_connection cred_def = await create_cred_def(version="1.0") - with connection.next( - type_="did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-offer-received" - ) as future_cred_offer_received: - issue_result = await asyncio.wait_for( - issue_credential_automated.asyncio( - client=backchannel, - json_body=V10CredentialProposalRequestMand( - connection_id=connected[0].connection_id, - credential_proposal=CredentialPreview( - [ - CredAttrSpec(name="attr_1_0", value="test_0"), - CredAttrSpec(name="attr_1_1", value="test_1"), - CredAttrSpec(name="attr_1_2", value="test_2"), - ] - ), - cred_def_id=cred_def.additional_properties["sent"][ - "credential_definition_id" - ], + issue_result = await asyncio.wait_for( + issue_credential_automated.asyncio( + client=backchannel, + json_body=V10CredentialProposalRequestMand( + connection_id=connected[1].connection_id, + credential_proposal=CredentialPreview( + [ + CredAttrSpec(name="attr_1_0", value="test_0"), + CredAttrSpec(name="attr_1_1", value="test_1"), + CredAttrSpec(name="attr_1_2", value="test_2"), + ] ), + cred_def_id=cred_def.additional_properties["sent"][ + "credential_definition_id" + ], ), - timeout=60, - ) - issue_result = cast(V10CredentialExchange, issue_result) - cred_offer_received = await asyncio.wait_for(future_cred_offer_received, 60) - print("cred_offer_received: ", cred_offer_received) - assert False + ), + timeout=60, + ) + issue_result = cast(V10CredentialExchange, issue_result) + credential_offer_accept = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-offer-accept", + "credential_exchange_id": issue_result.credential_exchange_id, + } + ) + assert ( + credential_offer_accept["@type"] + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-offer-received" + ) # @pytest.mark.asyncio From 3a6629c040e6afcef5923bbbe45d076c0f858765 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Mon, 26 Jul 2021 19:37:35 -0400 Subject: [PATCH 38/66] feat: add echo agent base Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- int/echo-agent/Dockerfile | 14 + int/echo-agent/echo.py | 28 + int/echo-agent/poetry.lock | 1100 +++++++++++++++++++++++++++++++++ int/echo-agent/pyproject.toml | 22 + 4 files changed, 1164 insertions(+) create mode 100644 int/echo-agent/Dockerfile create mode 100644 int/echo-agent/echo.py create mode 100644 int/echo-agent/poetry.lock create mode 100644 int/echo-agent/pyproject.toml diff --git a/int/echo-agent/Dockerfile b/int/echo-agent/Dockerfile new file mode 100644 index 00000000..671d03dd --- /dev/null +++ b/int/echo-agent/Dockerfile @@ -0,0 +1,14 @@ +FROM python:3.7 +WORKDIR /app + +RUN curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/install-poetry.py | POETRY_HOME=/opt/poetry python - && \ + cd /usr/local/bin && \ + ln -s /opt/poetry/bin/poetry && \ + poetry config virtualenvs.create false + +COPY ./pyproject.toml ./poetry.lock /app/ + +RUN poetry install --no-root + +COPY ./echo.py /app/ +CMD poetry run python -m uvicorn echo:app --host 0.0.0.0 --port 80 diff --git a/int/echo-agent/echo.py b/int/echo-agent/echo.py new file mode 100644 index 00000000..511885b2 --- /dev/null +++ b/int/echo-agent/echo.py @@ -0,0 +1,28 @@ +""" +Echo Agent. + +The goal of this agent is to implement an agent that can create new static +connections, receive messages, and send messages while minimizing logic and, +therefore (hopefully) how much code needs to be maintained. +""" + +from pydantic import BaseModel +from fastapi import FastAPI +from aries_staticagent import crypto + +app = FastAPI() + + +class Keypair(BaseModel): + public: str + private: str + + +@app.get("/") +def read_root() -> Keypair: + """Return root.""" + keypair_bytes = crypto.create_keypair() + return Keypair( + public=crypto.bytes_to_b58(keypair_bytes[0]), + private=crypto.bytes_to_b58(keypair_bytes[1]), + ) diff --git a/int/echo-agent/poetry.lock b/int/echo-agent/poetry.lock new file mode 100644 index 00000000..b6d0bf43 --- /dev/null +++ b/int/echo-agent/poetry.lock @@ -0,0 +1,1100 @@ +[[package]] +name = "aiohttp" +version = "3.7.4.post0" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +async-timeout = ">=3.0,<4.0" +attrs = ">=17.3.0" +chardet = ">=2.0,<5.0" +idna-ssl = {version = ">=1.0", markers = "python_version < \"3.7\""} +multidict = ">=4.5,<7.0" +typing-extensions = ">=3.6.5" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["aiodns", "brotlipy", "cchardet"] + +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "aries-staticagent" +version = "0.8.0" +description = "Python Static Agent Library and Examples for Aries" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +aiohttp = "*" +base58 = "*" +msgpack = "*" +pynacl = "*" +semver = "*" +sortedcontainers = "*" + +[package.extras] +test = ["coverage", "flake8", "pytest", "pytest-asyncio"] + +[[package]] +name = "asgiref" +version = "3.4.1" +description = "ASGI specs, helper code, and adapters" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] + +[[package]] +name = "async-timeout" +version = "3.0.1" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.5.3" + +[[package]] +name = "attrs" +version = "21.2.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] + +[[package]] +name = "backports.entry-points-selectable" +version = "1.1.0" +description = "Compatibility shim providing selectable entry points for older implementations" +category = "dev" +optional = false +python-versions = ">=2.7" + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"] + +[[package]] +name = "base58" +version = "2.1.0" +description = "Base58 and Base58Check implementation." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +tests = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "PyHamcrest (>=2.0.2)", "coveralls", "pytest-benchmark"] + +[[package]] +name = "black" +version = "21.7b0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +appdirs = "*" +click = ">=7.1.2" +dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} +mypy-extensions = ">=0.4.3" +pathspec = ">=0.8.1,<1" +regex = ">=2020.1.8" +tomli = ">=0.2.6,<2.0.0" +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"] +python2 = ["typed-ast (>=1.4.2)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "cffi" +version = "1.14.6" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cfgv" +version = "3.3.0" +description = "Validate configuration and produce human readable error messages." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[[package]] +name = "chardet" +version = "4.0.0" +description = "Universal encoding detector for Python 2 and 3" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "click" +version = "8.0.1" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "dataclasses" +version = "0.8" +description = "A backport of the dataclasses module for Python 3.6" +category = "main" +optional = false +python-versions = ">=3.6, <3.7" + +[[package]] +name = "distlib" +version = "0.3.2" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "fastapi" +version = "0.67.0" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +starlette = "0.14.2" + +[package.extras] +all = ["requests (>=2.24.0,<3.0.0)", "aiofiles (>=0.5.0,<0.6.0)", "jinja2 (>=2.11.2,<3.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "itsdangerous (>=1.1.0,<2.0.0)", "pyyaml (>=5.3.1,<6.0.0)", "graphene (>=2.1.8,<3.0.0)", "ujson (>=4.0.1,<5.0.0)", "orjson (>=3.2.1,<4.0.0)", "email_validator (>=1.1.1,<2.0.0)", "uvicorn[standard] (>=0.12.0,<0.14.0)", "async_exit_stack (>=1.0.1,<2.0.0)", "async_generator (>=1.10,<2.0.0)"] +dev = ["python-jose[cryptography] (>=3.3.0,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.14.0)", "graphene (>=2.1.8,<3.0.0)"] +doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=7.1.9,<8.0.0)", "markdown-include (>=0.6.0,<0.7.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.2.0)", "typer-cli (>=0.0.12,<0.0.13)", "pyyaml (>=5.3.1,<6.0.0)"] +test = ["pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<3.0.0)", "pytest-asyncio (>=0.14.0,<0.15.0)", "mypy (==0.812)", "flake8 (>=3.8.3,<4.0.0)", "black (==20.8b1)", "isort (>=5.0.6,<6.0.0)", "requests (>=2.24.0,<3.0.0)", "httpx (>=0.14.0,<0.15.0)", "email_validator (>=1.1.1,<2.0.0)", "sqlalchemy (>=1.3.18,<1.4.0)", "peewee (>=3.13.3,<4.0.0)", "databases[sqlite] (>=0.3.2,<0.4.0)", "orjson (>=3.2.1,<4.0.0)", "ujson (>=4.0.1,<5.0.0)", "async_exit_stack (>=1.0.1,<2.0.0)", "async_generator (>=1.10,<2.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "aiofiles (>=0.5.0,<0.6.0)", "flask (>=1.1.2,<2.0.0)"] + +[[package]] +name = "filelock" +version = "3.0.12" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "flake8" +version = "3.9.2" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" + +[[package]] +name = "h11" +version = "0.12.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "identify" +version = "2.2.11" +description = "File identification library for Python" +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.extras] +license = ["editdistance-s"] + +[[package]] +name = "idna" +version = "3.2" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "idna-ssl" +version = "1.1.0" +description = "Patch ssl.match_hostname for Unicode(idna) domains support" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +idna = ">=2.0" + +[[package]] +name = "importlib-metadata" +version = "4.6.1" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +perf = ["ipython"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] + +[[package]] +name = "importlib-resources" +version = "5.2.0" +description = "Read resources from Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "msgpack" +version = "1.0.2" +description = "MessagePack (de)serializer." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "multidict" +version = "5.1.0" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "nodeenv" +version = "1.6.0" +description = "Node.js virtual environment builder" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pathspec" +version = "0.9.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "platformdirs" +version = "2.1.0" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + +[[package]] +name = "pre-commit" +version = "2.13.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +importlib-resources = {version = "*", markers = "python_version < \"3.7\""} +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +toml = "*" +virtualenv = ">=20.0.8" + +[[package]] +name = "pycodestyle" +version = "2.7.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pycparser" +version = "2.20" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pydantic" +version = "1.8.2" +description = "Data validation and settings management using python 3.6 type hinting" +category = "main" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} +typing-extensions = ">=3.7.4.3" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyflakes" +version = "2.3.1" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pynacl" +version = "1.4.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +cffi = ">=1.4.1" +six = "*" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] + +[[package]] +name = "pyyaml" +version = "5.4.1" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[[package]] +name = "regex" +version = "2021.7.6" +description = "Alternative regular expression module, to replace re." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "semver" +version = "2.13.0" +description = "Python helper for Semantic Versioning (http://semver.org/)" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "starlette" +version = "0.14.2" +description = "The little ASGI library that shines." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +full = ["aiofiles", "graphene", "itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "1.1.0" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "typed-ast" +version = "1.4.3" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "3.10.0.0" +description = "Backported and Experimental Type Hints for Python 3.5+" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "uvicorn" +version = "0.14.0" +description = "The lightning-fast ASGI server." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +asgiref = ">=3.3.4" +click = ">=7" +h11 = ">=0.8" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +standard = ["websockets (>=9.1)", "httptools (>=0.2.0,<0.3.0)", "watchgod (>=0.6)", "python-dotenv (>=0.13)", "PyYAML (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "colorama (>=0.4)"] + +[[package]] +name = "virtualenv" +version = "20.6.0" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +"backports.entry-points-selectable" = ">=1.0.4" +distlib = ">=0.3.1,<1" +filelock = ">=3.0.0,<4" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""} +platformdirs = ">=2,<3" +six = ">=1.9.0,<2" + +[package.extras] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] + +[[package]] +name = "yarl" +version = "1.6.3" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[[package]] +name = "zipp" +version = "3.5.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.6.9" +content-hash = "57338394345a3cc115f82cdc8bbda2e27b8520280e6f7c043e8fe182bfe3715b" + +[metadata.files] +aiohttp = [ + {file = "aiohttp-3.7.4.post0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-win32.whl", hash = "sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287"}, + {file = "aiohttp-3.7.4.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-win32.whl", hash = "sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f"}, + {file = "aiohttp-3.7.4.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-win32.whl", hash = "sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16"}, + {file = "aiohttp-3.7.4.post0-cp38-cp38-win_amd64.whl", hash = "sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-win32.whl", hash = "sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9"}, + {file = "aiohttp-3.7.4.post0-cp39-cp39-win_amd64.whl", hash = "sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe"}, + {file = "aiohttp-3.7.4.post0.tar.gz", hash = "sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf"}, +] +appdirs = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] +aries-staticagent = [ + {file = "aries-staticagent-0.8.0.tar.gz", hash = "sha256:0672d479ca64fc085f55012cdad68c43871c417f371f7669634d5300646c0ff9"}, + {file = "aries_staticagent-0.8.0-py3-none-any.whl", hash = "sha256:648fec4a630421df27a0db652d3ca3d0ccaca8b2eac06c61ccdfad4b8f19ec78"}, +] +asgiref = [ + {file = "asgiref-3.4.1-py3-none-any.whl", hash = "sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214"}, + {file = "asgiref-3.4.1.tar.gz", hash = "sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9"}, +] +async-timeout = [ + {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, + {file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"}, +] +attrs = [ + {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, + {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, +] +"backports.entry-points-selectable" = [ + {file = "backports.entry_points_selectable-1.1.0-py2.py3-none-any.whl", hash = "sha256:a6d9a871cde5e15b4c4a53e3d43ba890cc6861ec1332c9c2428c92f977192acc"}, + {file = "backports.entry_points_selectable-1.1.0.tar.gz", hash = "sha256:988468260ec1c196dab6ae1149260e2f5472c9110334e5d51adcb77867361f6a"}, +] +base58 = [ + {file = "base58-2.1.0-py3-none-any.whl", hash = "sha256:8225891d501b68c843ffe30b86371f844a21c6ba00da76f52f9b998ba771fb48"}, + {file = "base58-2.1.0.tar.gz", hash = "sha256:171a547b4a3c61e1ae3807224a6f7aec75e364c4395e7562649d7335768001a2"}, +] +black = [ + {file = "black-21.7b0-py3-none-any.whl", hash = "sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116"}, + {file = "black-21.7b0.tar.gz", hash = "sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219"}, +] +cffi = [ + {file = "cffi-1.14.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:22b9c3c320171c108e903d61a3723b51e37aaa8c81255b5e7ce102775bd01e2c"}, + {file = "cffi-1.14.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:f0c5d1acbfca6ebdd6b1e3eded8d261affb6ddcf2186205518f1428b8569bb99"}, + {file = "cffi-1.14.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99f27fefe34c37ba9875f224a8f36e31d744d8083e00f520f133cab79ad5e819"}, + {file = "cffi-1.14.6-cp27-cp27m-win32.whl", hash = "sha256:55af55e32ae468e9946f741a5d51f9896da6b9bf0bbdd326843fec05c730eb20"}, + {file = "cffi-1.14.6-cp27-cp27m-win_amd64.whl", hash = "sha256:7bcac9a2b4fdbed2c16fa5681356d7121ecabf041f18d97ed5b8e0dd38a80224"}, + {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ed38b924ce794e505647f7c331b22a693bee1538fdf46b0222c4717b42f744e7"}, + {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e22dcb48709fc51a7b58a927391b23ab37eb3737a98ac4338e2448bef8559b33"}, + {file = "cffi-1.14.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e8c6a99be100371dbb046880e7a282152aa5d6127ae01783e37662ef73850d8f"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:19ca0dbdeda3b2615421d54bef8985f72af6e0c47082a8d26122adac81a95872"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d950695ae4381ecd856bcaf2b1e866720e4ab9a1498cba61c602e56630ca7195"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9dc245e3ac69c92ee4c167fbdd7428ec1956d4e754223124991ef29eb57a09d"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8661b2ce9694ca01c529bfa204dbb144b275a31685a075ce123f12331be790b"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b315d709717a99f4b27b59b021e6207c64620790ca3e0bde636a6c7f14618abb"}, + {file = "cffi-1.14.6-cp36-cp36m-win32.whl", hash = "sha256:80b06212075346b5546b0417b9f2bf467fea3bfe7352f781ffc05a8ab24ba14a"}, + {file = "cffi-1.14.6-cp36-cp36m-win_amd64.whl", hash = "sha256:a9da7010cec5a12193d1af9872a00888f396aba3dc79186604a09ea3ee7c029e"}, + {file = "cffi-1.14.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4373612d59c404baeb7cbd788a18b2b2a8331abcc84c3ba40051fcd18b17a4d5"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f10afb1004f102c7868ebfe91c28f4a712227fe4cb24974350ace1f90e1febbf"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fd4305f86f53dfd8cd3522269ed7fc34856a8ee3709a5e28b2836b2db9d4cd69"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d6169cb3c6c2ad50db5b868db6491a790300ade1ed5d1da29289d73bbe40b56"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d4b68e216fc65e9fe4f524c177b54964af043dde734807586cf5435af84045c"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33791e8a2dc2953f28b8d8d300dde42dd929ac28f974c4b4c6272cb2955cb762"}, + {file = "cffi-1.14.6-cp37-cp37m-win32.whl", hash = "sha256:0c0591bee64e438883b0c92a7bed78f6290d40bf02e54c5bf0978eaf36061771"}, + {file = "cffi-1.14.6-cp37-cp37m-win_amd64.whl", hash = "sha256:8eb687582ed7cd8c4bdbff3df6c0da443eb89c3c72e6e5dcdd9c81729712791a"}, + {file = "cffi-1.14.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba6f2b3f452e150945d58f4badd92310449876c4c954836cfb1803bdd7b422f0"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:64fda793737bc4037521d4899be780534b9aea552eb673b9833b01f945904c2e"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9f3e33c28cd39d1b655ed1ba7247133b6f7fc16fa16887b120c0c670e35ce346"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26bb2549b72708c833f5abe62b756176022a7b9a7f689b571e74c8478ead51dc"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb687a11f0a7a1839719edd80f41e459cc5366857ecbed383ff376c4e3cc6afd"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ad4d668a5c0645d281dcd17aff2be3212bc109b33814bbb15c4939f44181cc"}, + {file = "cffi-1.14.6-cp38-cp38-win32.whl", hash = "sha256:487d63e1454627c8e47dd230025780e91869cfba4c753a74fda196a1f6ad6548"}, + {file = "cffi-1.14.6-cp38-cp38-win_amd64.whl", hash = "sha256:c33d18eb6e6bc36f09d793c0dc58b0211fccc6ae5149b808da4a62660678b156"}, + {file = "cffi-1.14.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:06c54a68935738d206570b20da5ef2b6b6d92b38ef3ec45c5422c0ebaf338d4d"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:f174135f5609428cc6e1b9090f9268f5c8935fddb1b25ccb8255a2d50de6789e"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f3ebe6e73c319340830a9b2825d32eb6d8475c1dac020b4f0aa774ee3b898d1c"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c8d896becff2fa653dc4438b54a5a25a971d1f4110b32bd3068db3722c80202"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4922cd707b25e623b902c86188aca466d3620892db76c0bdd7b99a3d5e61d35f"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9e005e9bd57bc987764c32a1bee4364c44fdc11a3cc20a40b93b444984f2b87"}, + {file = "cffi-1.14.6-cp39-cp39-win32.whl", hash = "sha256:eb9e2a346c5238a30a746893f23a9535e700f8192a68c07c0258e7ece6ff3728"}, + {file = "cffi-1.14.6-cp39-cp39-win_amd64.whl", hash = "sha256:818014c754cd3dba7229c0f5884396264d51ffb87ec86e927ef0be140bfdb0d2"}, + {file = "cffi-1.14.6.tar.gz", hash = "sha256:c9a875ce9d7fe32887784274dd533c57909b7b1dcadcc128a2ac21331a9765dd"}, +] +cfgv = [ + {file = "cfgv-3.3.0-py2.py3-none-any.whl", hash = "sha256:b449c9c6118fe8cca7fa5e00b9ec60ba08145d281d52164230a69211c5d597a1"}, + {file = "cfgv-3.3.0.tar.gz", hash = "sha256:9e600479b3b99e8af981ecdfc80a0296104ee610cab48a5ae4ffd0b668650eb1"}, +] +chardet = [ + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +] +click = [ + {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, + {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +dataclasses = [ + {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, + {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, +] +distlib = [ + {file = "distlib-0.3.2-py2.py3-none-any.whl", hash = "sha256:23e223426b28491b1ced97dc3bbe183027419dfc7982b4fa2f05d5f3ff10711c"}, + {file = "distlib-0.3.2.zip", hash = "sha256:106fef6dc37dd8c0e2c0a60d3fca3e77460a48907f335fa28420463a6f799736"}, +] +fastapi = [ + {file = "fastapi-0.67.0-py3-none-any.whl", hash = "sha256:b05f5af77af3b21cab896b8dade8b383b2d2f254caae4681a56313e29196f1ac"}, + {file = "fastapi-0.67.0.tar.gz", hash = "sha256:24f45d65e589db3bab162c02a1e2e8b798c098861b1fa3e266efeb71b4faa8e2"}, +] +filelock = [ + {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, + {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, +] +flake8 = [ + {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, + {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, +] +h11 = [ + {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, + {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, +] +identify = [ + {file = "identify-2.2.11-py2.py3-none-any.whl", hash = "sha256:7abaecbb414e385752e8ce02d8c494f4fbc780c975074b46172598a28f1ab839"}, + {file = "identify-2.2.11.tar.gz", hash = "sha256:a0e700637abcbd1caae58e0463861250095dfe330a8371733a471af706a4a29a"}, +] +idna = [ + {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, + {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, +] +idna-ssl = [ + {file = "idna-ssl-1.1.0.tar.gz", hash = "sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.6.1-py3-none-any.whl", hash = "sha256:9f55f560e116f8643ecf2922d9cd3e1c7e8d52e683178fecd9d08f6aa357e11e"}, + {file = "importlib_metadata-4.6.1.tar.gz", hash = "sha256:079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac"}, +] +importlib-resources = [ + {file = "importlib_resources-5.2.0-py3-none-any.whl", hash = "sha256:a0143290bef3cbc99de9e40176e4987780939a955b8632f02ce6c935f42e9bfc"}, + {file = "importlib_resources-5.2.0.tar.gz", hash = "sha256:22a2c42d8c6a1d30aa8a0e1f57293725bfd5c013d562585e46aff469e0ff78b3"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +msgpack = [ + {file = "msgpack-1.0.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:b6d9e2dae081aa35c44af9c4298de4ee72991305503442a5c74656d82b581fe9"}, + {file = "msgpack-1.0.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:a99b144475230982aee16b3d249170f1cccebf27fb0a08e9f603b69637a62192"}, + {file = "msgpack-1.0.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1026dcc10537d27dd2d26c327e552f05ce148977e9d7b9f1718748281b38c841"}, + {file = "msgpack-1.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:fe07bc6735d08e492a327f496b7850e98cb4d112c56df69b0c844dbebcbb47f6"}, + {file = "msgpack-1.0.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:9ea52fff0473f9f3000987f313310208c879493491ef3ccf66268eff8d5a0326"}, + {file = "msgpack-1.0.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:26a1759f1a88df5f1d0b393eb582ec022326994e311ba9c5818adc5374736439"}, + {file = "msgpack-1.0.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:497d2c12426adcd27ab83144057a705efb6acc7e85957a51d43cdcf7f258900f"}, + {file = "msgpack-1.0.2-cp36-cp36m-win32.whl", hash = "sha256:e89ec55871ed5473a041c0495b7b4e6099f6263438e0bd04ccd8418f92d5d7f2"}, + {file = "msgpack-1.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a4355d2193106c7aa77c98fc955252a737d8550320ecdb2e9ac701e15e2943bc"}, + {file = "msgpack-1.0.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:d6c64601af8f3893d17ec233237030e3110f11b8a962cb66720bf70c0141aa54"}, + {file = "msgpack-1.0.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f484cd2dca68502de3704f056fa9b318c94b1539ed17a4c784266df5d6978c87"}, + {file = "msgpack-1.0.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f3e6aaf217ac1c7ce1563cf52a2f4f5d5b1f64e8729d794165db71da57257f0c"}, + {file = "msgpack-1.0.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:8521e5be9e3b93d4d5e07cb80b7e32353264d143c1f072309e1863174c6aadb1"}, + {file = "msgpack-1.0.2-cp37-cp37m-win32.whl", hash = "sha256:31c17bbf2ae5e29e48d794c693b7ca7a0c73bd4280976d408c53df421e838d2a"}, + {file = "msgpack-1.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8ffb24a3b7518e843cd83538cf859e026d24ec41ac5721c18ed0c55101f9775b"}, + {file = "msgpack-1.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:b28c0876cce1466d7c2195d7658cf50e4730667196e2f1355c4209444717ee06"}, + {file = "msgpack-1.0.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:87869ba567fe371c4555d2e11e4948778ab6b59d6cc9d8460d543e4cfbbddd1c"}, + {file = "msgpack-1.0.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:b55f7db883530b74c857e50e149126b91bb75d35c08b28db12dcb0346f15e46e"}, + {file = "msgpack-1.0.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:ac25f3e0513f6673e8b405c3a80500eb7be1cf8f57584be524c4fa78fe8e0c83"}, + {file = "msgpack-1.0.2-cp38-cp38-win32.whl", hash = "sha256:0cb94ee48675a45d3b86e61d13c1e6f1696f0183f0715544976356ff86f741d9"}, + {file = "msgpack-1.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:e36a812ef4705a291cdb4a2fd352f013134f26c6ff63477f20235138d1d21009"}, + {file = "msgpack-1.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:2a5866bdc88d77f6e1370f82f2371c9bc6fc92fe898fa2dec0c5d4f5435a2694"}, + {file = "msgpack-1.0.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:92be4b12de4806d3c36810b0fe2aeedd8d493db39e2eb90742b9c09299eb5759"}, + {file = "msgpack-1.0.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:de6bd7990a2c2dabe926b7e62a92886ccbf809425c347ae7de277067f97c2887"}, + {file = "msgpack-1.0.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5a9ee2540c78659a1dd0b110f73773533ee3108d4e1219b5a15a8d635b7aca0e"}, + {file = "msgpack-1.0.2-cp39-cp39-win32.whl", hash = "sha256:c747c0cc08bd6d72a586310bda6ea72eeb28e7505990f342552315b229a19b33"}, + {file = "msgpack-1.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:d8167b84af26654c1124857d71650404336f4eb5cc06900667a493fc619ddd9f"}, + {file = "msgpack-1.0.2.tar.gz", hash = "sha256:fae04496f5bc150eefad4e9571d1a76c55d021325dcd484ce45065ebbdd00984"}, +] +multidict = [ + {file = "multidict-5.1.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da"}, + {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224"}, + {file = "multidict-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26"}, + {file = "multidict-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6"}, + {file = "multidict-5.1.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9"}, + {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37"}, + {file = "multidict-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5"}, + {file = "multidict-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632"}, + {file = "multidict-5.1.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a"}, + {file = "multidict-5.1.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea"}, + {file = "multidict-5.1.0-cp38-cp38-win32.whl", hash = "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656"}, + {file = "multidict-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3"}, + {file = "multidict-5.1.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841"}, + {file = "multidict-5.1.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda"}, + {file = "multidict-5.1.0-cp39-cp39-win32.whl", hash = "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"}, + {file = "multidict-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359"}, + {file = "multidict-5.1.0.tar.gz", hash = "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +nodeenv = [ + {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, + {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, +] +pathspec = [ + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, +] +platformdirs = [ + {file = "platformdirs-2.1.0-py3-none-any.whl", hash = "sha256:b2b30ae52404f93e2024e85bba29329b85715d6b2f18ffe90ecd25a5c67553df"}, + {file = "platformdirs-2.1.0.tar.gz", hash = "sha256:1964be5aba107a7ccb7de0e6f1f1bfde0dee51641f0e733028121f8e02e2e16b"}, +] +pre-commit = [ + {file = "pre_commit-2.13.0-py2.py3-none-any.whl", hash = "sha256:b679d0fddd5b9d6d98783ae5f10fd0c4c59954f375b70a58cbe1ce9bcf9809a4"}, + {file = "pre_commit-2.13.0.tar.gz", hash = "sha256:764972c60693dc668ba8e86eb29654ec3144501310f7198742a767bec385a378"}, +] +pycodestyle = [ + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, +] +pycparser = [ + {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, + {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, +] +pydantic = [ + {file = "pydantic-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:05ddfd37c1720c392f4e0d43c484217b7521558302e7069ce8d318438d297739"}, + {file = "pydantic-1.8.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a7c6002203fe2c5a1b5cbb141bb85060cbff88c2d78eccbc72d97eb7022c43e4"}, + {file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:589eb6cd6361e8ac341db97602eb7f354551482368a37f4fd086c0733548308e"}, + {file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:10e5622224245941efc193ad1d159887872776df7a8fd592ed746aa25d071840"}, + {file = "pydantic-1.8.2-cp36-cp36m-win_amd64.whl", hash = "sha256:99a9fc39470010c45c161a1dc584997f1feb13f689ecf645f59bb4ba623e586b"}, + {file = "pydantic-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a83db7205f60c6a86f2c44a61791d993dff4b73135df1973ecd9eed5ea0bda20"}, + {file = "pydantic-1.8.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:41b542c0b3c42dc17da70554bc6f38cbc30d7066d2c2815a94499b5684582ecb"}, + {file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:ea5cb40a3b23b3265f6325727ddfc45141b08ed665458be8c6285e7b85bd73a1"}, + {file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:18b5ea242dd3e62dbf89b2b0ec9ba6c7b5abaf6af85b95a97b00279f65845a23"}, + {file = "pydantic-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:234a6c19f1c14e25e362cb05c68afb7f183eb931dd3cd4605eafff055ebbf287"}, + {file = "pydantic-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:021ea0e4133e8c824775a0cfe098677acf6fa5a3cbf9206a376eed3fc09302cd"}, + {file = "pydantic-1.8.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e710876437bc07bd414ff453ac8ec63d219e7690128d925c6e82889d674bb505"}, + {file = "pydantic-1.8.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:ac8eed4ca3bd3aadc58a13c2aa93cd8a884bcf21cb019f8cfecaae3b6ce3746e"}, + {file = "pydantic-1.8.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4a03cbbe743e9c7247ceae6f0d8898f7a64bb65800a45cbdc52d65e370570820"}, + {file = "pydantic-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:8621559dcf5afacf0069ed194278f35c255dc1a1385c28b32dd6c110fd6531b3"}, + {file = "pydantic-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8b223557f9510cf0bfd8b01316bf6dd281cf41826607eada99662f5e4963f316"}, + {file = "pydantic-1.8.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:244ad78eeb388a43b0c927e74d3af78008e944074b7d0f4f696ddd5b2af43c62"}, + {file = "pydantic-1.8.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:05ef5246a7ffd2ce12a619cbb29f3307b7c4509307b1b49f456657b43529dc6f"}, + {file = "pydantic-1.8.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:54cd5121383f4a461ff7644c7ca20c0419d58052db70d8791eacbbe31528916b"}, + {file = "pydantic-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:4be75bebf676a5f0f87937c6ddb061fa39cbea067240d98e298508c1bda6f3f3"}, + {file = "pydantic-1.8.2-py3-none-any.whl", hash = "sha256:fec866a0b59f372b7e776f2d7308511784dace622e0992a0b59ea3ccee0ae833"}, + {file = "pydantic-1.8.2.tar.gz", hash = "sha256:26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b"}, +] +pyflakes = [ + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, +] +pynacl = [ + {file = "PyNaCl-1.4.0-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:ea6841bc3a76fa4942ce00f3bda7d436fda21e2d91602b9e21b7ca9ecab8f3ff"}, + {file = "PyNaCl-1.4.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:d452a6746f0a7e11121e64625109bc4468fc3100452817001dbe018bb8b08514"}, + {file = "PyNaCl-1.4.0-cp27-cp27m-win32.whl", hash = "sha256:2fe0fc5a2480361dcaf4e6e7cea00e078fcda07ba45f811b167e3f99e8cff574"}, + {file = "PyNaCl-1.4.0-cp27-cp27m-win_amd64.whl", hash = "sha256:f8851ab9041756003119368c1e6cd0b9c631f46d686b3904b18c0139f4419f80"}, + {file = "PyNaCl-1.4.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:7757ae33dae81c300487591c68790dfb5145c7d03324000433d9a2c141f82af7"}, + {file = "PyNaCl-1.4.0-cp35-abi3-macosx_10_10_x86_64.whl", hash = "sha256:757250ddb3bff1eecd7e41e65f7f833a8405fede0194319f87899690624f2122"}, + {file = "PyNaCl-1.4.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:30f9b96db44e09b3304f9ea95079b1b7316b2b4f3744fe3aaecccd95d547063d"}, + {file = "PyNaCl-1.4.0-cp35-abi3-win32.whl", hash = "sha256:4e10569f8cbed81cb7526ae137049759d2a8d57726d52c1a000a3ce366779634"}, + {file = "PyNaCl-1.4.0-cp35-abi3-win_amd64.whl", hash = "sha256:c914f78da4953b33d4685e3cdc7ce63401247a21425c16a39760e282075ac4a6"}, + {file = "PyNaCl-1.4.0-cp35-cp35m-win32.whl", hash = "sha256:06cbb4d9b2c4bd3c8dc0d267416aaed79906e7b33f114ddbf0911969794b1cc4"}, + {file = "PyNaCl-1.4.0-cp35-cp35m-win_amd64.whl", hash = "sha256:511d269ee845037b95c9781aa702f90ccc36036f95d0f31373a6a79bd8242e25"}, + {file = "PyNaCl-1.4.0-cp36-cp36m-win32.whl", hash = "sha256:11335f09060af52c97137d4ac54285bcb7df0cef29014a1a4efe64ac065434c4"}, + {file = "PyNaCl-1.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:cd401ccbc2a249a47a3a1724c2918fcd04be1f7b54eb2a5a71ff915db0ac51c6"}, + {file = "PyNaCl-1.4.0-cp37-cp37m-win32.whl", hash = "sha256:8122ba5f2a2169ca5da936b2e5a511740ffb73979381b4229d9188f6dcb22f1f"}, + {file = "PyNaCl-1.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:537a7ccbea22905a0ab36ea58577b39d1fa9b1884869d173b5cf111f006f689f"}, + {file = "PyNaCl-1.4.0-cp38-cp38-win32.whl", hash = "sha256:9c4a7ea4fb81536c1b1f5cc44d54a296f96ae78c1ebd2311bd0b60be45a48d96"}, + {file = "PyNaCl-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7c6092102219f59ff29788860ccb021e80fffd953920c4a8653889c029b2d420"}, + {file = "PyNaCl-1.4.0.tar.gz", hash = "sha256:54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505"}, +] +pyyaml = [ + {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, + {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, + {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, + {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, + {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, + {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, + {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, + {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, + {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, +] +regex = [ + {file = "regex-2021.7.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407"}, + {file = "regex-2021.7.6-cp36-cp36m-win32.whl", hash = "sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b"}, + {file = "regex-2021.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb"}, + {file = "regex-2021.7.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895"}, + {file = "regex-2021.7.6-cp37-cp37m-win32.whl", hash = "sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5"}, + {file = "regex-2021.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f"}, + {file = "regex-2021.7.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068"}, + {file = "regex-2021.7.6-cp38-cp38-win32.whl", hash = "sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0"}, + {file = "regex-2021.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4"}, + {file = "regex-2021.7.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3"}, + {file = "regex-2021.7.6-cp39-cp39-win32.whl", hash = "sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035"}, + {file = "regex-2021.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c"}, + {file = "regex-2021.7.6.tar.gz", hash = "sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d"}, +] +semver = [ + {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, + {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sortedcontainers = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] +starlette = [ + {file = "starlette-0.14.2-py3-none-any.whl", hash = "sha256:3c8e48e52736b3161e34c9f0e8153b4f32ec5d8995a3ee1d59410d92f75162ed"}, + {file = "starlette-0.14.2.tar.gz", hash = "sha256:7d49f4a27f8742262ef1470608c59ddbc66baf37c148e938c7038e6bc7a998aa"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +tomli = [ + {file = "tomli-1.1.0-py3-none-any.whl", hash = "sha256:f4a182048010e89cbec0ae4686b21f550a7f2903f665e34a6de58ec15424f919"}, + {file = "tomli-1.1.0.tar.gz", hash = "sha256:33d7984738f8bb699c9b0a816eb646a8178a69eaa792d258486776a5d21b8ca5"}, +] +typed-ast = [ + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, +] +typing-extensions = [ + {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, + {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, + {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, +] +uvicorn = [ + {file = "uvicorn-0.14.0-py3-none-any.whl", hash = "sha256:2a76bb359171a504b3d1c853409af3adbfa5cef374a4a59e5881945a97a93eae"}, + {file = "uvicorn-0.14.0.tar.gz", hash = "sha256:45ad7dfaaa7d55cab4cd1e85e03f27e9d60bc067ddc59db52a2b0aeca8870292"}, +] +virtualenv = [ + {file = "virtualenv-20.6.0-py2.py3-none-any.whl", hash = "sha256:e4fc84337dce37ba34ef520bf2d4392b392999dbe47df992870dc23230f6b758"}, + {file = "virtualenv-20.6.0.tar.gz", hash = "sha256:51df5d8a2fad5d1b13e088ff38a433475768ff61f202356bb9812c454c20ae45"}, +] +yarl = [ + {file = "yarl-1.6.3-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76"}, + {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366"}, + {file = "yarl-1.6.3-cp36-cp36m-win32.whl", hash = "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721"}, + {file = "yarl-1.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643"}, + {file = "yarl-1.6.3-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f"}, + {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970"}, + {file = "yarl-1.6.3-cp37-cp37m-win32.whl", hash = "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e"}, + {file = "yarl-1.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50"}, + {file = "yarl-1.6.3-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2"}, + {file = "yarl-1.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2"}, + {file = "yarl-1.6.3-cp38-cp38-win32.whl", hash = "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896"}, + {file = "yarl-1.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a"}, + {file = "yarl-1.6.3-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0"}, + {file = "yarl-1.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4"}, + {file = "yarl-1.6.3-cp39-cp39-win32.whl", hash = "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424"}, + {file = "yarl-1.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6"}, + {file = "yarl-1.6.3.tar.gz", hash = "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10"}, +] +zipp = [ + {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, + {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, +] diff --git a/int/echo-agent/pyproject.toml b/int/echo-agent/pyproject.toml new file mode 100644 index 00000000..df908bd3 --- /dev/null +++ b/int/echo-agent/pyproject.toml @@ -0,0 +1,22 @@ +[tool.poetry] +name = "echo-agent" +version = "0.1.0" +description = "A \"hollow\" agent for use in testing" +authors = ["Daniel Bluhm "] +license = "Apache-2.0" + +[tool.poetry.dependencies] +python = "^3.6.9" +aries-staticagent = "^0.8.0" +fastapi = "^0.67.0" +pydantic = "^1.8.2" + +[tool.poetry.dev-dependencies] +uvicorn = "^0.14.0" +black = "^21.7b0" +flake8 = "^3.9.2" +pre-commit = "^2.13.0" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" From 24346ee074998524c5fab021a7ad4861fa79567e Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Tue, 27 Jul 2021 20:06:43 -0400 Subject: [PATCH 39/66] fix: set operation id for send_basicmessage in backchannel Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- ..._connections_conn_id_send_message.py => send_basicmessage.py} | 0 int/scripts/openapi.yml | 1 + 2 files changed, 1 insertion(+) rename int/acapy-backchannel/acapy_backchannel/api/basicmessage/{post_connections_conn_id_send_message.py => send_basicmessage.py} (100%) diff --git a/int/acapy-backchannel/acapy_backchannel/api/basicmessage/post_connections_conn_id_send_message.py b/int/acapy-backchannel/acapy_backchannel/api/basicmessage/send_basicmessage.py similarity index 100% rename from int/acapy-backchannel/acapy_backchannel/api/basicmessage/post_connections_conn_id_send_message.py rename to int/acapy-backchannel/acapy_backchannel/api/basicmessage/send_basicmessage.py diff --git a/int/scripts/openapi.yml b/int/scripts/openapi.yml index 27c74a2b..f89d0a84 100644 --- a/int/scripts/openapi.yml +++ b/int/scripts/openapi.yml @@ -560,6 +560,7 @@ paths: x-codegen-request-body-name: body /connections/{conn_id}/send-message: post: + operationId: send_basicmessage tags: - basicmessage summary: Send a basic message to a connection From e6d0a8fc696286a889996d21273e4301067f34ba Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Tue, 27 Jul 2021 20:08:34 -0400 Subject: [PATCH 40/66] feat: implement echo agent core functions and add dockerfile Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- int/echo-agent/Dockerfile | 3 +- int/echo-agent/echo.py | 180 +++++++++++++++++++++++++++++++++++--- 2 files changed, 169 insertions(+), 14 deletions(-) diff --git a/int/echo-agent/Dockerfile b/int/echo-agent/Dockerfile index 671d03dd..3deb1171 100644 --- a/int/echo-agent/Dockerfile +++ b/int/echo-agent/Dockerfile @@ -11,4 +11,5 @@ COPY ./pyproject.toml ./poetry.lock /app/ RUN poetry install --no-root COPY ./echo.py /app/ -CMD poetry run python -m uvicorn echo:app --host 0.0.0.0 --port 80 +ENTRYPOINT ["/bin/sh", "-c", "poetry run \"$@\"", "--"] +CMD python -m uvicorn echo:app --host 0.0.0.0 --port 80 diff --git a/int/echo-agent/echo.py b/int/echo-agent/echo.py index 511885b2..52aeffc2 100644 --- a/int/echo-agent/echo.py +++ b/int/echo-agent/echo.py @@ -4,25 +4,179 @@ The goal of this agent is to implement an agent that can create new static connections, receive messages, and send messages while minimizing logic and, therefore (hopefully) how much code needs to be maintained. + +Required operations include: +- create static connection +- receive message +- retrieve messages +- send message """ -from pydantic import BaseModel -from fastapi import FastAPI +from asyncio import Queue +import json +import logging +from typing import Dict, Iterable, List +from uuid import uuid4 +from aries_staticagent.static_connection import StaticConnection, Target +from aries_staticagent.message import Message +from pydantic import BaseModel, Field +from fastapi import FastAPI, Body, HTTPException, Request from aries_staticagent import crypto -app = FastAPI() +# Logging +LOGGER = logging.getLogger("uvicorn.error." + __name__) + +# Global state +connections: Dict[str, StaticConnection] = {} +recip_key_to_connection_id: Dict[str, str] = {} +messages: Dict[str, "Queue[Message]"] = {} + + +app = FastAPI(title="Echo Agent", version="0.1.0") + + +class NewConnection(BaseModel): + seed: str = Field(..., example="00000000000000000000000000000000") + endpoint: str + their_vk: str + + +class Connection(BaseModel): + connection_id: str + did: str + verkey: str + their_vk: str + + +@app.post("/connection", response_model=Connection, operation_id="new_connection") +async def new_connection(new_connection: NewConnection): + """Create a new static connection.""" + LOGGER.debug("Creating new connection from request: %s", new_connection) + conn = StaticConnection.from_seed( + seed=new_connection.seed.encode("ascii"), + target=Target( + endpoint=new_connection.endpoint, their_vk=new_connection.their_vk + ), + ) + connection_id = str(uuid4()) + connections[connection_id] = conn + recip_key_to_connection_id[conn.verkey_b58] = connection_id + result = Connection( + connection_id=connection_id, + did=conn.did, + verkey=conn.verkey_b58, + their_vk=new_connection.their_vk, + ) + LOGGER.debug("Returning new connection: %s", result) + return result + + +def _recipients_from_packed_message(message_bytes: bytes) -> Iterable[str]: + """ + Inspect the header of the packed message and extract the recipient key. + """ + try: + wrapper = json.loads(message_bytes) + except Exception as error: + raise ValueError("Invalid packed message") from error + + recips_json = crypto.b64_to_bytes(wrapper["protected"], urlsafe=True).decode( + "ascii" + ) + + try: + recips_outer = json.loads(recips_json) + except Exception as error: + raise ValueError("Invalid packed message recipients") from error + + return [recip["header"]["kid"] for recip in recips_outer["recipients"]] + + +@app.post("/receive") +async def receive_message(request: Request): + """Receive a new agent message and push onto the message queue.""" + message = await request.body() + LOGGER.debug("Message received: %s", message) + handled = False + for recipient in _recipients_from_packed_message(message): + if recipient in recip_key_to_connection_id: + connection_id = recip_key_to_connection_id[recipient] + LOGGER.debug( + "Found connection %s for message recipient %s", connection_id, recipient + ) + conn = connections[connection_id] + unpacked = conn.unpack(message) + LOGGER.debug("Unpacked message: %s", unpacked) + if connection_id not in messages: + messages[connection_id] = Queue() + await messages[connection_id].put(unpacked) + handled = True + if not handled: + LOGGER.warning("Received message that could not be handled: %s", message) + + +@app.get( + "/retrieve/{connection_id}", + response_model=List[Message], + operation_id="retrieve_messages", +) +async def retreive_messages(connection_id: str, poll: bool = False): + """Retrieve all received messages for recipient key.""" + if connection_id not in messages: + messages[connection_id] = Queue() + + if poll: + LOGGER.debug( + "Retrieving messages for connection_id %s with long polling", connection_id + ) + else: + LOGGER.debug( + "Retrieving messages for connection_id %s without long polling", + connection_id, + ) + queue = messages[connection_id] + if not queue.empty(): + to_return = [] + while not queue.empty(): + to_return.append(queue.get_nowait()) + queue.task_done() + LOGGER.debug("Returning messages: %s", to_return) + return to_return + elif poll: + LOGGER.debug( + "Waiting for message on queue for connection with id %s...", connection_id + ) + message = await queue.get() + queue.task_done() + LOGGER.debug("Message received, returning: %s", message) + return [message] + else: + return [] + + +@app.post("/send/{connection_id}", operation_id="send_message") +async def send_message(connection_id: str, message: dict = Body(...)): + """Send a message to connection identified by did.""" + LOGGER.debug("Sending message to %s: %s", connection_id, message) + if connection_id not in connections: + raise HTTPException( + status_code=404, detail=f"No connection matching {connection_id} found" + ) + conn = connections[connection_id] + await conn.send_async(message) -class Keypair(BaseModel): - public: str - private: str +class DebugInfo(BaseModel): + connections: Dict[str, str] + recip_key_to_connection_id: Dict[str, str] + messages: Dict[str, str] -@app.get("/") -def read_root() -> Keypair: - """Return root.""" - keypair_bytes = crypto.create_keypair() - return Keypair( - public=crypto.bytes_to_b58(keypair_bytes[0]), - private=crypto.bytes_to_b58(keypair_bytes[1]), +@app.get("/debug", response_model=DebugInfo) +async def debug_info(): + """Return agent state for debugging.""" + return DebugInfo( + connections={k: str(v) for k, v in connections.items()}, + recip_key_to_connection_id=recip_key_to_connection_id, + messages={k: repr(v) for k, v in messages.items()}, ) From d993404f8fb4f5609af01877518b429a8ac7c3dc Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Tue, 27 Jul 2021 20:09:56 -0400 Subject: [PATCH 41/66] feat: add echo agent client generation scripts Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- .../scripts/Dockerfile.openapi-generator | 7 +++++++ int/echo-agent/scripts/generate-client.sh | 15 +++++++++++++++ int/echo-agent/scripts/openapi.json | 1 + 3 files changed, 23 insertions(+) create mode 100644 int/echo-agent/scripts/Dockerfile.openapi-generator create mode 100755 int/echo-agent/scripts/generate-client.sh create mode 100644 int/echo-agent/scripts/openapi.json diff --git a/int/echo-agent/scripts/Dockerfile.openapi-generator b/int/echo-agent/scripts/Dockerfile.openapi-generator new file mode 100644 index 00000000..f32be016 --- /dev/null +++ b/int/echo-agent/scripts/Dockerfile.openapi-generator @@ -0,0 +1,7 @@ +FROM python:3.6 + +WORKDIR /usr/src/app + +RUN pip install openapi-python-client + +ENTRYPOINT ["/bin/sh", "-c", "openapi-python-client \"$@\"", "--"] diff --git a/int/echo-agent/scripts/generate-client.sh b/int/echo-agent/scripts/generate-client.sh new file mode 100755 index 00000000..1ef7dc73 --- /dev/null +++ b/int/echo-agent/scripts/generate-client.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +cd "$(dirname "$0")" || exit +if [ -z "$1" ]; then + echo 'Must specify "generate" or "update" as first argument' + exit 1 +fi + +CONTAINER_RUNTIME=${CONTAINER_RUNTIME:-docker} + +${CONTAINER_RUNTIME} build -t openapi-generator -f ./Dockerfile.openapi-generator . + +${CONTAINER_RUNTIME} run --rm \ + -v "$(realpath "$PWD/../"):/usr/src/app:z" \ + openapi-generator "$1" --path ./scripts/openapi.json diff --git a/int/echo-agent/scripts/openapi.json b/int/echo-agent/scripts/openapi.json new file mode 100644 index 00000000..bf32d7d9 --- /dev/null +++ b/int/echo-agent/scripts/openapi.json @@ -0,0 +1 @@ +{"openapi":"3.0.2","info":{"title":"Echo Agent","version":"0.1.0"},"paths":{"/connection":{"post":{"summary":"New Connection","description":"Create a new static connection.","operationId":"new_connection","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/NewConnection"}}},"required":true},"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Connection"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/receive":{"post":{"summary":"Receive Message","description":"Receive a new agent message and push onto the message queue.","operationId":"receive_message_receive_post","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}}}}},"/retrieve/{connection_id}":{"get":{"summary":"Retreive Messages","description":"Retrieve all received messages for recipient key.","operationId":"retrieve_messages","parameters":[{"required":true,"schema":{"title":"Connection Id","type":"string"},"name":"connection_id","in":"path"},{"required":false,"schema":{"title":"Poll","type":"boolean","default":false},"name":"poll","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"title":"Response Retreive Messages Retrieve Connection Id Get","type":"array","items":{"type":"object"}}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/send/{connection_id}":{"post":{"summary":"Send Message","description":"Send a message to connection identified by did.","operationId":"send_message","parameters":[{"required":true,"schema":{"title":"Connection Id","type":"string"},"name":"connection_id","in":"path"}],"requestBody":{"content":{"application/json":{"schema":{"title":"Message","type":"object"}}},"required":true},"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/debug":{"get":{"summary":"Debug Info","description":"Return agent state for debugging.","operationId":"debug_info_debug_get","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DebugInfo"}}}}}}}},"components":{"schemas":{"Connection":{"title":"Connection","required":["connection_id","did","verkey","their_vk"],"type":"object","properties":{"connection_id":{"title":"Connection Id","type":"string"},"did":{"title":"Did","type":"string"},"verkey":{"title":"Verkey","type":"string"},"their_vk":{"title":"Their Vk","type":"string"}}},"DebugInfo":{"title":"DebugInfo","required":["connections","recip_key_to_connection_id","messages"],"type":"object","properties":{"connections":{"title":"Connections","type":"object","additionalProperties":{"type":"string"}},"recip_key_to_connection_id":{"title":"Recip Key To Connection Id","type":"object","additionalProperties":{"type":"string"}},"messages":{"title":"Messages","type":"object","additionalProperties":{"type":"string"}}}},"HTTPValidationError":{"title":"HTTPValidationError","type":"object","properties":{"detail":{"title":"Detail","type":"array","items":{"$ref":"#/components/schemas/ValidationError"}}}},"NewConnection":{"title":"NewConnection","required":["seed","endpoint","their_vk"],"type":"object","properties":{"seed":{"title":"Seed","type":"string","example":"00000000000000000000000000000000"},"endpoint":{"title":"Endpoint","type":"string"},"their_vk":{"title":"Their Vk","type":"string"}}},"ValidationError":{"title":"ValidationError","required":["loc","msg","type"],"type":"object","properties":{"loc":{"title":"Location","type":"array","items":{"type":"string"}},"msg":{"title":"Message","type":"string"},"type":{"title":"Error Type","type":"string"}}}}}} From 4ca035310c270d2b366253cf7121ecf22411f012 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Tue, 27 Jul 2021 20:10:29 -0400 Subject: [PATCH 42/66] feat: add generated echo-agent-client Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- int/echo-agent/echo-agent-client/.gitignore | 23 +++ int/echo-agent/echo-agent-client/README.md | 67 +++++++++ .../echo_agent_client/__init__.py | 2 + .../echo_agent_client/api/__init__.py | 1 + .../echo_agent_client/api/default/__init__.py | 0 .../api/default/debug_info_debug_get.py | 94 +++++++++++++ .../api/default/new_connection.py | 112 +++++++++++++++ .../default/receive_message_receive_post.py | 61 ++++++++ .../api/default/retrieve_messages.py | 132 ++++++++++++++++++ .../api/default/send_message.py | 120 ++++++++++++++++ .../echo_agent_client/client.py | 46 ++++++ .../echo_agent_client/models/__init__.py | 12 ++ .../echo_agent_client/models/connection.py | 72 ++++++++++ .../echo_agent_client/models/debug_info.py | 72 ++++++++++ .../models/debug_info_connections.py | 44 ++++++ .../models/debug_info_messages.py | 44 ++++++ .../debug_info_recip_key_to_connection_id.py | 44 ++++++ .../models/http_validation_error.py | 66 +++++++++ .../models/new_connection.py | 66 +++++++++ .../retrieve_messages_response_200_item.py | 44 ++++++ .../models/send_message_message.py | 44 ++++++ .../models/validation_error.py | 67 +++++++++ .../echo_agent_client/py.typed | 1 + .../echo_agent_client/types.py | 43 ++++++ .../echo-agent-client/pyproject.toml | 41 ++++++ 25 files changed, 1318 insertions(+) create mode 100644 int/echo-agent/echo-agent-client/.gitignore create mode 100644 int/echo-agent/echo-agent-client/README.md create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/__init__.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/api/__init__.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/api/default/__init__.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/api/default/debug_info_debug_get.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/api/default/new_connection.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/api/default/receive_message_receive_post.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/api/default/retrieve_messages.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/api/default/send_message.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/client.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/__init__.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/connection.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_connections.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_messages.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_recip_key_to_connection_id.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/http_validation_error.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/new_connection.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/retrieve_messages_response_200_item.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/send_message_message.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/validation_error.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/py.typed create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/types.py create mode 100644 int/echo-agent/echo-agent-client/pyproject.toml diff --git a/int/echo-agent/echo-agent-client/.gitignore b/int/echo-agent/echo-agent-client/.gitignore new file mode 100644 index 00000000..ed29cb97 --- /dev/null +++ b/int/echo-agent/echo-agent-client/.gitignore @@ -0,0 +1,23 @@ +__pycache__/ +build/ +dist/ +*.egg-info/ +.pytest_cache/ + +# pyenv +.python-version + +# Environments +.env +.venv + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# JetBrains +.idea/ + +/coverage.xml +/.coverage \ No newline at end of file diff --git a/int/echo-agent/echo-agent-client/README.md b/int/echo-agent/echo-agent-client/README.md new file mode 100644 index 00000000..a6ebe643 --- /dev/null +++ b/int/echo-agent/echo-agent-client/README.md @@ -0,0 +1,67 @@ +# echo-agent-client +A client library for accessing Echo Agent + +## Usage +First, create a client: + +```python +from echo_agent_client import Client + +client = Client(base_url="https://api.example.com") +``` + +If the endpoints you're going to hit require authentication, use `AuthenticatedClient` instead: + +```python +from echo_agent_client import AuthenticatedClient + +client = AuthenticatedClient(base_url="https://api.example.com", token="SuperSecretToken") +``` + +Now call your endpoint and use your models: + +```python +from echo_agent_client.models import MyDataModel +from echo_agent_client.api.my_tag import get_my_data_model +from echo_agent_client.types import Response + +my_data: MyDataModel = get_my_data_model.sync(client=client) +# or if you need more info (e.g. status_code) +response: Response[MyDataModel] = get_my_data_model.sync_detailed(client=client) +``` + +Or do the same thing with an async version: + +```python +from echo_agent_client.models import MyDataModel +from echo_agent_client.api.my_tag import get_my_data_model +from echo_agent_client.types import Response + +my_data: MyDataModel = await get_my_data_model.asyncio(client=client) +response: Response[MyDataModel] = await get_my_data_model.asyncio_detailed(client=client) +``` + +Things to know: +1. Every path/method combo becomes a Python module with four functions: + 1. `sync`: Blocking request that returns parsed data (if successful) or `None` + 1. `sync_detailed`: Blocking request that always returns a `Request`, optionally with `parsed` set if the request was successful. + 1. `asyncio`: Like `sync` but the async instead of blocking + 1. `asyncio_detailed`: Like `sync_detailed` by async instead of blocking + +1. All path/query params, and bodies become method arguments. +1. If your endpoint had any tags on it, the first tag will be used as a module name for the function (my_tag above) +1. Any endpoint which did not have a tag will be in `echo_agent_client.api.default` + +## Building / publishing this Client +This project uses [Poetry](https://python-poetry.org/) to manage dependencies and packaging. Here are the basics: +1. Update the metadata in pyproject.toml (e.g. authors, version) +1. If you're using a private repository, configure it with Poetry + 1. `poetry config repositories. ` + 1. `poetry config http-basic. ` +1. Publish the client with `poetry publish --build -r ` or, if for public PyPI, just `poetry publish --build` + +If you want to install this client into another project without publishing it (e.g. for development) then: +1. If that project **is using Poetry**, you can simply do `poetry add ` from that project +1. If that project is not using Poetry: + 1. Build a wheel with `poetry build -f wheel` + 1. Install that wheel from the other project `pip install ` \ No newline at end of file diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/__init__.py b/int/echo-agent/echo-agent-client/echo_agent_client/__init__.py new file mode 100644 index 00000000..ded85d58 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/__init__.py @@ -0,0 +1,2 @@ +""" A client library for accessing Echo Agent """ +from .client import AuthenticatedClient, Client diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/api/__init__.py b/int/echo-agent/echo-agent-client/echo_agent_client/api/__init__.py new file mode 100644 index 00000000..dc035f4c --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/api/__init__.py @@ -0,0 +1 @@ +""" Contains methods for accessing the API """ diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/api/default/__init__.py b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/api/default/debug_info_debug_get.py b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/debug_info_debug_get.py new file mode 100644 index 00000000..fc39c1e4 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/debug_info_debug_get.py @@ -0,0 +1,94 @@ +from typing import Any, Dict, Optional + +import httpx + +from ...client import Client +from ...models.debug_info import DebugInfo +from ...types import Response + + +def _get_kwargs( + *, + client: Client, +) -> Dict[str, Any]: + url = "{}/debug".format(client.base_url) + + headers: Dict[str, Any] = client.get_headers() + cookies: Dict[str, Any] = client.get_cookies() + + return { + "url": url, + "headers": headers, + "cookies": cookies, + "timeout": client.get_timeout(), + } + + +def _parse_response(*, response: httpx.Response) -> Optional[DebugInfo]: + if response.status_code == 200: + response_200 = DebugInfo.from_dict(response.json()) + + return response_200 + return None + + +def _build_response(*, response: httpx.Response) -> Response[DebugInfo]: + return Response( + status_code=response.status_code, + content=response.content, + headers=response.headers, + parsed=_parse_response(response=response), + ) + + +def sync_detailed( + *, + client: Client, +) -> Response[DebugInfo]: + kwargs = _get_kwargs( + client=client, + ) + + response = httpx.get( + **kwargs, + ) + + return _build_response(response=response) + + +def sync( + *, + client: Client, +) -> Optional[DebugInfo]: + """Return agent state for debugging.""" + + return sync_detailed( + client=client, + ).parsed + + +async def asyncio_detailed( + *, + client: Client, +) -> Response[DebugInfo]: + kwargs = _get_kwargs( + client=client, + ) + + async with httpx.AsyncClient() as _client: + response = await _client.get(**kwargs) + + return _build_response(response=response) + + +async def asyncio( + *, + client: Client, +) -> Optional[DebugInfo]: + """Return agent state for debugging.""" + + return ( + await asyncio_detailed( + client=client, + ) + ).parsed diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/api/default/new_connection.py b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/new_connection.py new file mode 100644 index 00000000..2e43bfbb --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/new_connection.py @@ -0,0 +1,112 @@ +from typing import Any, Dict, Optional, Union + +import httpx + +from ...client import Client +from ...models.connection import Connection +from ...models.http_validation_error import HTTPValidationError +from ...models.new_connection import NewConnection +from ...types import Response + + +def _get_kwargs( + *, + client: Client, + json_body: NewConnection, +) -> Dict[str, Any]: + url = "{}/connection".format(client.base_url) + + headers: Dict[str, Any] = client.get_headers() + cookies: Dict[str, Any] = client.get_cookies() + + json_json_body = json_body.to_dict() + + return { + "url": url, + "headers": headers, + "cookies": cookies, + "timeout": client.get_timeout(), + "json": json_json_body, + } + + +def _parse_response(*, response: httpx.Response) -> Optional[Union[Connection, HTTPValidationError]]: + if response.status_code == 200: + response_200 = Connection.from_dict(response.json()) + + return response_200 + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + return None + + +def _build_response(*, response: httpx.Response) -> Response[Union[Connection, HTTPValidationError]]: + return Response( + status_code=response.status_code, + content=response.content, + headers=response.headers, + parsed=_parse_response(response=response), + ) + + +def sync_detailed( + *, + client: Client, + json_body: NewConnection, +) -> Response[Union[Connection, HTTPValidationError]]: + kwargs = _get_kwargs( + client=client, + json_body=json_body, + ) + + response = httpx.post( + **kwargs, + ) + + return _build_response(response=response) + + +def sync( + *, + client: Client, + json_body: NewConnection, +) -> Optional[Union[Connection, HTTPValidationError]]: + """Create a new static connection.""" + + return sync_detailed( + client=client, + json_body=json_body, + ).parsed + + +async def asyncio_detailed( + *, + client: Client, + json_body: NewConnection, +) -> Response[Union[Connection, HTTPValidationError]]: + kwargs = _get_kwargs( + client=client, + json_body=json_body, + ) + + async with httpx.AsyncClient() as _client: + response = await _client.post(**kwargs) + + return _build_response(response=response) + + +async def asyncio( + *, + client: Client, + json_body: NewConnection, +) -> Optional[Union[Connection, HTTPValidationError]]: + """Create a new static connection.""" + + return ( + await asyncio_detailed( + client=client, + json_body=json_body, + ) + ).parsed diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/api/default/receive_message_receive_post.py b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/receive_message_receive_post.py new file mode 100644 index 00000000..f50f3489 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/receive_message_receive_post.py @@ -0,0 +1,61 @@ +from typing import Any, Dict + +import httpx + +from ...client import Client +from ...types import Response + + +def _get_kwargs( + *, + client: Client, +) -> Dict[str, Any]: + url = "{}/receive".format(client.base_url) + + headers: Dict[str, Any] = client.get_headers() + cookies: Dict[str, Any] = client.get_cookies() + + return { + "url": url, + "headers": headers, + "cookies": cookies, + "timeout": client.get_timeout(), + } + + +def _build_response(*, response: httpx.Response) -> Response[Any]: + return Response( + status_code=response.status_code, + content=response.content, + headers=response.headers, + parsed=None, + ) + + +def sync_detailed( + *, + client: Client, +) -> Response[Any]: + kwargs = _get_kwargs( + client=client, + ) + + response = httpx.post( + **kwargs, + ) + + return _build_response(response=response) + + +async def asyncio_detailed( + *, + client: Client, +) -> Response[Any]: + kwargs = _get_kwargs( + client=client, + ) + + async with httpx.AsyncClient() as _client: + response = await _client.post(**kwargs) + + return _build_response(response=response) diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/api/default/retrieve_messages.py b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/retrieve_messages.py new file mode 100644 index 00000000..3cce861c --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/retrieve_messages.py @@ -0,0 +1,132 @@ +from typing import Any, Dict, List, Optional, Union + +import httpx + +from ...client import Client +from ...models.http_validation_error import HTTPValidationError +from ...models.retrieve_messages_response_200_item import RetrieveMessagesResponse200Item +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + *, + client: Client, + connection_id: str, + poll: Union[Unset, bool] = False, +) -> Dict[str, Any]: + url = "{}/retrieve/{connection_id}".format(client.base_url, connection_id=connection_id) + + headers: Dict[str, Any] = client.get_headers() + cookies: Dict[str, Any] = client.get_cookies() + + params: Dict[str, Any] = { + "poll": poll, + } + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + return { + "url": url, + "headers": headers, + "cookies": cookies, + "timeout": client.get_timeout(), + "params": params, + } + + +def _parse_response( + *, response: httpx.Response +) -> Optional[Union[HTTPValidationError, List[RetrieveMessagesResponse200Item]]]: + if response.status_code == 200: + response_200 = [] + _response_200 = response.json() + for response_200_item_data in _response_200: + response_200_item = RetrieveMessagesResponse200Item.from_dict(response_200_item_data) + + response_200.append(response_200_item) + + return response_200 + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + return None + + +def _build_response( + *, response: httpx.Response +) -> Response[Union[HTTPValidationError, List[RetrieveMessagesResponse200Item]]]: + return Response( + status_code=response.status_code, + content=response.content, + headers=response.headers, + parsed=_parse_response(response=response), + ) + + +def sync_detailed( + *, + client: Client, + connection_id: str, + poll: Union[Unset, bool] = False, +) -> Response[Union[HTTPValidationError, List[RetrieveMessagesResponse200Item]]]: + kwargs = _get_kwargs( + client=client, + connection_id=connection_id, + poll=poll, + ) + + response = httpx.get( + **kwargs, + ) + + return _build_response(response=response) + + +def sync( + *, + client: Client, + connection_id: str, + poll: Union[Unset, bool] = False, +) -> Optional[Union[HTTPValidationError, List[RetrieveMessagesResponse200Item]]]: + """Retrieve all received messages for recipient key.""" + + return sync_detailed( + client=client, + connection_id=connection_id, + poll=poll, + ).parsed + + +async def asyncio_detailed( + *, + client: Client, + connection_id: str, + poll: Union[Unset, bool] = False, +) -> Response[Union[HTTPValidationError, List[RetrieveMessagesResponse200Item]]]: + kwargs = _get_kwargs( + client=client, + connection_id=connection_id, + poll=poll, + ) + + async with httpx.AsyncClient() as _client: + response = await _client.get(**kwargs) + + return _build_response(response=response) + + +async def asyncio( + *, + client: Client, + connection_id: str, + poll: Union[Unset, bool] = False, +) -> Optional[Union[HTTPValidationError, List[RetrieveMessagesResponse200Item]]]: + """Retrieve all received messages for recipient key.""" + + return ( + await asyncio_detailed( + client=client, + connection_id=connection_id, + poll=poll, + ) + ).parsed diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/api/default/send_message.py b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/send_message.py new file mode 100644 index 00000000..8d1338cb --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/send_message.py @@ -0,0 +1,120 @@ +from typing import Any, Dict, Optional, Union + +import httpx + +from ...client import Client +from ...models.http_validation_error import HTTPValidationError +from ...models.send_message_message import SendMessageMessage +from ...types import Response + + +def _get_kwargs( + *, + client: Client, + connection_id: str, + json_body: SendMessageMessage, +) -> Dict[str, Any]: + url = "{}/send/{connection_id}".format(client.base_url, connection_id=connection_id) + + headers: Dict[str, Any] = client.get_headers() + cookies: Dict[str, Any] = client.get_cookies() + + json_json_body = json_body.to_dict() + + return { + "url": url, + "headers": headers, + "cookies": cookies, + "timeout": client.get_timeout(), + "json": json_json_body, + } + + +def _parse_response(*, response: httpx.Response) -> Optional[Union[Any, HTTPValidationError]]: + if response.status_code == 200: + response_200 = response.json() + + return response_200 + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + return None + + +def _build_response(*, response: httpx.Response) -> Response[Union[Any, HTTPValidationError]]: + return Response( + status_code=response.status_code, + content=response.content, + headers=response.headers, + parsed=_parse_response(response=response), + ) + + +def sync_detailed( + *, + client: Client, + connection_id: str, + json_body: SendMessageMessage, +) -> Response[Union[Any, HTTPValidationError]]: + kwargs = _get_kwargs( + client=client, + connection_id=connection_id, + json_body=json_body, + ) + + response = httpx.post( + **kwargs, + ) + + return _build_response(response=response) + + +def sync( + *, + client: Client, + connection_id: str, + json_body: SendMessageMessage, +) -> Optional[Union[Any, HTTPValidationError]]: + """Send a message to connection identified by did.""" + + return sync_detailed( + client=client, + connection_id=connection_id, + json_body=json_body, + ).parsed + + +async def asyncio_detailed( + *, + client: Client, + connection_id: str, + json_body: SendMessageMessage, +) -> Response[Union[Any, HTTPValidationError]]: + kwargs = _get_kwargs( + client=client, + connection_id=connection_id, + json_body=json_body, + ) + + async with httpx.AsyncClient() as _client: + response = await _client.post(**kwargs) + + return _build_response(response=response) + + +async def asyncio( + *, + client: Client, + connection_id: str, + json_body: SendMessageMessage, +) -> Optional[Union[Any, HTTPValidationError]]: + """Send a message to connection identified by did.""" + + return ( + await asyncio_detailed( + client=client, + connection_id=connection_id, + json_body=json_body, + ) + ).parsed diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/client.py b/int/echo-agent/echo-agent-client/echo_agent_client/client.py new file mode 100644 index 00000000..36fa529e --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/client.py @@ -0,0 +1,46 @@ +from typing import Dict + +import attr + + +@attr.s(auto_attribs=True) +class Client: + """A class for keeping track of data related to the API""" + + base_url: str + cookies: Dict[str, str] = attr.ib(factory=dict, kw_only=True) + headers: Dict[str, str] = attr.ib(factory=dict, kw_only=True) + timeout: float = attr.ib(5.0, kw_only=True) + + def get_headers(self) -> Dict[str, str]: + """Get headers to be used in all endpoints""" + return {**self.headers} + + def with_headers(self, headers: Dict[str, str]) -> "Client": + """Get a new client matching this one with additional headers""" + return attr.evolve(self, headers={**self.headers, **headers}) + + def get_cookies(self) -> Dict[str, str]: + return {**self.cookies} + + def with_cookies(self, cookies: Dict[str, str]) -> "Client": + """Get a new client matching this one with additional cookies""" + return attr.evolve(self, cookies={**self.cookies, **cookies}) + + def get_timeout(self) -> float: + return self.timeout + + def with_timeout(self, timeout: float) -> "Client": + """Get a new client matching this one with a new timeout (in seconds)""" + return attr.evolve(self, timeout=timeout) + + +@attr.s(auto_attribs=True) +class AuthenticatedClient(Client): + """A Client which has been authenticated for use on secured endpoints""" + + token: str + + def get_headers(self) -> Dict[str, str]: + """Get headers to be used in authenticated endpoints""" + return {"Authorization": f"Bearer {self.token}", **self.headers} diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/__init__.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/__init__.py new file mode 100644 index 00000000..5e0112ab --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/__init__.py @@ -0,0 +1,12 @@ +""" Contains all the data models used in inputs/outputs """ + +from .connection import Connection +from .debug_info import DebugInfo +from .debug_info_connections import DebugInfoConnections +from .debug_info_messages import DebugInfoMessages +from .debug_info_recip_key_to_connection_id import DebugInfoRecipKeyToConnectionId +from .http_validation_error import HTTPValidationError +from .new_connection import NewConnection +from .retrieve_messages_response_200_item import RetrieveMessagesResponse200Item +from .send_message_message import SendMessageMessage +from .validation_error import ValidationError diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/connection.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/connection.py new file mode 100644 index 00000000..ad320f80 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/connection.py @@ -0,0 +1,72 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +T = TypeVar("T", bound="Connection") + + +@attr.s(auto_attribs=True) +class Connection: + """ """ + + connection_id: str + did: str + verkey: str + their_vk: str + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + connection_id = self.connection_id + did = self.did + verkey = self.verkey + their_vk = self.their_vk + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "connection_id": connection_id, + "did": did, + "verkey": verkey, + "their_vk": their_vk, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + connection_id = d.pop("connection_id") + + did = d.pop("did") + + verkey = d.pop("verkey") + + their_vk = d.pop("their_vk") + + connection = cls( + connection_id=connection_id, + did=did, + verkey=verkey, + their_vk=their_vk, + ) + + connection.additional_properties = d + return connection + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info.py new file mode 100644 index 00000000..3db92ba7 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info.py @@ -0,0 +1,72 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +from ..models.debug_info_connections import DebugInfoConnections +from ..models.debug_info_messages import DebugInfoMessages +from ..models.debug_info_recip_key_to_connection_id import DebugInfoRecipKeyToConnectionId + +T = TypeVar("T", bound="DebugInfo") + + +@attr.s(auto_attribs=True) +class DebugInfo: + """ """ + + connections: DebugInfoConnections + recip_key_to_connection_id: DebugInfoRecipKeyToConnectionId + messages: DebugInfoMessages + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + connections = self.connections.to_dict() + + recip_key_to_connection_id = self.recip_key_to_connection_id.to_dict() + + messages = self.messages.to_dict() + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "connections": connections, + "recip_key_to_connection_id": recip_key_to_connection_id, + "messages": messages, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + connections = DebugInfoConnections.from_dict(d.pop("connections")) + + recip_key_to_connection_id = DebugInfoRecipKeyToConnectionId.from_dict(d.pop("recip_key_to_connection_id")) + + messages = DebugInfoMessages.from_dict(d.pop("messages")) + + debug_info = cls( + connections=connections, + recip_key_to_connection_id=recip_key_to_connection_id, + messages=messages, + ) + + debug_info.additional_properties = d + return debug_info + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_connections.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_connections.py new file mode 100644 index 00000000..8bc57465 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_connections.py @@ -0,0 +1,44 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +T = TypeVar("T", bound="DebugInfoConnections") + + +@attr.s(auto_attribs=True) +class DebugInfoConnections: + """ """ + + additional_properties: Dict[str, str] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + debug_info_connections = cls() + + debug_info_connections.additional_properties = d + return debug_info_connections + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> str: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: str) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_messages.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_messages.py new file mode 100644 index 00000000..410c73b5 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_messages.py @@ -0,0 +1,44 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +T = TypeVar("T", bound="DebugInfoMessages") + + +@attr.s(auto_attribs=True) +class DebugInfoMessages: + """ """ + + additional_properties: Dict[str, str] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + debug_info_messages = cls() + + debug_info_messages.additional_properties = d + return debug_info_messages + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> str: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: str) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_recip_key_to_connection_id.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_recip_key_to_connection_id.py new file mode 100644 index 00000000..1178a15f --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/debug_info_recip_key_to_connection_id.py @@ -0,0 +1,44 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +T = TypeVar("T", bound="DebugInfoRecipKeyToConnectionId") + + +@attr.s(auto_attribs=True) +class DebugInfoRecipKeyToConnectionId: + """ """ + + additional_properties: Dict[str, str] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + debug_info_recip_key_to_connection_id = cls() + + debug_info_recip_key_to_connection_id.additional_properties = d + return debug_info_recip_key_to_connection_id + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> str: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: str) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/http_validation_error.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/http_validation_error.py new file mode 100644 index 00000000..211ef53e --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/http_validation_error.py @@ -0,0 +1,66 @@ +from typing import Any, Dict, List, Type, TypeVar, Union + +import attr + +from ..models.validation_error import ValidationError +from ..types import UNSET, Unset + +T = TypeVar("T", bound="HTTPValidationError") + + +@attr.s(auto_attribs=True) +class HTTPValidationError: + """ """ + + detail: Union[Unset, List[ValidationError]] = UNSET + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + detail: Union[Unset, List[Dict[str, Any]]] = UNSET + if not isinstance(self.detail, Unset): + detail = [] + for detail_item_data in self.detail: + detail_item = detail_item_data.to_dict() + + detail.append(detail_item) + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if detail is not UNSET: + field_dict["detail"] = detail + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + detail = [] + _detail = d.pop("detail", UNSET) + for detail_item_data in _detail or []: + detail_item = ValidationError.from_dict(detail_item_data) + + detail.append(detail_item) + + http_validation_error = cls( + detail=detail, + ) + + http_validation_error.additional_properties = d + return http_validation_error + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/new_connection.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/new_connection.py new file mode 100644 index 00000000..a2ed5363 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/new_connection.py @@ -0,0 +1,66 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +T = TypeVar("T", bound="NewConnection") + + +@attr.s(auto_attribs=True) +class NewConnection: + """ """ + + seed: str + endpoint: str + their_vk: str + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + seed = self.seed + endpoint = self.endpoint + their_vk = self.their_vk + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "seed": seed, + "endpoint": endpoint, + "their_vk": their_vk, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + seed = d.pop("seed") + + endpoint = d.pop("endpoint") + + their_vk = d.pop("their_vk") + + new_connection = cls( + seed=seed, + endpoint=endpoint, + their_vk=their_vk, + ) + + new_connection.additional_properties = d + return new_connection + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/retrieve_messages_response_200_item.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/retrieve_messages_response_200_item.py new file mode 100644 index 00000000..383b56e6 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/retrieve_messages_response_200_item.py @@ -0,0 +1,44 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +T = TypeVar("T", bound="RetrieveMessagesResponse200Item") + + +@attr.s(auto_attribs=True) +class RetrieveMessagesResponse200Item: + """ """ + + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + retrieve_messages_response_200_item = cls() + + retrieve_messages_response_200_item.additional_properties = d + return retrieve_messages_response_200_item + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/send_message_message.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/send_message_message.py new file mode 100644 index 00000000..8c667b42 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/send_message_message.py @@ -0,0 +1,44 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +T = TypeVar("T", bound="SendMessageMessage") + + +@attr.s(auto_attribs=True) +class SendMessageMessage: + """ """ + + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + send_message_message = cls() + + send_message_message.additional_properties = d + return send_message_message + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/validation_error.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/validation_error.py new file mode 100644 index 00000000..5fd056db --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/validation_error.py @@ -0,0 +1,67 @@ +from typing import Any, Dict, List, Type, TypeVar, cast + +import attr + +T = TypeVar("T", bound="ValidationError") + + +@attr.s(auto_attribs=True) +class ValidationError: + """ """ + + loc: List[str] + msg: str + type: str + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + loc = self.loc + + msg = self.msg + type = self.type + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "loc": loc, + "msg": msg, + "type": type, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + loc = cast(List[str], d.pop("loc")) + + msg = d.pop("msg") + + type = d.pop("type") + + validation_error = cls( + loc=loc, + msg=msg, + type=type, + ) + + validation_error.additional_properties = d + return validation_error + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/py.typed b/int/echo-agent/echo-agent-client/echo_agent_client/py.typed new file mode 100644 index 00000000..1aad3271 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 \ No newline at end of file diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/types.py b/int/echo-agent/echo-agent-client/echo_agent_client/types.py new file mode 100644 index 00000000..a6f00ece --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/types.py @@ -0,0 +1,43 @@ +""" Contains some shared types for properties """ +from typing import BinaryIO, Generic, MutableMapping, Optional, TextIO, Tuple, TypeVar, Union + +import attr + + +class Unset: + def __bool__(self) -> bool: + return False + + +UNSET: Unset = Unset() + +FileJsonType = Tuple[Optional[str], Union[BinaryIO, TextIO], Optional[str]] + + +@attr.s(auto_attribs=True) +class File: + """Contains information for file uploads""" + + payload: Union[BinaryIO, TextIO] + file_name: Optional[str] = None + mime_type: Optional[str] = None + + def to_tuple(self) -> FileJsonType: + """Return a tuple representation that httpx will accept for multipart/form-data""" + return self.file_name, self.payload, self.mime_type + + +T = TypeVar("T") + + +@attr.s(auto_attribs=True) +class Response(Generic[T]): + """A response from an endpoint""" + + status_code: int + content: bytes + headers: MutableMapping[str, str] + parsed: Optional[T] + + +__all__ = ["File", "Response", "FileJsonType"] diff --git a/int/echo-agent/echo-agent-client/pyproject.toml b/int/echo-agent/echo-agent-client/pyproject.toml new file mode 100644 index 00000000..ce4a62ed --- /dev/null +++ b/int/echo-agent/echo-agent-client/pyproject.toml @@ -0,0 +1,41 @@ +[tool.poetry] +name = "echo-agent-client" +version = "0.1.0" +description = "A client library for accessing Echo Agent" + +authors = [] + +readme = "README.md" +packages = [ + {include = "echo_agent_client"}, +] +include = ["CHANGELOG.md", "echo_agent_client/py.typed"] + + +[tool.poetry.dependencies] +python = "^3.6" +httpx = ">=0.15.4,<0.19.0" +attrs = ">=20.1.0,<22.0.0" +python-dateutil = "^2.8.0" + +[tool.black] +line-length = 120 +target_version = ['py36', 'py37', 'py38'] +exclude = ''' +( + /( + | \.git + | \.venv + | \.mypy_cache + )/ +) +''' + +[tool.isort] +line_length = 120 +multi_line_output = 3 +include_trailing_comma = true + +[build-system] +requires = ["poetry>=1.0"] +build-backend = "poetry.masonry.api" \ No newline at end of file From e553b089cafad375eda4e965dbd6d24fd2fffe46 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Tue, 27 Jul 2021 20:13:12 -0400 Subject: [PATCH 43/66] feat: add echo-agent-client to test runner Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- int/Dockerfile.test.runner | 1 + int/docker-compose.yml | 15 +- int/poetry.lock | 365 +++++++++++++++++++++++-------------- int/pyproject.toml | 1 + 4 files changed, 240 insertions(+), 142 deletions(-) diff --git a/int/Dockerfile.test.runner b/int/Dockerfile.test.runner index 812b1ede..b42fd79a 100644 --- a/int/Dockerfile.test.runner +++ b/int/Dockerfile.test.runner @@ -8,6 +8,7 @@ ADD https://github.com/ufoscout/docker-compose-wait/releases/download/$WAIT_VERS RUN chmod +x /wait COPY acapy-backchannel acapy-backchannel +COPY echo-agent echo-agent COPY pyproject.toml . COPY poetry.lock . RUN poetry install --no-dev diff --git a/int/docker-compose.yml b/int/docker-compose.yml index 998a00e3..db607825 100644 --- a/int/docker-compose.yml +++ b/int/docker-compose.yml @@ -14,9 +14,19 @@ services: build: context: ../ dockerfile: ./docker/Dockerfile + environment: + ACAPY_TOOLBOX_LOG_LEVEL: DEBUG ports: - "3001:3001" - command: start -it http 0.0.0.0 3000 -ot http -e http://acapy_plugin_agent:3000 --admin 0.0.0.0 3001 --admin-insecure-mode --plugin acapy_plugin_toolbox --log-level debug --genesis-url https://raw.githubusercontent.com/Indicio-tech/indicio-network/master/genesis_files/pool_transactions_testnet_genesis --wallet-type indy --wallet-name default --wallet-key "insecure, for use in testing only" --auto-provision --auto-ping-connection + command: start -it http 0.0.0.0 3000 -ot http -e http://acapy_plugin_agent:3000 --admin 0.0.0.0 3001 --admin-insecure-mode --plugin acapy_plugin_toolbox --genesis-url https://raw.githubusercontent.com/Indicio-tech/indicio-network/master/genesis_files/pool_transactions_testnet_genesis --wallet-type indy --wallet-name default --wallet-key "insecure, for use in testing only" --auto-provision --auto-ping-connection + + echo: + image: echo-agent + build: + context: ./echo-agent/ + ports: + - "3002:3002" + command: python -m uvicorn echo:app --host 0.0.0.0 --port 3002 --log-level debug #************************************************************* # tester: drives tests for acapy_plugin_toolbox in a * @@ -38,7 +48,8 @@ services: - AGENT_HOST=acapy_plugin_agent - AGENT_PORT=3000 - AGENT_BACKCHANNEL_PORT=3001 - - SUITE_HOST=tests + - SUITE_ENDPOINT=http://echo:3002/receive + - SUITE_HOST=echo - SUITE_PORT=3002 depends_on: - acapy_plugin_agent diff --git a/int/poetry.lock b/int/poetry.lock index 103cc950..1ac47cac 100644 --- a/int/poetry.lock +++ b/int/poetry.lock @@ -38,14 +38,13 @@ speedups = ["aiodns", "brotlipy", "cchardet"] [[package]] name = "anyio" -version = "3.1.0" +version = "3.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "main" optional = false python-versions = ">=3.6.2" [package.dependencies] -async-generator = {version = "*", markers = "python_version < \"3.7\""} dataclasses = {version = "*", markers = "python_version < \"3.7\""} idna = ">=2.8" sniffio = ">=1.1" @@ -129,6 +128,21 @@ docs = ["furo", "sphinx", "zope.interface"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +[[package]] +name = "backports.entry-points-selectable" +version = "1.1.0" +description = "Compatibility shim providing selectable entry points for older implementations" +category = "dev" +optional = false +python-versions = ">=2.7" + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"] + [[package]] name = "base58" version = "2.1.0" @@ -142,7 +156,7 @@ tests = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "PyHamcrest (>=2.0.2)" [[package]] name = "black" -version = "21.6b0" +version = "21.7b0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -155,7 +169,7 @@ dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} mypy-extensions = ">=0.4.3" pathspec = ">=0.8.1,<1" regex = ">=2020.1.8" -toml = ">=0.10.1" +tomli = ">=0.2.6,<2.0.0" typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""} typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} @@ -175,7 +189,7 @@ python-versions = "*" [[package]] name = "cffi" -version = "1.14.5" +version = "1.14.6" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -200,6 +214,17 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "charset-normalizer" +version = "2.0.3" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + [[package]] name = "click" version = "8.0.1" @@ -258,6 +283,24 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "echo-agent-client" +version = "0.1.0" +description = "A client library for accessing Echo Agent" +category = "main" +optional = false +python-versions = "^3.6" +develop = true + +[package.dependencies] +attrs = ">=20.1.0,<22.0.0" +httpx = ">=0.15.4,<0.19.0" +python-dateutil = "^2.8.0" + +[package.source] +type = "directory" +url = "echo-agent/echo-agent-client" + [[package]] name = "filelock" version = "3.0.12" @@ -290,7 +333,7 @@ python-versions = ">=3.6" [[package]] name = "httpcore" -version = "0.13.4" +version = "0.13.6" description = "A minimal low-level HTTP client." category = "main" optional = false @@ -306,7 +349,7 @@ http2 = ["h2 (>=3,<5)"] [[package]] name = "httpx" -version = "0.18.1" +version = "0.18.2" description = "The next generation HTTP client." category = "main" optional = false @@ -315,7 +358,7 @@ python-versions = ">=3.6" [package.dependencies] async-generator = {version = "*", markers = "python_version < \"3.7\""} certifi = "*" -httpcore = ">=0.13.0,<0.14.0" +httpcore = ">=0.13.3,<0.14.0" rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} sniffio = "*" @@ -325,7 +368,7 @@ http2 = ["h2 (>=3.0.0,<4.0.0)"] [[package]] name = "identify" -version = "2.2.10" +version = "2.2.11" description = "File identification library for Python" category = "dev" optional = false @@ -336,11 +379,11 @@ license = ["editdistance-s"] [[package]] name = "idna" -version = "2.10" +version = "3.2" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" [[package]] name = "idna-ssl" @@ -366,7 +409,7 @@ test = ["flake8 (>=3.8.4,<3.9.0)", "pycodestyle (>=2.6.0,<2.7.0)"] [[package]] name = "importlib-metadata" -version = "4.5.0" +version = "4.6.1" description = "Read metadata from Python packages" category = "main" optional = false @@ -378,11 +421,12 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +perf = ["ipython"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "importlib-resources" -version = "5.1.4" +version = "5.2.0" description = "Read resources from Python packages" category = "dev" optional = false @@ -445,22 +489,34 @@ python-versions = "*" [[package]] name = "packaging" -version = "20.9" +version = "21.0" description = "Core utilities for Python packages" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] pyparsing = ">=2.0.2" [[package]] name = "pathspec" -version = "0.8.1" +version = "0.9.0" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "platformdirs" +version = "2.1.0" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] name = "pluggy" @@ -604,7 +660,7 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale [[package]] name = "python-dateutil" -version = "2.8.1" +version = "2.8.2" description = "Extensions to the standard Python datetime module" category = "main" optional = false @@ -623,7 +679,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "regex" -version = "2021.4.4" +version = "2021.7.6" description = "Alternative regular expression module, to replace re." category = "dev" optional = false @@ -631,21 +687,21 @@ python-versions = "*" [[package]] name = "requests" -version = "2.25.1" +version = "2.26.0" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} urllib3 = ">=1.21.1,<1.27" [package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "rfc3986" @@ -704,6 +760,14 @@ category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "tomli" +version = "1.1.0" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.6" + [[package]] name = "typed-ast" version = "1.4.3" @@ -722,7 +786,7 @@ python-versions = "*" [[package]] name = "urllib3" -version = "1.26.5" +version = "1.26.6" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -735,18 +799,19 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.4.7" +version = "20.6.0" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] -appdirs = ">=1.4.3,<2" +"backports.entry-points-selectable" = ">=1.0.4" distlib = ">=0.3.1,<1" filelock = ">=3.0.0,<4" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""} +platformdirs = ">=2,<3" six = ">=1.9.0,<2" [package.extras] @@ -768,7 +833,7 @@ typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} [[package]] name = "zipp" -version = "3.4.1" +version = "3.5.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false @@ -776,12 +841,12 @@ python-versions = ">=3.6" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [metadata] lock-version = "1.1" python-versions = "^3.6.9" -content-hash = "5fae2d0da93b3c45b7924a0fc64ffb9c3ef0a9290bf46fe84307e1d39c8dcf87" +content-hash = "5317644905dc6f91087b0a73d7c5e193237617c91c362ffa6d47153f138f400e" [metadata.files] acapy-backchannel = [] @@ -825,8 +890,8 @@ aiohttp = [ {file = "aiohttp-3.7.4.post0.tar.gz", hash = "sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf"}, ] anyio = [ - {file = "anyio-3.1.0-py3-none-any.whl", hash = "sha256:5e335cef65fbd1a422bbfbb4722e8e9a9fadbd8c06d5afe9cd614d12023f6e5a"}, - {file = "anyio-3.1.0.tar.gz", hash = "sha256:43e20711a9d003d858d694c12356dc44ab82c03ccc5290313c3392fa349dad0e"}, + {file = "anyio-3.3.0-py3-none-any.whl", hash = "sha256:929a6852074397afe1d989002aa96d457e3e1e5441357c60d03e7eea0e65e1b0"}, + {file = "anyio-3.3.0.tar.gz", hash = "sha256:ae57a67583e5ff8b4af47666ff5651c3732d45fd26c929253748e796af860374"}, ] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, @@ -856,56 +921,63 @@ attrs = [ {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, ] +"backports.entry-points-selectable" = [ + {file = "backports.entry_points_selectable-1.1.0-py2.py3-none-any.whl", hash = "sha256:a6d9a871cde5e15b4c4a53e3d43ba890cc6861ec1332c9c2428c92f977192acc"}, + {file = "backports.entry_points_selectable-1.1.0.tar.gz", hash = "sha256:988468260ec1c196dab6ae1149260e2f5472c9110334e5d51adcb77867361f6a"}, +] base58 = [ {file = "base58-2.1.0-py3-none-any.whl", hash = "sha256:8225891d501b68c843ffe30b86371f844a21c6ba00da76f52f9b998ba771fb48"}, {file = "base58-2.1.0.tar.gz", hash = "sha256:171a547b4a3c61e1ae3807224a6f7aec75e364c4395e7562649d7335768001a2"}, ] black = [ - {file = "black-21.6b0-py3-none-any.whl", hash = "sha256:dfb8c5a069012b2ab1e972e7b908f5fb42b6bbabcba0a788b86dc05067c7d9c7"}, - {file = "black-21.6b0.tar.gz", hash = "sha256:dc132348a88d103016726fe360cb9ede02cecf99b76e3660ce6c596be132ce04"}, + {file = "black-21.7b0-py3-none-any.whl", hash = "sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116"}, + {file = "black-21.7b0.tar.gz", hash = "sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219"}, ] certifi = [ {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, ] cffi = [ - {file = "cffi-1.14.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa"}, - {file = "cffi-1.14.5-cp27-cp27m-win32.whl", hash = "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3"}, - {file = "cffi-1.14.5-cp27-cp27m-win_amd64.whl", hash = "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6"}, - {file = "cffi-1.14.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406"}, - {file = "cffi-1.14.5-cp35-cp35m-win32.whl", hash = "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369"}, - {file = "cffi-1.14.5-cp35-cp35m-win_amd64.whl", hash = "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315"}, - {file = "cffi-1.14.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132"}, - {file = "cffi-1.14.5-cp36-cp36m-win32.whl", hash = "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53"}, - {file = "cffi-1.14.5-cp36-cp36m-win_amd64.whl", hash = "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813"}, - {file = "cffi-1.14.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49"}, - {file = "cffi-1.14.5-cp37-cp37m-win32.whl", hash = "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62"}, - {file = "cffi-1.14.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4"}, - {file = "cffi-1.14.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827"}, - {file = "cffi-1.14.5-cp38-cp38-win32.whl", hash = "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e"}, - {file = "cffi-1.14.5-cp38-cp38-win_amd64.whl", hash = "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396"}, - {file = "cffi-1.14.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee"}, - {file = "cffi-1.14.5-cp39-cp39-win32.whl", hash = "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396"}, - {file = "cffi-1.14.5-cp39-cp39-win_amd64.whl", hash = "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d"}, - {file = "cffi-1.14.5.tar.gz", hash = "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"}, + {file = "cffi-1.14.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:22b9c3c320171c108e903d61a3723b51e37aaa8c81255b5e7ce102775bd01e2c"}, + {file = "cffi-1.14.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:f0c5d1acbfca6ebdd6b1e3eded8d261affb6ddcf2186205518f1428b8569bb99"}, + {file = "cffi-1.14.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99f27fefe34c37ba9875f224a8f36e31d744d8083e00f520f133cab79ad5e819"}, + {file = "cffi-1.14.6-cp27-cp27m-win32.whl", hash = "sha256:55af55e32ae468e9946f741a5d51f9896da6b9bf0bbdd326843fec05c730eb20"}, + {file = "cffi-1.14.6-cp27-cp27m-win_amd64.whl", hash = "sha256:7bcac9a2b4fdbed2c16fa5681356d7121ecabf041f18d97ed5b8e0dd38a80224"}, + {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ed38b924ce794e505647f7c331b22a693bee1538fdf46b0222c4717b42f744e7"}, + {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e22dcb48709fc51a7b58a927391b23ab37eb3737a98ac4338e2448bef8559b33"}, + {file = "cffi-1.14.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e8c6a99be100371dbb046880e7a282152aa5d6127ae01783e37662ef73850d8f"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:19ca0dbdeda3b2615421d54bef8985f72af6e0c47082a8d26122adac81a95872"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d950695ae4381ecd856bcaf2b1e866720e4ab9a1498cba61c602e56630ca7195"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9dc245e3ac69c92ee4c167fbdd7428ec1956d4e754223124991ef29eb57a09d"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8661b2ce9694ca01c529bfa204dbb144b275a31685a075ce123f12331be790b"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b315d709717a99f4b27b59b021e6207c64620790ca3e0bde636a6c7f14618abb"}, + {file = "cffi-1.14.6-cp36-cp36m-win32.whl", hash = "sha256:80b06212075346b5546b0417b9f2bf467fea3bfe7352f781ffc05a8ab24ba14a"}, + {file = "cffi-1.14.6-cp36-cp36m-win_amd64.whl", hash = "sha256:a9da7010cec5a12193d1af9872a00888f396aba3dc79186604a09ea3ee7c029e"}, + {file = "cffi-1.14.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4373612d59c404baeb7cbd788a18b2b2a8331abcc84c3ba40051fcd18b17a4d5"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f10afb1004f102c7868ebfe91c28f4a712227fe4cb24974350ace1f90e1febbf"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fd4305f86f53dfd8cd3522269ed7fc34856a8ee3709a5e28b2836b2db9d4cd69"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d6169cb3c6c2ad50db5b868db6491a790300ade1ed5d1da29289d73bbe40b56"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d4b68e216fc65e9fe4f524c177b54964af043dde734807586cf5435af84045c"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33791e8a2dc2953f28b8d8d300dde42dd929ac28f974c4b4c6272cb2955cb762"}, + {file = "cffi-1.14.6-cp37-cp37m-win32.whl", hash = "sha256:0c0591bee64e438883b0c92a7bed78f6290d40bf02e54c5bf0978eaf36061771"}, + {file = "cffi-1.14.6-cp37-cp37m-win_amd64.whl", hash = "sha256:8eb687582ed7cd8c4bdbff3df6c0da443eb89c3c72e6e5dcdd9c81729712791a"}, + {file = "cffi-1.14.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba6f2b3f452e150945d58f4badd92310449876c4c954836cfb1803bdd7b422f0"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:64fda793737bc4037521d4899be780534b9aea552eb673b9833b01f945904c2e"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9f3e33c28cd39d1b655ed1ba7247133b6f7fc16fa16887b120c0c670e35ce346"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26bb2549b72708c833f5abe62b756176022a7b9a7f689b571e74c8478ead51dc"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb687a11f0a7a1839719edd80f41e459cc5366857ecbed383ff376c4e3cc6afd"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ad4d668a5c0645d281dcd17aff2be3212bc109b33814bbb15c4939f44181cc"}, + {file = "cffi-1.14.6-cp38-cp38-win32.whl", hash = "sha256:487d63e1454627c8e47dd230025780e91869cfba4c753a74fda196a1f6ad6548"}, + {file = "cffi-1.14.6-cp38-cp38-win_amd64.whl", hash = "sha256:c33d18eb6e6bc36f09d793c0dc58b0211fccc6ae5149b808da4a62660678b156"}, + {file = "cffi-1.14.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:06c54a68935738d206570b20da5ef2b6b6d92b38ef3ec45c5422c0ebaf338d4d"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:f174135f5609428cc6e1b9090f9268f5c8935fddb1b25ccb8255a2d50de6789e"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f3ebe6e73c319340830a9b2825d32eb6d8475c1dac020b4f0aa774ee3b898d1c"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c8d896becff2fa653dc4438b54a5a25a971d1f4110b32bd3068db3722c80202"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4922cd707b25e623b902c86188aca466d3620892db76c0bdd7b99a3d5e61d35f"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9e005e9bd57bc987764c32a1bee4364c44fdc11a3cc20a40b93b444984f2b87"}, + {file = "cffi-1.14.6-cp39-cp39-win32.whl", hash = "sha256:eb9e2a346c5238a30a746893f23a9535e700f8192a68c07c0258e7ece6ff3728"}, + {file = "cffi-1.14.6-cp39-cp39-win_amd64.whl", hash = "sha256:818014c754cd3dba7229c0f5884396264d51ffb87ec86e927ef0be140bfdb0d2"}, + {file = "cffi-1.14.6.tar.gz", hash = "sha256:c9a875ce9d7fe32887784274dd533c57909b7b1dcadcc128a2ac21331a9765dd"}, ] cfgv = [ {file = "cfgv-3.3.0-py2.py3-none-any.whl", hash = "sha256:b449c9c6118fe8cca7fa5e00b9ec60ba08145d281d52164230a69211c5d597a1"}, @@ -915,6 +987,10 @@ chardet = [ {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] +charset-normalizer = [ + {file = "charset-normalizer-2.0.3.tar.gz", hash = "sha256:c46c3ace2d744cfbdebceaa3c19ae691f53ae621b39fd7570f59d14fb7f2fd12"}, + {file = "charset_normalizer-2.0.3-py3-none-any.whl", hash = "sha256:88fce3fa5b1a84fdcb3f603d889f723d1dd89b26059d0123ca435570e848d5e1"}, +] click = [ {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, @@ -988,6 +1064,7 @@ distlib = [ {file = "distlib-0.3.2-py2.py3-none-any.whl", hash = "sha256:23e223426b28491b1ced97dc3bbe183027419dfc7982b4fa2f05d5f3ff10711c"}, {file = "distlib-0.3.2.zip", hash = "sha256:106fef6dc37dd8c0e2c0a60d3fca3e77460a48907f335fa28420463a6f799736"}, ] +echo-agent-client = [] filelock = [ {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, @@ -1001,20 +1078,20 @@ h11 = [ {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, ] httpcore = [ - {file = "httpcore-0.13.4-py3-none-any.whl", hash = "sha256:38e09649bb3906c913a2917c4eb3e3b3e11c83d4edebad8b53b7d757abc49267"}, - {file = "httpcore-0.13.4.tar.gz", hash = "sha256:9fa4c623bb9d2280c009c34658cc6315e4fd425a395145645bee205d827263e4"}, + {file = "httpcore-0.13.6-py3-none-any.whl", hash = "sha256:db4c0dcb8323494d01b8c6d812d80091a31e520033e7b0120883d6f52da649ff"}, + {file = "httpcore-0.13.6.tar.gz", hash = "sha256:b0d16f0012ec88d8cc848f5a55f8a03158405f4bca02ee49bc4ca2c1fda49f3e"}, ] httpx = [ - {file = "httpx-0.18.1-py3-none-any.whl", hash = "sha256:ad2e3db847be736edc4b272c4d5788790a7e5789ef132fc6b5fef8aeb9e9f6e0"}, - {file = "httpx-0.18.1.tar.gz", hash = "sha256:0a2651dd2b9d7662c70d12ada5c290abcf57373b9633515fe4baa9f62566086f"}, + {file = "httpx-0.18.2-py3-none-any.whl", hash = "sha256:979afafecb7d22a1d10340bafb403cf2cb75aff214426ff206521fc79d26408c"}, + {file = "httpx-0.18.2.tar.gz", hash = "sha256:9f99c15d33642d38bce8405df088c1c4cfd940284b4290cacbfb02e64f4877c6"}, ] identify = [ - {file = "identify-2.2.10-py2.py3-none-any.whl", hash = "sha256:18d0c531ee3dbc112fa6181f34faa179de3f57ea57ae2899754f16a7e0ff6421"}, - {file = "identify-2.2.10.tar.gz", hash = "sha256:5b41f71471bc738e7b586308c3fca172f78940195cb3bf6734c1e66fdac49306"}, + {file = "identify-2.2.11-py2.py3-none-any.whl", hash = "sha256:7abaecbb414e385752e8ce02d8c494f4fbc780c975074b46172598a28f1ab839"}, + {file = "identify-2.2.11.tar.gz", hash = "sha256:a0e700637abcbd1caae58e0463861250095dfe330a8371733a471af706a4a29a"}, ] idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, + {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, + {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, ] idna-ssl = [ {file = "idna-ssl-1.1.0.tar.gz", hash = "sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c"}, @@ -1037,12 +1114,12 @@ immutables = [ {file = "immutables-0.15.tar.gz", hash = "sha256:3713ab1ebbb6946b7ce1387bb9d1d7f5e09c45add58c2a2ee65f963c171e746b"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.5.0-py3-none-any.whl", hash = "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00"}, - {file = "importlib_metadata-4.5.0.tar.gz", hash = "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139"}, + {file = "importlib_metadata-4.6.1-py3-none-any.whl", hash = "sha256:9f55f560e116f8643ecf2922d9cd3e1c7e8d52e683178fecd9d08f6aa357e11e"}, + {file = "importlib_metadata-4.6.1.tar.gz", hash = "sha256:079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac"}, ] importlib-resources = [ - {file = "importlib_resources-5.1.4-py3-none-any.whl", hash = "sha256:e962bff7440364183203d179d7ae9ad90cb1f2b74dcb84300e88ecc42dca3351"}, - {file = "importlib_resources-5.1.4.tar.gz", hash = "sha256:54161657e8ffc76596c4ede7080ca68cb02962a2e074a2586b695a93a925d36e"}, + {file = "importlib_resources-5.2.0-py3-none-any.whl", hash = "sha256:a0143290bef3cbc99de9e40176e4987780939a955b8632f02ce6c935f42e9bfc"}, + {file = "importlib_resources-5.2.0.tar.gz", hash = "sha256:22a2c42d8c6a1d30aa8a0e1f57293725bfd5c013d562585e46aff469e0ff78b3"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -1130,12 +1207,16 @@ nodeenv = [ {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, ] packaging = [ - {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, - {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, + {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, + {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, ] pathspec = [ - {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, - {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, +] +platformdirs = [ + {file = "platformdirs-2.1.0-py3-none-any.whl", hash = "sha256:b2b30ae52404f93e2024e85bba29329b85715d6b2f18ffe90ecd25a5c67553df"}, + {file = "platformdirs-2.1.0.tar.gz", hash = "sha256:1964be5aba107a7ccb7de0e6f1f1bfde0dee51641f0e733028121f8e02e2e16b"}, ] pluggy = [ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, @@ -1198,8 +1279,8 @@ pytest-cov = [ {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, ] python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] pyyaml = [ {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, @@ -1233,51 +1314,51 @@ pyyaml = [ {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] regex = [ - {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, - {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, - {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, - {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, - {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, - {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, - {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, - {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, - {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, - {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, - {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, - {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, - {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, + {file = "regex-2021.7.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222"}, + {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407"}, + {file = "regex-2021.7.6-cp36-cp36m-win32.whl", hash = "sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b"}, + {file = "regex-2021.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb"}, + {file = "regex-2021.7.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad"}, + {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895"}, + {file = "regex-2021.7.6-cp37-cp37m-win32.whl", hash = "sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5"}, + {file = "regex-2021.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f"}, + {file = "regex-2021.7.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761"}, + {file = "regex-2021.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068"}, + {file = "regex-2021.7.6-cp38-cp38-win32.whl", hash = "sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0"}, + {file = "regex-2021.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4"}, + {file = "regex-2021.7.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58"}, + {file = "regex-2021.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3"}, + {file = "regex-2021.7.6-cp39-cp39-win32.whl", hash = "sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035"}, + {file = "regex-2021.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c"}, + {file = "regex-2021.7.6.tar.gz", hash = "sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d"}, ] requests = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, + {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, + {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, ] rfc3986 = [ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, @@ -1303,6 +1384,10 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +tomli = [ + {file = "tomli-1.1.0-py3-none-any.whl", hash = "sha256:f4a182048010e89cbec0ae4686b21f550a7f2903f665e34a6de58ec15424f919"}, + {file = "tomli-1.1.0.tar.gz", hash = "sha256:33d7984738f8bb699c9b0a816eb646a8178a69eaa792d258486776a5d21b8ca5"}, +] typed-ast = [ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, @@ -1341,12 +1426,12 @@ typing-extensions = [ {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, ] urllib3 = [ - {file = "urllib3-1.26.5-py2.py3-none-any.whl", hash = "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c"}, - {file = "urllib3-1.26.5.tar.gz", hash = "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"}, + {file = "urllib3-1.26.6-py2.py3-none-any.whl", hash = "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4"}, + {file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"}, ] virtualenv = [ - {file = "virtualenv-20.4.7-py2.py3-none-any.whl", hash = "sha256:2b0126166ea7c9c3661f5b8e06773d28f83322de7a3ff7d06f0aed18c9de6a76"}, - {file = "virtualenv-20.4.7.tar.gz", hash = "sha256:14fdf849f80dbb29a4eb6caa9875d476ee2a5cf76a5f5415fa2f1606010ab467"}, + {file = "virtualenv-20.6.0-py2.py3-none-any.whl", hash = "sha256:e4fc84337dce37ba34ef520bf2d4392b392999dbe47df992870dc23230f6b758"}, + {file = "virtualenv-20.6.0.tar.gz", hash = "sha256:51df5d8a2fad5d1b13e088ff38a433475768ff61f202356bb9812c454c20ae45"}, ] yarl = [ {file = "yarl-1.6.3-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434"}, @@ -1388,6 +1473,6 @@ yarl = [ {file = "yarl-1.6.3.tar.gz", hash = "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10"}, ] zipp = [ - {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, - {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, + {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, + {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, ] diff --git a/int/pyproject.toml b/int/pyproject.toml index a9f9a0cc..cda46ee6 100644 --- a/int/pyproject.toml +++ b/int/pyproject.toml @@ -13,6 +13,7 @@ pytest-cov = "^2.12.1" asynctest = "0.13.0" requests = "^2.25.1" acapy-backchannel = {path = "acapy-backchannel", develop = true} +echo-agent-client = {path = "echo-agent/echo-agent-client", develop = true} aries-staticagent = "^0.8.0" aiohttp = "^3.7.4" From 21c5e901a7950f18a4f028a1c48db4467dc0de94 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Wed, 28 Jul 2021 13:33:43 -0400 Subject: [PATCH 44/66] fix: bugs with send_to_admins sending to endpoint Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- acapy_plugin_toolbox/util.py | 50 ++++++++++++++++++++++++------------ 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/acapy_plugin_toolbox/util.py b/acapy_plugin_toolbox/util.py index 2ade501b..25f78cfc 100644 --- a/acapy_plugin_toolbox/util.py +++ b/acapy_plugin_toolbox/util.py @@ -338,16 +338,26 @@ async def handle(self, context: RequestContext, _responder): async def admin_connections(session: ProfileSession): """Return admin connections.""" storage = session.inject(BaseStorage) - admin_ids = map( - lambda record: record.tags["connection_id"], - filter( - lambda record: json.loads(record.value) == "admin", - await storage.find_all_records( - ConnRecord.RECORD_TYPE_METADATA, {"key": "group"} - ), - ), - ) - admins = [await ConnRecord.retrieve_by_id(session, id) for id in admin_ids] + admin_metadata_records = [ + record + for record in await storage.find_all_records( + ConnRecord.RECORD_TYPE_METADATA, {"key": "group"} + ) + or [] + if json.loads(record.value) == "admin" + ] + admins = [] + for record in admin_metadata_records: + try: + admin = await ConnRecord.retrieve_by_id( + session, record.tags["connection_id"] + ) + admins.append(admin) + except StorageNotFoundError: + # Clean up dangling metadata records of admins + LOGGER.debug("Deleteing dangling admin metadata record: %s", admins) + await storage.delete_record(record) + LOGGER.info("Discovered admins: %s", admins) return admins @@ -370,12 +380,20 @@ async def send_to_admins( ] for target in admin_targets: - await responder.send( - message, - reply_to_verkey=target.recipient_keys[0], - reply_from_verkey=target.sender_key, - to_session_only=to_session_only, - ) + if not to_session_only: + await responder.send( + message, + reply_to_verkey=target.recipient_keys[0], + reply_from_verkey=target.sender_key, + target=target, + ) + else: + await responder.send( + message, + reply_to_verkey=target.recipient_keys[0], + reply_from_verkey=target.sender_key, + to_session_only=to_session_only, + ) class InvalidConnection(Exception): From e46720576d249790f8891496d0dc24c9946c0059 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Wed, 28 Jul 2021 13:57:07 -0400 Subject: [PATCH 45/66] feat: add wait_for_message to echo agent client Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- .../api/default/retrieve_messages.py | 17 +-- .../api/default/wait_for_message.py | 139 ++++++++++++++++++ .../echo_agent_client/models/__init__.py | 3 + ..._for_message_wait_for_connection_id_get.py | 44 ++++++ int/echo-agent/scripts/openapi.json | 2 +- 5 files changed, 188 insertions(+), 17 deletions(-) create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/api/default/wait_for_message.py create mode 100644 int/echo-agent/echo-agent-client/echo_agent_client/models/wait_for_message_response_wait_for_message_wait_for_connection_id_get.py diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/api/default/retrieve_messages.py b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/retrieve_messages.py index 3cce861c..436b3ee9 100644 --- a/int/echo-agent/echo-agent-client/echo_agent_client/api/default/retrieve_messages.py +++ b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/retrieve_messages.py @@ -5,31 +5,24 @@ from ...client import Client from ...models.http_validation_error import HTTPValidationError from ...models.retrieve_messages_response_200_item import RetrieveMessagesResponse200Item -from ...types import UNSET, Response, Unset +from ...types import Response def _get_kwargs( *, client: Client, connection_id: str, - poll: Union[Unset, bool] = False, ) -> Dict[str, Any]: url = "{}/retrieve/{connection_id}".format(client.base_url, connection_id=connection_id) headers: Dict[str, Any] = client.get_headers() cookies: Dict[str, Any] = client.get_cookies() - params: Dict[str, Any] = { - "poll": poll, - } - params = {k: v for k, v in params.items() if v is not UNSET and v is not None} - return { "url": url, "headers": headers, "cookies": cookies, "timeout": client.get_timeout(), - "params": params, } @@ -67,12 +60,10 @@ def sync_detailed( *, client: Client, connection_id: str, - poll: Union[Unset, bool] = False, ) -> Response[Union[HTTPValidationError, List[RetrieveMessagesResponse200Item]]]: kwargs = _get_kwargs( client=client, connection_id=connection_id, - poll=poll, ) response = httpx.get( @@ -86,14 +77,12 @@ def sync( *, client: Client, connection_id: str, - poll: Union[Unset, bool] = False, ) -> Optional[Union[HTTPValidationError, List[RetrieveMessagesResponse200Item]]]: """Retrieve all received messages for recipient key.""" return sync_detailed( client=client, connection_id=connection_id, - poll=poll, ).parsed @@ -101,12 +90,10 @@ async def asyncio_detailed( *, client: Client, connection_id: str, - poll: Union[Unset, bool] = False, ) -> Response[Union[HTTPValidationError, List[RetrieveMessagesResponse200Item]]]: kwargs = _get_kwargs( client=client, connection_id=connection_id, - poll=poll, ) async with httpx.AsyncClient() as _client: @@ -119,7 +106,6 @@ async def asyncio( *, client: Client, connection_id: str, - poll: Union[Unset, bool] = False, ) -> Optional[Union[HTTPValidationError, List[RetrieveMessagesResponse200Item]]]: """Retrieve all received messages for recipient key.""" @@ -127,6 +113,5 @@ async def asyncio( await asyncio_detailed( client=client, connection_id=connection_id, - poll=poll, ) ).parsed diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/api/default/wait_for_message.py b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/wait_for_message.py new file mode 100644 index 00000000..6310834e --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/api/default/wait_for_message.py @@ -0,0 +1,139 @@ +from typing import Any, Dict, Optional, Union + +import httpx + +from ...client import Client +from ...models.http_validation_error import HTTPValidationError +from ...models.wait_for_message_response_wait_for_message_wait_for_connection_id_get import ( + WaitForMessageResponseWaitForMessageWaitForConnectionIdGet, +) +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + *, + client: Client, + connection_id: str, + thid: Union[Unset, str] = UNSET, + msg_type: Union[Unset, str] = UNSET, +) -> Dict[str, Any]: + url = "{}/wait-for/{connection_id}".format(client.base_url, connection_id=connection_id) + + headers: Dict[str, Any] = client.get_headers() + cookies: Dict[str, Any] = client.get_cookies() + + params: Dict[str, Any] = { + "thid": thid, + "msg_type": msg_type, + } + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + return { + "url": url, + "headers": headers, + "cookies": cookies, + "timeout": client.get_timeout(), + "params": params, + } + + +def _parse_response( + *, response: httpx.Response +) -> Optional[Union[HTTPValidationError, WaitForMessageResponseWaitForMessageWaitForConnectionIdGet]]: + if response.status_code == 200: + response_200 = WaitForMessageResponseWaitForMessageWaitForConnectionIdGet.from_dict(response.json()) + + return response_200 + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + return None + + +def _build_response( + *, response: httpx.Response +) -> Response[Union[HTTPValidationError, WaitForMessageResponseWaitForMessageWaitForConnectionIdGet]]: + return Response( + status_code=response.status_code, + content=response.content, + headers=response.headers, + parsed=_parse_response(response=response), + ) + + +def sync_detailed( + *, + client: Client, + connection_id: str, + thid: Union[Unset, str] = UNSET, + msg_type: Union[Unset, str] = UNSET, +) -> Response[Union[HTTPValidationError, WaitForMessageResponseWaitForMessageWaitForConnectionIdGet]]: + kwargs = _get_kwargs( + client=client, + connection_id=connection_id, + thid=thid, + msg_type=msg_type, + ) + + response = httpx.get( + **kwargs, + ) + + return _build_response(response=response) + + +def sync( + *, + client: Client, + connection_id: str, + thid: Union[Unset, str] = UNSET, + msg_type: Union[Unset, str] = UNSET, +) -> Optional[Union[HTTPValidationError, WaitForMessageResponseWaitForMessageWaitForConnectionIdGet]]: + """Wait for a message matching criteria.""" + + return sync_detailed( + client=client, + connection_id=connection_id, + thid=thid, + msg_type=msg_type, + ).parsed + + +async def asyncio_detailed( + *, + client: Client, + connection_id: str, + thid: Union[Unset, str] = UNSET, + msg_type: Union[Unset, str] = UNSET, +) -> Response[Union[HTTPValidationError, WaitForMessageResponseWaitForMessageWaitForConnectionIdGet]]: + kwargs = _get_kwargs( + client=client, + connection_id=connection_id, + thid=thid, + msg_type=msg_type, + ) + + async with httpx.AsyncClient() as _client: + response = await _client.get(**kwargs) + + return _build_response(response=response) + + +async def asyncio( + *, + client: Client, + connection_id: str, + thid: Union[Unset, str] = UNSET, + msg_type: Union[Unset, str] = UNSET, +) -> Optional[Union[HTTPValidationError, WaitForMessageResponseWaitForMessageWaitForConnectionIdGet]]: + """Wait for a message matching criteria.""" + + return ( + await asyncio_detailed( + client=client, + connection_id=connection_id, + thid=thid, + msg_type=msg_type, + ) + ).parsed diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/__init__.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/__init__.py index 5e0112ab..c6c88ef2 100644 --- a/int/echo-agent/echo-agent-client/echo_agent_client/models/__init__.py +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/__init__.py @@ -10,3 +10,6 @@ from .retrieve_messages_response_200_item import RetrieveMessagesResponse200Item from .send_message_message import SendMessageMessage from .validation_error import ValidationError +from .wait_for_message_response_wait_for_message_wait_for_connection_id_get import ( + WaitForMessageResponseWaitForMessageWaitForConnectionIdGet, +) diff --git a/int/echo-agent/echo-agent-client/echo_agent_client/models/wait_for_message_response_wait_for_message_wait_for_connection_id_get.py b/int/echo-agent/echo-agent-client/echo_agent_client/models/wait_for_message_response_wait_for_message_wait_for_connection_id_get.py new file mode 100644 index 00000000..49468889 --- /dev/null +++ b/int/echo-agent/echo-agent-client/echo_agent_client/models/wait_for_message_response_wait_for_message_wait_for_connection_id_get.py @@ -0,0 +1,44 @@ +from typing import Any, Dict, List, Type, TypeVar + +import attr + +T = TypeVar("T", bound="WaitForMessageResponseWaitForMessageWaitForConnectionIdGet") + + +@attr.s(auto_attribs=True) +class WaitForMessageResponseWaitForMessageWaitForConnectionIdGet: + """ """ + + additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + wait_for_message_response_wait_for_message_wait_for_connection_id_get = cls() + + wait_for_message_response_wait_for_message_wait_for_connection_id_get.additional_properties = d + return wait_for_message_response_wait_for_message_wait_for_connection_id_get + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/int/echo-agent/scripts/openapi.json b/int/echo-agent/scripts/openapi.json index bf32d7d9..45957c2e 100644 --- a/int/echo-agent/scripts/openapi.json +++ b/int/echo-agent/scripts/openapi.json @@ -1 +1 @@ -{"openapi":"3.0.2","info":{"title":"Echo Agent","version":"0.1.0"},"paths":{"/connection":{"post":{"summary":"New Connection","description":"Create a new static connection.","operationId":"new_connection","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/NewConnection"}}},"required":true},"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Connection"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/receive":{"post":{"summary":"Receive Message","description":"Receive a new agent message and push onto the message queue.","operationId":"receive_message_receive_post","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}}}}},"/retrieve/{connection_id}":{"get":{"summary":"Retreive Messages","description":"Retrieve all received messages for recipient key.","operationId":"retrieve_messages","parameters":[{"required":true,"schema":{"title":"Connection Id","type":"string"},"name":"connection_id","in":"path"},{"required":false,"schema":{"title":"Poll","type":"boolean","default":false},"name":"poll","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"title":"Response Retreive Messages Retrieve Connection Id Get","type":"array","items":{"type":"object"}}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/send/{connection_id}":{"post":{"summary":"Send Message","description":"Send a message to connection identified by did.","operationId":"send_message","parameters":[{"required":true,"schema":{"title":"Connection Id","type":"string"},"name":"connection_id","in":"path"}],"requestBody":{"content":{"application/json":{"schema":{"title":"Message","type":"object"}}},"required":true},"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/debug":{"get":{"summary":"Debug Info","description":"Return agent state for debugging.","operationId":"debug_info_debug_get","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DebugInfo"}}}}}}}},"components":{"schemas":{"Connection":{"title":"Connection","required":["connection_id","did","verkey","their_vk"],"type":"object","properties":{"connection_id":{"title":"Connection Id","type":"string"},"did":{"title":"Did","type":"string"},"verkey":{"title":"Verkey","type":"string"},"their_vk":{"title":"Their Vk","type":"string"}}},"DebugInfo":{"title":"DebugInfo","required":["connections","recip_key_to_connection_id","messages"],"type":"object","properties":{"connections":{"title":"Connections","type":"object","additionalProperties":{"type":"string"}},"recip_key_to_connection_id":{"title":"Recip Key To Connection Id","type":"object","additionalProperties":{"type":"string"}},"messages":{"title":"Messages","type":"object","additionalProperties":{"type":"string"}}}},"HTTPValidationError":{"title":"HTTPValidationError","type":"object","properties":{"detail":{"title":"Detail","type":"array","items":{"$ref":"#/components/schemas/ValidationError"}}}},"NewConnection":{"title":"NewConnection","required":["seed","endpoint","their_vk"],"type":"object","properties":{"seed":{"title":"Seed","type":"string","example":"00000000000000000000000000000000"},"endpoint":{"title":"Endpoint","type":"string"},"their_vk":{"title":"Their Vk","type":"string"}}},"ValidationError":{"title":"ValidationError","required":["loc","msg","type"],"type":"object","properties":{"loc":{"title":"Location","type":"array","items":{"type":"string"}},"msg":{"title":"Message","type":"string"},"type":{"title":"Error Type","type":"string"}}}}}} +{"openapi":"3.0.2","info":{"title":"Echo Agent","version":"0.1.0"},"paths":{"/connection":{"post":{"summary":"New Connection","description":"Create a new static connection.","operationId":"new_connection","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/NewConnection"}}},"required":true},"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/Connection"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/receive":{"post":{"summary":"Receive Message","description":"Receive a new agent message and push onto the message queue.","operationId":"receive_message_receive_post","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}}}}},"/retrieve/{connection_id}":{"get":{"summary":"Retreive Messages","description":"Retrieve all received messages for recipient key.","operationId":"retrieve_messages","parameters":[{"required":true,"schema":{"title":"Connection Id","type":"string"},"name":"connection_id","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"title":"Response Retreive Messages Retrieve Connection Id Get","type":"array","items":{"type":"object"}}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/wait-for/{connection_id}":{"get":{"summary":"Wait For Message","description":"Wait for a message matching criteria.","operationId":"wait_for_message","parameters":[{"required":true,"schema":{"title":"Connection Id","type":"string"},"name":"connection_id","in":"path"},{"required":false,"schema":{"title":"Thid","type":"string"},"name":"thid","in":"query"},{"required":false,"schema":{"title":"Msg Type","type":"string"},"name":"msg_type","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"title":"Response Wait For Message Wait For Connection Id Get","type":"object"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/send/{connection_id}":{"post":{"summary":"Send Message","description":"Send a message to connection identified by did.","operationId":"send_message","parameters":[{"required":true,"schema":{"title":"Connection Id","type":"string"},"name":"connection_id","in":"path"}],"requestBody":{"content":{"application/json":{"schema":{"title":"Message","type":"object"}}},"required":true},"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/debug":{"get":{"summary":"Debug Info","description":"Return agent state for debugging.","operationId":"debug_info_debug_get","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DebugInfo"}}}}}}}},"components":{"schemas":{"Connection":{"title":"Connection","required":["connection_id","did","verkey","their_vk"],"type":"object","properties":{"connection_id":{"title":"Connection Id","type":"string"},"did":{"title":"Did","type":"string"},"verkey":{"title":"Verkey","type":"string"},"their_vk":{"title":"Their Vk","type":"string"}}},"DebugInfo":{"title":"DebugInfo","required":["connections","recip_key_to_connection_id","messages"],"type":"object","properties":{"connections":{"title":"Connections","type":"object","additionalProperties":{"type":"string"}},"recip_key_to_connection_id":{"title":"Recip Key To Connection Id","type":"object","additionalProperties":{"type":"string"}},"messages":{"title":"Messages","type":"object","additionalProperties":{"type":"string"}}}},"HTTPValidationError":{"title":"HTTPValidationError","type":"object","properties":{"detail":{"title":"Detail","type":"array","items":{"$ref":"#/components/schemas/ValidationError"}}}},"NewConnection":{"title":"NewConnection","required":["seed","endpoint","their_vk"],"type":"object","properties":{"seed":{"title":"Seed","type":"string","example":"00000000000000000000000000000000"},"endpoint":{"title":"Endpoint","type":"string"},"their_vk":{"title":"Their Vk","type":"string"}}},"ValidationError":{"title":"ValidationError","required":["loc","msg","type"],"type":"object","properties":{"loc":{"title":"Location","type":"array","items":{"type":"string"}},"msg":{"title":"Message","type":"string"},"type":{"title":"Error Type","type":"string"}}}}}} From 5deffc67a841c870bfc81b82f61c13149b524913 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Wed, 28 Jul 2021 13:57:35 -0400 Subject: [PATCH 46/66] feat: add wait_for_message to echo agent Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- int/echo-agent/echo.py | 72 ++++++++++++++++++++++++++++++------------ 1 file changed, 52 insertions(+), 20 deletions(-) diff --git a/int/echo-agent/echo.py b/int/echo-agent/echo.py index 52aeffc2..d066cd46 100644 --- a/int/echo-agent/echo.py +++ b/int/echo-agent/echo.py @@ -15,7 +15,7 @@ from asyncio import Queue import json import logging -from typing import Dict, Iterable, List +from typing import Dict, Iterable, List, Optional from uuid import uuid4 from aries_staticagent.static_connection import StaticConnection, Target from aries_staticagent.message import Message @@ -120,20 +120,18 @@ async def receive_message(request: Request): response_model=List[Message], operation_id="retrieve_messages", ) -async def retreive_messages(connection_id: str, poll: bool = False): +async def retreive_messages(connection_id: str): """Retrieve all received messages for recipient key.""" if connection_id not in messages: - messages[connection_id] = Queue() - - if poll: - LOGGER.debug( - "Retrieving messages for connection_id %s with long polling", connection_id - ) - else: - LOGGER.debug( - "Retrieving messages for connection_id %s without long polling", - connection_id, + raise HTTPException( + status_code=404, + detail=f"No messages found for connection id {connection_id}", ) + + LOGGER.debug( + "Retrieving messages for connection_id %s", + connection_id, + ) queue = messages[connection_id] if not queue.empty(): to_return = [] @@ -142,18 +140,52 @@ async def retreive_messages(connection_id: str, poll: bool = False): queue.task_done() LOGGER.debug("Returning messages: %s", to_return) return to_return - elif poll: - LOGGER.debug( - "Waiting for message on queue for connection with id %s...", connection_id - ) - message = await queue.get() - queue.task_done() - LOGGER.debug("Message received, returning: %s", message) - return [message] else: return [] +@app.get( + "/wait-for/{connection_id}", response_model=Message, operation_id="wait_for_message" +) +async def wait_for_message( + connection_id: str, thid: Optional[str] = None, msg_type: Optional[str] = None +): + """Wait for a message matching criteria.""" + + def _matcher(message: Message): + """Matcher for messages.""" + thid_match = True if thid is None else message.thread["thid"] == thid + msg_type_match = True if msg_type is None else message.type == msg_type + return thid_match and msg_type_match + + if connection_id not in messages: + if connection_id in connections: + messages[connection_id] = Queue() + else: + raise HTTPException( + status_code=404, detail=f"No connection id matching {connection_id}" + ) + + queue = messages[connection_id] + while not queue.empty(): + message = queue.get_nowait() + queue.task_done() + if _matcher(message): + LOGGER.debug("Found message: %s", message) + return message + else: + LOGGER.info("Dropping message: %s", message) + + while True: + message = await queue.get() + queue.task_done() + if _matcher(message): + LOGGER.debug("Found message: %s", message) + return message + else: + LOGGER.info("Dropping message: %s", message) + + @app.post("/send/{connection_id}", operation_id="send_message") async def send_message(connection_id: str, message: dict = Body(...)): """Send a message to connection identified by did.""" From cc05cf989e86f9f113232ebac2e57a33d06cf428 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Wed, 28 Jul 2021 13:58:59 -0400 Subject: [PATCH 47/66] feat: replace asynchronously run agent server with echo in fixtures Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- int/docker-compose.yml | 3 +- int/tests/__init__.py | 60 --------------------- int/tests/conftest.py | 120 ++++++++++++++++++++++++++++++----------- 3 files changed, 91 insertions(+), 92 deletions(-) diff --git a/int/docker-compose.yml b/int/docker-compose.yml index db607825..654ace85 100644 --- a/int/docker-compose.yml +++ b/int/docker-compose.yml @@ -41,7 +41,7 @@ services: dockerfile: ./Dockerfile.test.runner environment: - WAIT_BEFORE_HOSTS=3 - - WAIT_HOSTS=acapy_plugin_agent:3000 + - WAIT_HOSTS=acapy_plugin_agent:3000,echo:3002 - WAIT_HOSTS_TIMEOUT=60 - WAIT_SLEEP_INTERVAL=1 - WAIT_HOST_CONNECT_TIMEOUT=30 @@ -53,3 +53,4 @@ services: - SUITE_PORT=3002 depends_on: - acapy_plugin_agent + - echo diff --git a/int/tests/__init__.py b/int/tests/__init__.py index 20430124..e69de29b 100644 --- a/int/tests/__init__.py +++ b/int/tests/__init__.py @@ -1,60 +0,0 @@ -"""Common helpers.""" - -import logging - -from aiohttp import web -from aries_staticagent import StaticConnection, Module - -LOGGER = logging.getLogger(__name__) - - -class BaseAgent: - """Simple Agent class. - Used to start up an agent with statically configured handlers. - """ - - def __init__(self, host: str, port: int, connection: StaticConnection): - """Initialize BaseAgent.""" - self.host = host - self.port = port - self.connection = connection - self._runner = None - - async def handle_web_request(self, request: web.Request): - """Handle HTTP POST.""" - response = [] - with self.connection.session(response.append) as session: - try: - await self.connection.handle(await request.read(), session) - except: - LOGGER.exception("Message handling failed") - - if response: - return web.Response(body=response.pop()) - - raise web.HTTPAccepted() - - async def start_async(self): - """Start the agent listening for HTTP POSTs.""" - app = web.Application() - app.add_routes([web.post("/", self.handle_web_request)]) - self.runner = web.AppRunner(app) - await self.runner.setup() - site = web.TCPSite(self.runner, self.host, self.port) - await site.start() - - async def cleanup(self): - """Clean up async start.""" - await self.runner.cleanup() - - def start(self): - """Start sychronously.""" - app = web.Application() - app.add_routes([web.post("/", self.handle_web_request)]) - - web.run_app(app, port=self.port) - - def register_modules(self, *modules: Module): - """Register modules on connection.""" - for module in modules: - self.connection.route_module(module) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index a38080f4..016e3b4b 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -1,39 +1,41 @@ """Common fixtures for testing.""" import asyncio -from contextlib import suppress +import hashlib +import logging import os -import base64 from typing import Iterator, Optional -from acapy_backchannel.models.conn_record import ConnRecord -from acapy_backchannel.models.did import DID -import pytest -import hashlib -import httpx from acapy_backchannel import Client from acapy_backchannel.api.connection import ( create_static, - set_metadata, delete_connection, -) -from acapy_backchannel.api.wallet import ( - create_did, - set_public_did, + set_metadata, ) from acapy_backchannel.api.ledger import accept_taa, fetch_taa +from acapy_backchannel.api.wallet import create_did, set_public_did from acapy_backchannel.models import ( + ConnectionMetadataSetRequest, ConnectionStaticRequest, ConnectionStaticResult, - ConnectionMetadataSetRequest, TAAAccept, ) - +from acapy_backchannel.models.conn_record import ConnRecord +from acapy_backchannel.models.did import DID from aries_staticagent import StaticConnection, Target - -from . import BaseAgent - -import logging +from echo_agent_client import Client as EchoClient +from echo_agent_client.api.default import ( + new_connection, + retrieve_messages, + send_message, + wait_for_message as echo_wait_for_message, +) +from echo_agent_client.models import Connection as EchoConnection +from echo_agent_client.models.new_connection import NewConnection +from echo_agent_client.models.send_message_message import SendMessageMessage +from echo_agent_client.types import UNSET +import httpx +import pytest LOGGER = logging.getLogger(__name__) @@ -94,8 +96,8 @@ def agent_seed(): @pytest.fixture(scope="session") -def suite_endpoint(suite_host, suite_port): - yield "http://{}:{}".format(suite_host, suite_port) +def suite_endpoint(): + yield os.environ.get("SUITE_ENDPOINT", "http://localhost:3000") @pytest.fixture(scope="session") @@ -165,21 +167,77 @@ def connection(agent_connection: ConnectionStaticResult, suite_seed: str): @pytest.fixture(scope="session") -def agent(suite_host, suite_port, connection: StaticConnection): - yield BaseAgent(suite_host, suite_port, connection) +def echo_client(suite_host, suite_port): + yield EchoClient(base_url=f"http://{suite_host}:{suite_port}") -@pytest.fixture(scope="session", autouse=True) -async def http_endpoint(agent: BaseAgent): - """Start up http endpoint for suite.""" - server_task = asyncio.ensure_future(agent.start_async()) +@pytest.fixture(scope="session") +async def echo_connection(echo_client, suite_seed, agent_connection): + yield await new_connection.asyncio( + client=echo_client, + json_body=NewConnection( + seed=suite_seed, + endpoint=agent_connection.my_endpoint, + their_vk=agent_connection.my_verkey, + ), + ) - yield - server_task.cancel() - with suppress(asyncio.CancelledError): - await server_task - await agent.cleanup() +@pytest.fixture(scope="session") +def asynchronously_recevied_messages( + echo_client: EchoClient, echo_connection: EchoConnection +): + """Get asynchronously recevied messages from the echo agent.""" + # Could wipe left over messages here + async def _asynchronously_received_messages(timeout: int = 5): + timed_client = echo_client.with_timeout(timeout) + try: + messages = await retrieve_messages.asyncio( + client=timed_client, connection_id=echo_connection.connection_id + ) + except httpx.ReadTimeout: + raise Exception( + "Retrieving asynchronously recevied messages timed out" + ) from None + + return messages + + yield _asynchronously_received_messages + # Could wipe remaining messages here + + +@pytest.fixture(scope="session") +def wait_for_message(echo_client: EchoClient, echo_connection: EchoConnection): + """Get asynchronously recevied messages from the echo agent.""" + # Could wipe left over messages here + async def _asynchronously_received_messages( + *, thid: Optional[str] = None, msg_type: Optional[str] = None, timeout: int = 5 + ): + timed_client = echo_client.with_timeout(timeout) + try: + return await echo_wait_for_message.asyncio( + client=timed_client, + connection_id=echo_connection.connection_id, + thid=thid or UNSET, + msg_type=msg_type or UNSET, + ) + except httpx.ReadTimeout: + raise Exception("Waiting for message timed out") from None + + yield _asynchronously_received_messages + # Could wipe remaining messages here + + +@pytest.fixture(scope="session") +def send_via_echo(echo_client, echo_connection: EchoConnection): + async def _send_via_echo(message: dict): + await send_message.asyncio( + client=echo_client, + connection_id=echo_connection.connection_id, + json_body=SendMessageMessage.from_dict(message), + ) + + yield _send_via_echo @pytest.fixture(scope="session") From 780bb2ac301b683a3e4ddfc4637c33c98b765051 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Wed, 28 Jul 2021 13:59:56 -0400 Subject: [PATCH 48/66] fix: minor fixes in basicmessage tests Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- int/tests/test_basicmessage.py | 172 ++++++++++++++------------------- 1 file changed, 70 insertions(+), 102 deletions(-) diff --git a/int/tests/test_basicmessage.py b/int/tests/test_basicmessage.py index 2cbe826c..5e479dfc 100644 --- a/int/tests/test_basicmessage.py +++ b/int/tests/test_basicmessage.py @@ -1,92 +1,100 @@ """Basic Message Tests""" import asyncio +from acapy_backchannel.models.send_message import SendMessage import pytest from aries_staticagent import StaticConnection, utils +from acapy_backchannel.api.basicmessage import send_basicmessage + + +@pytest.fixture +async def test_messages(connection, connection_id): + for i in range(6): + # This must be done by message or else the messages will not be recorded. + # await send_basicmessage.asyncio( + # client=backchannel, + # conn_id=connection_id, + # json_body=SendMessage(content="Test Message #{}".format(i)) + # ) + + # send_and_await_reply_async used instead of send_async to capture "sent" + # message so it doesn't clog up echo agent's queue + _ = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", + "connection_id": connection_id, + "content": "Test Message #{}".format(i), + }, + ) + + +@pytest.fixture(autouse=True) +async def clear_messages(connection): + yield + await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", + }, + timeout=5, + ) @pytest.mark.asyncio -async def test_send(connection: StaticConnection, connection_id: str): +async def test_send( + connection: StaticConnection, connection_id: str, asynchronously_recevied_messages +): """Test send message""" - with connection.next() as future_recip_message: - sent_message = await asyncio.wait_for( - connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", - "connection_id": connection_id, - "content": "Your hovercraft is full of eels.", - }, - return_route="all", - ), - timeout=60, - ) - recip_message = await asyncio.wait_for(future_recip_message, 60) + sent_message = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", + "connection_id": connection_id, + "content": "Your hovercraft is full of eels.", + }, + ) + [recip_message] = await asynchronously_recevied_messages() assert ( - recip_message["@type"] + sent_message["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/sent" ) - assert recip_message["message"]["content"] == "Your hovercraft is full of eels." - # TODO add proper backchannel for clearing messages - await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", - } + assert ( + recip_message["@type"] + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/basicmessage/1.0/message" ) + assert recip_message["content"] == "Your hovercraft is full of eels." @pytest.mark.asyncio async def test_new(connection: StaticConnection): """Test new message notification""" - new_response = await connection.send_and_await_reply_async( + # "new" message notifications are sent only over sessions. + # This call must be done as a send_and_await_reply_async + new_message = await connection.send_and_await_reply_async( { "@type": "https://didcomm.org/basicmessage/1.0/message", "~l10n": {"locale": "en"}, "sent_time": utils.timestamp(), "content": "Your hovercraft is full of eels.", }, - return_route="all", + timeout=10, ) assert ( - new_response["@type"] + new_message["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/new" ) - assert new_response["message"]["content"] == "Your hovercraft is full of eels." - # Delete messages to clear the state between tests - await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", - } - ) + assert new_message["message"]["content"] == "Your hovercraft is full of eels." @pytest.mark.asyncio async def test_get(connection: StaticConnection, connection_id: str): """Send multiple messages and verify that the proper count and content appears in messages list""" - with connection.next() as future_recip_message: - sent_message = await asyncio.wait_for( - connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", - "connection_id": connection_id, - "content": "Are you suggesting coconuts migrate?", - }, - return_route="all", - ), - timeout=60, - ) - recip_message = await asyncio.wait_for(future_recip_message, 60) - with connection.next() as future_recip_message: - sent_message = await asyncio.wait_for( - connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", - "connection_id": connection_id, - "content": "'Tis but a flesh wound.", - }, - return_route="all", - ), - timeout=60, + test_content = ("Are you suggesting coconuts migrate?", "'Tis but a flesh wound.") + for content in test_content: + _ = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", + "connection_id": connection_id, + "content": content, + } ) - recip_message = await asyncio.wait_for(future_recip_message, 60) get_messages = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", @@ -97,35 +105,15 @@ async def test_get(connection: StaticConnection, connection_id: str): == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" ) assert get_messages["count"] == 2 - assert ( - get_messages["messages"][1]["content"] == "Are you suggesting coconuts migrate?" - ) - assert get_messages["messages"][0]["content"] == "'Tis but a flesh wound." - # Delete messages to clear the state between tests - await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", - } - ) + assert get_messages["messages"][1]["content"] == test_content[0] + assert get_messages["messages"][0]["content"] == test_content[1] @pytest.mark.asyncio -async def test_get_limit_offset(connection: StaticConnection, connection_id: str): +async def test_get_limit_offset( + connection: StaticConnection, connection_id: str, test_messages +): """Send multiple messages and verify that get returns the correct content according to the limit and offset""" - for i in range(6): - with connection.next() as future_recip_message: - sent_message = await asyncio.wait_for( - connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", - "connection_id": connection_id, - "content": "Test Message #{}".format(i), - }, - return_route="all", - ), - timeout=60, - ) - recip_message = await asyncio.wait_for(future_recip_message, 60) get_messages = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", @@ -141,31 +129,11 @@ async def test_get_limit_offset(connection: StaticConnection, connection_id: str assert get_messages["messages"][0]["content"] == "Test Message #3" assert get_messages["messages"][1]["content"] == "Test Message #2" assert get_messages["messages"][2]["content"] == "Test Message #1" - # Delete messages to clear the state between tests - await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", - } - ) @pytest.mark.asyncio -async def test_delete(connection: StaticConnection, connection_id: str): +async def test_delete(connection: StaticConnection, connection_id: str, test_messages): """Send multiple messages, delete them, and verify that the messages count is zero""" - for i in range(6): - with connection.next() as future_recip_message: - sent_message = await asyncio.wait_for( - connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", - "connection_id": connection_id, - "content": "Test Message #{}".format(i), - }, - return_route="all", - ), - timeout=60, - ) - recip_message = await asyncio.wait_for(future_recip_message, 60) delete_message = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", From 8eecb1741b0ccc20c8cc230dde9786626cd1db0d Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Wed, 28 Jul 2021 14:00:28 -0400 Subject: [PATCH 49/66] fix: admin connection conflicts in connection tests Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- int/tests/test_connections.py | 103 ++++++++++++++-------------------- 1 file changed, 41 insertions(+), 62 deletions(-) diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index 37d988ca..39d11f5f 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -1,9 +1,39 @@ """Connections Tests""" -import asyncio -import pytest from acapy_backchannel import Client -from acapy_backchannel.api.connection import delete_connection, get_connections +from acapy_backchannel.api.connection import ( + create_invitation, + delete_connection, + get_connections, + receive_invitation, +) +from acapy_backchannel.models.create_invitation_request import CreateInvitationRequest +from acapy_backchannel.models.receive_invitation_request import ReceiveInvitationRequest from aries_staticagent import Message +import pytest + + +@pytest.fixture +def new_connection(backchannel: Client, wait_for_message): + """Factory for new connections.""" + + async def _new_connection(): + lhs_conn = await create_invitation.asyncio( + client=backchannel, json_body=CreateInvitationRequest(), auto_accept="true" + ) + rhs_conn = await receive_invitation.asyncio( + client=backchannel, + json_body=ReceiveInvitationRequest.from_dict(lhs_conn.invitation.to_dict()), + auto_accept="true", + ) + + print(await get_connections.asyncio(client=backchannel)) + message = await wait_for_message( + msg_type="https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connected" + ) + print(message) + return (lhs_conn.connection_id, rhs_conn.connection_id) + + yield _new_connection @pytest.fixture(autouse=True) @@ -26,7 +56,7 @@ async def test_create_connection(connection): "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", - "group": "admin", + "group": "default", "auto_accept": True, "multi_use": True, } @@ -34,7 +64,6 @@ async def test_create_connection(connection): invitation = await connection.send_and_await_reply_async( msg_invitation, condition=lambda reply: reply.thread["thid"] == msg_invitation.id, - return_route="all", ) msg_received = Message( { @@ -54,58 +83,10 @@ async def test_create_connection(connection): @pytest.mark.asyncio -async def test_get_list(connection): +async def test_get_list(connection, new_connection): """Create two connections and verify that their connection_ids are in connections list""" - msg_invitation = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", - "alias": "Invitation I sent to Alice", - "label": "Bob", - "group": "admin", - "auto_accept": True, - "multi_use": True, - } - ) - invitation = await connection.send_and_await_reply_async( - msg_invitation, - condition=lambda reply: reply.thread["thid"] == msg_invitation.id, - return_route="all", - ) - msg_received = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", - "invitation": invitation["invitation_url"], - "auto_accept": True, - } - ) - received = await connection.send_and_await_reply_async( - msg_received, condition=lambda reply: reply.thread["thid"] == msg_received.id - ) - msg_invitation2 = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", - "alias": "Second invitation I sent to Alice", - "label": "Bob", - "group": "admin", - "auto_accept": True, - "multi_use": True, - } - ) - invitation2 = await connection.send_and_await_reply_async( - msg_invitation2, - condition=lambda reply: reply.thread["thid"] == msg_invitation2.id, - return_route="all", - ) - msg_received2 = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", - "invitation": invitation2["invitation_url"], - "auto_accept": True, - } - ) - received2 = await connection.send_and_await_reply_async( - msg_received2, condition=lambda reply: reply.thread["thid"] == msg_received2.id - ) + conn1 = await new_connection() + conn2 = await new_connection() get_list = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" @@ -115,10 +96,10 @@ async def test_get_list(connection): get_list["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/list" ) - assert received["connection_id"] in [ + assert conn1[0] in [ connection_item["connection_id"] for connection_item in get_list["connections"] ] - assert received2["connection_id"] in [ + assert conn2[0] in [ connection_item["connection_id"] for connection_item in get_list["connections"] ] @@ -131,7 +112,7 @@ async def test_update(connection): "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", - "group": "admin", + "group": "default", "auto_accept": True, "multi_use": True, } @@ -139,7 +120,6 @@ async def test_update(connection): invitation = await connection.send_and_await_reply_async( msg_invitation, condition=lambda reply: reply.thread["thid"] == msg_invitation.id, - return_route="all", ) msg_received = Message( { @@ -176,7 +156,7 @@ async def test_delete(connection): "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", "alias": "Invitation I sent to Alice", "label": "Bob", - "group": "admin", + "group": "default", "auto_accept": True, "multi_use": True, } @@ -184,7 +164,6 @@ async def test_delete(connection): invitation = await connection.send_and_await_reply_async( invitation_msg, condition=lambda reply: reply.thread["thid"] == invitation_msg.id, - return_route="all", ) msg_received = Message( { From 40b5d94a502582f0b54f49ebb1fc9cb87e4c9ede Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Wed, 28 Jul 2021 14:06:50 -0400 Subject: [PATCH 50/66] feat: github workflow uses run tests Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- .github/workflows/push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index c5fc3030..b99051f1 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -81,7 +81,7 @@ jobs: - uses: actions/checkout@v2 - name: integration tests run: | - docker-compose -f int/docker-compose.yml up -d && docker attach juggernaut + docker-compose -f int/docker-compose.yml run tests - name: teardown run: | docker-compose -f int/docker-compose.yml down From 035e94aa02c93abd2ad9a880959a113f6c75a5af Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Thu, 29 Jul 2021 12:14:30 -0400 Subject: [PATCH 51/66] feat: use ASA-Py 0.9.0 pre-release Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- int/poetry.lock | 39 ++++++++++++++++++--------------------- int/pyproject.toml | 2 +- 2 files changed, 19 insertions(+), 22 deletions(-) diff --git a/int/poetry.lock b/int/poetry.lock index 1ac47cac..cdf8bf02 100644 --- a/int/poetry.lock +++ b/int/poetry.lock @@ -65,22 +65,19 @@ python-versions = "*" [[package]] name = "aries-staticagent" -version = "0.8.0" -description = "Python Static Agent Library and Examples for Aries" +version = "0.9.0rc2" +description = "Aries Static Agent library for Python" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.9,<4.0.0" [package.dependencies] -aiohttp = "*" -base58 = "*" -msgpack = "*" -pynacl = "*" -semver = "*" -sortedcontainers = "*" - -[package.extras] -test = ["coverage", "flake8", "pytest", "pytest-asyncio"] +aiohttp = ">=3.7.4,<4.0.0" +base58 = ">=2.1.0,<3.0.0" +msgpack = ">=1.0.2,<2.0.0" +PyNaCl = ">=1.4.0,<2.0.0" +semver = ">=2.13.0,<3.0.0" +sortedcontainers = ">=2.4.0,<3.0.0" [[package]] name = "async-generator" @@ -426,7 +423,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [[package]] name = "importlib-resources" -version = "5.2.0" +version = "5.2.1" description = "Read resources from Python packages" category = "dev" optional = false @@ -508,7 +505,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "platformdirs" -version = "2.1.0" +version = "2.2.0" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false @@ -846,7 +843,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = "^3.6.9" -content-hash = "5317644905dc6f91087b0a73d7c5e193237617c91c362ffa6d47153f138f400e" +content-hash = "1a3210ce508020a4fd36e0572aaee7962acd2f2b40e1ba6fbd9a605bf175051b" [metadata.files] acapy-backchannel = [] @@ -898,8 +895,8 @@ appdirs = [ {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, ] aries-staticagent = [ - {file = "aries-staticagent-0.8.0.tar.gz", hash = "sha256:0672d479ca64fc085f55012cdad68c43871c417f371f7669634d5300646c0ff9"}, - {file = "aries_staticagent-0.8.0-py3-none-any.whl", hash = "sha256:648fec4a630421df27a0db652d3ca3d0ccaca8b2eac06c61ccdfad4b8f19ec78"}, + {file = "aries-staticagent-0.9.0rc2.tar.gz", hash = "sha256:2e108df617c2ece130f5e69ac28f0ed4cef2fc945a99e1377e3ea518990ec359"}, + {file = "aries_staticagent-0.9.0rc2-py3-none-any.whl", hash = "sha256:86a54d040815d19f3396da8a4c040e7b3571c7ffa024ce43354008b21fb1dc55"}, ] async-generator = [ {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, @@ -1118,8 +1115,8 @@ importlib-metadata = [ {file = "importlib_metadata-4.6.1.tar.gz", hash = "sha256:079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac"}, ] importlib-resources = [ - {file = "importlib_resources-5.2.0-py3-none-any.whl", hash = "sha256:a0143290bef3cbc99de9e40176e4987780939a955b8632f02ce6c935f42e9bfc"}, - {file = "importlib_resources-5.2.0.tar.gz", hash = "sha256:22a2c42d8c6a1d30aa8a0e1f57293725bfd5c013d562585e46aff469e0ff78b3"}, + {file = "importlib_resources-5.2.1-py3-none-any.whl", hash = "sha256:b9a075a844a03e0fb0ab70e5b0ea138c92e9f07f3a21fc11a656cf9492dbf64f"}, + {file = "importlib_resources-5.2.1.tar.gz", hash = "sha256:c3789472eacdf3e20a4d746f7fe7cdf53c122e76f78813fbfb04d618e0152b6a"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -1215,8 +1212,8 @@ pathspec = [ {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, ] platformdirs = [ - {file = "platformdirs-2.1.0-py3-none-any.whl", hash = "sha256:b2b30ae52404f93e2024e85bba29329b85715d6b2f18ffe90ecd25a5c67553df"}, - {file = "platformdirs-2.1.0.tar.gz", hash = "sha256:1964be5aba107a7ccb7de0e6f1f1bfde0dee51641f0e733028121f8e02e2e16b"}, + {file = "platformdirs-2.2.0-py3-none-any.whl", hash = "sha256:4666d822218db6a262bdfdc9c39d21f23b4cfdb08af331a81e92751daf6c866c"}, + {file = "platformdirs-2.2.0.tar.gz", hash = "sha256:632daad3ab546bd8e6af0537d09805cec458dce201bccfe23012df73332e181e"}, ] pluggy = [ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, diff --git a/int/pyproject.toml b/int/pyproject.toml index cda46ee6..67bb1647 100644 --- a/int/pyproject.toml +++ b/int/pyproject.toml @@ -14,7 +14,7 @@ asynctest = "0.13.0" requests = "^2.25.1" acapy-backchannel = {path = "acapy-backchannel", develop = true} echo-agent-client = {path = "echo-agent/echo-agent-client", develop = true} -aries-staticagent = "^0.8.0" +aries-staticagent = ">=0.9.0rc2" aiohttp = "^3.7.4" [tool.poetry.dev-dependencies] From 8968433313e05e09a5ac911b0d87c3eb62492bc5 Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Thu, 29 Jul 2021 12:17:19 -0400 Subject: [PATCH 52/66] feat: use timeouts where appropriate Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- int/tests/conftest.py | 26 +++++++++++++++++-- int/tests/test_basicmessage.py | 2 +- int/tests/test_connections.py | 34 +++++-------------------- int/tests/test_credentialdefinitions.py | 12 ++++++--- int/tests/test_schemas.py | 6 +++-- 5 files changed, 44 insertions(+), 36 deletions(-) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index 016e3b4b..9fc13966 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -4,7 +4,7 @@ import hashlib import logging import os -from typing import Iterator, Optional +from typing import Iterator, Optional, Union from acapy_backchannel import Client from acapy_backchannel.api.connection import ( @@ -23,6 +23,8 @@ from acapy_backchannel.models.conn_record import ConnRecord from acapy_backchannel.models.did import DID from aries_staticagent import StaticConnection, Target +from aries_staticagent.message import Message +from aries_staticagent.utils import http_send from echo_agent_client import Client as EchoClient from echo_agent_client.api.default import ( new_connection, @@ -154,11 +156,31 @@ def connection_id(conn_record: ConnRecord): yield conn_record.connection_id +class IntegrationTestConnection(StaticConnection): + async def send_and_await_reply_async( + self, + msg: Union[dict, Message], + *, + return_route: str = "all", + plaintext: bool = False, + anoncrypt: bool = False, + timeout: int = 1, + ) -> Message: + return await super().send_and_await_reply_async( + msg, + return_route=return_route, + plaintext=plaintext, + anoncrypt=anoncrypt, + timeout=timeout, + ) + + @pytest.fixture(scope="session") def connection(agent_connection: ConnectionStaticResult, suite_seed: str): """Yield static connection to agent under test.""" + # Create and yield static connection - yield StaticConnection.from_seed( + yield IntegrationTestConnection.from_seed( seed=suite_seed.encode("ascii"), target=Target( endpoint=agent_connection.my_endpoint, their_vk=agent_connection.my_verkey diff --git a/int/tests/test_basicmessage.py b/int/tests/test_basicmessage.py index 5e479dfc..aecb59a7 100644 --- a/int/tests/test_basicmessage.py +++ b/int/tests/test_basicmessage.py @@ -67,7 +67,7 @@ async def test_new(connection: StaticConnection): """Test new message notification""" # "new" message notifications are sent only over sessions. # This call must be done as a send_and_await_reply_async - new_message = await connection.send_and_await_reply_async( + new_message = await connection.send_and_await_returned_async( { "@type": "https://didcomm.org/basicmessage/1.0/message", "~l10n": {"locale": "en"}, diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index 39d11f5f..c02b5f6b 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -61,10 +61,7 @@ async def test_create_connection(connection): "multi_use": True, } ) - invitation = await connection.send_and_await_reply_async( - msg_invitation, - condition=lambda reply: reply.thread["thid"] == msg_invitation.id, - ) + invitation = await connection.send_and_await_reply_async(msg_invitation) msg_received = Message( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", @@ -72,9 +69,7 @@ async def test_create_connection(connection): "auto_accept": True, } ) - received = await connection.send_and_await_reply_async( - msg_received, condition=lambda reply: reply.thread["thid"] == msg_received.id - ) + received = await connection.send_and_await_reply_async(msg_received) assert ( received["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" @@ -117,10 +112,7 @@ async def test_update(connection): "multi_use": True, } ) - invitation = await connection.send_and_await_reply_async( - msg_invitation, - condition=lambda reply: reply.thread["thid"] == msg_invitation.id, - ) + invitation = await connection.send_and_await_reply_async(msg_invitation) msg_received = Message( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", @@ -128,10 +120,7 @@ async def test_update(connection): "auto_accept": True, } ) - received = await connection.send_and_await_reply_async( - msg_received, - condition=lambda reply: reply.thread["thid"] == msg_received.id, - ) + received = await connection.send_and_await_reply_async(msg_received) msg_update = Message( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/update", @@ -140,10 +129,7 @@ async def test_update(connection): "role": "Updated role", } ) - update = await connection.send_and_await_reply_async( - msg_update, - condition=lambda reply: reply.thread["thid"] == msg_update.id, - ) + update = await connection.send_and_await_reply_async(msg_update) assert update["label"] == "Updated label" @@ -161,10 +147,7 @@ async def test_delete(connection): "multi_use": True, } ) - invitation = await connection.send_and_await_reply_async( - invitation_msg, - condition=lambda reply: reply.thread["thid"] == invitation_msg.id, - ) + invitation = await connection.send_and_await_reply_async(invitation_msg) msg_received = Message( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", @@ -172,10 +155,7 @@ async def test_delete(connection): "auto_accept": True, } ) - received = await connection.send_and_await_reply_async( - msg_received, - condition=lambda reply: reply.thread["thid"] == msg_received.id, - ) + received = await connection.send_and_await_reply_async(msg_received) delete_connection = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/delete", diff --git a/int/tests/test_credentialdefinitions.py b/int/tests/test_credentialdefinitions.py index 86d9f307..a9a1f029 100644 --- a/int/tests/test_credentialdefinitions.py +++ b/int/tests/test_credentialdefinitions.py @@ -33,7 +33,8 @@ async def test_send_cred_def(connection, endorser_did, create_schema): "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", "schema_id": schema.sent.schema_id, "~transport": {"return_route": "all"}, - } + }, + timeout=30 ) assert ( send_cred_def["@type"] @@ -50,7 +51,8 @@ async def test_cred_def_get(connection, endorser_did, create_schema): "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", "schema_id": schema.sent.schema_id, "~transport": {"return_route": "all"}, - } + }, + timeout=30 ) cred_def_get = await connection.send_and_await_reply_async( { @@ -76,7 +78,8 @@ async def test_cred_def_get_list(connection, endorser_did, create_schema): "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", "schema_id": schema1_2.sent.schema_id, "~transport": {"return_route": "all"}, - } + }, + timeout=30 ) schema1_3 = await create_schema(version="1.3") send_schema1_3 = await connection.send_and_await_reply_async( @@ -84,7 +87,8 @@ async def test_cred_def_get_list(connection, endorser_did, create_schema): "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-credential-definitions/0.1/send-credential-definition", "schema_id": schema1_3.sent.schema_id, "~transport": {"return_route": "all"}, - } + }, + timeout=30 ) cred_def_get_list = await connection.send_and_await_reply_async( { diff --git a/int/tests/test_schemas.py b/int/tests/test_schemas.py index 0b0f4aa2..b9e8491d 100644 --- a/int/tests/test_schemas.py +++ b/int/tests/test_schemas.py @@ -32,7 +32,8 @@ async def test_schema_get(connection, endorser_did): "schema_version": "2.0", "attributes": ["attr_2_0", "attr_2_1", "attr_2_2"], "return_route": "all", - } + }, + timeout=30 ) schema_get = await connection.send_and_await_reply_async( { @@ -61,7 +62,8 @@ async def test_schema_get_list(connection, endorser_did): "schema_version": "3.0", "attributes": ["attr_3_0", "attr_3_1", "attr_3_2"], "return_route": "all", - } + }, + timeout=30 ) schema_get_list = await connection.send_and_await_reply_async( { From a0f886a68ea0d9cc3ed633c5db7412cf2b361dfb Mon Sep 17 00:00:00 2001 From: Daniel Bluhm Date: Thu, 29 Jul 2021 12:18:08 -0400 Subject: [PATCH 53/66] fix: rearrange logic in holder test and use echo agent Signed-off-by: Daniel Bluhm Signed-off-by: Char Howland --- .../holder/v0_1/messages/cred_offer_accept.py | 1 + int/tests/test_holder.py | 9 +++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/acapy_plugin_toolbox/holder/v0_1/messages/cred_offer_accept.py b/acapy_plugin_toolbox/holder/v0_1/messages/cred_offer_accept.py index d50d988e..5de9edd3 100644 --- a/acapy_plugin_toolbox/holder/v0_1/messages/cred_offer_accept.py +++ b/acapy_plugin_toolbox/holder/v0_1/messages/cred_offer_accept.py @@ -70,6 +70,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): ) sent = CredRequestSent(record=cred_ex_record) + sent.assign_thread_from(self) await responder.send(credential_request_message, connection_id=connection_id) await responder.send_reply(sent) diff --git a/int/tests/test_holder.py b/int/tests/test_holder.py index daa4861f..adafe464 100644 --- a/int/tests/test_holder.py +++ b/int/tests/test_holder.py @@ -94,6 +94,7 @@ async def test_holder_credential_exchange( endorser_did, create_schema, create_cred_def, + wait_for_message ): connected = issuer_holder_connection cred_def = await create_cred_def(version="1.0") @@ -116,17 +117,21 @@ async def test_holder_credential_exchange( ), timeout=60, ) + credential_offer_received = await wait_for_message( + msg_type="did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-offer-received" + ) issue_result = cast(V10CredentialExchange, issue_result) credential_offer_accept = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-offer-accept", - "credential_exchange_id": issue_result.credential_exchange_id, + "credential_exchange_id": credential_offer_received["credential_exchange_id"], } ) assert ( credential_offer_accept["@type"] - == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-offer-received" + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-request-sent" ) + # TODO verify holder "agent" has a credential matching the accepted one # @pytest.mark.asyncio From 52231458e496ed961570cf33ac721650d88fbd50 Mon Sep 17 00:00:00 2001 From: Char Howland Date: Fri, 30 Jul 2021 17:06:57 -0700 Subject: [PATCH 54/66] chore: add assign_thread_from() to fix cred-get-list handler error Signed-off-by: Char Howland --- acapy_plugin_toolbox/holder/v0_1/messages/cred_get_list.py | 1 + 1 file changed, 1 insertion(+) diff --git a/acapy_plugin_toolbox/holder/v0_1/messages/cred_get_list.py b/acapy_plugin_toolbox/holder/v0_1/messages/cred_get_list.py index 610219f4..0f00964e 100644 --- a/acapy_plugin_toolbox/holder/v0_1/messages/cred_get_list.py +++ b/acapy_plugin_toolbox/holder/v0_1/messages/cred_get_list.py @@ -71,4 +71,5 @@ async def handle(self, context: RequestContext, responder: BaseResponder): cred_list = CredList( results=[credential.serialize() for credential in credentials], page=page ) + cred_list.assign_thread_from(self) await responder.send_reply(cred_list) From fad416031cf2b76d2b6488e086cba8d2cf59b31b Mon Sep 17 00:00:00 2001 From: Char Howland Date: Fri, 30 Jul 2021 17:10:21 -0700 Subject: [PATCH 55/66] fix: asynchronously_received_messages() spelling Signed-off-by: Char Howland --- int/tests/conftest.py | 2 +- int/tests/test_basicmessage.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index 9fc13966..50b2ea5b 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -206,7 +206,7 @@ async def echo_connection(echo_client, suite_seed, agent_connection): @pytest.fixture(scope="session") -def asynchronously_recevied_messages( +def asynchronously_received_messages( echo_client: EchoClient, echo_connection: EchoConnection ): """Get asynchronously recevied messages from the echo agent.""" diff --git a/int/tests/test_basicmessage.py b/int/tests/test_basicmessage.py index aecb59a7..2399e4da 100644 --- a/int/tests/test_basicmessage.py +++ b/int/tests/test_basicmessage.py @@ -40,7 +40,7 @@ async def clear_messages(connection): @pytest.mark.asyncio async def test_send( - connection: StaticConnection, connection_id: str, asynchronously_recevied_messages + connection: StaticConnection, connection_id: str, asynchronously_received_messages ): """Test send message""" sent_message = await connection.send_and_await_reply_async( @@ -50,7 +50,7 @@ async def test_send( "content": "Your hovercraft is full of eels.", }, ) - [recip_message] = await asynchronously_recevied_messages() + [recip_message] = await asynchronously_received_messages() assert ( sent_message["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/sent" From 537e0b2204cb72d7b8942758a308f01b44f19c4c Mon Sep 17 00:00:00 2001 From: Char Howland Date: Fri, 30 Jul 2021 17:14:00 -0700 Subject: [PATCH 56/66] fix: add asynchronously_received_messages() fixture to clear pending messages Signed-off-by: Char Howland --- int/tests/test_connections.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index c02b5f6b..8433d2df 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -13,10 +13,13 @@ @pytest.fixture -def new_connection(backchannel: Client, wait_for_message): +def new_connection( + backchannel: Client, wait_for_message, asynchronously_received_messages +): """Factory for new connections.""" async def _new_connection(): + await asynchronously_received_messages() lhs_conn = await create_invitation.asyncio( client=backchannel, json_body=CreateInvitationRequest(), auto_accept="true" ) From 98ebc2ea0e59df00104a59c6ee090be85888ae97 Mon Sep 17 00:00:00 2001 From: Char Howland Date: Mon, 2 Aug 2021 12:31:57 -0700 Subject: [PATCH 57/66] chore: verify credential_exchange_id present in credential records Signed-off-by: Char Howland --- .../holder/v0_1/messages/cred_get_list.py | 2 +- int/tests/test_holder.py | 56 ++++++++++++++----- 2 files changed, 42 insertions(+), 16 deletions(-) diff --git a/acapy_plugin_toolbox/holder/v0_1/messages/cred_get_list.py b/acapy_plugin_toolbox/holder/v0_1/messages/cred_get_list.py index 0f00964e..c9704d64 100644 --- a/acapy_plugin_toolbox/holder/v0_1/messages/cred_get_list.py +++ b/acapy_plugin_toolbox/holder/v0_1/messages/cred_get_list.py @@ -71,5 +71,5 @@ async def handle(self, context: RequestContext, responder: BaseResponder): cred_list = CredList( results=[credential.serialize() for credential in credentials], page=page ) - cred_list.assign_thread_from(self) + cred_list.assign_thread_from(context.message) # self await responder.send_reply(cred_list) diff --git a/int/tests/test_holder.py b/int/tests/test_holder.py index adafe464..865b1f7b 100644 --- a/int/tests/test_holder.py +++ b/int/tests/test_holder.py @@ -25,6 +25,7 @@ from acapy_backchannel.api.schema import publish_schema from acapy_backchannel.api.credential_definition import publish_cred_def from acapy_backchannel.api.issue_credential_v10 import issue_credential_automated +from acapy_backchannel.api.issue_credential_v10 import get_issue_credential_records @pytest.fixture(scope="module") @@ -94,7 +95,7 @@ async def test_holder_credential_exchange( endorser_did, create_schema, create_cred_def, - wait_for_message + wait_for_message, ): connected = issuer_holder_connection cred_def = await create_cred_def(version="1.0") @@ -105,9 +106,9 @@ async def test_holder_credential_exchange( connection_id=connected[1].connection_id, credential_proposal=CredentialPreview( [ - CredAttrSpec(name="attr_1_0", value="test_0"), - CredAttrSpec(name="attr_1_1", value="test_1"), - CredAttrSpec(name="attr_1_2", value="test_2"), + CredAttrSpec(name="attr_1_0", value="Test 1"), + CredAttrSpec(name="attr_1_1", value="Test 2"), + CredAttrSpec(name="attr_1_2", value="Test 3"), ] ), cred_def_id=cred_def.additional_properties["sent"][ @@ -120,25 +121,50 @@ async def test_holder_credential_exchange( credential_offer_received = await wait_for_message( msg_type="did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-offer-received" ) - issue_result = cast(V10CredentialExchange, issue_result) + issue_result = cast(V10CredentialExchange(auto_issue="true"), issue_result) credential_offer_accept = await connection.send_and_await_reply_async( { "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-offer-accept", - "credential_exchange_id": credential_offer_received["credential_exchange_id"], + "credential_exchange_id": credential_offer_received[ + "credential_exchange_id" + ], } ) assert ( credential_offer_accept["@type"] == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-request-sent" ) - # TODO verify holder "agent" has a credential matching the accepted one + credential_received = await wait_for_message( + msg_type="did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-received" + ) + records = await asyncio.wait_for( + get_issue_credential_records.asyncio( + client=backchannel, + ), + timeout=20, + ) + assert credential_received["credential_exchange_id"] in [ + record.credential_exchange_id for record in records.results + ] -# @pytest.mark.asyncio -# async def test_credentials_get_list(connection, endorser_did, create_schema, create_cred_def): -# credentials_get_list = await connection.send_and_await_reply_async( -# { -# "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credentials-get-list", -# } -# ) -# assert credentials_get_list["@type"] == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credentials-list" +@pytest.mark.asyncio +async def test_credentials_get_list( + backchannel: Client, + connection, + issuer_holder_connection, + endorser_did, + create_schema, + create_cred_def, + wait_for_message, +): + credentials_get_list = await connection.send_and_await_reply_async( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credentials-get-list", + } + ) + assert ( + credentials_get_list["@type"] + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credentials-list" + ) + # TODO create fixture for credential issuance to retrieve credentials from get-list protocol From 3cb5be9f9f36f3cde7574e2e04122ec58f151571 Mon Sep 17 00:00:00 2001 From: Char Howland Date: Tue, 17 Aug 2021 13:00:36 -0500 Subject: [PATCH 58/66] style: reformatting with black Signed-off-by: Char Howland --- acapy_plugin_toolbox/basicmessage.py | 11 ++-------- acapy_plugin_toolbox/connections.py | 21 ++++--------------- .../credential_definitions.py | 7 +------ .../holder/v0_1/messages/pres_delete.py | 4 +--- acapy_plugin_toolbox/issuer.py | 7 ++----- acapy_plugin_toolbox/payments.py | 8 ++----- acapy_plugin_toolbox/routing.py | 10 ++------- acapy_plugin_toolbox/static_connections.py | 6 ++---- acapy_plugin_toolbox/taa.py | 6 ++---- int/tests/conftest.py | 11 ++-------- int/tests/test_basicmessage.py | 12 +++++------ int/tests/test_credentialdefinitions.py | 8 +++---- int/tests/test_holder.py | 15 ++++--------- int/tests/test_schemas.py | 4 ++-- tests/holder/conftest.py | 4 +--- tests/holder/test_pres_approve.py | 3 +-- tests/holder/test_send_pres_proposal.py | 5 +---- 17 files changed, 39 insertions(+), 103 deletions(-) diff --git a/acapy_plugin_toolbox/basicmessage.py b/acapy_plugin_toolbox/basicmessage.py index eda63eae..e67da76d 100644 --- a/acapy_plugin_toolbox/basicmessage.py +++ b/acapy_plugin_toolbox/basicmessage.py @@ -119,12 +119,7 @@ def record_value(self) -> dict: """Get record value.""" return { prop: getattr(self, prop) - for prop in ( - "content", - "locale", - "sent_time", - "state", - ) + for prop in ("content", "locale", "sent_time", "state") } @property @@ -214,9 +209,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): tag_filter = dict( filter( lambda item: item[1] is not None, - { - "connection_id": context.message.connection_id, - }.items(), + {"connection_id": context.message.connection_id}.items(), ) ) msgs = sorted( diff --git a/acapy_plugin_toolbox/connections.py b/acapy_plugin_toolbox/connections.py index a581c8a0..c1b93d51 100644 --- a/acapy_plugin_toolbox/connections.py +++ b/acapy_plugin_toolbox/connections.py @@ -75,9 +75,7 @@ async def connections_event_handler(profile: Profile, event: Event): responder = profile.inject(BaseResponder) async with profile.session() as session: await send_to_admins( - session, - Connected(**conn_record_to_message_repr(record)), - responder, + session, Connected(**conn_record_to_message_repr(record)), responder ) @@ -138,14 +136,7 @@ def _state_map(state: str) -> str: schema={ "my_did": fields.Str(required=False), "state": fields.Str( - validate=validate.OneOf( - [ - "pending", - "active", - "error", - ] - ), - required=False, + validate=validate.OneOf(["pending", "active", "error"]), required=False ), "their_did": fields.Str(required=False), }, @@ -234,18 +225,14 @@ async def handle(self, context: RequestContext, responder: BaseResponder): name="Delete", handler="acapy_plugin_toolbox.connections.DeleteHandler", msg_type=DELETE, - schema={ - "connection_id": fields.Str(required=True), - }, + schema={"connection_id": fields.Str(required=True)}, ) Deleted, DeletedSchema = generate_model_schema( name="Deleted", handler="acapy_plugin_toolbox.util.PassHandler", msg_type=DELETED, - schema={ - "connection_id": fields.Str(required=True), - }, + schema={"connection_id": fields.Str(required=True)}, ) diff --git a/acapy_plugin_toolbox/credential_definitions.py b/acapy_plugin_toolbox/credential_definitions.py index d4095bb2..ad12f08b 100644 --- a/acapy_plugin_toolbox/credential_definitions.py +++ b/acapy_plugin_toolbox/credential_definitions.py @@ -102,12 +102,7 @@ def record_tags(self) -> dict: """Get tags for record.""" return { prop: getattr(self, prop) - for prop in ( - "cred_def_id", - "schema_id", - "state", - "author", - ) + for prop in ("cred_def_id", "schema_id", "state", "author") } @classmethod diff --git a/acapy_plugin_toolbox/holder/v0_1/messages/pres_delete.py b/acapy_plugin_toolbox/holder/v0_1/messages/pres_delete.py index 4f102b0b..58ba3dae 100644 --- a/acapy_plugin_toolbox/holder/v0_1/messages/pres_delete.py +++ b/acapy_plugin_toolbox/holder/v0_1/messages/pres_delete.py @@ -50,8 +50,6 @@ async def handle(self, context: RequestContext, responder: BaseResponder): await pres_ex_record.delete_record(session) - message = PresDeleted( - presentation_exchange_id=self.presentation_exchange_id, - ) + message = PresDeleted(presentation_exchange_id=self.presentation_exchange_id) message.assign_thread_from(self) await responder.send_reply(message) diff --git a/acapy_plugin_toolbox/issuer.py b/acapy_plugin_toolbox/issuer.py index a4a1610f..c8cac67b 100644 --- a/acapy_plugin_toolbox/issuer.py +++ b/acapy_plugin_toolbox/issuer.py @@ -185,7 +185,7 @@ def __init__( connection_id: UUID, proof_request: IndyProofRequest, comment: str = None, - **kwargs + **kwargs, ): """Initialize message.""" super().__init__(**kwargs) @@ -270,10 +270,7 @@ class CredList(AdminIssuerMessage): class Fields: results = fields.List( - fields.Dict(), - required=True, - description="List of credentials", - example=[], + fields.Dict(), required=True, description="List of credentials", example=[] ) diff --git a/acapy_plugin_toolbox/payments.py b/acapy_plugin_toolbox/payments.py index f5c2b395..f126891c 100644 --- a/acapy_plugin_toolbox/payments.py +++ b/acapy_plugin_toolbox/payments.py @@ -290,9 +290,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): return address = Address( - address=address_str, - method=SOV_METHOD, - balance=sovatoms_to_tokens(balance), + address=address_str, method=SOV_METHOD, balance=sovatoms_to_tokens(balance) ) address.assign_thread_from(context.message) await responder.send_reply(address) @@ -341,9 +339,7 @@ async def fetch_transfer_auth(ledger: BaseLedger): ) if ledger.cache: await ledger.cache.set( - ["admin-payments::xfer_auth"], - xfer_auth_fee, - ledger.cache_duration, + ["admin-payments::xfer_auth"], xfer_auth_fee, ledger.cache_duration ) return xfer_auth_fee diff --git a/acapy_plugin_toolbox/routing.py b/acapy_plugin_toolbox/routing.py index 60a6c182..cc304b83 100644 --- a/acapy_plugin_toolbox/routing.py +++ b/acapy_plugin_toolbox/routing.py @@ -143,10 +143,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): _record, request = await manager.prepare_request(connection.connection_id) # Send mediation request - await responder.send( - request, - connection_id=connection.connection_id, - ) + await responder.send(request, connection_id=connection.connection_id) # Send notification of mediation request sent sent = MediationRequestSent(connection_id=connection.connection_id) @@ -195,10 +192,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): context.message.verkey, context.message.connection_id ) - await responder.send( - update, - connection_id=context.message.connection_id, - ) + await responder.send(update, connection_id=context.message.connection_id) sent = KeylistUpdateSent( connection_id=context.message.connection_id, diff --git a/acapy_plugin_toolbox/static_connections.py b/acapy_plugin_toolbox/static_connections.py index ba0a3e06..fdc33a32 100644 --- a/acapy_plugin_toolbox/static_connections.py +++ b/acapy_plugin_toolbox/static_connections.py @@ -94,8 +94,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): # Make our info for the connection my_info = await wallet.create_local_did( - method=DIDMethod.SOV, - key_type=KeyType.ED25519, + method=DIDMethod.SOV, key_type=KeyType.ED25519 ) # Create connection record @@ -150,8 +149,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): msg_type=STATIC_CONNECTION_GET_LIST, schema={ "initiator": fields.Str( - validate=validate.OneOf(["self", "external"]), - required=False, + validate=validate.OneOf(["self", "external"]), required=False ), "invitation_key": fields.Str(required=False), "my_did": fields.Str(required=False), diff --git a/acapy_plugin_toolbox/taa.py b/acapy_plugin_toolbox/taa.py index 40bd986b..39ee5153 100644 --- a/acapy_plugin_toolbox/taa.py +++ b/acapy_plugin_toolbox/taa.py @@ -52,8 +52,7 @@ async def setup(session: ProfileSession, protocol_registry: ProblemReport = None msg_type=TAA, schema={ "version": fields.Str( - required=True, - description="Version of Transaction Author Agreement", + required=True, description="Version of Transaction Author Agreement" ), "text": fields.Str( required=True, description="Transaction Author Agreement text" @@ -185,8 +184,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): "version": fields.Str(required=False, description="Version of accepted TAA."), "time": fields.Str(required=False, description="Time of acceptance."), "mechanism": fields.Str( - required=False, - description="The mechanism used to accept the TAA.", + required=False, description="The mechanism used to accept the TAA." ), }, ) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index 50b2ea5b..ef8fd0d9 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -295,11 +295,7 @@ async def endorser_did(make_did, backchannel, accepted_taa): LOGGER.info("Publishing DID through https://selfserve.indiciotech.io") response = httpx.post( url="https://selfserve.indiciotech.io/nym", - json={ - "network": "testnet", - "did": did.did, - "verkey": did.verkey, - }, + json={"network": "testnet", "did": did.did, "verkey": did.verkey}, timeout=15, ) if response.is_error: @@ -307,9 +303,6 @@ async def endorser_did(make_did, backchannel, accepted_taa): LOGGER.info("DID Published") backchannel.timeout = 15 - result = await set_public_did.asyncio_detailed( - client=backchannel, - did=did.did, - ) + result = await set_public_did.asyncio_detailed(client=backchannel, did=did.did) assert result.status_code == 200 yield did diff --git a/int/tests/test_basicmessage.py b/int/tests/test_basicmessage.py index 2399e4da..98032955 100644 --- a/int/tests/test_basicmessage.py +++ b/int/tests/test_basicmessage.py @@ -23,7 +23,7 @@ async def test_messages(connection, connection_id): "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", "connection_id": connection_id, "content": "Test Message #{}".format(i), - }, + } ) @@ -32,7 +32,7 @@ async def clear_messages(connection): yield await connection.send_and_await_reply_async( { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete" }, timeout=5, ) @@ -48,7 +48,7 @@ async def test_send( "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", "connection_id": connection_id, "content": "Your hovercraft is full of eels.", - }, + } ) [recip_message] = await asynchronously_received_messages() assert ( @@ -97,7 +97,7 @@ async def test_get(connection: StaticConnection, connection_id: str): ) get_messages = await connection.send_and_await_reply_async( { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get" } ) assert ( @@ -136,12 +136,12 @@ async def test_delete(connection: StaticConnection, connection_id: str, test_mes """Send multiple messages, delete them, and verify that the messages count is zero""" delete_message = await connection.send_and_await_reply_async( { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete", + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete" } ) get_messages = await connection.send_and_await_reply_async( { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get" } ) assert ( diff --git a/int/tests/test_credentialdefinitions.py b/int/tests/test_credentialdefinitions.py index a9a1f029..da5c75c4 100644 --- a/int/tests/test_credentialdefinitions.py +++ b/int/tests/test_credentialdefinitions.py @@ -34,7 +34,7 @@ async def test_send_cred_def(connection, endorser_did, create_schema): "schema_id": schema.sent.schema_id, "~transport": {"return_route": "all"}, }, - timeout=30 + timeout=30, ) assert ( send_cred_def["@type"] @@ -52,7 +52,7 @@ async def test_cred_def_get(connection, endorser_did, create_schema): "schema_id": schema.sent.schema_id, "~transport": {"return_route": "all"}, }, - timeout=30 + timeout=30, ) cred_def_get = await connection.send_and_await_reply_async( { @@ -79,7 +79,7 @@ async def test_cred_def_get_list(connection, endorser_did, create_schema): "schema_id": schema1_2.sent.schema_id, "~transport": {"return_route": "all"}, }, - timeout=30 + timeout=30, ) schema1_3 = await create_schema(version="1.3") send_schema1_3 = await connection.send_and_await_reply_async( @@ -88,7 +88,7 @@ async def test_cred_def_get_list(connection, endorser_did, create_schema): "schema_id": schema1_3.sent.schema_id, "~transport": {"return_route": "all"}, }, - timeout=30 + timeout=30, ) cred_def_get_list = await connection.send_and_await_reply_async( { diff --git a/int/tests/test_holder.py b/int/tests/test_holder.py index 865b1f7b..88a89cf1 100644 --- a/int/tests/test_holder.py +++ b/int/tests/test_holder.py @@ -32,9 +32,7 @@ async def issuer_holder_connection(backchannel: Client, connection): """Invitation creation fixture""" invitation_created = await create_invitation.asyncio( - client=backchannel, - json_body=CreateInvitationRequest(), - auto_accept="true", + client=backchannel, json_body=CreateInvitationRequest(), auto_accept="true" ) connection_created = await receive_invitation.asyncio( client=backchannel, @@ -79,9 +77,7 @@ async def _create_cred_def(version): backchannel.timeout = 30 return await publish_cred_def.asyncio( client=backchannel, - json_body=CredentialDefinitionSendRequest( - schema_id=schema.sent.schema_id, - ), + json_body=CredentialDefinitionSendRequest(schema_id=schema.sent.schema_id), ) yield _create_cred_def @@ -138,10 +134,7 @@ async def test_holder_credential_exchange( msg_type="did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credential-received" ) records = await asyncio.wait_for( - get_issue_credential_records.asyncio( - client=backchannel, - ), - timeout=20, + get_issue_credential_records.asyncio(client=backchannel), timeout=20 ) assert credential_received["credential_exchange_id"] in [ record.credential_exchange_id for record in records.results @@ -160,7 +153,7 @@ async def test_credentials_get_list( ): credentials_get_list = await connection.send_and_await_reply_async( { - "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credentials-get-list", + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/admin-holder/0.1/credentials-get-list" } ) assert ( diff --git a/int/tests/test_schemas.py b/int/tests/test_schemas.py index b9e8491d..a9b9c8fd 100644 --- a/int/tests/test_schemas.py +++ b/int/tests/test_schemas.py @@ -33,7 +33,7 @@ async def test_schema_get(connection, endorser_did): "attributes": ["attr_2_0", "attr_2_1", "attr_2_2"], "return_route": "all", }, - timeout=30 + timeout=30, ) schema_get = await connection.send_and_await_reply_async( { @@ -63,7 +63,7 @@ async def test_schema_get_list(connection, endorser_did): "attributes": ["attr_3_0", "attr_3_1", "attr_3_2"], "return_route": "all", }, - timeout=30 + timeout=30, ) schema_get_list = await connection.send_and_await_reply_async( { diff --git a/tests/holder/conftest.py b/tests/holder/conftest.py index 8a0fb748..c7080c6e 100644 --- a/tests/holder/conftest.py +++ b/tests/holder/conftest.py @@ -18,9 +18,7 @@ def _mock_record_query(obj, result=None, spec=None): with mock.patch.object( obj, "query", - mock.CoroutineMock( - return_value=result or mock.MagicMock(spec=spec), - ), + mock.CoroutineMock(return_value=result or mock.MagicMock(spec=spec)), ) as record_query: yield record_query diff --git a/tests/holder/test_pres_approve.py b/tests/holder/test_pres_approve.py index c3dbeb1d..f60e6716 100644 --- a/tests/holder/test_pres_approve.py +++ b/tests/holder/test_pres_approve.py @@ -43,8 +43,7 @@ def context(context, message): @pytest.fixture def record(): yield PresExRecord( - presentation_exchange_id=TEST_PRES_EX_ID, - connection_id=TEST_CONN_ID, + presentation_exchange_id=TEST_PRES_EX_ID, connection_id=TEST_CONN_ID ) diff --git a/tests/holder/test_send_pres_proposal.py b/tests/holder/test_send_pres_proposal.py index 03fe7cc8..b9c1ec9c 100644 --- a/tests/holder/test_send_pres_proposal.py +++ b/tests/holder/test_send_pres_proposal.py @@ -12,10 +12,7 @@ TEST_CONN_ID = "test-connection-id" TEST_PROPOSAL = IndyPresPreview( - attributes=[ - IndyPresAttrSpec(name="test-proposal"), - ], - predicates=[], + attributes=[IndyPresAttrSpec(name="test-proposal")], predicates=[] ) TEST_COMMENT = "test-comment" From 3840b8a9f0462e6aa603f7da8aa7887bc517b173 Mon Sep 17 00:00:00 2001 From: Char Howland Date: Tue, 17 Aug 2021 13:13:57 -0500 Subject: [PATCH 59/66] fix: accept_taa timed client Co-authored-by: Daniel Bluhm Signed-off-by: Char Howland --- int/tests/conftest.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index ef8fd0d9..0be1e18c 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -275,8 +275,7 @@ async def _make_did(): @pytest.fixture(scope="session") async def accepted_taa(backchannel): - backchannel.timeout = 15 - result = (await fetch_taa.asyncio(client=backchannel)).result + result = (await fetch_taa.asyncio(client=backchannel.with_timeout(15))).result result = await accept_taa.asyncio( client=backchannel, json_body=TAAAccept( From e4566dd423b7304699ce4a97339cb0e5cb5ace06 Mon Sep 17 00:00:00 2001 From: Char Howland Date: Tue, 17 Aug 2021 13:22:19 -0500 Subject: [PATCH 60/66] fix: poetry updates Signed-off-by: Char Howland --- int/poetry.lock | 176 ++++++++++++++++++++++++++---------------------- 1 file changed, 94 insertions(+), 82 deletions(-) diff --git a/int/poetry.lock b/int/poetry.lock index cdf8bf02..efc17d3a 100644 --- a/int/poetry.lock +++ b/int/poetry.lock @@ -213,7 +213,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "charset-normalizer" -version = "2.0.3" +version = "2.0.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -365,7 +365,7 @@ http2 = ["h2 (>=3.0.0,<4.0.0)"] [[package]] name = "identify" -version = "2.2.11" +version = "2.2.13" description = "File identification library for Python" category = "dev" optional = false @@ -395,18 +395,21 @@ idna = ">=2.0" [[package]] name = "immutables" -version = "0.15" +version = "0.16" description = "Immutable Collections" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} [package.extras] -test = ["flake8 (>=3.8.4,<3.9.0)", "pycodestyle (>=2.6.0,<2.7.0)"] +test = ["flake8 (>=3.8.4,<3.9.0)", "pycodestyle (>=2.6.0,<2.7.0)", "mypy (>=0.910)", "pytest (>=6.2.4,<6.3.0)"] [[package]] name = "importlib-metadata" -version = "4.6.1" +version = "4.6.4" description = "Read metadata from Python packages" category = "main" optional = false @@ -423,7 +426,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [[package]] name = "importlib-resources" -version = "5.2.1" +version = "5.2.2" description = "Read resources from Python packages" category = "dev" optional = false @@ -531,7 +534,7 @@ dev = ["pre-commit", "tox"] [[package]] name = "pre-commit" -version = "2.13.0" +version = "2.14.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false @@ -676,7 +679,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "regex" -version = "2021.7.6" +version = "2021.8.3" description = "Alternative regular expression module, to replace re." category = "dev" optional = false @@ -759,7 +762,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tomli" -version = "1.1.0" +version = "1.2.1" description = "A lil' TOML parser" category = "dev" optional = false @@ -796,7 +799,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.6.0" +version = "20.7.2" description = "Virtual Python Environment builder" category = "dev" optional = false @@ -813,7 +816,7 @@ six = ">=1.9.0,<2" [package.extras] docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] [[package]] name = "yarl" @@ -942,6 +945,11 @@ cffi = [ {file = "cffi-1.14.6-cp27-cp27m-win_amd64.whl", hash = "sha256:7bcac9a2b4fdbed2c16fa5681356d7121ecabf041f18d97ed5b8e0dd38a80224"}, {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ed38b924ce794e505647f7c331b22a693bee1538fdf46b0222c4717b42f744e7"}, {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e22dcb48709fc51a7b58a927391b23ab37eb3737a98ac4338e2448bef8559b33"}, + {file = "cffi-1.14.6-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:aedb15f0a5a5949ecb129a82b72b19df97bbbca024081ed2ef88bd5c0a610534"}, + {file = "cffi-1.14.6-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:48916e459c54c4a70e52745639f1db524542140433599e13911b2f329834276a"}, + {file = "cffi-1.14.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f627688813d0a4140153ff532537fbe4afea5a3dffce1f9deb7f91f848a832b5"}, + {file = "cffi-1.14.6-cp35-cp35m-win32.whl", hash = "sha256:f0010c6f9d1a4011e429109fda55a225921e3206e7f62a0c22a35344bfd13cca"}, + {file = "cffi-1.14.6-cp35-cp35m-win_amd64.whl", hash = "sha256:57e555a9feb4a8460415f1aac331a2dc833b1115284f7ded7278b54afc5bd218"}, {file = "cffi-1.14.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e8c6a99be100371dbb046880e7a282152aa5d6127ae01783e37662ef73850d8f"}, {file = "cffi-1.14.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:19ca0dbdeda3b2615421d54bef8985f72af6e0c47082a8d26122adac81a95872"}, {file = "cffi-1.14.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d950695ae4381ecd856bcaf2b1e866720e4ab9a1498cba61c602e56630ca7195"}, @@ -985,8 +993,8 @@ chardet = [ {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.3.tar.gz", hash = "sha256:c46c3ace2d744cfbdebceaa3c19ae691f53ae621b39fd7570f59d14fb7f2fd12"}, - {file = "charset_normalizer-2.0.3-py3-none-any.whl", hash = "sha256:88fce3fa5b1a84fdcb3f603d889f723d1dd89b26059d0123ca435570e848d5e1"}, + {file = "charset-normalizer-2.0.4.tar.gz", hash = "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"}, + {file = "charset_normalizer-2.0.4-py3-none-any.whl", hash = "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b"}, ] click = [ {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, @@ -1083,8 +1091,8 @@ httpx = [ {file = "httpx-0.18.2.tar.gz", hash = "sha256:9f99c15d33642d38bce8405df088c1c4cfd940284b4290cacbfb02e64f4877c6"}, ] identify = [ - {file = "identify-2.2.11-py2.py3-none-any.whl", hash = "sha256:7abaecbb414e385752e8ce02d8c494f4fbc780c975074b46172598a28f1ab839"}, - {file = "identify-2.2.11.tar.gz", hash = "sha256:a0e700637abcbd1caae58e0463861250095dfe330a8371733a471af706a4a29a"}, + {file = "identify-2.2.13-py2.py3-none-any.whl", hash = "sha256:7199679b5be13a6b40e6e19ea473e789b11b4e3b60986499b1f589ffb03c217c"}, + {file = "identify-2.2.13.tar.gz", hash = "sha256:7bc6e829392bd017236531963d2d937d66fc27cadc643ac0aba2ce9f26157c79"}, ] idna = [ {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, @@ -1094,29 +1102,41 @@ idna-ssl = [ {file = "idna-ssl-1.1.0.tar.gz", hash = "sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c"}, ] immutables = [ - {file = "immutables-0.15-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:6728f4392e3e8e64b593a5a0cd910a1278f07f879795517e09f308daed138631"}, - {file = "immutables-0.15-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f0836cd3bdc37c8a77b192bbe5f41dbcc3ce654db048ebbba89bdfe6db7a1c7a"}, - {file = "immutables-0.15-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:8703d8abfd8687932f2a05f38e7de270c3a6ca3bd1c1efb3c938656b3f2f985a"}, - {file = "immutables-0.15-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:b8ad986f9b532c026f19585289384b0769188fcb68b37c7f0bd0df9092a6ca54"}, - {file = "immutables-0.15-cp36-cp36m-win_amd64.whl", hash = "sha256:6f117d9206165b9dab8fd81c5129db757d1a044953f438654236ed9a7a4224ae"}, - {file = "immutables-0.15-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b75ade826920c4e490b1bb14cf967ac14e61eb7c5562161c5d7337d61962c226"}, - {file = "immutables-0.15-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:b7e13c061785e34f73c4f659861f1b3e4a5fd918e4395c84b21c4e3d449ebe27"}, - {file = "immutables-0.15-cp37-cp37m-win_amd64.whl", hash = "sha256:3035849accee4f4e510ed7c94366a40e0f5fef9069fbe04a35f4787b13610a4a"}, - {file = "immutables-0.15-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:b04fa69174e0c8f815f9c55f2a43fc9e5a68452fab459a08e904a74e8471639f"}, - {file = "immutables-0.15-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:141c2e9ea515a3a815007a429f0b47a578ebeb42c831edaec882a245a35fffca"}, - {file = "immutables-0.15-cp38-cp38-win_amd64.whl", hash = "sha256:cbe8c64640637faa5535d539421b293327f119c31507c33ca880bd4f16035eb6"}, - {file = "immutables-0.15-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a0a4e4417d5ef4812d7f99470cd39347b58cb927365dd2b8da9161040d260db0"}, - {file = "immutables-0.15-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3b15c08c71c59e5b7c2470ef949d49ff9f4263bb77f488422eaa157da84d6999"}, - {file = "immutables-0.15-cp39-cp39-win_amd64.whl", hash = "sha256:2283a93c151566e6830aee0e5bee55fc273455503b43aa004356b50f9182092b"}, - {file = "immutables-0.15.tar.gz", hash = "sha256:3713ab1ebbb6946b7ce1387bb9d1d7f5e09c45add58c2a2ee65f963c171e746b"}, + {file = "immutables-0.16-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:acbfa79d44228d96296279068441f980dc63dbed52522d9227ff9f4d96c6627e"}, + {file = "immutables-0.16-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c9ed003eacb92e630ef200e31f47236c2139b39476894f7963b32bd39bafa3"}, + {file = "immutables-0.16-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a396314b9024fa55bf83a27813fd76cf9f27dce51f53b0f19b51de035146251"}, + {file = "immutables-0.16-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4a2a71678348fb95b13ca108d447f559a754c41b47bd1e7e4fb23974e735682d"}, + {file = "immutables-0.16-cp36-cp36m-win32.whl", hash = "sha256:064001638ab5d36f6aa05b6101446f4a5793fb71e522bc81b8fc65a1894266ff"}, + {file = "immutables-0.16-cp36-cp36m-win_amd64.whl", hash = "sha256:1de393f1b188740ca7b38f946f2bbc7edf3910d2048f03bbb8d01f17a038d67c"}, + {file = "immutables-0.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fcf678a3074613119385a02a07c469ec5130559f5ea843c85a0840c80b5b71c6"}, + {file = "immutables-0.16-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a307eb0984eb43e815dcacea3ac50c11d00a936ecf694c46991cd5a23bcb0ec0"}, + {file = "immutables-0.16-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7a58825ff2254e2612c5a932174398a4ea8fbddd8a64a02c880cc32ee28b8820"}, + {file = "immutables-0.16-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:798b095381eb42cf40db6876339e7bed84093e5868018a9e73d8e1f7ab4bb21e"}, + {file = "immutables-0.16-cp37-cp37m-win32.whl", hash = "sha256:19bdede174847c2ef1292df0f23868ab3918b560febb09fcac6eec621bd4812b"}, + {file = "immutables-0.16-cp37-cp37m-win_amd64.whl", hash = "sha256:9ccf4c0e3e2e3237012b516c74c49de8872ccdf9129739f7a0b9d7444a8c4862"}, + {file = "immutables-0.16-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d59beef203a3765db72b1d0943547425c8318ecf7d64c451fd1e130b653c2fbb"}, + {file = "immutables-0.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0020aaa4010b136056c20a46ce53204e1407a9e4464246cb2cf95b90808d9161"}, + {file = "immutables-0.16-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edd9f67671555af1eb99ad3c7550238487dd7ac0ac5205b40204ed61c9a922ac"}, + {file = "immutables-0.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:298a301f85f307b4c056a0825eb30f060e64d73605e783289f3df37dd762bab8"}, + {file = "immutables-0.16-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b779617f5b94486bfd0f22162cd72eb5f2beb0214a14b75fdafb7b2c908ed0cb"}, + {file = "immutables-0.16-cp38-cp38-win32.whl", hash = "sha256:511c93d8b1bbbf103ff3f1f120c5a68a9866ce03dea6ac406537f93ca9b19139"}, + {file = "immutables-0.16-cp38-cp38-win_amd64.whl", hash = "sha256:b651b61c1af6cda2ee201450f2ffe048a5959bc88e43e6c312f4c93e69c9e929"}, + {file = "immutables-0.16-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:aa7bf572ae1e006104c584be70dc634849cf0dc62f42f4ee194774f97e7fd17d"}, + {file = "immutables-0.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50793a44ba0d228ed8cad4d0925e00dfd62ea32f44ddee8854f8066447272d05"}, + {file = "immutables-0.16-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799621dcdcdcbb2516546a40123b87bf88de75fe7459f7bd8144f079ace6ec3e"}, + {file = "immutables-0.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7bcf52aeb983bd803b7c6106eae1b2d9a0c7ab1241bc6b45e2174ba2b7283031"}, + {file = "immutables-0.16-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:734c269e82e5f307fb6e17945953b67659d1731e65309787b8f7ba267d1468f2"}, + {file = "immutables-0.16-cp39-cp39-win32.whl", hash = "sha256:a454d5d3fee4b7cc627345791eb2ca4b27fa3bbb062ccf362ecaaa51679a07ed"}, + {file = "immutables-0.16-cp39-cp39-win_amd64.whl", hash = "sha256:2505d93395d3f8ae4223e21465994c3bc6952015a38dc4f03cb3e07a2b8d8325"}, + {file = "immutables-0.16.tar.gz", hash = "sha256:d67e86859598eed0d926562da33325dac7767b7b1eff84e232c22abea19f4360"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.6.1-py3-none-any.whl", hash = "sha256:9f55f560e116f8643ecf2922d9cd3e1c7e8d52e683178fecd9d08f6aa357e11e"}, - {file = "importlib_metadata-4.6.1.tar.gz", hash = "sha256:079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac"}, + {file = "importlib_metadata-4.6.4-py3-none-any.whl", hash = "sha256:ed5157fef23a4bc4594615a0dd8eba94b2bb36bf2a343fa3d8bb2fa0a62a99d5"}, + {file = "importlib_metadata-4.6.4.tar.gz", hash = "sha256:7b30a78db2922d78a6f47fb30683156a14f3c6aa5cc23f77cc8967e9ab2d002f"}, ] importlib-resources = [ - {file = "importlib_resources-5.2.1-py3-none-any.whl", hash = "sha256:b9a075a844a03e0fb0ab70e5b0ea138c92e9f07f3a21fc11a656cf9492dbf64f"}, - {file = "importlib_resources-5.2.1.tar.gz", hash = "sha256:c3789472eacdf3e20a4d746f7fe7cdf53c122e76f78813fbfb04d618e0152b6a"}, + {file = "importlib_resources-5.2.2-py3-none-any.whl", hash = "sha256:2480d8e07d1890056cb53c96e3de44fead9c62f2ba949b0f2e4c4345f4afa977"}, + {file = "importlib_resources-5.2.2.tar.gz", hash = "sha256:a65882a4d0fe5fbf702273456ba2ce74fe44892c25e42e057aca526b702a6d4b"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -1220,8 +1240,8 @@ pluggy = [ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] pre-commit = [ - {file = "pre_commit-2.13.0-py2.py3-none-any.whl", hash = "sha256:b679d0fddd5b9d6d98783ae5f10fd0c4c59954f375b70a58cbe1ce9bcf9809a4"}, - {file = "pre_commit-2.13.0.tar.gz", hash = "sha256:764972c60693dc668ba8e86eb29654ec3144501310f7198742a767bec385a378"}, + {file = "pre_commit-2.14.0-py2.py3-none-any.whl", hash = "sha256:ec3045ae62e1aa2eecfb8e86fa3025c2e3698f77394ef8d2011ce0aedd85b2d4"}, + {file = "pre_commit-2.14.0.tar.gz", hash = "sha256:2386eeb4cf6633712c7cc9ede83684d53c8cafca6b59f79c738098b51c6d206c"}, ] py = [ {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, @@ -1311,47 +1331,39 @@ pyyaml = [ {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] regex = [ - {file = "regex-2021.7.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407"}, - {file = "regex-2021.7.6-cp36-cp36m-win32.whl", hash = "sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b"}, - {file = "regex-2021.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb"}, - {file = "regex-2021.7.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895"}, - {file = "regex-2021.7.6-cp37-cp37m-win32.whl", hash = "sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5"}, - {file = "regex-2021.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f"}, - {file = "regex-2021.7.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068"}, - {file = "regex-2021.7.6-cp38-cp38-win32.whl", hash = "sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0"}, - {file = "regex-2021.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4"}, - {file = "regex-2021.7.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3"}, - {file = "regex-2021.7.6-cp39-cp39-win32.whl", hash = "sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035"}, - {file = "regex-2021.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c"}, - {file = "regex-2021.7.6.tar.gz", hash = "sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d"}, + {file = "regex-2021.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8764a78c5464ac6bde91a8c87dd718c27c1cabb7ed2b4beaf36d3e8e390567f9"}, + {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4551728b767f35f86b8e5ec19a363df87450c7376d7419c3cac5b9ceb4bce576"}, + {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:577737ec3d4c195c4aef01b757905779a9e9aee608fa1cf0aec16b5576c893d3"}, + {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c856ec9b42e5af4fe2d8e75970fcc3a2c15925cbcc6e7a9bcb44583b10b95e80"}, + {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3835de96524a7b6869a6c710b26c90e94558c31006e96ca3cf6af6751b27dca1"}, + {file = "regex-2021.8.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cea56288eeda8b7511d507bbe7790d89ae7049daa5f51ae31a35ae3c05408531"}, + {file = "regex-2021.8.3-cp36-cp36m-win32.whl", hash = "sha256:a4eddbe2a715b2dd3849afbdeacf1cc283160b24e09baf64fa5675f51940419d"}, + {file = "regex-2021.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:57fece29f7cc55d882fe282d9de52f2f522bb85290555b49394102f3621751ee"}, + {file = "regex-2021.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a5c6dbe09aff091adfa8c7cfc1a0e83fdb8021ddb2c183512775a14f1435fe16"}, + {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff4a8ad9638b7ca52313d8732f37ecd5fd3c8e3aff10a8ccb93176fd5b3812f6"}, + {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b63e3571b24a7959017573b6455e05b675050bbbea69408f35f3cb984ec54363"}, + {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fbc20975eee093efa2071de80df7f972b7b35e560b213aafabcec7c0bd00bd8c"}, + {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14caacd1853e40103f59571f169704367e79fb78fac3d6d09ac84d9197cadd16"}, + {file = "regex-2021.8.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb350eb1060591d8e89d6bac4713d41006cd4d479f5e11db334a48ff8999512f"}, + {file = "regex-2021.8.3-cp37-cp37m-win32.whl", hash = "sha256:18fdc51458abc0a974822333bd3a932d4e06ba2a3243e9a1da305668bd62ec6d"}, + {file = "regex-2021.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:026beb631097a4a3def7299aa5825e05e057de3c6d72b139c37813bfa351274b"}, + {file = "regex-2021.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:16d9eaa8c7e91537516c20da37db975f09ac2e7772a0694b245076c6d68f85da"}, + {file = "regex-2021.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3905c86cc4ab6d71635d6419a6f8d972cab7c634539bba6053c47354fd04452c"}, + {file = "regex-2021.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937b20955806381e08e54bd9d71f83276d1f883264808521b70b33d98e4dec5d"}, + {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28e8af338240b6f39713a34e337c3813047896ace09d51593d6907c66c0708ba"}, + {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c09d88a07483231119f5017904db8f60ad67906efac3f1baa31b9b7f7cca281"}, + {file = "regex-2021.8.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:85f568892422a0e96235eb8ea6c5a41c8ccbf55576a2260c0160800dbd7c4f20"}, + {file = "regex-2021.8.3-cp38-cp38-win32.whl", hash = "sha256:bf6d987edd4a44dd2fa2723fca2790f9442ae4de2c8438e53fcb1befdf5d823a"}, + {file = "regex-2021.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:8fe58d9f6e3d1abf690174fd75800fda9bdc23d2a287e77758dc0e8567e38ce6"}, + {file = "regex-2021.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7976d410e42be9ae7458c1816a416218364e06e162b82e42f7060737e711d9ce"}, + {file = "regex-2021.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9569da9e78f0947b249370cb8fadf1015a193c359e7e442ac9ecc585d937f08d"}, + {file = "regex-2021.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bbe342c5b2dec5c5223e7c363f291558bc27982ef39ffd6569e8c082bdc83"}, + {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f421e3cdd3a273bace013751c345f4ebeef08f05e8c10757533ada360b51a39"}, + {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea212df6e5d3f60341aef46401d32fcfded85593af1d82b8b4a7a68cd67fdd6b"}, + {file = "regex-2021.8.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a3b73390511edd2db2d34ff09aa0b2c08be974c71b4c0505b4a048d5dc128c2b"}, + {file = "regex-2021.8.3-cp39-cp39-win32.whl", hash = "sha256:f35567470ee6dbfb946f069ed5f5615b40edcbb5f1e6e1d3d2b114468d505fc6"}, + {file = "regex-2021.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:bfa6a679410b394600eafd16336b2ce8de43e9b13f7fb9247d84ef5ad2b45e91"}, + {file = "regex-2021.8.3.tar.gz", hash = "sha256:8935937dad2c9b369c3d932b0edbc52a62647c2afb2fafc0c280f14a8bf56a6a"}, ] requests = [ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, @@ -1382,8 +1394,8 @@ toml = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tomli = [ - {file = "tomli-1.1.0-py3-none-any.whl", hash = "sha256:f4a182048010e89cbec0ae4686b21f550a7f2903f665e34a6de58ec15424f919"}, - {file = "tomli-1.1.0.tar.gz", hash = "sha256:33d7984738f8bb699c9b0a816eb646a8178a69eaa792d258486776a5d21b8ca5"}, + {file = "tomli-1.2.1-py3-none-any.whl", hash = "sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f"}, + {file = "tomli-1.2.1.tar.gz", hash = "sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442"}, ] typed-ast = [ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, @@ -1427,8 +1439,8 @@ urllib3 = [ {file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"}, ] virtualenv = [ - {file = "virtualenv-20.6.0-py2.py3-none-any.whl", hash = "sha256:e4fc84337dce37ba34ef520bf2d4392b392999dbe47df992870dc23230f6b758"}, - {file = "virtualenv-20.6.0.tar.gz", hash = "sha256:51df5d8a2fad5d1b13e088ff38a433475768ff61f202356bb9812c454c20ae45"}, + {file = "virtualenv-20.7.2-py2.py3-none-any.whl", hash = "sha256:e4670891b3a03eb071748c569a87cceaefbf643c5bac46d996c5a45c34aa0f06"}, + {file = "virtualenv-20.7.2.tar.gz", hash = "sha256:9ef4e8ee4710826e98ff3075c9a4739e2cb1040de6a2a8d35db0055840dc96a0"}, ] yarl = [ {file = "yarl-1.6.3-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434"}, From fe256ae5be155c02a832cd7244f1037bce97fcb9 Mon Sep 17 00:00:00 2001 From: Char Howland Date: Tue, 17 Aug 2021 13:24:14 -0500 Subject: [PATCH 61/66] feat: use Client.with_timeout() method Signed-off-by: Char Howland --- int/tests/conftest.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/int/tests/conftest.py b/int/tests/conftest.py index 0be1e18c..238a77d8 100644 --- a/int/tests/conftest.py +++ b/int/tests/conftest.py @@ -267,7 +267,7 @@ async def make_did(backchannel): """DID factory fixture""" async def _make_did(): - return (await create_did.asyncio(client=backchannel)).result + return (await create_did.asyncio(client=backchannel.with_timeout(15))).result yield _make_did # TODO create DID deletion method @@ -289,7 +289,6 @@ async def accepted_taa(backchannel): @pytest.fixture(scope="session") async def endorser_did(make_did, backchannel, accepted_taa): """Endorser DID factory fixture""" - backchannel.timeout = 15 did: DID = await make_did() LOGGER.info("Publishing DID through https://selfserve.indiciotech.io") response = httpx.post( @@ -301,7 +300,8 @@ async def endorser_did(make_did, backchannel, accepted_taa): raise Exception("Failed to publish DID:", response.text) LOGGER.info("DID Published") - backchannel.timeout = 15 - result = await set_public_did.asyncio_detailed(client=backchannel, did=did.did) + result = await set_public_did.asyncio_detailed( + client=backchannel.with_timeout(15), did=did.did + ) assert result.status_code == 200 yield did From af3c9078f54865d9d11551e7c47bf1debbad02d6 Mon Sep 17 00:00:00 2001 From: Char Howland Date: Tue, 17 Aug 2021 13:25:08 -0500 Subject: [PATCH 62/66] chore: remove unnecessary print statements Signed-off-by: Char Howland --- int/tests/test_connections.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index 8433d2df..05c1ae72 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -29,11 +29,9 @@ async def _new_connection(): auto_accept="true", ) - print(await get_connections.asyncio(client=backchannel)) message = await wait_for_message( msg_type="https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connected" ) - print(message) return (lhs_conn.connection_id, rhs_conn.connection_id) yield _new_connection From 6ef27a21cbb3707f745fb72b9d23ed8ef8c7c9e3 Mon Sep 17 00:00:00 2001 From: Char Howland Date: Tue, 17 Aug 2021 13:38:15 -0500 Subject: [PATCH 63/66] Merge branch 'test/int/holder' of github.com:cjhowland/aries-acapy-plugin-toolbox into test/int/holder Signed-off-by: Char Howland --- int/tests/test_basicmessage.py | 302 +++++++++++++-------------- int/tests/test_connections.py | 362 ++++++++++++++++----------------- 2 files changed, 332 insertions(+), 332 deletions(-) diff --git a/int/tests/test_basicmessage.py b/int/tests/test_basicmessage.py index 98032955..8c2bf5d2 100644 --- a/int/tests/test_basicmessage.py +++ b/int/tests/test_basicmessage.py @@ -1,151 +1,151 @@ -"""Basic Message Tests""" -import asyncio -from acapy_backchannel.models.send_message import SendMessage -import pytest -from aries_staticagent import StaticConnection, utils -from acapy_backchannel.api.basicmessage import send_basicmessage - - -@pytest.fixture -async def test_messages(connection, connection_id): - for i in range(6): - # This must be done by message or else the messages will not be recorded. - # await send_basicmessage.asyncio( - # client=backchannel, - # conn_id=connection_id, - # json_body=SendMessage(content="Test Message #{}".format(i)) - # ) - - # send_and_await_reply_async used instead of send_async to capture "sent" - # message so it doesn't clog up echo agent's queue - _ = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", - "connection_id": connection_id, - "content": "Test Message #{}".format(i), - } - ) - - -@pytest.fixture(autouse=True) -async def clear_messages(connection): - yield - await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete" - }, - timeout=5, - ) - - -@pytest.mark.asyncio -async def test_send( - connection: StaticConnection, connection_id: str, asynchronously_received_messages -): - """Test send message""" - sent_message = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", - "connection_id": connection_id, - "content": "Your hovercraft is full of eels.", - } - ) - [recip_message] = await asynchronously_received_messages() - assert ( - sent_message["@type"] - == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/sent" - ) - assert ( - recip_message["@type"] - == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/basicmessage/1.0/message" - ) - assert recip_message["content"] == "Your hovercraft is full of eels." - - -@pytest.mark.asyncio -async def test_new(connection: StaticConnection): - """Test new message notification""" - # "new" message notifications are sent only over sessions. - # This call must be done as a send_and_await_reply_async - new_message = await connection.send_and_await_returned_async( - { - "@type": "https://didcomm.org/basicmessage/1.0/message", - "~l10n": {"locale": "en"}, - "sent_time": utils.timestamp(), - "content": "Your hovercraft is full of eels.", - }, - timeout=10, - ) - assert ( - new_message["@type"] - == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/new" - ) - assert new_message["message"]["content"] == "Your hovercraft is full of eels." - - -@pytest.mark.asyncio -async def test_get(connection: StaticConnection, connection_id: str): - """Send multiple messages and verify that the proper count and content appears in messages list""" - test_content = ("Are you suggesting coconuts migrate?", "'Tis but a flesh wound.") - for content in test_content: - _ = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", - "connection_id": connection_id, - "content": content, - } - ) - get_messages = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get" - } - ) - assert ( - get_messages["@type"] - == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" - ) - assert get_messages["count"] == 2 - assert get_messages["messages"][1]["content"] == test_content[0] - assert get_messages["messages"][0]["content"] == test_content[1] - - -@pytest.mark.asyncio -async def test_get_limit_offset( - connection: StaticConnection, connection_id: str, test_messages -): - """Send multiple messages and verify that get returns the correct content according to the limit and offset""" - get_messages = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", - "limit": 3, - "offset": 2, - } - ) - assert ( - get_messages["@type"] - == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" - ) - assert get_messages["count"] == 3 - assert get_messages["messages"][0]["content"] == "Test Message #3" - assert get_messages["messages"][1]["content"] == "Test Message #2" - assert get_messages["messages"][2]["content"] == "Test Message #1" - - -@pytest.mark.asyncio -async def test_delete(connection: StaticConnection, connection_id: str, test_messages): - """Send multiple messages, delete them, and verify that the messages count is zero""" - delete_message = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete" - } - ) - get_messages = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get" - } - ) - assert ( - delete_message["@type"] - == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/deleted" - ) - assert get_messages["count"] == 0 +"""Basic Message Tests""" +import asyncio +from acapy_backchannel.models.send_message import SendMessage +import pytest +from aries_staticagent import StaticConnection, utils +from acapy_backchannel.api.basicmessage import send_basicmessage + + +@pytest.fixture +async def test_messages(connection, connection_id): + for i in range(6): + # This must be done by message or else the messages will not be recorded. + # await send_basicmessage.asyncio( + # client=backchannel, + # conn_id=connection_id, + # json_body=SendMessage(content="Test Message #{}".format(i)) + # ) + + # send_and_await_reply_async used instead of send_async to capture "sent" + # message so it doesn't clog up echo agent's queue + _ = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", + "connection_id": connection_id, + "content": "Test Message #{}".format(i), + } + ) + + +@pytest.fixture(autouse=True) +async def clear_messages(connection): + yield + await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete" + }, + timeout=5, + ) + + +@pytest.mark.asyncio +async def test_send( + connection: StaticConnection, connection_id: str, asynchronously_received_messages +): + """Test send message""" + sent_message = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", + "connection_id": connection_id, + "content": "Your hovercraft is full of eels.", + } + ) + [recip_message] = await asynchronously_received_messages() + assert ( + sent_message["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/sent" + ) + assert ( + recip_message["@type"] + == "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/basicmessage/1.0/message" + ) + assert recip_message["content"] == "Your hovercraft is full of eels." + + +@pytest.mark.asyncio +async def test_new(connection: StaticConnection): + """Test new message notification""" + # "new" message notifications are sent only over sessions. + # This call must be done as a send_and_await_reply_async + new_message = await connection.send_and_await_returned_async( + { + "@type": "https://didcomm.org/basicmessage/1.0/message", + "~l10n": {"locale": "en"}, + "sent_time": utils.timestamp(), + "content": "Your hovercraft is full of eels.", + }, + timeout=10, + ) + assert ( + new_message["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/new" + ) + assert new_message["message"]["content"] == "Your hovercraft is full of eels." + + +@pytest.mark.asyncio +async def test_get(connection: StaticConnection, connection_id: str): + """Send multiple messages and verify that the proper count and content appears in messages list""" + test_content = ("Are you suggesting coconuts migrate?", "'Tis but a flesh wound.") + for content in test_content: + _ = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/send", + "connection_id": connection_id, + "content": content, + } + ) + get_messages = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get" + } + ) + assert ( + get_messages["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" + ) + assert get_messages["count"] == 2 + assert get_messages["messages"][1]["content"] == test_content[0] + assert get_messages["messages"][0]["content"] == test_content[1] + + +@pytest.mark.asyncio +async def test_get_limit_offset( + connection: StaticConnection, connection_id: str, test_messages +): + """Send multiple messages and verify that get returns the correct content according to the limit and offset""" + get_messages = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get", + "limit": 3, + "offset": 2, + } + ) + assert ( + get_messages["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/messages" + ) + assert get_messages["count"] == 3 + assert get_messages["messages"][0]["content"] == "Test Message #3" + assert get_messages["messages"][1]["content"] == "Test Message #2" + assert get_messages["messages"][2]["content"] == "Test Message #1" + + +@pytest.mark.asyncio +async def test_delete(connection: StaticConnection, connection_id: str, test_messages): + """Send multiple messages, delete them, and verify that the messages count is zero""" + delete_message = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/delete" + } + ) + get_messages = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/get" + } + ) + assert ( + delete_message["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-basicmessage/0.1/deleted" + ) + assert get_messages["count"] == 0 diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index 05c1ae72..925d979f 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -1,181 +1,181 @@ -"""Connections Tests""" -from acapy_backchannel import Client -from acapy_backchannel.api.connection import ( - create_invitation, - delete_connection, - get_connections, - receive_invitation, -) -from acapy_backchannel.models.create_invitation_request import CreateInvitationRequest -from acapy_backchannel.models.receive_invitation_request import ReceiveInvitationRequest -from aries_staticagent import Message -import pytest - - -@pytest.fixture -def new_connection( - backchannel: Client, wait_for_message, asynchronously_received_messages -): - """Factory for new connections.""" - - async def _new_connection(): - await asynchronously_received_messages() - lhs_conn = await create_invitation.asyncio( - client=backchannel, json_body=CreateInvitationRequest(), auto_accept="true" - ) - rhs_conn = await receive_invitation.asyncio( - client=backchannel, - json_body=ReceiveInvitationRequest.from_dict(lhs_conn.invitation.to_dict()), - auto_accept="true", - ) - - message = await wait_for_message( - msg_type="https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connected" - ) - return (lhs_conn.connection_id, rhs_conn.connection_id) - - yield _new_connection - - -@pytest.fixture(autouse=True) -async def clear_connection_state(backchannel: Client, connection_id: str): - """Clear connections after each test.""" - yield - connections = await get_connections.asyncio(client=backchannel) - for connection in connections.results: - if connection.connection_id != connection_id: - await delete_connection.asyncio( - client=backchannel, conn_id=connection.connection_id - ) - - -@pytest.mark.asyncio -async def test_create_connection(connection): - """Send an invitation and receive it to create a new connection""" - msg_invitation = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", - "alias": "Invitation I sent to Alice", - "label": "Bob", - "group": "default", - "auto_accept": True, - "multi_use": True, - } - ) - invitation = await connection.send_and_await_reply_async(msg_invitation) - msg_received = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", - "invitation": invitation["invitation_url"], - "auto_accept": True, - } - ) - received = await connection.send_and_await_reply_async(msg_received) - assert ( - received["@type"] - == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" - ) - assert received["label"] == msg_invitation["label"] - - -@pytest.mark.asyncio -async def test_get_list(connection, new_connection): - """Create two connections and verify that their connection_ids are in connections list""" - conn1 = await new_connection() - conn2 = await new_connection() - get_list = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" - } - ) - assert ( - get_list["@type"] - == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/list" - ) - assert conn1[0] in [ - connection_item["connection_id"] for connection_item in get_list["connections"] - ] - assert conn2[0] in [ - connection_item["connection_id"] for connection_item in get_list["connections"] - ] - - -@pytest.mark.asyncio -async def test_update(connection): - """Test update of connection attribute""" - msg_invitation = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", - "alias": "Invitation I sent to Alice", - "label": "Bob", - "group": "default", - "auto_accept": True, - "multi_use": True, - } - ) - invitation = await connection.send_and_await_reply_async(msg_invitation) - msg_received = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", - "invitation": invitation["invitation_url"], - "auto_accept": True, - } - ) - received = await connection.send_and_await_reply_async(msg_received) - msg_update = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/update", - "connection_id": received["connection_id"], - "label": "Updated label", - "role": "Updated role", - } - ) - update = await connection.send_and_await_reply_async(msg_update) - assert update["label"] == "Updated label" - - -@pytest.mark.asyncio -async def test_delete(connection): - """Create an invitation, delete it, and verify that its label and connectio_id - is no longer in the connections list""" - invitation_msg = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", - "alias": "Invitation I sent to Alice", - "label": "Bob", - "group": "default", - "auto_accept": True, - "multi_use": True, - } - ) - invitation = await connection.send_and_await_reply_async(invitation_msg) - msg_received = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", - "invitation": invitation["invitation_url"], - "auto_accept": True, - } - ) - received = await connection.send_and_await_reply_async(msg_received) - delete_connection = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/delete", - "connection_id": received["connection_id"], - } - ) - assert ( - delete_connection["@type"] - == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" - ) - assert delete_connection["connection_id"] == received["connection_id"] - get_list = await connection.send_and_await_reply_async( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" - } - ) - assert invitation_msg["label"] not in [ - connection_item["label"] for connection_item in get_list["connections"] - ] - assert received["connection_id"] not in [ - connection_item["connection_id"] for connection_item in get_list["connections"] - ] +"""Connections Tests""" +from acapy_backchannel import Client +from acapy_backchannel.api.connection import ( + create_invitation, + delete_connection, + get_connections, + receive_invitation, +) +from acapy_backchannel.models.create_invitation_request import CreateInvitationRequest +from acapy_backchannel.models.receive_invitation_request import ReceiveInvitationRequest +from aries_staticagent import Message +import pytest + + +@pytest.fixture +def new_connection( + backchannel: Client, wait_for_message, asynchronously_received_messages +): + """Factory for new connections.""" + + async def _new_connection(): + await asynchronously_received_messages() + lhs_conn = await create_invitation.asyncio( + client=backchannel, json_body=CreateInvitationRequest(), auto_accept="true" + ) + rhs_conn = await receive_invitation.asyncio( + client=backchannel, + json_body=ReceiveInvitationRequest.from_dict(lhs_conn.invitation.to_dict()), + auto_accept="true", + ) + + message = await wait_for_message( + msg_type="https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connected" + ) + return (lhs_conn.connection_id, rhs_conn.connection_id) + + yield _new_connection + + +@pytest.fixture(autouse=True) +async def clear_connection_state(backchannel: Client, connection_id: str): + """Clear connections after each test.""" + yield + connections = await get_connections.asyncio(client=backchannel) + for connection in connections.results: + if connection.connection_id != connection_id: + await delete_connection.asyncio( + client=backchannel, conn_id=connection.connection_id + ) + + +@pytest.mark.asyncio +async def test_create_connection(connection): + """Send an invitation and receive it to create a new connection""" + msg_invitation = Message( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", + "alias": "Invitation I sent to Alice", + "label": "Bob", + "group": "default", + "auto_accept": True, + "multi_use": True, + } + ) + invitation = await connection.send_and_await_reply_async(msg_invitation) + msg_received = Message( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", + "invitation": invitation["invitation_url"], + "auto_accept": True, + } + ) + received = await connection.send_and_await_reply_async(msg_received) + assert ( + received["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" + ) + assert received["label"] == msg_invitation["label"] + + +@pytest.mark.asyncio +async def test_get_list(connection, new_connection): + """Create two connections and verify that their connection_ids are in connections list""" + conn1 = await new_connection() + conn2 = await new_connection() + get_list = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" + } + ) + assert ( + get_list["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/list" + ) + assert conn1[0] in [ + connection_item["connection_id"] for connection_item in get_list["connections"] + ] + assert conn2[0] in [ + connection_item["connection_id"] for connection_item in get_list["connections"] + ] + + +@pytest.mark.asyncio +async def test_update(connection): + """Test update of connection attribute""" + msg_invitation = Message( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", + "alias": "Invitation I sent to Alice", + "label": "Bob", + "group": "default", + "auto_accept": True, + "multi_use": True, + } + ) + invitation = await connection.send_and_await_reply_async(msg_invitation) + msg_received = Message( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", + "invitation": invitation["invitation_url"], + "auto_accept": True, + } + ) + received = await connection.send_and_await_reply_async(msg_received) + msg_update = Message( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/update", + "connection_id": received["connection_id"], + "label": "Updated label", + "role": "Updated role", + } + ) + update = await connection.send_and_await_reply_async(msg_update) + assert update["label"] == "Updated label" + + +@pytest.mark.asyncio +async def test_delete(connection): + """Create an invitation, delete it, and verify that its label and connectio_id + is no longer in the connections list""" + invitation_msg = Message( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", + "alias": "Invitation I sent to Alice", + "label": "Bob", + "group": "default", + "auto_accept": True, + "multi_use": True, + } + ) + invitation = await connection.send_and_await_reply_async(invitation_msg) + msg_received = Message( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", + "invitation": invitation["invitation_url"], + "auto_accept": True, + } + ) + received = await connection.send_and_await_reply_async(msg_received) + delete_connection = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/delete", + "connection_id": received["connection_id"], + } + ) + assert ( + delete_connection["@type"] + == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" + ) + assert delete_connection["connection_id"] == received["connection_id"] + get_list = await connection.send_and_await_reply_async( + { + "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" + } + ) + assert invitation_msg["label"] not in [ + connection_item["label"] for connection_item in get_list["connections"] + ] + assert received["connection_id"] not in [ + connection_item["connection_id"] for connection_item in get_list["connections"] + ] From 22536840b82f730c3e87be182a7f5ef31964be4f Mon Sep 17 00:00:00 2001 From: Char Howland Date: Tue, 17 Aug 2021 14:06:50 -0500 Subject: [PATCH 64/66] chore: remove command to run specific test Signed-off-by: Char Howland --- int/docker-compose.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/int/docker-compose.yml b/int/docker-compose.yml index 654ace85..b0de7ba9 100644 --- a/int/docker-compose.yml +++ b/int/docker-compose.yml @@ -35,7 +35,6 @@ services: tests: container_name: juggernaut - command: -k test_holder_credential_exchange build: context: . dockerfile: ./Dockerfile.test.runner From 0009afa55e8bbced263ff8d3ae316ec78447cd76 Mon Sep 17 00:00:00 2001 From: Char Howland Date: Wed, 18 Aug 2021 12:01:21 -0500 Subject: [PATCH 65/66] chore: use new_connection fixture in test_update and test_delete Signed-off-by: Char Howland --- int/tests/test_connections.py | 63 ++++++++--------------------------- 1 file changed, 13 insertions(+), 50 deletions(-) diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index 925d979f..0b1140b6 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -28,7 +28,6 @@ async def _new_connection(): json_body=ReceiveInvitationRequest.from_dict(lhs_conn.invitation.to_dict()), auto_accept="true", ) - message = await wait_for_message( msg_type="https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connected" ) @@ -50,7 +49,7 @@ async def clear_connection_state(backchannel: Client, connection_id: str): @pytest.mark.asyncio -async def test_create_connection(connection): +async def test_create_connection(connection, wait_for_message): """Send an invitation and receive it to create a new connection""" msg_invitation = Message( { @@ -71,6 +70,9 @@ async def test_create_connection(connection): } ) received = await connection.send_and_await_reply_async(msg_received) + message = await wait_for_message( + msg_type="https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connected" + ) assert ( received["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connection" @@ -101,31 +103,13 @@ async def test_get_list(connection, new_connection): @pytest.mark.asyncio -async def test_update(connection): +async def test_update(connection, new_connection): """Test update of connection attribute""" - msg_invitation = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", - "alias": "Invitation I sent to Alice", - "label": "Bob", - "group": "default", - "auto_accept": True, - "multi_use": True, - } - ) - invitation = await connection.send_and_await_reply_async(msg_invitation) - msg_received = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", - "invitation": invitation["invitation_url"], - "auto_accept": True, - } - ) - received = await connection.send_and_await_reply_async(msg_received) + conn = await new_connection() msg_update = Message( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/update", - "connection_id": received["connection_id"], + "connection_id": conn[0], "label": "Updated label", "role": "Updated role", } @@ -135,47 +119,26 @@ async def test_update(connection): @pytest.mark.asyncio -async def test_delete(connection): - """Create an invitation, delete it, and verify that its label and connectio_id +async def test_delete(connection, new_connection): + """Create an invitation, delete it, and verify that its label and connection_id is no longer in the connections list""" - invitation_msg = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-invitations/0.1/create", - "alias": "Invitation I sent to Alice", - "label": "Bob", - "group": "default", - "auto_accept": True, - "multi_use": True, - } - ) - invitation = await connection.send_and_await_reply_async(invitation_msg) - msg_received = Message( - { - "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/receive-invitation", - "invitation": invitation["invitation_url"], - "auto_accept": True, - } - ) - received = await connection.send_and_await_reply_async(msg_received) + conn = await new_connection() delete_connection = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/delete", - "connection_id": received["connection_id"], + "connection_id": conn[0], } ) assert ( delete_connection["@type"] == "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/deleted" ) - assert delete_connection["connection_id"] == received["connection_id"] + assert delete_connection["connection_id"] == conn[0] get_list = await connection.send_and_await_reply_async( { "@type": "https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/get-list" } ) - assert invitation_msg["label"] not in [ - connection_item["label"] for connection_item in get_list["connections"] - ] - assert received["connection_id"] not in [ + assert conn[0] not in [ connection_item["connection_id"] for connection_item in get_list["connections"] ] From 6e2ca1336382098695875ce899b4b47d4a2aa1a8 Mon Sep 17 00:00:00 2001 From: Char Howland Date: Wed, 18 Aug 2021 12:29:44 -0500 Subject: [PATCH 66/66] fix: await both connected messages in new_connection fixture Signed-off-by: Char Howland --- int/tests/test_connections.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/int/tests/test_connections.py b/int/tests/test_connections.py index 0b1140b6..71846797 100644 --- a/int/tests/test_connections.py +++ b/int/tests/test_connections.py @@ -28,7 +28,10 @@ async def _new_connection(): json_body=ReceiveInvitationRequest.from_dict(lhs_conn.invitation.to_dict()), auto_accept="true", ) - message = await wait_for_message( + first_connected = await wait_for_message( + msg_type="https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connected" + ) + second_connected = await wait_for_message( msg_type="https://github.com/hyperledger/aries-toolbox/tree/master/docs/admin-connections/0.1/connected" ) return (lhs_conn.connection_id, rhs_conn.connection_id)