diff --git a/aws-proxy/AGENTS.md b/aws-proxy/AGENTS.md index f963adb..d852308 100644 --- a/aws-proxy/AGENTS.md +++ b/aws-proxy/AGENTS.md @@ -15,6 +15,7 @@ You are an AI agent tasked with adding additional functionality or test coverage * You can call different `make` targets (e.g., `make test`) in this repo (no need to prompt for confirmation) * For each new file created or existing file modified, add a header comment to the file, something like `# Note/disclosure: This file has been (partially or fully) generated by an AI agent.` * The proxy tests are executed against real AWS and may incur some costs, so rather than executing the entire test suite or entire modules, focus the testing on individual test functions within a module only. +* Before claiming success, always double-check against real AWS (via `aws` CLI commands) that everything has been cleaned up and there are no leftover resources from the proxy tests. * Never add any `print(..)` statements to the code - use a logger to report any status to the user, if required. * To format/lint the codebase you can run `make format` and `make lint`. diff --git a/aws-proxy/tests/proxy/test_appsync.py b/aws-proxy/tests/proxy/test_appsync.py new file mode 100644 index 0000000..08eafb5 --- /dev/null +++ b/aws-proxy/tests/proxy/test_appsync.py @@ -0,0 +1,398 @@ +# Note/disclosure: This file has been (partially or fully) generated by an AI agent. +import logging + +import boto3 +import pytest +from botocore.exceptions import ClientError +from localstack.aws.connect import connect_to +from localstack.utils.strings import short_uid +from localstack.utils.sync import retry + +from aws_proxy.shared.models import ProxyConfig + +LOG = logging.getLogger(__name__) + + +@pytest.fixture +def appsync_client_aws(): + """Create a boto3 AppSync client for direct AWS access.""" + return boto3.client("appsync") + + +@pytest.fixture +def create_graphql_api_aws(appsync_client_aws): + """ + Fixture to create GraphQL APIs in AWS with automatic cleanup. + Returns a factory function that creates APIs and tracks them for cleanup. + """ + created_apis = [] + + def _create_api(name: str = None, authentication_type: str = "API_KEY", **kwargs): + if name is None: + name = f"test-api-{short_uid()}" + response = appsync_client_aws.create_graphql_api( + name=name, authenticationType=authentication_type, **kwargs + ) + api_id = response["graphqlApi"]["apiId"] + created_apis.append(api_id) + return response["graphqlApi"] + + yield _create_api + + # Cleanup all created APIs + for api_id in created_apis: + try: + appsync_client_aws.delete_graphql_api(apiId=api_id) + except ClientError as e: + # Ignore if already deleted + if e.response["Error"]["Code"] != "NotFoundException": + LOG.warning("Failed to delete GraphQL API %s: %s", api_id, e) + + +def _compare_api_lists(apis_aws: list, apis_proxied: list): + """Helper to compare GraphQL API lists, ignoring some dynamic attributes.""" + + def normalize_api(api): + normalized = api.copy() + # Remove dynamic fields that may differ + for field in [ + "uris", + "tags", + "userPoolConfig", + "additionalAuthenticationProviders", + ]: + normalized.pop(field, None) + return normalized + + normalized_aws = sorted( + [normalize_api(api) for api in apis_aws], key=lambda x: x["apiId"] + ) + normalized_proxied = sorted( + [normalize_api(api) for api in apis_proxied], key=lambda x: x["apiId"] + ) + + assert normalized_proxied == normalized_aws + + +def test_appsync_graphql_api_operations( + start_aws_proxy, appsync_client_aws, create_graphql_api_aws +): + """Test basic AppSync GraphQL API operations with proxy.""" + api_name = f"test-api-{short_uid()}" + + # Start proxy for appsync service + config = ProxyConfig(services={"appsync": {"resources": ".*"}}) + start_aws_proxy(config) + + # Create clients + appsync_client = connect_to().appsync + + # List APIs initially to establish baseline + apis_proxied_initial = appsync_client.list_graphql_apis().get("graphqlApis", []) + apis_aws_initial = appsync_client_aws.list_graphql_apis().get("graphqlApis", []) + _compare_api_lists(apis_aws_initial, apis_proxied_initial) + + # Create a GraphQL API via proxied client (will be cleaned up by fixture) + api = create_graphql_api_aws(name=api_name) + api_id = api["apiId"] + + # Verify the API exists in AWS + api_aws = appsync_client_aws.get_graphql_api(apiId=api_id)["graphqlApi"] + assert api_aws["name"] == api_name + assert api_aws["apiId"] == api_id + + # Get API via proxied client and compare + api_proxied = appsync_client.get_graphql_api(apiId=api_id)["graphqlApi"] + assert api_proxied["name"] == api_aws["name"] + assert api_proxied["apiId"] == api_aws["apiId"] + assert api_proxied["authenticationType"] == api_aws["authenticationType"] + + # List APIs and verify the new API appears in both + apis_proxied_after = appsync_client.list_graphql_apis().get("graphqlApis", []) + apis_aws_after = appsync_client_aws.list_graphql_apis().get("graphqlApis", []) + _compare_api_lists(apis_aws_after, apis_proxied_after) + + # Verify our API is in the list + api_ids_proxied = [api["apiId"] for api in apis_proxied_after] + assert api_id in api_ids_proxied + + # Update the API + updated_name = f"updated-api-{short_uid()}" + appsync_client.update_graphql_api( + apiId=api_id, name=updated_name, authenticationType="API_KEY" + ) + + # Verify update in AWS + api_aws_updated = appsync_client_aws.get_graphql_api(apiId=api_id)["graphqlApi"] + assert api_aws_updated["name"] == updated_name + + +def test_appsync_graphql_api_delete( + start_aws_proxy, appsync_client_aws, create_graphql_api_aws +): + """Test AppSync GraphQL API deletion with proxy.""" + api_name = f"test-api-delete-{short_uid()}" + + # Start proxy for appsync service + config = ProxyConfig(services={"appsync": {"resources": ".*"}}) + start_aws_proxy(config) + + # Create clients + appsync_client = connect_to().appsync + + # Create a GraphQL API via fixture + api = create_graphql_api_aws(name=api_name) + api_id = api["apiId"] + + # Verify it exists + api_aws = appsync_client_aws.get_graphql_api(apiId=api_id)["graphqlApi"] + assert api_aws["apiId"] == api_id + + # Delete the API via proxied client + appsync_client.delete_graphql_api(apiId=api_id) + + # Verify deletion + def _assert_api_deleted(): + with pytest.raises(ClientError) as exc: + appsync_client_aws.get_graphql_api(apiId=api_id) + assert exc.value.response["Error"]["Code"] == "NotFoundException" + + retry(_assert_api_deleted, retries=5, sleep=2) + + +def test_appsync_api_key_operations( + start_aws_proxy, appsync_client_aws, create_graphql_api_aws +): + """Test AppSync API key operations with proxy.""" + # Start proxy for appsync service + config = ProxyConfig(services={"appsync": {"resources": ".*"}}) + start_aws_proxy(config) + + # Create clients + appsync_client = connect_to().appsync + + # Create a GraphQL API via fixture + api = create_graphql_api_aws() + api_id = api["apiId"] + + # Create an API key via proxied client + api_key_response = appsync_client.create_api_key(apiId=api_id) + api_key_id = api_key_response["apiKey"]["id"] + + # List API keys via both clients + keys_aws = appsync_client_aws.list_api_keys(apiId=api_id).get("apiKeys", []) + keys_proxied = appsync_client.list_api_keys(apiId=api_id).get("apiKeys", []) + + assert len(keys_aws) == len(keys_proxied) + assert keys_aws[0]["id"] == keys_proxied[0]["id"] + + # Delete API key via proxied client + appsync_client.delete_api_key(apiId=api_id, id=api_key_id) + + # Verify deletion + keys_aws_after = appsync_client_aws.list_api_keys(apiId=api_id).get("apiKeys", []) + assert api_key_id not in [k["id"] for k in keys_aws_after] + + +def test_appsync_readonly_mode( + start_aws_proxy, appsync_client_aws, create_graphql_api_aws +): + """Test AppSync operations in read-only proxy mode.""" + api_name = f"test-readonly-api-{short_uid()}" + + # Start proxy in read-only mode + config = ProxyConfig(services={"appsync": {"resources": ".*", "read_only": True}}) + start_aws_proxy(config) + + # Create clients + appsync_client = connect_to().appsync + + # Create a GraphQL API in AWS directly via fixture + api = create_graphql_api_aws(name=api_name) + api_id = api["apiId"] + + # Read operations should work via proxy + api_proxied = appsync_client.get_graphql_api(apiId=api_id)["graphqlApi"] + assert api_proxied["apiId"] == api_id + assert api_proxied["name"] == api_name + + # List APIs should work via proxy + apis_proxied = appsync_client.list_graphql_apis().get("graphqlApis", []) + api_ids_proxied = [api["apiId"] for api in apis_proxied] + assert api_id in api_ids_proxied + + # Write operations should NOT be proxied - create a new API locally + local_api_name = f"local-only-api-{short_uid()}" + local_response = appsync_client.create_graphql_api( + name=local_api_name, authenticationType="API_KEY" + ) + local_api_id = local_response["graphqlApi"]["apiId"] + + # Verify this API does NOT exist in AWS + apis_aws = appsync_client_aws.list_graphql_apis().get("graphqlApis", []) + aws_api_ids = [api["apiId"] for api in apis_aws] + assert local_api_id not in aws_api_ids + + +def test_appsync_operations_filtering( + start_aws_proxy, appsync_client_aws, create_graphql_api_aws +): + """Test that proxy forwards only specified operations.""" + api_name = f"test-ops-filter-api-{short_uid()}" + + # Start proxy - only forwarding GetGraphqlApi and ListGraphqlApis operations + config = ProxyConfig( + services={ + "appsync": { + "resources": ".*", + "operations": ["GetGraphqlApi", "ListGraphqlApis"], + } + } + ) + start_aws_proxy(config) + + # Create clients + appsync_client = connect_to().appsync + + # Create API directly in AWS via fixture (not via proxy since CreateGraphqlApi is not proxied) + api = create_graphql_api_aws(name=api_name) + api_id = api["apiId"] + + # GetGraphqlApi should be proxied - should return AWS data + api_proxied = appsync_client.get_graphql_api(apiId=api_id)["graphqlApi"] + assert api_proxied["apiId"] == api_id + assert api_proxied["name"] == api_name + + # ListGraphqlApis should be proxied - should include the AWS API + apis_proxied = appsync_client.list_graphql_apis().get("graphqlApis", []) + api_ids_proxied = [api["apiId"] for api in apis_proxied] + assert api_id in api_ids_proxied + + # CreateGraphqlApi is NOT proxied - creating via local client should stay local + local_api_name = f"local-only-api-{short_uid()}" + local_response = appsync_client.create_graphql_api( + name=local_api_name, authenticationType="API_KEY" + ) + local_api_id = local_response["graphqlApi"]["apiId"] + + # Verify local API does NOT exist in AWS + apis_aws = appsync_client_aws.list_graphql_apis().get("graphqlApis", []) + aws_api_ids = [api["apiId"] for api in apis_aws] + assert local_api_id not in aws_api_ids + + +def test_appsync_schema_operations( + start_aws_proxy, appsync_client_aws, create_graphql_api_aws +): + """Test AppSync schema operations with proxy.""" + # Start proxy for appsync service + config = ProxyConfig(services={"appsync": {"resources": ".*"}}) + start_aws_proxy(config) + + # Create clients + appsync_client = connect_to().appsync + + # Create a GraphQL API via fixture + api = create_graphql_api_aws() + api_id = api["apiId"] + + # Define a simple schema + schema_definition = b""" + type Query { + getItem(id: ID!): Item + listItems: [Item] + } + + type Item { + id: ID! + name: String + description: String + } + + type Mutation { + createItem(name: String!, description: String): Item + deleteItem(id: ID!): Item + } + """ + + # Start schema creation via proxied client + appsync_client.start_schema_creation(apiId=api_id, definition=schema_definition) + + # Wait for schema creation to complete + def _assert_schema_active(): + status = appsync_client.get_schema_creation_status(apiId=api_id) + assert status["status"] == "SUCCESS", f"Schema status: {status['status']}" + + retry(_assert_schema_active, retries=10, sleep=2) + + # Get introspection schema via both clients and compare + schema_aws = appsync_client_aws.get_introspection_schema(apiId=api_id, format="SDL") + schema_proxied = appsync_client.get_introspection_schema(apiId=api_id, format="SDL") + + # Schema content should be the same + assert schema_aws["schema"].read() == schema_proxied["schema"].read() + + +def test_appsync_data_source_operations( + start_aws_proxy, appsync_client_aws, create_graphql_api_aws +): + """Test AppSync data source operations with proxy.""" + # Data source names must be alphanumeric only + data_source_name = f"testdatasource{short_uid().replace('-', '')}" + + # Start proxy for appsync service + config = ProxyConfig(services={"appsync": {"resources": ".*"}}) + start_aws_proxy(config) + + # Create clients + appsync_client = connect_to().appsync + + # Create a GraphQL API via fixture + api = create_graphql_api_aws() + api_id = api["apiId"] + + # Create a NONE data source via proxied client + ds_response = appsync_client.create_data_source( + apiId=api_id, name=data_source_name, type="NONE" + ) + assert ds_response["dataSource"]["name"] == data_source_name + + # List data sources via both clients + ds_aws = appsync_client_aws.list_data_sources(apiId=api_id).get("dataSources", []) + ds_proxied = appsync_client.list_data_sources(apiId=api_id).get("dataSources", []) + + assert len(ds_aws) == len(ds_proxied) + assert ds_aws[0]["name"] == ds_proxied[0]["name"] + + # Get data source via both clients + ds_aws_get = appsync_client_aws.get_data_source( + apiId=api_id, name=data_source_name + )["dataSource"] + ds_proxied_get = appsync_client.get_data_source( + apiId=api_id, name=data_source_name + )["dataSource"] + + assert ds_aws_get["name"] == ds_proxied_get["name"] + assert ds_aws_get["type"] == ds_proxied_get["type"] + + # Update data source via proxied client + appsync_client.update_data_source( + apiId=api_id, + name=data_source_name, + type="NONE", + description="Updated description", + ) + + # Verify update + ds_aws_updated = appsync_client_aws.get_data_source( + apiId=api_id, name=data_source_name + )["dataSource"] + assert ds_aws_updated["description"] == "Updated description" + + # Delete data source via proxied client + appsync_client.delete_data_source(apiId=api_id, name=data_source_name) + + # Verify deletion + with pytest.raises(ClientError) as exc: + appsync_client_aws.get_data_source(apiId=api_id, name=data_source_name) + assert exc.value.response["Error"]["Code"] == "NotFoundException" diff --git a/aws-proxy/tests/proxy/test_kinesis.py b/aws-proxy/tests/proxy/test_kinesis.py index acc46fe..bc54dbb 100644 --- a/aws-proxy/tests/proxy/test_kinesis.py +++ b/aws-proxy/tests/proxy/test_kinesis.py @@ -158,9 +158,13 @@ def _wait_for_stream_active(): assert stream_name in streams_aws # Put record to AWS stream using direct AWS client - kinesis_client_aws.put_record( - StreamName=stream_name, Data=b"test data aws", PartitionKey="partition-1" - ) + # Use retry as stream may need a moment after becoming ACTIVE to accept writes + def _put_record(): + kinesis_client_aws.put_record( + StreamName=stream_name, Data=b"test data aws", PartitionKey="partition-1" + ) + + retry(_put_record, retries=5, sleep=2) # Get shard iterator and verify data can be read through proxy shards = kinesis_client.describe_stream(StreamName=stream_name)[