diff --git a/.artifactignore b/.artifactignore index 827a68ebf..850e600d6 100644 --- a/.artifactignore +++ b/.artifactignore @@ -1,7 +1 @@ -_manifest\** -bcde-output\** -Experiment_PipReport_** -GovCompDisc_Log_** -GovCompDisc_Manifest_** -GovCompDisc_Metadata_** -ScanTelemetry_** \ No newline at end of file +_manifest\** \ No newline at end of file diff --git a/.ci/e2e_integration_test/start-e2e.ps1 b/.ci/e2e_integration_test/start-e2e.ps1 deleted file mode 100644 index 653ed0bc4..000000000 --- a/.ci/e2e_integration_test/start-e2e.ps1 +++ /dev/null @@ -1,102 +0,0 @@ -# -# Copyright (c) Microsoft. All rights reserved. -# Licensed under the MIT license. See LICENSE file in the project root for full license information. -# - -# Python worker E2E integration test -# The E2E integration test will test the worker against a prerelease version of core tools -$FUNC_RUNTIME_VERSION = '4' -$FUNC_RUNTIME_LANGUAGE = 'python' -$AZURE_FUNCTIONS_ENVIRONMENT = "development" -$PYAZURE_WEBHOST_DEBUG = "true" -$PYAZURE_INTEGRATION_TEST = "true" - -# Speed up Invoke-RestMethod by turning off progress bar -$ProgressPreference = 'SilentlyContinue' - -function get_architecture() { - # Return "x64" or "x86" - return [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture.ToString().ToLowerInvariant(); -} - -function get_os() { - # Return either "win", "linux", "osx", or "unknown" - if ($IsWindows) { - return "win" - } elseif ($IsLinux) { - return "linux" - } elseif ($IsMacOS) { - return "osx" - } - return "unknown" -} - -function get_core_tool_download_url() { - $os = get_os - $arch = get_architecture - return "https://functionsintegclibuilds.blob.core.windows.net/builds/$FUNC_RUNTIME_VERSION/latest/Azure.Functions.Cli.$os-$arch.zip" -} - -function get_core_tools_version_url() { - return "https://functionsintegclibuilds.blob.core.windows.net/builds/$FUNC_RUNTIME_VERSION/latest/version.txt" -} - -function get_func_execuable_path($path) { - $exe_name = "func" - if ($IsWindows) { - $exe_name = "func.exe" - } - return Join-Path $path $exe_name -} - -$FUNC_CLI_DIRECTORY = Join-Path $PSScriptRoot 'Azure.Functions.Cli' -$FUNC_CLI_DIRECTORY_EXIST = Test-Path -Path $FUNC_CLI_DIRECTORY -PathType Container -if ($FUNC_CLI_DIRECTORY_EXIST) { - Write-Host 'Deleting Functions Core Tools...' - Remove-Item -Force "$FUNC_CLI_DIRECTORY.zip" -ErrorAction Ignore - Remove-Item -Recurse -Force $FUNC_CLI_DIRECTORY -ErrorAction Ignore -} - -$version = Invoke-RestMethod -Uri "$(get_core_tools_version_url)" -Write-Host "Downloading Functions Core Tools $version..." - -$output = "$FUNC_CLI_DIRECTORY.zip" -Invoke-RestMethod -Uri "$(get_core_tool_download_url)" -OutFile $output - -Write-Host 'Extracting Functions Core Tools...' -Expand-Archive $output -DestinationPath $FUNC_CLI_DIRECTORY -InformationAction SilentlyContinue - -Write-Host "Starting Functions Host..." -$env:FUNCTIONS_WORKER_RUNTIME = $FUNC_RUNTIME_LANGUAGE -$env:FUNCTIONS_WORKER_RUNTIME_VERSION = $env:PythonVersion -$env:AZURE_FUNCTIONS_ENVIRONMENT = $AZURE_FUNCTIONS_ENVIRONMENT -$env:PYAZURE_WEBHOST_DEBUG = $PYAZURE_WEBHOST_DEBUG -$env:PYAZURE_INTEGRATION_TEST = $PYAZURE_INTEGRATION_TEST - -$env:Path = "$env:Path$([System.IO.Path]::PathSeparator)$FUNC_CLI_DIRECTORY" -$funcExePath = $(get_func_execuable_path $FUNC_CLI_DIRECTORY) - -if ($IsMacOS -or $IsLinux) { - chmod -R 755 $FUNC_CLI_DIRECTORY -} -Write-Host "Function Exe Path: $funcExePath" - -Set-Location $env:BUILD_SOURCESDIRECTORY -Write-Host "Set-Location: $env:BUILD_SOURCESDIRECTORY" - -Write-Host "-----------------------------------------------------------------------------`n" -ForegroundColor Green -Write-Host "Preparing E2E integration tests..." -ForegroundColor Green -Write-Host "-----------------------------------------------------------------------------`n" -ForegroundColor Green -python -m pip install -U pip -python -m pip install -U -e .[dev] -cd tests -python -m invoke -c test_setup build-protos -python -m invoke -c test_setup extensions -Write-Host "-----------------------------------------------------------------------------`n" -ForegroundColor Green -Write-Host "-----------------------------------------------------------------------------`n" -ForegroundColor Green -Write-Host "-----------------------------------------------------------------------------`n" -ForegroundColor Green -Write-Host "Running E2E integration tests..." -ForegroundColor Green -Write-Host "-----------------------------------------------------------------------------`n" -ForegroundColor Green -$env:CORE_TOOLS_EXE_PATH = "$funcExePath" -python -m pytest --junitxml=e2e-integration-test-report.xml --reruns 4 tests/endtoend -Write-Host "-----------------------------------------------------------------------------`n" -ForegroundColor Green diff --git a/.coveragerc b/.coveragerc index a3dbea58b..04ff8b376 100644 --- a/.coveragerc +++ b/.coveragerc @@ -49,4 +49,4 @@ omit = # Removing the imported libraries that might show up in this. */azure/functions/* */azure/* - */azure_functions_worker/_thirdparty/* + */azure_functions_worker_v2/_thirdparty/* diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile deleted file mode 100644 index b2c13c80b..000000000 --- a/.devcontainer/Dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.234.0/containers/python-3/.devcontainer/base.Dockerfile - -# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster -ARG VARIANT="3.9" -FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} - -# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10 -ARG NODE_VERSION="none" -RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi - -# [Optional] If your pip requirements rarely change, uncomment this section to add them to the image. -# COPY requirements.txt /tmp/pip-tmp/ -# RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \ -# && rm -rf /tmp/pip-tmp - -# [Optional] Uncomment this section to install additional OS packages. -# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ -# && apt-get -y install --no-install-recommends - -# [Optional] Uncomment this line to install global node packages. -# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g " 2>&1 \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json deleted file mode 100644 index 03ff47808..000000000 --- a/.devcontainer/devcontainer.json +++ /dev/null @@ -1,57 +0,0 @@ -// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: -// https://github.com/microsoft/vscode-dev-containers/tree/v0.234.0/containers/python-3 -{ - "name": "Python 3", - "build": { - "dockerfile": "Dockerfile", - "context": "..", - "args": { - // Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7 - // Append -bullseye or -buster to pin to an OS version. - // Use -bullseye variants on local on arm64/Apple Silicon. - "VARIANT": "3.9", - // Options - "NODE_VERSION": "none" - } - }, - - // Set *default* container specific settings.json values on container create. - "settings": { - "python.defaultInterpreterPath": "/usr/local/bin/python", - "python.linting.enabled": true, - "python.linting.pylintEnabled": true, - "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", - "python.formatting.blackPath": "/usr/local/py-utils/bin/black", - "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", - "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", - "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", - "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", - "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", - "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", - "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint", - "python.testing.pytestArgs": [ - "tests/unittests" - ], - "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true - }, - - // Add the IDs of extensions you want installed when the container is created. - "extensions": [ - "ms-python.python", - "ms-python.vscode-pylance" - ], - - // Use 'forwardPorts' to make a list of ports inside the container available locally. - // "forwardPorts": [], - - // Use 'postCreateCommand' to run commands after the container is created. - "postCreateCommand": "sudo python -m pip install -U pip && sudo python -m pip install -U -e .[dev] && cd tests && sudo python -m invoke -c test_setup webhost", - - // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. - "remoteUser": "vscode", - - "features": { - "dotnet": "latest" - } -} diff --git a/.flake8 b/.flake8 index 94a2f1926..8fa776784 100644 --- a/.flake8 +++ b/.flake8 @@ -6,10 +6,11 @@ ignore = W503,E402,E731 exclude = .git, __pycache__, build, dist, .eggs, .github, .local, docs/, - Samples, azure_functions_worker/protos/, - azure_functions_worker/_thirdparty/typing_inspect.py, + Samples, azure_functions_worker_v2_v2/protos/, + azure_functions_worker/utils/typing_inspect.py, tests/unittests/test_typing_inspect.py, tests/unittests/broken_functions/syntax_error/main.py, + tests/protos/, .env*, .vscode, venv*, *.venv* max-line-length = 88 diff --git a/.github/linters/tox.ini b/.github/linters/tox.ini index e024de18f..43dd07423 100644 --- a/.github/linters/tox.ini +++ b/.github/linters/tox.ini @@ -23,8 +23,8 @@ exclude = docs, Samples, __pycache__, - azure_functions_worker/protos/, - azure_functions_worker/_thirdparty/typing_inspect.py, + azure_functions_worker_v2/protos/, + azure_functions_worker_v2/_thirdparty/typing_inspect.py, tests/unittests/test_typing_inspect.py, .venv*, .env*, diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 91c0ee6d5..d17ba9914 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -52,7 +52,7 @@ jobs: # VALIDATE_PYTHON_PYLINT: false # disable pylint, as we have not configure it # VALIDATE_PYTHON_BLACK: false # same as above PYTHON_FLAKE8_CONFIG_FILE: tox.ini - FILTER_REGEX_INCLUDE: azure_functions_worker/.* + FILTER_REGEX_INCLUDE: azure_functions_worker_v2/.* FILTER_REGEX_EXCLUDE: tests/.* DEFAULT_BRANCH: dev GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index e8d9736a3..ca875958b 100644 --- a/.gitignore +++ b/.gitignore @@ -131,3 +131,6 @@ prof/ tests/**/host.json tests/**/bin tests/**/extensions.csproj + +# Protos +*pb2* diff --git a/README.md b/README.md index 08baeb6d1..a889aef3d 100644 --- a/README.md +++ b/README.md @@ -1,34 +1,35 @@ # Functions Header Image - Lightning Logo Azure Functions Python Worker -| Branch | Build Status | CodeCov | Test Status | -|--------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| dev | [![Build Status](https://img.shields.io/azure-devops/build/azfunc/public/658/dev)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=658&branchName=dev) | [![codecov](https://codecov.io/gh/Azure/azure-functions-python-worker/branch/dev/graph/badge.svg)](https://codecov.io/gh/Azure/azure-functions-python-worker) | [![Test Status](https://img.shields.io/azure-devops/build/azfunc/public/658/dev)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=658&branchName=dev) | +| Branch | Status | CodeCov | Unittests | E2E tests | +|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------| +| main | [![Build Status](https://azfunc.visualstudio.com/Azure%20Functions/_apis/build/status/Azure.azure-functions-python-worker?branchName=main)](https://azfunc.visualstudio.com/Azure%20Functions/_build/latest?definitionId=57&branchName=main) | [![codecov](https://codecov.io/gh/Azure/azure-functions-python-worker/branch/main/graph/badge.svg)](https://codecov.io/gh/Azure/azure-functions-python-worker) | ![CI Unit tests](https://github.com/Azure/azure-functions-python-worker/workflows/CI%20Unit%20tests/badge.svg?branch=main) | ![CI E2E tests](https://github.com/Azure/azure-functions-python-worker/workflows/CI%20E2E%20tests/badge.svg?branch=main) | +| dev | [![Build Status](https://azfunc.visualstudio.com/Azure%20Functions/_apis/build/status/Azure.azure-functions-python-worker?branchName=dev)](https://azfunc.visualstudio.com/Azure%20Functions/_build/latest?definitionId=57&branchName=dev) | [![codecov](https://codecov.io/gh/Azure/azure-functions-python-worker/branch/dev/graph/badge.svg)](https://codecov.io/gh/Azure/azure-functions-python-worker) | ![CI Unit tests](https://github.com/Azure/azure-functions-python-worker/workflows/CI%20Unit%20tests/badge.svg?branch=dev) | ![CI E2E tests](https://github.com/Azure/azure-functions-python-worker/workflows/CI%20E2E%20tests/badge.svg?branch=dev) | -Python support for Azure Functions is based on Python 3.8, 3.9, 3.10, 3.11, and 3.12 serverless hosting on Linux and the Functions 4.0 runtime. +Python support for Azure Functions is based on Python 3.6, 3.7, 3.8, 3.9, and 3.10 serverless hosting on Linux and the Functions 2.0, 3.0 and 4.0 runtime. Here is the current status of Python in Azure Functions: What are the supported Python versions? -| Azure Functions Runtime | Python 3.8 | Python 3.9 | Python 3.10 | Python 3.11 | Python 3.12 | -|----------------------------------|------------|------------|-------------|-------------|-------------| -| Azure Functions 3.0 (deprecated) | ✔ | ✔ | - | - | - | -| Azure Functions 4.0 | ✔ | ✔ | ✔ | ✔ | ✔ | +| Azure Functions Runtime | Python 3.6 | Python 3.7 | Python 3.8 | Python 3.9 | Python 3.10 | Python 3.11 | +|----------------------------------|------------|------------|------------|------------|-------------|-------------| +| Azure Functions 2.0 (deprecated) | ✔ | ✔ | - | - | - | - | +| Azure Functions 3.0 (deprecated) | ✔ | ✔ | ✔ | ✔ | - | - | +| Azure Functions 4.0 | - | - | ✔ | ✔ | ✔ | ✔ | For information about Azure Functions Runtime, please refer to [Azure Functions runtime versions overview](https://docs.microsoft.com/en-us/azure/azure-functions/functions-versions) page. ### What's available? -- Build, test, debug, and publish using Azure Functions Core Tools (CLI) or Visual Studio Code -- Deploy Python Function project onto consumption, dedicated, elastic premium, or flex consumption plan. -- Deploy Python Function project in a custom docker image onto dedicated or elastic premium plan. -- Triggers / Bindings : Blob, Cosmos DB, Event Grid, Event Hub, HTTP, Kafka, MySQL, Queue, ServiceBus, SQL, Timer, and Warmup +- Build, test, debug and publish using Azure Functions Core Tools (CLI) or Visual Studio Code +- Deploy Python Function project onto consumption, dedicated, or elastic premium plan. +- Deploy Python Function project in a custom docker image onto dedicated, or elastic premium plan. +- Triggers / Bindings : HTTP, Blob, Queue, Timer, Cosmos DB, Event Grid, Event Hubs and Service Bus - Triggers / Bindings : Custom binding support -### What's new? +What's coming? -- [SDK Type Bindings for Blob](https://techcommunity.microsoft.com/t5/azure-compute-blog/azure-functions-sdk-type-bindings-for-azure-blob-storage-with/ba-p/4146744) -- [HTTP Streaming](https://techcommunity.microsoft.com/t5/azure-compute-blog/azure-functions-support-for-http-streams-in-python-is-now-in/ba-p/4146697) +- [Durable Functions For Python](https://github.com/Azure/azure-functions-durable-python) ### Get Started @@ -71,4 +72,4 @@ provided by the bot. You will only need to do this once across all repos using o This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or -contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. \ No newline at end of file +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. diff --git a/azure_functions_worker/__main__.py b/azure_functions_worker/__main__.py deleted file mode 100644 index 4197fda14..000000000 --- a/azure_functions_worker/__main__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from azure_functions_worker import main - -if __name__ == '__main__': - main.main() diff --git a/azure_functions_worker/bindings/__init__.py b/azure_functions_worker/bindings/__init__.py deleted file mode 100644 index e64ba3bd6..000000000 --- a/azure_functions_worker/bindings/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from .retrycontext import RetryContext # isort: skip -from .tracecontext import TraceContext # isort: skip -from .context import Context -from .meta import ( - check_deferred_bindings_enabled, - check_input_type_annotation, - check_output_type_annotation, - from_incoming_proto, - get_deferred_raw_bindings, - has_implicit_output, - is_trigger_binding, - load_binding_registry, - to_outgoing_param_binding, - to_outgoing_proto, -) -from .out import Out - -__all__ = ( - 'Out', 'Context', - 'is_trigger_binding', - 'load_binding_registry', - 'check_input_type_annotation', 'check_output_type_annotation', - 'has_implicit_output', - 'from_incoming_proto', 'to_outgoing_proto', 'TraceContext', 'RetryContext', - 'to_outgoing_param_binding', 'check_deferred_bindings_enabled', - 'get_deferred_raw_bindings' -) diff --git a/azure_functions_worker/bindings/rpcexception.py b/azure_functions_worker/bindings/rpcexception.py deleted file mode 100644 index d51c517c8..000000000 --- a/azure_functions_worker/bindings/rpcexception.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -class RpcException: - - def __init__(self, - source: str, - stack_trace: str, - message: str) -> None: - self.__source = source - self.__stack_trace = stack_trace - self.__message = message - - @property - def source(self) -> str: - return self.__source - - @property - def stack_trace(self) -> str: - return self.__stack_trace - - @property - def message(self) -> str: - return self.__message diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/__init__.py b/azure_functions_worker/bindings/shared_memory_data_transfer/__init__.py deleted file mode 100644 index a68b5ec4e..000000000 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -""" -This module provides functionality for accessing shared memory maps. -These are used for transferring data between functions host and the worker -proces. -The initial set of corresponding changes to enable shared memory maps in the -functions host can be found in the following Pull Request: -https://github.com/Azure/azure-functions-host/pull/6836 -The issue tracking shared memory transfer related changes is: -https://github.com/Azure/azure-functions-host/issues/6791 -""" - -from .file_accessor import FileAccessor -from .file_accessor_factory import FileAccessorFactory -from .shared_memory_constants import SharedMemoryConstants -from .shared_memory_exception import SharedMemoryException -from .shared_memory_manager import SharedMemoryManager -from .shared_memory_map import SharedMemoryMap - -__all__ = ( - 'FileAccessorFactory', 'FileAccessor', 'SharedMemoryConstants', - 'SharedMemoryException', 'SharedMemoryMap', 'SharedMemoryManager' -) diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor.py b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor.py deleted file mode 100644 index 3838bcdaa..000000000 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import mmap -from abc import ABCMeta, abstractmethod -from typing import Optional - -from .shared_memory_constants import SharedMemoryConstants as consts - - -class FileAccessor(metaclass=ABCMeta): - """ - For accessing memory maps. - This is an interface that must be implemented by sub-classes to provide - platform-specific support for accessing memory maps. - Currently the following two sub-classes are implemented: - 1) FileAccessorWindows - 2) FileAccessorUnix - Note: Platform specific details of mmap can be found in the official docs: - https://docs.python.org/3/library/mmap.html - """ - @abstractmethod - def open_mem_map( - self, - mem_map_name: str, - mem_map_size: int, - access: int = mmap.ACCESS_READ) -> Optional[mmap.mmap]: - """ - Opens an existing memory map. - Returns the opened mmap if successful, None otherwise. - """ - raise NotImplementedError - - @abstractmethod - def create_mem_map(self, mem_map_name: str, mem_map_size: int) \ - -> Optional[mmap.mmap]: - """ - Creates a new memory map. - Returns the created mmap if successful, None otherwise. - """ - raise NotImplementedError - - @abstractmethod - def delete_mem_map(self, mem_map_name: str, mem_map: mmap.mmap) -> bool: - """ - Deletes the memory map and any backing resources associated with it. - If there is no memory map with the given name, then no action is - performed. - Returns True if the memory map was successfully deleted, False - otherwise. - """ - raise NotImplementedError - - def _is_mem_map_initialized(self, mem_map: mmap.mmap) -> bool: - """ - Checks if the dirty bit of the memory map has been set or not. - This is used to check if a new memory map was created successfully and - we don't end up using an existing one. - """ - original_pos = mem_map.tell() - # The dirty bit is the first byte of the header so seek to the beginning - mem_map.seek(0) - # Read the first byte - byte_read = mem_map.read(1) - # Check if the dirty bit was set or not - if byte_read == consts.HeaderFlags.Initialized: - is_set = True - else: - is_set = False - # Seek back the memory map to the original position - mem_map.seek(original_pos) - return is_set - - def _set_mem_map_initialized(self, mem_map: mmap.mmap): - """ - Sets the dirty bit in the header of the memory map to indicate that this - memory map is not new anymore. - """ - original_pos = mem_map.tell() - # The dirty bit is the first byte of the header so seek to the beginning - mem_map.seek(0) - # Set the dirty bit - mem_map.write(consts.HeaderFlags.Initialized) - # Seek back the memory map to the original position - mem_map.seek(original_pos) - - -class DummyFileAccessor(FileAccessor): - def open_mem_map(self, mem_map_name: str, mem_map_size: int, - access: int = mmap.ACCESS_READ) -> Optional[mmap.mmap]: - pass - - def create_mem_map(self, mem_map_name: str, - mem_map_size: int) -> Optional[mmap.mmap]: - pass - - def delete_mem_map(self, mem_map_name: str, mem_map: mmap.mmap) -> bool: - pass diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_factory.py b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_factory.py deleted file mode 100644 index eb97f0f54..000000000 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_factory.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import os -import sys - -from ...constants import FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED -from ...utils.common import is_envvar_true -from .file_accessor import DummyFileAccessor -from .file_accessor_unix import FileAccessorUnix -from .file_accessor_windows import FileAccessorWindows - - -class FileAccessorFactory: - """ - For creating the platform-appropriate instance of FileAccessor to perform - memory map related operations. - """ - @staticmethod - def create_file_accessor(): - if sys.platform == "darwin" and not is_envvar_true( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED): - return DummyFileAccessor() - elif os.name == 'nt': - return FileAccessorWindows() - return FileAccessorUnix() diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_unix.py b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_unix.py deleted file mode 100644 index ae4f6206c..000000000 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_unix.py +++ /dev/null @@ -1,200 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import mmap -import os -from io import BufferedRandom -from typing import List, Optional - -from azure_functions_worker import constants - -from ...logging import logger -from ...utils.common import get_app_setting -from .file_accessor import FileAccessor -from .shared_memory_constants import SharedMemoryConstants as consts -from .shared_memory_exception import SharedMemoryException - - -class FileAccessorUnix(FileAccessor): - """ - For accessing memory maps. - This implements the FileAccessor interface for Unix platforms. - """ - def __init__(self): - # From the list of configured directories where memory maps can be - # stored, get the list of directories which are valid (either existed - # already or have been created successfully for use). - self.valid_dirs = self._get_valid_mem_map_dirs() - - def __del__(self): - del self.valid_dirs - - def open_mem_map( - self, - mem_map_name: str, - mem_map_size: int, - access: int = mmap.ACCESS_READ) -> Optional[mmap.mmap]: - """ - Note: mem_map_size = 0 means open the entire mmap. - """ - if mem_map_name is None or mem_map_name == '': - raise SharedMemoryException( - f'Cannot open memory map. Invalid name {mem_map_name}') - if mem_map_size < 0: - raise SharedMemoryException( - f'Cannot open memory map. Invalid size {mem_map_size}') - fd = self._open_mem_map_file(mem_map_name) - if fd is None: - logger.warning('Cannot open file: %s', mem_map_name) - return None - mem_map = mmap.mmap(fd.fileno(), mem_map_size, access=access) - return mem_map - - def create_mem_map(self, mem_map_name: str, mem_map_size: int) \ - -> Optional[mmap.mmap]: - if mem_map_name is None or mem_map_name == '': - raise SharedMemoryException( - f'Cannot create memory map. Invalid name {mem_map_name}') - if mem_map_size <= 0: - raise SharedMemoryException( - f'Cannot create memory map. Invalid size {mem_map_size}') - file = self._create_mem_map_file(mem_map_name, mem_map_size) - if file is None: - logger.warning('Cannot create file: %s', mem_map_name) - return None - mem_map = mmap.mmap(file.fileno(), mem_map_size, mmap.MAP_SHARED, - mmap.PROT_WRITE) - if self._is_mem_map_initialized(mem_map): - raise SharedMemoryException(f'Memory map {mem_map_name} ' - 'already exists') - self._set_mem_map_initialized(mem_map) - return mem_map - - def delete_mem_map(self, mem_map_name: str, mem_map: mmap.mmap) -> bool: - if mem_map_name is None or mem_map_name == '': - raise SharedMemoryException( - f'Cannot delete memory map. Invalid name {mem_map_name}') - try: - fd = self._open_mem_map_file(mem_map_name) - os.remove(fd.name) - except Exception as e: - # In this case, we don't want to fail right away but log that - # deletion was unsuccessful. - # These logs can help identify if we may be leaking memory and not - # cleaning up the created memory maps. - logger.error('Cannot delete memory map %s - %s', mem_map_name, e, - exc_info=True) - return False - mem_map.close() - return True - - def _get_allowed_mem_map_dirs(self) -> List[str]: - """ - Get the list of directories where memory maps can be created. - If specified in AppSetting, that list will be used. - Otherwise, the default value will be used. - """ - setting = constants.UNIX_SHARED_MEMORY_DIRECTORIES - allowed_mem_map_dirs_str = get_app_setting(setting) - if allowed_mem_map_dirs_str is None: - allowed_mem_map_dirs = consts.UNIX_TEMP_DIRS - logger.info( - 'Using allowed directories for shared memory: %s from App ' - 'Setting: %s', - allowed_mem_map_dirs, setting) - else: - allowed_mem_map_dirs = allowed_mem_map_dirs_str.split(',') - logger.info( - 'Using default allowed directories for shared memory: %s', - allowed_mem_map_dirs) - return allowed_mem_map_dirs - - def _get_valid_mem_map_dirs(self) -> List[str]: - """ - From the configured list of allowed directories where memory maps can be - stored, return all those that either already existed or were created - successfully for use. - Returns list of directories, in decreasing order of preference, where - memory maps can be created. - """ - allowed_dirs = self._get_allowed_mem_map_dirs() - # Iterate over all the possible directories where the memory map could - # be created and try to create each of them if they don't exist already. - valid_dirs = [] - for temp_dir in allowed_dirs: - dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) - if os.path.exists(dir_path): - # A valid directory already exists - valid_dirs.append(dir_path) - logger.debug('Found directory %s to store memory maps', - dir_path) - else: - try: - os.makedirs(dir_path) - valid_dirs.append(dir_path) - except Exception as e: - # We keep trying to check/create others - logger.warning('Cannot create directory %s to ' - 'store memory maps - %s', dir_path, e, - exc_info=True) - if len(valid_dirs) == 0: - logger.error('No valid directory for memory maps in %s', - allowed_dirs) - return valid_dirs - - def _open_mem_map_file(self, mem_map_name: str) -> Optional[BufferedRandom]: - """ - Get the file descriptor of an existing memory map. - Returns the BufferedRandom stream to the file. - """ - # Iterate over all the possible directories where the memory map could - # be present and try to open it. - for temp_dir in self.valid_dirs: - file_path = os.path.join(temp_dir, mem_map_name) - if os.path.exists(file_path): - try: - fd = open(file_path, 'r+b') - return fd - except Exception as e: - logger.error('Cannot open file %s - %s', file_path, e, - exc_info=True) - # The memory map was not found in any of the known directories - logger.error( - 'Cannot open memory map %s in any of the following directories: ' - '%s', - mem_map_name, self.valid_dirs) - return None - - def _create_mem_map_file(self, mem_map_name: str, mem_map_size: int) \ - -> Optional[BufferedRandom]: - """ - Create the file descriptor for a new memory map. - Returns the BufferedRandom stream to the file. - """ - # Ensure that the file does not already exist - for temp_dir in self.valid_dirs: - file_path = os.path.join(temp_dir, mem_map_name) - if os.path.exists(file_path): - raise SharedMemoryException( - f'File {file_path} for memory map {mem_map_name} ' - f'already exists') - # Create the file - for temp_dir in self.valid_dirs: - file_path = os.path.join(temp_dir, mem_map_name) - try: - file = open(file_path, 'wb+') - file.truncate(mem_map_size) - return file - except Exception as e: - # If the memory map could not be created in this directory, we - # keep trying in other applicable directories. - logger.warning('Cannot create memory map in %s - %s.' - ' Trying other directories.', file_path, e, - exc_info=True) - # Could not create the memory map in any of the applicable directory - # paths so we fail. - logger.error( - 'Cannot create memory map %s with size %s in any of the ' - 'following directories: %s', - mem_map_name, mem_map_size, self.valid_dirs) - return None diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_windows.py b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_windows.py deleted file mode 100644 index 1b45056c7..000000000 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_windows.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import mmap -from typing import Optional - -from ...logging import logger -from .file_accessor import FileAccessor -from .shared_memory_exception import SharedMemoryException - - -class FileAccessorWindows(FileAccessor): - """ - For accessing memory maps. - This implements the FileAccessor interface for Windows. - """ - def open_mem_map( - self, - mem_map_name: str, - mem_map_size: int, - access: int = mmap.ACCESS_READ) -> Optional[mmap.mmap]: - """ - Note: mem_map_size = 0 means open the entire mmap. - Note: On Windows, an mmap is created if one does not exist even when - attempting to open it. - """ - if mem_map_name is None or mem_map_name == '': - raise SharedMemoryException( - f'Cannot open memory map. Invalid name {mem_map_name}') - if mem_map_size < 0: - raise SharedMemoryException( - f'Cannot open memory map. Invalid size {mem_map_size}') - try: - mem_map = mmap.mmap(-1, mem_map_size, mem_map_name, access=access) - return mem_map - except Exception as e: - logger.warning( - 'Cannot open memory map %s with size %s - %s', mem_map_name, - mem_map_size, e) - return None - - def create_mem_map(self, mem_map_name: str, mem_map_size: int) \ - -> Optional[mmap.mmap]: - # Windows also creates the mmap when trying to open it, if it does not - # already exist. - if mem_map_name is None or mem_map_name == '': - raise SharedMemoryException( - f'Cannot create memory map. Invalid name {mem_map_name}') - if mem_map_size <= 0: - raise SharedMemoryException( - f'Cannot create memory map. Invalid size {mem_map_size}') - mem_map = self.open_mem_map(mem_map_name, mem_map_size, - mmap.ACCESS_WRITE) - if mem_map is None: - return None - if self._is_mem_map_initialized(mem_map): - raise SharedMemoryException( - f'Cannot create memory map {mem_map_name} as it ' - f'already exists') - self._set_mem_map_initialized(mem_map) - return mem_map - - def delete_mem_map(self, mem_map_name: str, mem_map: mmap.mmap) -> bool: - """ - In Windows, an mmap is not backed by a file so no file needs to be - deleted. - """ - mem_map.close() - return True diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_constants.py b/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_constants.py deleted file mode 100644 index ac25170b3..000000000 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_constants.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -class SharedMemoryConstants: - class HeaderFlags: - """ - Flags that are present in the header region of the memory maps. - """ - Initialized = b'\x01' - """ - Indicates that the memory map has been initialized, may be in use and - is not new. - This represents a boolean value of True. - """ - - MEM_MAP_INITIALIZED_FLAG_NUM_BYTES = 1 - """ - The length of a bool which is the length of the part of the header flag - specifying if the memory map is already created and used. - This is to distinguish between new memory maps and ones that were - previously created and may be in use already. - Header flags are defined in the class SharedMemoryConstants.HeaderFlags. - """ - - CONTENT_LENGTH_NUM_BYTES = 8 - """ - The length of a long which is the length of the part of the header - specifying content length in the memory map. - """ - - CONTENT_HEADER_TOTAL_BYTES = MEM_MAP_INITIALIZED_FLAG_NUM_BYTES + \ - CONTENT_LENGTH_NUM_BYTES - """ - The total length of the header - """ - - MIN_BYTES_FOR_SHARED_MEM_TRANSFER = 1024 * 1024 # 1 MB - """ - Minimum size (in number of bytes) an object must be in order for it to be - transferred over shared memory. - If the object is smaller than this, gRPC is used. - Note: This needs to be consistent among the host and workers. - e.g. in the host, it is defined in SharedMemoryConstants.cs - """ - - MAX_BYTES_FOR_SHARED_MEM_TRANSFER = 2 * 1024 * 1024 * 1024 # 2 GB - """ - Maximum size (in number of bytes) an object must be in order for it to be - transferred over shared memory. - This limit is imposed because initializing objects like greater than 2GB - is not allowed in DotNet. - Ref: https://stackoverflow.com/a/3944336/3132415 - Note: This needs to be consistent among the host and workers. - e.g. in the host, it is defined in SharedMemoryConstants.cs - """ - - SIZE_OF_CHAR_BYTES = 2 - """ - This is what the size of a character is in DotNet. Can be verified by - doing "sizeof(char)". - To keep the limits consistent, when determining if a string can be - transferred over shared memory, we multiply the number of characters - by this constant. - Corresponding logic in the host can be found in SharedMemoryManager.cs - """ - - UNIX_TEMP_DIRS = ["/dev/shm"] - """ - Default directories in Unix where the memory maps can be found. - These list is in order of preference, starting with the highest preference - directory. - A user can override this by using the AppSetting: - UNIX_SHARED_MEMORY_DIRECTORIES. - """ - - UNIX_TEMP_DIR_SUFFIX = "AzureFunctions" - """ - Suffix for the temp directories containing memory maps in Unix - """ diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_exception.py b/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_exception.py deleted file mode 100644 index cf802d336..000000000 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_exception.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -class SharedMemoryException(Exception): - """ - Exception raised when using shared memory. - """ - def __init__(self, msg: str) -> None: - super().__init__(msg) diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_manager.py b/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_manager.py deleted file mode 100644 index ec1a1a7cb..000000000 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_manager.py +++ /dev/null @@ -1,203 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import uuid -from typing import Dict, Optional - -from ...constants import FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED -from ...logging import logger -from ...utils.common import is_envvar_true -from ..datumdef import Datum -from .file_accessor_factory import FileAccessorFactory -from .shared_memory_constants import SharedMemoryConstants as consts -from .shared_memory_map import SharedMemoryMap -from .shared_memory_metadata import SharedMemoryMetadata - - -class SharedMemoryManager: - """ - Performs all operations related to reading/writing data from/to shared - memory. - This is used for transferring input/output data of the function from/to the - functions host over shared memory as opposed to RPC to improve the rate of - data transfer and the function's end-to-end latency. - """ - def __init__(self): - # The allocated memory maps are tracked here so that a reference to them - # is kept open until they have been used (e.g. if they contain a - # function's output, it is read by the functions host). - # Having a mapping of the name and the memory map is then later used to - # close a given memory map by its name, after it has been used. - # key: mem_map_name, val: SharedMemoryMap - self._allocated_mem_maps: Dict[str, SharedMemoryMap] = {} - self._file_accessor = FileAccessorFactory.create_file_accessor() - - def __del__(self): - del self._file_accessor - del self._allocated_mem_maps - - @property - def allocated_mem_maps(self): - """ - List of allocated shared memory maps. - """ - return self._allocated_mem_maps - - @property - def file_accessor(self): - """ - FileAccessor instance for accessing memory maps. - """ - return self._file_accessor - - def is_enabled(self) -> bool: - """ - Whether supported types should be transferred between functions host and - the worker using shared memory. - """ - return is_envvar_true( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) - - def is_supported(self, datum: Datum) -> bool: - """ - Whether the given Datum object can be transferred to the functions host - using shared memory. - This logic is kept consistent with the host's which can be found in - SharedMemoryManager.cs - """ - if datum.type == 'bytes': - num_bytes = len(datum.value) - if num_bytes >= consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER and \ - num_bytes <= consts.MAX_BYTES_FOR_SHARED_MEM_TRANSFER: - return True - elif datum.type == 'string': - num_bytes = len(datum.value) * consts.SIZE_OF_CHAR_BYTES - if num_bytes >= consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER and \ - num_bytes <= consts.MAX_BYTES_FOR_SHARED_MEM_TRANSFER: - return True - return False - - def put_bytes(self, content: bytes) -> Optional[SharedMemoryMetadata]: - """ - Writes the given bytes into shared memory. - Returns metadata about the shared memory region to which the content was - written if successful, None otherwise. - """ - if content is None: - return None - mem_map_name = str(uuid.uuid4()) - content_length = len(content) - shared_mem_map = self._create(mem_map_name, content_length) - if shared_mem_map is None: - return None - try: - num_bytes_written = shared_mem_map.put_bytes(content) - except Exception as e: - logger.warning('Cannot write %s bytes into shared memory %s - %s', - content_length, mem_map_name, e) - shared_mem_map.dispose() - return None - if num_bytes_written != content_length: - logger.error( - 'Cannot write data into shared memory %s (%s != %s)', - mem_map_name, num_bytes_written, content_length) - shared_mem_map.dispose() - return None - self.allocated_mem_maps[mem_map_name] = shared_mem_map - return SharedMemoryMetadata(mem_map_name, content_length) - - def put_string(self, content: str) -> Optional[SharedMemoryMetadata]: - """ - Writes the given string into shared memory. - Returns the name of the memory map into which the data was written if - succesful, None otherwise. - Note: The encoding used here must be consistent with what is used by the - host in SharedMemoryManager.cs (GetStringAsync/PutStringAsync). - """ - if content is None: - return None - content_bytes = content.encode('utf-8') - return self.put_bytes(content_bytes) - - def get_bytes(self, mem_map_name: str, offset: int, count: int) \ - -> Optional[bytes]: - """ - Reads data from the given memory map with the provided name, starting at - the provided offset and reading a total of count bytes. - Returns the data read from shared memory as bytes if successful, None - otherwise. - """ - if offset != 0: - logger.error( - 'Cannot read bytes. Non-zero offset (%s) not supported.', - offset) - return None - shared_mem_map = self._open(mem_map_name, count) - if shared_mem_map is None: - return None - try: - content = shared_mem_map.get_bytes(content_offset=0, - bytes_to_read=count) - finally: - shared_mem_map.dispose(is_delete_file=False) - return content - - def get_string(self, mem_map_name: str, offset: int, count: int) \ - -> Optional[str]: - """ - Reads data from the given memory map with the provided name, starting at - the provided offset and reading a total of count bytes. - Returns the data read from shared memory as a string if successful, None - otherwise. - Note: The encoding used here must be consistent with what is used by the - host in SharedMemoryManager.cs (GetStringAsync/PutStringAsync). - """ - content_bytes = self.get_bytes(mem_map_name, offset, count) - if content_bytes is None: - return None - content_str = content_bytes.decode('utf-8') - return content_str - - def free_mem_map(self, mem_map_name: str, - to_delete_backing_resources: bool = True) -> bool: - """ - Frees the memory map and, if specified, any backing resources (e.g. - file in the case of Unix) associated with it. - If there is no memory map with the given name being tracked, then no - action is performed. - Returns True if the memory map was freed successfully, False otherwise. - """ - if mem_map_name not in self.allocated_mem_maps: - logger.error( - 'Cannot find memory map in list of allocations %s', - mem_map_name) - return False - shared_mem_map = self.allocated_mem_maps[mem_map_name] - success = shared_mem_map.dispose(to_delete_backing_resources) - del self.allocated_mem_maps[mem_map_name] - return success - - def _create(self, mem_map_name: str, content_length: int) \ - -> Optional[SharedMemoryMap]: - """ - Creates a new SharedMemoryMap with the given name and content length. - Returns the SharedMemoryMap object if successful, None otherwise. - """ - mem_map_size = consts.CONTENT_HEADER_TOTAL_BYTES + content_length - mem_map = self.file_accessor.create_mem_map(mem_map_name, mem_map_size) - if mem_map is None: - return None - return SharedMemoryMap(self.file_accessor, mem_map_name, mem_map) - - def _open(self, mem_map_name: str, content_length: int) \ - -> Optional[SharedMemoryMap]: - """ - Opens an existing SharedMemoryMap with the given name and content - length. - Returns the SharedMemoryMap object if successful, None otherwise. - """ - mem_map_size = consts.CONTENT_HEADER_TOTAL_BYTES + content_length - mem_map = self.file_accessor.open_mem_map(mem_map_name, mem_map_size) - if mem_map is None: - return None - return SharedMemoryMap(self.file_accessor, mem_map_name, mem_map) diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_map.py b/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_map.py deleted file mode 100644 index d84eb81c4..000000000 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_map.py +++ /dev/null @@ -1,119 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import mmap -import os -import struct -import sys -from typing import Optional - -from ...logging import logger -from .file_accessor import FileAccessor -from .shared_memory_constants import SharedMemoryConstants as consts -from .shared_memory_exception import SharedMemoryException - - -class SharedMemoryMap: - """ - Shared memory region to read/write data from. - """ - def __init__( - self, - file_accessor: FileAccessor, - mem_map_name: str, - mem_map: mmap.mmap): - if mem_map is None: - raise SharedMemoryException( - 'Cannot initialize SharedMemoryMap. Invalid memory map ' - 'provided') - if mem_map_name is None or mem_map_name == '': - raise SharedMemoryException( - f'Cannot initialize SharedMemoryMap. Invalid name ' - f'{mem_map_name}') - self.file_accessor = file_accessor - self.mem_map_name = mem_map_name - self.mem_map = mem_map - - def put_bytes(self, content: bytes) -> Optional[int]: - """ - Writes the given content bytes into this SharedMemoryMap. - The number of bytes written must be less than or equal to the size of - the SharedMemoryMap. - Returns the number of bytes of content written. - """ - if content is None: - return None - content_length = len(content) - # Seek past the MemoryMapInitialized flag section of the header - self.mem_map.seek(consts.MEM_MAP_INITIALIZED_FLAG_NUM_BYTES) - # Write the content length into the header - content_length_bytes = content_length.to_bytes( - consts.CONTENT_LENGTH_NUM_BYTES, byteorder=sys.byteorder) - num_content_length_bytes = len(content_length_bytes) - num_content_length_bytes_written = self.mem_map.write( - content_length_bytes) - if num_content_length_bytes_written != num_content_length_bytes: - logger.error( - 'Cannot write content size to memory map %s (%s != %s)', - self.mem_map_name, num_content_length_bytes_written, - num_content_length_bytes) - return 0 - # Write the content - num_content_bytes_written = self.mem_map.write(content) - self.mem_map.flush() - return num_content_bytes_written - - def get_bytes(self, content_offset: int = 0, bytes_to_read: int = 0) \ - -> Optional[bytes]: - """ - Read content from this SharedMemoryMap with the given name and starting - at the given offset. - content_offset = 0 means read from the beginning of the content. - bytes_to_read = 0 means read the entire content. - Returns the content as bytes if successful, None otherwise. - """ - content_length = self._get_content_length() - if content_length is None: - return None - # Seek past the header and get to the content - self.mem_map.seek(consts.CONTENT_HEADER_TOTAL_BYTES) - if content_offset > 0: - self.mem_map.seek(content_offset, os.SEEK_CUR) - if bytes_to_read > 0: - # Read up to the specified number of bytes to read - content = self.mem_map.read(bytes_to_read) - else: - # Read the entire content - content = self.mem_map.read() - return content - - def dispose(self, is_delete_file: bool = True) -> bool: - """ - Close the underlying memory map. - Returns True if the resources were disposed, False otherwise. - """ - success = True - if is_delete_file: - success = self.file_accessor.delete_mem_map(self.mem_map_name, - self.mem_map) - self.mem_map.close() - return success - - def _bytes_to_long(self, input_bytes) -> int: - """ - Decode a set of bytes representing a long. - This uses the format that the functions host (i.e. C#) uses. - """ - return struct.unpack(" Optional[int]: - """ - Read the header of the memory map to determine the length of content - contained in that memory map. - Returns the content length as a non-negative integer if successful, - None otherwise. - """ - self.mem_map.seek(consts.MEM_MAP_INITIALIZED_FLAG_NUM_BYTES) - header_bytes = self.mem_map.read(consts.CONTENT_LENGTH_NUM_BYTES) - content_length = self._bytes_to_long(header_bytes) - return content_length diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_metadata.py b/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_metadata.py deleted file mode 100644 index ee5c50e07..000000000 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_metadata.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -class SharedMemoryMetadata: - """ - Information about a shared memory region. - """ - def __init__(self, mem_map_name, count_bytes): - # Name of the memory map - self.mem_map_name = mem_map_name - # Number of bytes of content in the memory map - self.count_bytes = count_bytes diff --git a/azure_functions_worker/dispatcher.py b/azure_functions_worker/dispatcher.py deleted file mode 100644 index 897a3499a..000000000 --- a/azure_functions_worker/dispatcher.py +++ /dev/null @@ -1,1115 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -"""GRPC client. - -Implements loading and execution of Python workers. -""" - -import asyncio -import concurrent.futures -import logging -import os -import platform -import queue -import sys -import threading -from asyncio import BaseEventLoop -from datetime import datetime -from logging import LogRecord -from typing import List, Optional - -import grpc - -from . import bindings, constants, functions, loader, protos -from .bindings.shared_memory_data_transfer import SharedMemoryManager -from .constants import ( - APPLICATIONINSIGHTS_CONNECTION_STRING, - HTTP_URI, - METADATA_PROPERTIES_WORKER_INDEXED, - PYTHON_AZURE_MONITOR_LOGGER_NAME, - PYTHON_AZURE_MONITOR_LOGGER_NAME_DEFAULT, - PYTHON_ENABLE_DEBUG_LOGGING, - PYTHON_ENABLE_INIT_INDEXING, - PYTHON_ENABLE_OPENTELEMETRY, - PYTHON_ENABLE_OPENTELEMETRY_DEFAULT, - PYTHON_LANGUAGE_RUNTIME, - PYTHON_ROLLBACK_CWD_PATH, - PYTHON_SCRIPT_FILE_NAME, - PYTHON_SCRIPT_FILE_NAME_DEFAULT, - PYTHON_THREADPOOL_THREAD_COUNT, - PYTHON_THREADPOOL_THREAD_COUNT_DEFAULT, - PYTHON_THREADPOOL_THREAD_COUNT_MAX_37, - PYTHON_THREADPOOL_THREAD_COUNT_MIN, - REQUIRES_ROUTE_PARAMETERS -) -from .extension import ExtensionManager -from .http_v2 import ( - HttpServerInitError, - HttpV2Registry, - http_coordinator, - initialize_http_server, - sync_http_request, -) -from .logging import ( - CONSOLE_LOG_PREFIX, - disable_console_logging, - enable_console_logging, - error_logger, - format_exception, - is_system_log_category, - logger, -) -from .utils.app_setting_manager import get_python_appsetting_state -from .utils.common import get_app_setting, is_envvar_true, validate_script_file_name -from .utils.dependency import DependencyManager -from .utils.tracing import marshall_exception_trace -from .utils.wrappers import disable_feature_by -from .version import VERSION - -_TRUE = "true" -_TRACEPARENT = "traceparent" -_TRACESTATE = "tracestate" - - -class DispatcherMeta(type): - __current_dispatcher__ = None - - @property - def current(mcls): - disp = mcls.__current_dispatcher__ - if disp is None: - raise RuntimeError('no currently running Dispatcher is found') - return disp - - -class Dispatcher(metaclass=DispatcherMeta): - _GRPC_STOP_RESPONSE = object() - - def __init__(self, loop: BaseEventLoop, host: str, port: int, - worker_id: str, request_id: str, - grpc_connect_timeout: float, - grpc_max_msg_len: int = -1) -> None: - self._loop = loop - self._host = host - self._port = port - self._request_id = request_id - self._worker_id = worker_id - self._function_data_cache_enabled = False - self._functions = functions.Registry() - self._shmem_mgr = SharedMemoryManager() - self._old_task_factory = None - - # Used to store metadata returns - self._function_metadata_result = None - self._function_metadata_exception = None - - # Used for checking if open telemetry is enabled - self._azure_monitor_available = False - self._context_api = None - self._trace_context_propagator = None - - # We allow the customer to change synchronous thread pool max worker - # count by setting the PYTHON_THREADPOOL_THREAD_COUNT app setting. - # For 3.[6|7|8] The default value is 1. - # For 3.9, we don't set this value by default but we honor incoming - # the app setting. - self._sync_call_tp: concurrent.futures.Executor = ( - self._create_sync_call_tp(self._get_sync_tp_max_workers()) - ) - - self._grpc_connect_timeout: float = grpc_connect_timeout - # This is set to -1 by default to remove the limitation on msg size - self._grpc_max_msg_len: int = grpc_max_msg_len - self._grpc_resp_queue: queue.Queue = queue.Queue() - self._grpc_connected_fut = loop.create_future() - self._grpc_thread: threading.Thread = threading.Thread( - name='grpc-thread', target=self.__poll_grpc) - - @staticmethod - def get_worker_metadata(): - return protos.WorkerMetadata( - runtime_name=PYTHON_LANGUAGE_RUNTIME, - runtime_version=f"{sys.version_info.major}." - f"{sys.version_info.minor}", - worker_version=VERSION, - worker_bitness=platform.machine(), - custom_properties={}) - - def get_sync_tp_workers_set(self): - """We don't know the exact value of the threadcount set for the Python - 3.9 scenarios (as we'll start passing only None by default), and we - need to get that information. - - Ref: concurrent.futures.thread.ThreadPoolExecutor.__init__._max_workers - """ - return self._sync_call_tp._max_workers - - @classmethod - async def connect(cls, host: str, port: int, worker_id: str, - request_id: str, connect_timeout: float): - loop = asyncio.events.get_event_loop() - disp = cls(loop, host, port, worker_id, request_id, connect_timeout) - disp._grpc_thread.start() - await disp._grpc_connected_fut - logger.info('Successfully opened gRPC channel to %s:%s ', host, port) - return disp - - async def dispatch_forever(self): # sourcery skip: swap-if-expression - if DispatcherMeta.__current_dispatcher__ is not None: - raise RuntimeError('there can be only one running dispatcher per ' - 'process') - - self._old_task_factory = self._loop.get_task_factory() - - loader.install() - - DispatcherMeta.__current_dispatcher__ = self - try: - forever = self._loop.create_future() - - self._grpc_resp_queue.put_nowait( - protos.StreamingMessage( - request_id=self.request_id, - start_stream=protos.StartStream( - worker_id=self.worker_id))) - - # In Python 3.11+, constructing a task has an optional context - # parameter. Allow for this param to be passed to ContextEnabledTask - self._loop.set_task_factory( - lambda loop, coro, context=None: ContextEnabledTask( - coro, loop=loop, context=context)) - - # Detach console logging before enabling GRPC channel logging - logger.info('Detaching console logging.') - disable_console_logging() - - # Attach gRPC logging to the root logger. Since gRPC channel is - # established, should use it for system and user logs - logging_handler = AsyncLoggingHandler() - root_logger = logging.getLogger() - - log_level = logging.INFO if not is_envvar_true( - PYTHON_ENABLE_DEBUG_LOGGING) else logging.DEBUG - - root_logger.setLevel(log_level) - root_logger.addHandler(logging_handler) - logger.info('Switched to gRPC logging.') - logging_handler.flush() - - try: - await forever - finally: - logger.warning('Detaching gRPC logging due to exception.') - logging_handler.flush() - root_logger.removeHandler(logging_handler) - - # Reenable console logging when there's an exception - enable_console_logging() - logger.warning('Switched to console logging due to exception.') - finally: - DispatcherMeta.__current_dispatcher__ = None - - loader.uninstall() - - self._loop.set_task_factory(self._old_task_factory) - self.stop() - - def stop(self) -> None: - if self._grpc_thread is not None: - self._grpc_resp_queue.put_nowait(self._GRPC_STOP_RESPONSE) - self._grpc_thread.join() - self._grpc_thread = None - - self._stop_sync_call_tp() - - def on_logging(self, record: logging.LogRecord, - formatted_msg: str) -> None: - if record.levelno >= logging.CRITICAL: - log_level = protos.RpcLog.Critical - elif record.levelno >= logging.ERROR: - log_level = protos.RpcLog.Error - elif record.levelno >= logging.WARNING: - log_level = protos.RpcLog.Warning - elif record.levelno >= logging.INFO: - log_level = protos.RpcLog.Information - elif record.levelno >= logging.DEBUG: - log_level = protos.RpcLog.Debug - else: - log_level = getattr(protos.RpcLog, 'None') - - if is_system_log_category(record.name): - log_category = protos.RpcLog.RpcLogCategory.Value('System') - else: # customers using logging will yield 'root' in record.name - log_category = protos.RpcLog.RpcLogCategory.Value('User') - - log = dict( - level=log_level, - message=formatted_msg, - category=record.name, - log_category=log_category - ) - - invocation_id = get_current_invocation_id() - if invocation_id is not None: - log['invocation_id'] = invocation_id - - self._grpc_resp_queue.put_nowait( - protos.StreamingMessage( - request_id=self.request_id, - rpc_log=protos.RpcLog(**log))) - - @property - def request_id(self) -> str: - return self._request_id - - @property - def worker_id(self) -> str: - return self._worker_id - - # noinspection PyBroadException - @staticmethod - def _serialize_exception(exc: Exception): - try: - message = f'{type(exc).__name__}: {exc}' - except Exception: - message = ('Unhandled exception in function. ' - 'Could not serialize original exception message.') - - try: - stack_trace = marshall_exception_trace(exc) - except Exception: - stack_trace = '' - - return protos.RpcException(message=message, stack_trace=stack_trace) - - async def _dispatch_grpc_request(self, request): - content_type = request.WhichOneof('content') - request_handler = getattr(self, f'_handle__{content_type}', None) - if request_handler is None: - # Don't crash on unknown messages. Some of them can be ignored; - # and if something goes really wrong the host can always just - # kill the worker's process. - logger.error('unknown StreamingMessage content type %s', - content_type) - return - - resp = await request_handler(request) - self._grpc_resp_queue.put_nowait(resp) - - def initialize_azure_monitor(self): - """Initializes OpenTelemetry and Azure monitor distro - """ - self.update_opentelemetry_status() - try: - from azure.monitor.opentelemetry import configure_azure_monitor - - # Set functions resource detector manually until officially - # include in Azure monitor distro - os.environ.setdefault( - "OTEL_EXPERIMENTAL_RESOURCE_DETECTORS", - "azure_functions", - ) - - configure_azure_monitor( - # Connection string can be explicitly specified in Appsetting - # If not set, defaults to env var - # APPLICATIONINSIGHTS_CONNECTION_STRING - connection_string=get_app_setting( - setting=APPLICATIONINSIGHTS_CONNECTION_STRING - ), - logger_name=get_app_setting( - setting=PYTHON_AZURE_MONITOR_LOGGER_NAME, - default_value=PYTHON_AZURE_MONITOR_LOGGER_NAME_DEFAULT - ), - ) - self._azure_monitor_available = True - - logger.info("Successfully configured Azure monitor distro.") - except ImportError: - logger.exception( - "Cannot import Azure Monitor distro." - ) - self._azure_monitor_available = False - except Exception: - logger.exception( - "Error initializing Azure monitor distro." - ) - self._azure_monitor_available = False - - def update_opentelemetry_status(self): - """Check for OpenTelemetry library availability and - update the status attribute.""" - try: - from opentelemetry import context as context_api - from opentelemetry.trace.propagation.tracecontext import ( - TraceContextTextMapPropagator, - ) - - self._context_api = context_api - self._trace_context_propagator = TraceContextTextMapPropagator() - - except ImportError: - logger.exception( - "Cannot import OpenTelemetry libraries." - ) - - async def _handle__worker_init_request(self, request): - logger.info('Received WorkerInitRequest, ' - 'python version %s, ' - 'worker version %s, ' - 'request ID %s. ' - 'App Settings state: %s. ' - 'To enable debug level logging, please refer to ' - 'https://aka.ms/python-enable-debug-logging', - sys.version, - VERSION, - self.request_id, - get_python_appsetting_state() - ) - - worker_init_request = request.worker_init_request - host_capabilities = worker_init_request.capabilities - if constants.FUNCTION_DATA_CACHE in host_capabilities: - val = host_capabilities[constants.FUNCTION_DATA_CACHE] - self._function_data_cache_enabled = val == _TRUE - - capabilities = { - constants.RAW_HTTP_BODY_BYTES: _TRUE, - constants.TYPED_DATA_COLLECTION: _TRUE, - constants.RPC_HTTP_BODY_ONLY: _TRUE, - constants.WORKER_STATUS: _TRUE, - constants.RPC_HTTP_TRIGGER_METADATA_REMOVED: _TRUE, - constants.SHARED_MEMORY_DATA_TRANSFER: _TRUE, - } - if get_app_setting(setting=PYTHON_ENABLE_OPENTELEMETRY, - default_value=PYTHON_ENABLE_OPENTELEMETRY_DEFAULT): - self.initialize_azure_monitor() - - if self._azure_monitor_available: - capabilities[constants.WORKER_OPEN_TELEMETRY_ENABLED] = _TRUE - - if DependencyManager.should_load_cx_dependencies(): - DependencyManager.prioritize_customer_dependencies() - - if DependencyManager.is_in_linux_consumption(): - import azure.functions # NoQA - - # loading bindings registry and saving results to a static - # dictionary which will be later used in the invocation request - bindings.load_binding_registry() - - if is_envvar_true(PYTHON_ENABLE_INIT_INDEXING): - try: - self.load_function_metadata( - worker_init_request.function_app_directory, - caller_info="worker_init_request") - - if HttpV2Registry.http_v2_enabled(): - capabilities[HTTP_URI] = \ - initialize_http_server(self._host) - capabilities[REQUIRES_ROUTE_PARAMETERS] = _TRUE - - except HttpServerInitError: - raise - except Exception as ex: - self._function_metadata_exception = ex - - return protos.StreamingMessage( - request_id=self.request_id, - worker_init_response=protos.WorkerInitResponse( - capabilities=capabilities, - worker_metadata=self.get_worker_metadata(), - result=protos.StatusResult( - status=protos.StatusResult.Success))) - - async def _handle__worker_status_request(self, request): - # Logging is not necessary in this request since the response is used - # for host to judge scale decisions of out-of-proc languages. - # Having log here will reduce the responsiveness of the worker. - return protos.StreamingMessage( - request_id=request.request_id, - worker_status_response=protos.WorkerStatusResponse()) - - def load_function_metadata(self, function_app_directory, caller_info): - """ - This method is called to index the functions in the function app - directory and save the results in function_metadata_result or - function_metadata_exception in case of an exception. - """ - script_file_name = get_app_setting( - setting=PYTHON_SCRIPT_FILE_NAME, - default_value=f'{PYTHON_SCRIPT_FILE_NAME_DEFAULT}') - - logger.debug( - 'Received load metadata request from %s, request ID %s, ' - 'script_file_name: %s', - caller_info, self.request_id, script_file_name) - - validate_script_file_name(script_file_name) - function_path = os.path.join(function_app_directory, - script_file_name) - - # For V1, the function path will not exist and - # return None. - self._function_metadata_result = ( - self.index_functions(function_path, function_app_directory)) \ - if os.path.exists(function_path) else None - - async def _handle__functions_metadata_request(self, request): - metadata_request = request.functions_metadata_request - function_app_directory = metadata_request.function_app_directory - - script_file_name = get_app_setting( - setting=PYTHON_SCRIPT_FILE_NAME, - default_value=f'{PYTHON_SCRIPT_FILE_NAME_DEFAULT}') - function_path = os.path.join(function_app_directory, - script_file_name) - - logger.info( - 'Received WorkerMetadataRequest, request ID %s, ' - 'function_path: %s', - self.request_id, function_path) - - if not is_envvar_true(PYTHON_ENABLE_INIT_INDEXING): - try: - self.load_function_metadata( - function_app_directory, - caller_info="functions_metadata_request") - except Exception as ex: - self._function_metadata_exception = ex - - if self._function_metadata_exception: - return protos.StreamingMessage( - request_id=request.request_id, - function_metadata_response=protos.FunctionMetadataResponse( - result=protos.StatusResult( - status=protos.StatusResult.Failure, - exception=self._serialize_exception( - self._function_metadata_exception)))) - else: - metadata_result = self._function_metadata_result - - return protos.StreamingMessage( - request_id=request.request_id, - function_metadata_response=protos.FunctionMetadataResponse( - use_default_metadata_indexing=False if metadata_result else - True, - function_metadata_results=metadata_result, - result=protos.StatusResult( - status=protos.StatusResult.Success))) - - async def _handle__function_load_request(self, request): - func_request = request.function_load_request - function_id = func_request.function_id - function_metadata = func_request.metadata - function_name = function_metadata.name - function_app_directory = function_metadata.directory - - logger.info( - 'Received WorkerLoadRequest, request ID %s, function_id: %s,' - 'function_name: %s, function_app_directory : %s', - self.request_id, function_id, function_name, - function_app_directory) - - programming_model = "V2" - try: - if not self._functions.get_function(function_id): - - if function_metadata.properties.get( - METADATA_PROPERTIES_WORKER_INDEXED, False): - # This is for the second worker and above where the worker - # indexing is enabled and load request is called without - # calling the metadata request. In this case we index the - # function and update the workers registry - - try: - self.load_function_metadata( - function_app_directory, - caller_info="functions_load_request") - except Exception as ex: - self._function_metadata_exception = ex - - # For the second worker, if there was an exception in - # indexing, we raise it here - if self._function_metadata_exception: - raise Exception(self._function_metadata_exception) - - else: - # legacy function - programming_model = "V1" - - func = loader.load_function( - function_name, - function_app_directory, - func_request.metadata.script_file, - func_request.metadata.entry_point) - - self._functions.add_function( - function_id, func, func_request.metadata) - - try: - ExtensionManager.function_load_extension( - function_name, - func_request.metadata.directory - ) - except Exception as ex: - logging.error("Failed to load extensions: ", ex) - raise - - logger.info('Successfully processed FunctionLoadRequest, ' - 'request ID: %s, ' - 'function ID: %s,' - 'function Name: %s,' - 'programming model: %s', - self.request_id, - function_id, - function_name, - programming_model) - - return protos.StreamingMessage( - request_id=self.request_id, - function_load_response=protos.FunctionLoadResponse( - function_id=function_id, - result=protos.StatusResult( - status=protos.StatusResult.Success))) - - except Exception as ex: - return protos.StreamingMessage( - request_id=self.request_id, - function_load_response=protos.FunctionLoadResponse( - function_id=function_id, - result=protos.StatusResult( - status=protos.StatusResult.Failure, - exception=self._serialize_exception(ex)))) - - async def _handle__invocation_request(self, request): - invocation_time = datetime.utcnow() - invoc_request = request.invocation_request - invocation_id = invoc_request.invocation_id - function_id = invoc_request.function_id - http_v2_enabled = False - - # Set the current `invocation_id` to the current task so - # that our logging handler can find it. - current_task = asyncio.current_task(self._loop) - assert isinstance(current_task, ContextEnabledTask) - current_task.set_azure_invocation_id(invocation_id) - - try: - fi: functions.FunctionInfo = self._functions.get_function( - function_id) - assert fi is not None - - function_invocation_logs: List[str] = [ - 'Received FunctionInvocationRequest', - f'request ID: {self.request_id}', - f'function ID: {function_id}', - f'function name: {fi.name}', - f'invocation ID: {invocation_id}', - f'function type: {"async" if fi.is_async else "sync"}', - f'timestamp (UTC): {invocation_time}' - ] - if not fi.is_async: - function_invocation_logs.append( - f'sync threadpool max workers: ' - f'{self.get_sync_tp_workers_set()}' - ) - logger.info(', '.join(function_invocation_logs)) - - args = {} - - http_v2_enabled = self._functions.get_function(function_id) \ - .is_http_func and \ - HttpV2Registry.http_v2_enabled() - - for pb in invoc_request.input_data: - pb_type_info = fi.input_types[pb.name] - if bindings.is_trigger_binding(pb_type_info.binding_name): - trigger_metadata = invoc_request.trigger_metadata - else: - trigger_metadata = None - - args[pb.name] = bindings.from_incoming_proto( - pb_type_info.binding_name, - pb, - trigger_metadata=trigger_metadata, - pytype=pb_type_info.pytype, - shmem_mgr=self._shmem_mgr, - function_name=self._functions.get_function( - function_id).name, - is_deferred_binding=pb_type_info.deferred_bindings_enabled) - - if http_v2_enabled: - http_request = await http_coordinator.get_http_request_async( - invocation_id) - - trigger_arg_name = fi.trigger_metadata.get('param_name') - func_http_request = args[trigger_arg_name] - await sync_http_request(http_request, func_http_request) - args[trigger_arg_name] = http_request - - fi_context = self._get_context(invoc_request, fi.name, - fi.directory) - - # Use local thread storage to store the invocation ID - # for a customer's threads - fi_context.thread_local_storage.invocation_id = invocation_id - if fi.requires_context: - args['context'] = fi_context - - if fi.output_types: - for name in fi.output_types: - args[name] = bindings.Out() - - if fi.is_async: - if self._azure_monitor_available: - self.configure_opentelemetry(fi_context) - - call_result = \ - await self._run_async_func(fi_context, fi.func, args) - else: - call_result = await self._loop.run_in_executor( - self._sync_call_tp, - self._run_sync_func, - invocation_id, fi_context, fi.func, args) - - if call_result is not None and not fi.has_return: - raise RuntimeError( - f'function {fi.name!r} without a $return binding' - 'returned a non-None value') - - if http_v2_enabled: - http_coordinator.set_http_response(invocation_id, call_result) - - output_data = [] - cache_enabled = self._function_data_cache_enabled - if fi.output_types: - for out_name, out_type_info in fi.output_types.items(): - val = args[out_name].get() - if val is None: - # TODO: is the "Out" parameter optional? - # Can "None" be marshaled into protos.TypedData? - continue - - param_binding = bindings.to_outgoing_param_binding( - out_type_info.binding_name, val, - pytype=out_type_info.pytype, - out_name=out_name, shmem_mgr=self._shmem_mgr, - is_function_data_cache_enabled=cache_enabled) - output_data.append(param_binding) - - return_value = None - if fi.return_type is not None and not http_v2_enabled: - return_value = bindings.to_outgoing_proto( - fi.return_type.binding_name, - call_result, - pytype=fi.return_type.pytype, - ) - - # Actively flush customer print() function to console - sys.stdout.flush() - - return protos.StreamingMessage( - request_id=self.request_id, - invocation_response=protos.InvocationResponse( - invocation_id=invocation_id, - return_value=return_value, - result=protos.StatusResult( - status=protos.StatusResult.Success), - output_data=output_data)) - - except Exception as ex: - if http_v2_enabled: - http_coordinator.set_http_response(invocation_id, ex) - - return protos.StreamingMessage( - request_id=self.request_id, - invocation_response=protos.InvocationResponse( - invocation_id=invocation_id, - result=protos.StatusResult( - status=protos.StatusResult.Failure, - exception=self._serialize_exception(ex)))) - - async def _handle__function_environment_reload_request(self, request): - """Only runs on Linux Consumption placeholder specialization. - This is called only when placeholder mode is true. On worker restarts - worker init request will be called directly. - """ - try: - logger.info('Received FunctionEnvironmentReloadRequest, ' - 'request ID: %s, ' - 'App Settings state: %s. ' - 'To enable debug level logging, please refer to ' - 'https://aka.ms/python-enable-debug-logging', - self.request_id, - get_python_appsetting_state()) - - func_env_reload_request = \ - request.function_environment_reload_request - directory = func_env_reload_request.function_app_directory - - # Append function project root to module finding sys.path - if func_env_reload_request.function_app_directory: - sys.path.append(func_env_reload_request.function_app_directory) - - # Clear sys.path import cache, reload all module from new sys.path - sys.path_importer_cache.clear() - - # Reload environment variables - os.environ.clear() - env_vars = func_env_reload_request.environment_variables - for var in env_vars: - os.environ[var] = env_vars[var] - - # Apply PYTHON_THREADPOOL_THREAD_COUNT - self._stop_sync_call_tp() - self._sync_call_tp = ( - self._create_sync_call_tp(self._get_sync_tp_max_workers()) - ) - - if is_envvar_true(PYTHON_ENABLE_DEBUG_LOGGING): - root_logger = logging.getLogger() - root_logger.setLevel(logging.DEBUG) - - # Reload azure google namespaces - DependencyManager.reload_customer_libraries(directory) - - # calling load_binding_registry again since the - # reload_customer_libraries call clears the registry - bindings.load_binding_registry() - - capabilities = {} - if get_app_setting( - setting=PYTHON_ENABLE_OPENTELEMETRY, - default_value=PYTHON_ENABLE_OPENTELEMETRY_DEFAULT): - self.initialize_azure_monitor() - - if self._azure_monitor_available: - capabilities[constants.WORKER_OPEN_TELEMETRY_ENABLED] = ( - _TRUE) - - if is_envvar_true(PYTHON_ENABLE_INIT_INDEXING): - try: - self.load_function_metadata( - directory, - caller_info="environment_reload_request") - - if HttpV2Registry.http_v2_enabled(): - capabilities[HTTP_URI] = \ - initialize_http_server(self._host) - capabilities[REQUIRES_ROUTE_PARAMETERS] = _TRUE - except HttpServerInitError: - raise - except Exception as ex: - self._function_metadata_exception = ex - - # Change function app directory - if getattr(func_env_reload_request, - 'function_app_directory', None): - self._change_cwd( - func_env_reload_request.function_app_directory) - - success_response = protos.FunctionEnvironmentReloadResponse( - capabilities=capabilities, - worker_metadata=self.get_worker_metadata(), - result=protos.StatusResult( - status=protos.StatusResult.Success)) - - return protos.StreamingMessage( - request_id=self.request_id, - function_environment_reload_response=success_response) - - except Exception as ex: - failure_response = protos.FunctionEnvironmentReloadResponse( - result=protos.StatusResult( - status=protos.StatusResult.Failure, - exception=self._serialize_exception(ex))) - - return protos.StreamingMessage( - request_id=self.request_id, - function_environment_reload_response=failure_response) - - def index_functions(self, function_path: str, function_dir: str): - indexed_functions = loader.index_function_app(function_path) - logger.info( - "Indexed function app and found %s functions", - len(indexed_functions) - ) - - if indexed_functions: - fx_metadata_results, fx_bindings_logs = ( - loader.process_indexed_function( - self._functions, - indexed_functions, - function_dir)) - - indexed_function_logs: List[str] = [] - indexed_function_bindings_logs = [] - for func in indexed_functions: - func_binding_logs = fx_bindings_logs.get(func) - for binding in func.get_bindings(): - deferred_binding_info = func_binding_logs.get( - binding.name)\ - if func_binding_logs.get(binding.name) else "" - indexed_function_bindings_logs.append(( - binding.type, binding.name, deferred_binding_info)) - - function_log = "Function Name: {}, Function Binding: {}" \ - .format(func.get_function_name(), - indexed_function_bindings_logs) - indexed_function_logs.append(function_log) - - logger.info( - 'Successfully processed FunctionMetadataRequest for ' - 'functions: %s. Deferred bindings enabled: %s.', " ".join( - indexed_function_logs), - self._functions.deferred_bindings_enabled()) - - return fx_metadata_results - - async def _handle__close_shared_memory_resources_request(self, request): - """ - Frees any memory maps that were produced as output for a given - invocation. - This is called after the functions host is done reading the output from - the worker and wants the worker to free up those resources. - If the cache is enabled, let the host decide when to delete the - resources. Just drop the reference from the worker. - If the cache is not enabled, the worker should free the resources as at - this point the host has read the memory maps and does not need them. - """ - close_request = request.close_shared_memory_resources_request - map_names = close_request.map_names - # Assign default value of False to all result values. - # If we are successfully able to close a memory map, its result will be - # set to True. - results = {mem_map_name: False for mem_map_name in map_names} - - try: - for map_name in map_names: - try: - to_delete_resources = not self._function_data_cache_enabled - success = self._shmem_mgr.free_mem_map(map_name, - to_delete_resources) - results[map_name] = success - except Exception as e: - logger.error('Cannot free memory map %s - %s', map_name, e, - exc_info=True) - finally: - response = protos.CloseSharedMemoryResourcesResponse( - close_map_results=results) - return protos.StreamingMessage( - request_id=self.request_id, - close_shared_memory_resources_response=response) - - def configure_opentelemetry(self, invocation_context): - carrier = {_TRACEPARENT: invocation_context.trace_context.trace_parent, - _TRACESTATE: invocation_context.trace_context.trace_state} - ctx = self._trace_context_propagator.extract(carrier) - self._context_api.attach(ctx) - - @staticmethod - def _get_context(invoc_request: protos.InvocationRequest, name: str, - directory: str) -> bindings.Context: - """ For more information refer: - https://aka.ms/azfunc-invocation-context - """ - trace_context = bindings.TraceContext( - invoc_request.trace_context.trace_parent, - invoc_request.trace_context.trace_state, - invoc_request.trace_context.attributes) - - retry_context = bindings.RetryContext( - invoc_request.retry_context.retry_count, - invoc_request.retry_context.max_retry_count, - invoc_request.retry_context.exception) - - return bindings.Context( - name, directory, invoc_request.invocation_id, - _invocation_id_local, trace_context, retry_context) - - @disable_feature_by(PYTHON_ROLLBACK_CWD_PATH) - def _change_cwd(self, new_cwd: str): - if os.path.exists(new_cwd): - os.chdir(new_cwd) - logger.info('Changing current working directory to %s', new_cwd) - else: - logger.warning('Directory %s is not found when reloading', new_cwd) - - def _stop_sync_call_tp(self): - """Deallocate the current synchronous thread pool and assign - self._sync_call_tp to None. If the thread pool does not exist, - this will be a no op. - """ - if getattr(self, '_sync_call_tp', None): - self._sync_call_tp.shutdown() - self._sync_call_tp = None - - @staticmethod - def _get_sync_tp_max_workers() -> Optional[int]: - def tp_max_workers_validator(value: str) -> bool: - try: - int_value = int(value) - except ValueError: - logger.warning('%s must be an integer', - PYTHON_THREADPOOL_THREAD_COUNT) - return False - - if int_value < PYTHON_THREADPOOL_THREAD_COUNT_MIN: - logger.warning( - '%s must be set to a value between %s and sys.maxint. ' - 'Reverting to default value for max_workers', - PYTHON_THREADPOOL_THREAD_COUNT, - PYTHON_THREADPOOL_THREAD_COUNT_MIN) - return False - return True - - # Starting Python 3.9, worker won't be putting a limit on the - # max_workers count in the created threadpool. - default_value = None if sys.version_info.minor >= 9 \ - else f'{PYTHON_THREADPOOL_THREAD_COUNT_DEFAULT}' - - max_workers = get_app_setting(setting=PYTHON_THREADPOOL_THREAD_COUNT, - default_value=default_value, - validator=tp_max_workers_validator) - - if sys.version_info.minor <= 7: - max_workers = min(int(max_workers), - PYTHON_THREADPOOL_THREAD_COUNT_MAX_37) - - # We can box the app setting as int for earlier python versions. - return int(max_workers) if max_workers else None - - def _create_sync_call_tp( - self, max_worker: Optional[int]) -> concurrent.futures.Executor: - """Create a thread pool executor with max_worker. This is a wrapper - over ThreadPoolExecutor constructor. Consider calling this method after - _stop_sync_call_tp() to ensure only 1 synchronous thread pool is - running. - """ - return concurrent.futures.ThreadPoolExecutor( - max_workers=max_worker - ) - - def _run_sync_func(self, invocation_id, context, func, params): - # This helper exists because we need to access the current - # invocation_id from ThreadPoolExecutor's threads. - context.thread_local_storage.invocation_id = invocation_id - try: - if self._azure_monitor_available: - self.configure_opentelemetry(context) - return ExtensionManager.get_sync_invocation_wrapper(context, - func)(params) - finally: - context.thread_local_storage.invocation_id = None - - async def _run_async_func(self, context, func, params): - return await ExtensionManager.get_async_invocation_wrapper( - context, func, params - ) - - def __poll_grpc(self): - options = [] - if self._grpc_max_msg_len: - options.append(('grpc.max_receive_message_length', - self._grpc_max_msg_len)) - options.append(('grpc.max_send_message_length', - self._grpc_max_msg_len)) - - channel = grpc.insecure_channel( - f'{self._host}:{self._port}', options) - - try: - grpc.channel_ready_future(channel).result( - timeout=self._grpc_connect_timeout) - except Exception as ex: - self._loop.call_soon_threadsafe( - self._grpc_connected_fut.set_exception, ex) - return - else: - self._loop.call_soon_threadsafe( - self._grpc_connected_fut.set_result, True) - - stub = protos.FunctionRpcStub(channel) - - def gen(resp_queue): - while True: - msg = resp_queue.get() - if msg is self._GRPC_STOP_RESPONSE: - grpc_req_stream.cancel() - return - yield msg - - grpc_req_stream = stub.EventStream(gen(self._grpc_resp_queue)) - try: - for req in grpc_req_stream: - self._loop.call_soon_threadsafe( - self._loop.create_task, self._dispatch_grpc_request(req)) - except Exception as ex: - if ex is grpc_req_stream: - # Yes, this is how grpc_req_stream iterator exits. - return - error_logger.exception( - 'unhandled error in gRPC thread. Exception: {0}'.format( - format_exception(ex))) - raise - - -class AsyncLoggingHandler(logging.Handler): - def emit(self, record: LogRecord) -> None: - # Since we disable console log after gRPC channel is initiated, - # we should redirect all the messages into dispatcher. - - # When dispatcher receives an exception, it should switch back - # to console logging. However, it is possible that - # __current_dispatcher__ is set to None as there are still messages - # buffered in this handler, not calling the emit yet. - msg = self.format(record) - try: - Dispatcher.current.on_logging(record, msg) - except RuntimeError as runtime_error: - # This will cause 'Dispatcher not found' failure. - # Logging such of an issue will cause infinite loop of gRPC logging - # To mitigate, we should suppress the 2nd level error logging here - # and use print function to report exception instead. - print(f'{CONSOLE_LOG_PREFIX} ERROR: {str(runtime_error)}', - file=sys.stderr, flush=True) - - -class ContextEnabledTask(asyncio.Task): - AZURE_INVOCATION_ID = '__azure_function_invocation_id__' - - def __init__(self, coro, loop, context=None): - # The context param is only available for 3.11+. If - # not, it can't be sent in the init() call. - if sys.version_info.minor >= 11: - super().__init__(coro, loop=loop, context=context) - else: - super().__init__(coro, loop=loop) - - current_task = asyncio.current_task(loop) - if current_task is not None: - invocation_id = getattr( - current_task, self.AZURE_INVOCATION_ID, None) - if invocation_id is not None: - self.set_azure_invocation_id(invocation_id) - - def set_azure_invocation_id(self, invocation_id: str) -> None: - setattr(self, self.AZURE_INVOCATION_ID, invocation_id) - - -def get_current_invocation_id() -> Optional[str]: - loop = asyncio._get_running_loop() - if loop is not None: - current_task = asyncio.current_task(loop) - if current_task is not None: - task_invocation_id = getattr(current_task, - ContextEnabledTask.AZURE_INVOCATION_ID, - None) - if task_invocation_id is not None: - return task_invocation_id - - return getattr(_invocation_id_local, 'invocation_id', None) - - -_invocation_id_local = threading.local() diff --git a/azure_functions_worker/extension.py b/azure_functions_worker/extension.py deleted file mode 100644 index fcf8602c0..000000000 --- a/azure_functions_worker/extension.py +++ /dev/null @@ -1,254 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import functools -import logging -from types import ModuleType -from typing import Any, Callable, List, Optional - -from .constants import ( - PYTHON_ENABLE_WORKER_EXTENSIONS, - PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT, - PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT_39, - PYTHON_ISOLATE_WORKER_DEPENDENCIES, -) -from .logging import SYSTEM_LOG_PREFIX, logger -from .utils.common import get_sdk_from_sys_path, get_sdk_version, is_python_version -from .utils.wrappers import enable_feature_by - -# Extension Hooks -FUNC_EXT_POST_FUNCTION_LOAD = "post_function_load" -FUNC_EXT_PRE_INVOCATION = "pre_invocation" -FUNC_EXT_POST_INVOCATION = "post_invocation" -APP_EXT_POST_FUNCTION_LOAD = "post_function_load_app_level" -APP_EXT_PRE_INVOCATION = "pre_invocation_app_level" -APP_EXT_POST_INVOCATION = "post_invocation_app_level" - - -class ExtensionManager: - _is_sdk_detected: bool = False - """This marks if the ExtensionManager has already proceeded a detection, - if so, the sdk will be cached in ._extension_enabled_sdk - """ - - _extension_enabled_sdk: Optional[ModuleType] = None - """This is a cache of azure.functions module that supports extension - interfaces. If this is None, that mean the sdk does not support extension. - """ - - @classmethod - @enable_feature_by( - flag=PYTHON_ENABLE_WORKER_EXTENSIONS, - flag_default=( - PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT_39 if - is_python_version('3.9') else - PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT - ) - ) - def function_load_extension(cls, func_name, func_directory): - """Helper to execute function load extensions. If one of the extension - fails in the extension chain, the rest of them will continue, emitting - an error log of an exception trace for failed extension. - - Parameters - ---------- - func_name: str - The name of the trigger (e.g. HttpTrigger) - func_directory: str - The folder path of the trigger - (e.g. /home/site/wwwroot/HttpTrigger). - """ - sdk = cls._try_get_sdk_with_extension_enabled() - if sdk is None: - return - - # Reports application & function extensions installed on customer's app - cls._info_discover_extension_list(func_name, sdk) - - # Get function hooks from azure.functions.extension.ExtensionMeta - # The return type is FuncExtensionHooks - funcs = sdk.ExtensionMeta.get_function_hooks(func_name) - - # Invoke function hooks - cls._safe_execute_function_load_hooks( - funcs, FUNC_EXT_POST_FUNCTION_LOAD, func_name, func_directory - ) - - # Get application hooks from azure.functions.extension.ExtensionMeta - # The reutnr type is AppExtensionHooks - apps = sdk.ExtensionMeta.get_application_hooks() - - # Invoke application hook - cls._safe_execute_function_load_hooks( - apps, APP_EXT_POST_FUNCTION_LOAD, func_name, func_directory - ) - - @classmethod - @enable_feature_by( - flag=PYTHON_ENABLE_WORKER_EXTENSIONS, - flag_default=( - PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT_39 if - is_python_version('3.9') else - PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT - ) - ) - def _invocation_extension(cls, ctx, hook_name, func_args, func_ret=None): - """Helper to execute extensions. If one of the extension fails in the - extension chain, the rest of them will continue, emitting an error log - of an exception trace for failed extension. - - Parameters - ---------- - ctx: azure.functions.Context - Azure Functions context to be passed onto extension - hook_name: str - The exetension name to be executed (e.g. pre_invocations). - These are defined in azure.functions.FuncExtensionHooks. - """ - sdk = cls._try_get_sdk_with_extension_enabled() - if sdk is None: - return - - # Get function hooks from azure.functions.extension.ExtensionMeta - # The return type is FuncExtensionHooks - funcs = sdk.ExtensionMeta.get_function_hooks(ctx.function_name) - - # Invoke function hooks - cls._safe_execute_invocation_hooks( - funcs, hook_name, ctx, func_args, func_ret - ) - - # Get application hooks from azure.functions.extension.ExtensionMeta - # The reutnr type is AppExtensionHooks - apps = sdk.ExtensionMeta.get_application_hooks() - - # Invoke application hook - cls._safe_execute_invocation_hooks( - apps, hook_name, ctx, func_args, func_ret - ) - - @classmethod - def get_sync_invocation_wrapper(cls, ctx, func) -> Callable[[List], Any]: - """Get a synchronous lambda of extension wrapped function which takes - function parameters - """ - return functools.partial(cls._raw_invocation_wrapper, ctx, func) - - @classmethod - async def get_async_invocation_wrapper(cls, ctx, function, args) -> Any: - """An asynchronous coroutine for executing function with extensions - """ - cls._invocation_extension(ctx, APP_EXT_PRE_INVOCATION, args) - cls._invocation_extension(ctx, FUNC_EXT_PRE_INVOCATION, args) - result = await function(**args) - cls._invocation_extension(ctx, FUNC_EXT_POST_INVOCATION, args, result) - cls._invocation_extension(ctx, APP_EXT_POST_INVOCATION, args, result) - return result - - @staticmethod - def _is_extension_enabled_in_sdk(module: ModuleType) -> bool: - """Check if the extension feature is enabled in particular - azure.functions package. - - Parameters - ---------- - module: ModuleType - The azure.functions SDK module - - Returns - ------- - bool - True on azure.functions SDK supports extension registration - """ - return getattr(module, 'ExtensionMeta', None) is not None - - @classmethod - def _is_pre_invocation_hook(cls, name) -> bool: - return name in (FUNC_EXT_PRE_INVOCATION, APP_EXT_PRE_INVOCATION) - - @classmethod - def _is_post_invocation_hook(cls, name) -> bool: - return name in (FUNC_EXT_POST_INVOCATION, APP_EXT_POST_INVOCATION) - - @classmethod - def _safe_execute_invocation_hooks(cls, hooks, hook_name, ctx, fargs, fret): - # hooks from azure.functions.ExtensionMeta.get_function_hooks() or - # azure.functions.ExtensionMeta.get_application_hooks() - if hooks: - # Invoke extension implementation from ..ext_impl - for hook_meta in getattr(hooks, hook_name, []): - # Register a system logger with prefix azure_functions_worker - ext_logger = logging.getLogger( - f'{SYSTEM_LOG_PREFIX}.extension.{hook_meta.ext_name}' - ) - try: - if cls._is_pre_invocation_hook(hook_name): - hook_meta.ext_impl(ext_logger, ctx, fargs) - elif cls._is_post_invocation_hook(hook_name): - hook_meta.ext_impl(ext_logger, ctx, fargs, fret) - except Exception as e: - ext_logger.error(e, exc_info=True) - - @classmethod - def _safe_execute_function_load_hooks(cls, hooks, hook_name, fname, fdir): - # hooks from azure.functions.ExtensionMeta.get_function_hooks() or - # azure.functions.ExtensionMeta.get_application_hooks() - if hooks: - # Invoke extension implementation from ..ext_impl - for hook_meta in getattr(hooks, hook_name, []): - try: - hook_meta.ext_impl(fname, fdir) - except Exception as e: - logger.error(e, exc_info=True) - - @classmethod - def _raw_invocation_wrapper(cls, ctx, function, args) -> Any: - """Calls pre_invocation and post_invocation extensions additional - to function invocation - """ - cls._invocation_extension(ctx, APP_EXT_PRE_INVOCATION, args) - cls._invocation_extension(ctx, FUNC_EXT_PRE_INVOCATION, args) - result = function(**args) - cls._invocation_extension(ctx, FUNC_EXT_POST_INVOCATION, args, result) - cls._invocation_extension(ctx, APP_EXT_POST_INVOCATION, args, result) - return result - - @classmethod - def _try_get_sdk_with_extension_enabled(cls) -> Optional[ModuleType]: - if cls._is_sdk_detected: - return cls._extension_enabled_sdk - - sdk = get_sdk_from_sys_path() - if cls._is_extension_enabled_in_sdk(sdk): - cls._info_extension_is_enabled(sdk) - cls._extension_enabled_sdk = sdk - else: - cls._warn_sdk_not_support_extension(sdk) - cls._extension_enabled_sdk = None - - cls._is_sdk_detected = True - return cls._extension_enabled_sdk - - @classmethod - def _info_extension_is_enabled(cls, sdk): - logger.info( - 'Python Worker Extension is enabled in azure.functions (%s). ' - 'Sdk path: %s', get_sdk_version(sdk), sdk.__file__) - - @classmethod - def _info_discover_extension_list(cls, function_name, sdk): - logger.info( - 'Python Worker Extension Manager is loading %s, current ' - 'registered extensions: %s', - function_name, sdk.ExtensionMeta.get_registered_extensions_json() - ) - - @classmethod - def _warn_sdk_not_support_extension(cls, sdk): - logger.warning( - 'The azure.functions (%s) does not support Python worker ' - 'extensions. If you believe extensions are correctly installed, ' - 'please set the %s and %s to "true"', - get_sdk_version(sdk), PYTHON_ISOLATE_WORKER_DEPENDENCIES, - PYTHON_ENABLE_WORKER_EXTENSIONS - ) diff --git a/azure_functions_worker/logging.py b/azure_functions_worker/logging.py deleted file mode 100644 index adb5ff294..000000000 --- a/azure_functions_worker/logging.py +++ /dev/null @@ -1,107 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging -import logging.handlers -import sys -import traceback -from typing import Optional - -# Logging Prefixes -CONSOLE_LOG_PREFIX = "LanguageWorkerConsoleLog" -SYSTEM_LOG_PREFIX = "azure_functions_worker" -SDK_LOG_PREFIX = "azure.functions" -SYSTEM_ERROR_LOG_PREFIX = "azure_functions_worker_errors" - - -logger: logging.Logger = logging.getLogger(SYSTEM_LOG_PREFIX) -error_logger: logging.Logger = ( - logging.getLogger(SYSTEM_ERROR_LOG_PREFIX)) - -handler: Optional[logging.Handler] = None -error_handler: Optional[logging.Handler] = None - - -def format_exception(exception: Exception) -> str: - msg = str(exception) + "\n" - if (sys.version_info.major, sys.version_info.minor) < (3, 10): - msg += ''.join(traceback.format_exception( - etype=type(exception), - tb=exception.__traceback__, - value=exception)) - elif (sys.version_info.major, sys.version_info.minor) >= (3, 10): - msg += ''.join(traceback.format_exception(exception)) - else: - msg = str(exception) - return msg - - -def setup(log_level, log_destination): - # Since handler and error_handler are moved to the global scope, - # before assigning to these handlers, we should define 'global' keyword - global handler - global error_handler - - if log_level == 'TRACE': - log_level = 'DEBUG' - - formatter = logging.Formatter(f'{CONSOLE_LOG_PREFIX}' - ' %(levelname)s: %(message)s') - - if log_destination is None: - # With no explicit log destination we do split logging, - # errors go into stderr, everything else -- to stdout. - error_handler = logging.StreamHandler(sys.stderr) - error_handler.setFormatter(formatter) - error_handler.setLevel(getattr(logging, log_level)) - - handler = logging.StreamHandler(sys.stdout) - - elif log_destination in ('stdout', 'stderr'): - handler = logging.StreamHandler(getattr(sys, log_destination)) - - elif log_destination == 'syslog': - handler = logging.handlers.SysLogHandler() - - else: - handler = logging.FileHandler(log_destination) - - if error_handler is None: - error_handler = handler - - handler.setFormatter(formatter) - handler.setLevel(getattr(logging, log_level)) - - logger.addHandler(handler) - logger.setLevel(getattr(logging, log_level)) - - error_logger.addHandler(error_handler) - error_logger.setLevel(getattr(logging, log_level)) - - -def disable_console_logging() -> None: - # We should only remove the sys.stdout stream, as error_logger is used for - # unexpected critical error logs handling. - if logger and handler: - handler.flush() - logger.removeHandler(handler) - - -def enable_console_logging() -> None: - if logger and handler: - logger.addHandler(handler) - - -def is_system_log_category(ctg: str) -> bool: - """Check if the logging namespace belongs to system logs. Category starts - with the following name will be treated as system logs. - 1. 'azure_functions_worker' (Worker Info) - 2. 'azure_functions_worker_errors' (Worker Error) - 3. 'azure.functions' (SDK) - - Expected behaviors for sytem logs and customer logs are listed below: - local_console customer_app_insight functions_kusto_table - system_log false false true - customer_log true true false - """ - return ctg.startswith(SYSTEM_LOG_PREFIX) or ctg.startswith(SDK_LOG_PREFIX) diff --git a/azure_functions_worker/main.py b/azure_functions_worker/main.py deleted file mode 100644 index 130e0e9ea..000000000 --- a/azure_functions_worker/main.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -"""Main entrypoint.""" - -import argparse - - -def parse_args(): - parser = argparse.ArgumentParser( - description='Python Azure Functions Worker') - parser.add_argument('--host', - help="host address") - parser.add_argument('--port', type=int, - help='port number') - parser.add_argument('--workerId', dest='worker_id', - help='id for the worker') - parser.add_argument('--requestId', dest='request_id', - help='id of the request') - parser.add_argument('--log-level', type=str, default='INFO', - choices=['TRACE', 'INFO', 'WARNING', 'ERROR'], - help="log level: 'TRACE', 'INFO', 'WARNING', " - "or 'ERROR'") - parser.add_argument('--log-to', type=str, default=None, - help='log destination: stdout, stderr, ' - 'syslog, or a file path') - parser.add_argument('--grpcMaxMessageLength', type=int, - dest='grpc_max_msg_len') - parser.add_argument('--functions-uri', dest='functions_uri', type=str, - help='URI with IP Address and Port used to' - ' connect to the Host via gRPC.') - parser.add_argument('--functions-request-id', dest='functions_request_id', - type=str, help='Request ID used for gRPC communication ' - 'with the Host.') - parser.add_argument('--functions-worker-id', - dest='functions_worker_id', type=str, - help='Worker ID assigned to this language worker.') - parser.add_argument('--functions-grpc-max-message-length', type=int, - dest='functions_grpc_max_msg_len', - help='Max grpc message length for Functions') - return parser.parse_args() - - -def main(): - from .utils.dependency import DependencyManager - DependencyManager.initialize() - DependencyManager.use_worker_dependencies() - - import asyncio - - from . import logging - from .logging import error_logger, format_exception, logger - - args = parse_args() - logging.setup(log_level=args.log_level, log_destination=args.log_to) - - logger.info('Starting Azure Functions Python Worker.') - logger.info('Worker ID: %s, Request ID: %s, Host Address: %s:%s', - args.worker_id, args.request_id, args.host, args.port) - - try: - return asyncio.run(start_async( - args.host, args.port, args.worker_id, args.request_id)) - except Exception as ex: - error_logger.exception( - 'unhandled error in functions worker: {0}'.format( - format_exception(ex))) - raise - - -async def start_async(host, port, worker_id, request_id): - from . import dispatcher - - disp = await dispatcher.Dispatcher.connect(host=host, port=port, - worker_id=worker_id, - request_id=request_id, - connect_timeout=5.0) - - await disp.dispatch_forever() diff --git a/azure_functions_worker/protos/.gitignore b/azure_functions_worker/protos/.gitignore deleted file mode 100644 index f43e6c214..000000000 --- a/azure_functions_worker/protos/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -/_src -*_pb2.py -*_pb2_grpc.py diff --git a/azure_functions_worker/protos/_src/.gitignore b/azure_functions_worker/protos/_src/.gitignore deleted file mode 100644 index 940794e60..000000000 --- a/azure_functions_worker/protos/_src/.gitignore +++ /dev/null @@ -1,288 +0,0 @@ -## Ignore Visual Studio temporary files, build results, and -## files generated by popular Visual Studio add-ons. -## -## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore - -# User-specific files -*.suo -*.user -*.userosscache -*.sln.docstates - -# User-specific files (MonoDevelop/Xamarin Studio) -*.userprefs - -# Build results -[Dd]ebug/ -[Dd]ebugPublic/ -[Rr]elease/ -[Rr]eleases/ -x64/ -x86/ -bld/ -[Bb]in/ -[Oo]bj/ -[Ll]og/ - -# Visual Studio 2015 cache/options directory -.vs/ -# Uncomment if you have tasks that create the project's static files in wwwroot -#wwwroot/ - -# MSTest test Results -[Tt]est[Rr]esult*/ -[Bb]uild[Ll]og.* - -# NUNIT -*.VisualState.xml -TestResult.xml - -# Build Results of an ATL Project -[Dd]ebugPS/ -[Rr]eleasePS/ -dlldata.c - -# .NET Core -project.lock.json -project.fragment.lock.json -artifacts/ -**/Properties/launchSettings.json - -*_i.c -*_p.c -*_i.h -*.ilk -*.meta -*.obj -*.pch -*.pdb -*.pgc -*.pgd -*.rsp -*.sbr -*.tlb -*.tli -*.tlh -*.tmp -*.tmp_proj -*.log -*.vspscc -*.vssscc -.builds -*.pidb -*.svclog -*.scc - -# Chutzpah Test files -_Chutzpah* - -# Visual C++ cache files -ipch/ -*.aps -*.ncb -*.opendb -*.opensdf -*.sdf -*.cachefile -*.VC.db -*.VC.VC.opendb - -# Visual Studio profiler -*.psess -*.vsp -*.vspx -*.sap - -# TFS 2012 Local Workspace -$tf/ - -# Guidance Automation Toolkit -*.gpState - -# ReSharper is a .NET coding add-in -_ReSharper*/ -*.[Rr]e[Ss]harper -*.DotSettings.user - -# JustCode is a .NET coding add-in -.JustCode - -# TeamCity is a build add-in -_TeamCity* - -# DotCover is a Code Coverage Tool -*.dotCover - -# Visual Studio code coverage results -*.coverage -*.coveragexml - -# NCrunch -_NCrunch_* -.*crunch*.local.xml -nCrunchTemp_* - -# MightyMoose -*.mm.* -AutoTest.Net/ - -# Web workbench (sass) -.sass-cache/ - -# Installshield output folder -[Ee]xpress/ - -# DocProject is a documentation generator add-in -DocProject/buildhelp/ -DocProject/Help/*.HxT -DocProject/Help/*.HxC -DocProject/Help/*.hhc -DocProject/Help/*.hhk -DocProject/Help/*.hhp -DocProject/Help/Html2 -DocProject/Help/html - -# Click-Once directory -publish/ - -# Publish Web Output -*.[Pp]ublish.xml -*.azurePubxml -# TODO: Comment the next line if you want to checkin your web deploy settings -# but database connection strings (with potential passwords) will be unencrypted -*.pubxml -*.publishproj - -# Microsoft Azure Web App publish settings. Comment the next line if you want to -# checkin your Azure Web App publish settings, but sensitive information contained -# in these scripts will be unencrypted -PublishScripts/ - -# NuGet Packages -*.nupkg -# The packages folder can be ignored because of Package Restore -**/packages/* -# except build/, which is used as an MSBuild target. -!**/packages/build/ -# Uncomment if necessary however generally it will be regenerated when needed -#!**/packages/repositories.config -# NuGet v3's project.json files produces more ignorable files -*.nuget.props -*.nuget.targets - -# Microsoft Azure Build Output -csx/ -*.build.csdef - -# Microsoft Azure Emulator -ecf/ -rcf/ - -# Windows Store app package directories and files -AppPackages/ -BundleArtifacts/ -Package.StoreAssociation.xml -_pkginfo.txt - -# Visual Studio cache files -# files ending in .cache can be ignored -*.[Cc]ache -# but keep track of directories ending in .cache -!*.[Cc]ache/ - -# Others -ClientBin/ -~$* -*~ -*.dbmdl -*.dbproj.schemaview -*.jfm -*.pfx -*.publishsettings -orleans.codegen.cs - -# Since there are multiple workflows, uncomment next line to ignore bower_components -# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) -#bower_components/ - -# RIA/Silverlight projects -Generated_Code/ - -# Backup & report files from converting an old project file -# to a newer Visual Studio version. Backup files are not needed, -# because we have git ;-) -_UpgradeReport_Files/ -Backup*/ -UpgradeLog*.XML -UpgradeLog*.htm - -# SQL Server files -*.mdf -*.ldf -*.ndf - -# Business Intelligence projects -*.rdl.data -*.bim.layout -*.bim_*.settings - -# Microsoft Fakes -FakesAssemblies/ - -# GhostDoc plugin setting file -*.GhostDoc.xml - -# Node.js Tools for Visual Studio -.ntvs_analysis.dat -node_modules/ - -# Typescript v1 declaration files -typings/ - -# Visual Studio 6 build log -*.plg - -# Visual Studio 6 workspace options file -*.opt - -# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) -*.vbw - -# Visual Studio LightSwitch build output -**/*.HTMLClient/GeneratedArtifacts -**/*.DesktopClient/GeneratedArtifacts -**/*.DesktopClient/ModelManifest.xml -**/*.Server/GeneratedArtifacts -**/*.Server/ModelManifest.xml -_Pvt_Extensions - -# Paket dependency manager -.paket/paket.exe -paket-files/ - -# FAKE - F# Make -.fake/ - -# JetBrains Rider -.idea/ -*.sln.iml - -# CodeRush -.cr/ - -# Python Tools for Visual Studio (PTVS) -__pycache__/ -*.pyc - -# Cake - Uncomment if you are using it -# tools/** -# !tools/packages.config - -# Telerik's JustMock configuration file -*.jmconfig - -# BizTalk build output -*.btp.cs -*.btm.cs -*.odx.cs -*.xsd.cs diff --git a/azure_functions_worker/protos/_src/LICENSE b/azure_functions_worker/protos/_src/LICENSE deleted file mode 100644 index 21071075c..000000000 --- a/azure_functions_worker/protos/_src/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ - MIT License - - Copyright (c) Microsoft Corporation. All rights reserved. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE diff --git a/azure_functions_worker/protos/_src/README.md b/azure_functions_worker/protos/_src/README.md deleted file mode 100644 index b22f0bb4b..000000000 --- a/azure_functions_worker/protos/_src/README.md +++ /dev/null @@ -1,98 +0,0 @@ -# Azure Functions Languge Worker Protobuf - -This repository contains the protobuf definition file which defines the gRPC service which is used between the [Azure Functions Host](https://github.com/Azure/azure-functions-host) and the Azure Functions language workers. This repo is shared across many repos in many languages (for each worker) by using git commands. - -To use this repo in Azure Functions language workers, follow steps below to add this repo as a subtree (*Adding This Repo*). If this repo is already embedded in a language worker repo, follow the steps to update the consumed file (*Pulling Updates*). - -Learn more about Azure Function's projects on the [meta](https://github.com/azure/azure-functions) repo. - -## Adding This Repo - -From within the Azure Functions language worker repo: -1. Define remote branch for cleaner git commands - - `git remote add proto-file https://github.com/azure/azure-functions-language-worker-protobuf.git` - - `git fetch proto-file` -2. Index contents of azure-functions-worker-protobuf to language worker repo - - `git read-tree --prefix= -u proto-file/` -3. Add new path in language worker repo to .gitignore file - - In .gitignore, add path in language worker repo -4. Finalize with commit - - `git commit -m "Added subtree from https://github.com/azure/azure-functions-language-worker-protobuf. Branch: . Commit: "` - - `git push` - -## Pulling Updates - -From within the Azure Functions language worker repo: -1. Define remote branch for cleaner git commands - - `git remote add proto-file https://github.com/azure/azure-functions-language-worker-protobuf.git` - - `git fetch proto-file` -2. Pull a specific release tag - - `git fetch proto-file refs/tags/` - - Example: `git fetch proto-file refs/tags/v1.1.0-protofile` -3. Merge updates - - Merge with an explicit path to subtree: `git merge -X subtree= --squash --allow-unrelated-histories --strategy-option theirs` - - Example: `git merge -X subtree=src/WebJobs.Script.Grpc/azure-functions-language-worker-protobuf --squash v1.1.0-protofile --allow-unrelated-histories --strategy-option theirs` -4. Finalize with commit - - `git commit -m "Updated subtree from https://github.com/azure/azure-functions-language-worker-protobuf. Tag: . Commit: "` - - `git push` - -## Releasing a Language Worker Protobuf version - -1. Draft a release in the GitHub UI - - Be sure to inculde details of the release -2. Create a release version, following semantic versioning guidelines ([semver.org](https://semver.org/)) -3. Tag the version with the pattern: `v..

-protofile` (example: `v1.1.0-protofile`) -3. Merge `dev` to `master` - -## Consuming FunctionRPC.proto -*Note: Update versionNumber before running following commands* - -## CSharp -``` -set NUGET_PATH="%UserProfile%\.nuget\packages" -set GRPC_TOOLS_PATH=%NUGET_PATH%\grpc.tools\\tools\windows_x86 -set PROTO_PATH=.\azure-functions-language-worker-protobuf\src\proto -set PROTO=.\azure-functions-language-worker-protobuf\src\proto\FunctionRpc.proto -set PROTOBUF_TOOLS=%NUGET_PATH%\google.protobuf.tools\\tools -set MSGDIR=.\Messages - -if exist %MSGDIR% rmdir /s /q %MSGDIR% -mkdir %MSGDIR% - -set OUTDIR=%MSGDIR%\DotNet -mkdir %OUTDIR% -%GRPC_TOOLS_PATH%\protoc.exe %PROTO% --csharp_out %OUTDIR% --grpc_out=%OUTDIR% --plugin=protoc-gen-grpc=%GRPC_TOOLS_PATH%\grpc_csharp_plugin.exe --proto_path=%PROTO_PATH% --proto_path=%PROTOBUF_TOOLS% -``` -## JavaScript -In package.json, add to the build script the following commands to build .js files and to build .ts files. Use and install npm package `protobufjs`. - -Generate JavaScript files: -``` -pbjs -t json-module -w commonjs -o azure-functions-language-worker-protobuf/src/rpc.js azure-functions-language-worker-protobuf/src/proto/FunctionRpc.proto -``` -Generate TypeScript files: -``` -pbjs -t static-module azure-functions-language-worker-protobuf/src/proto/FunctionRpc.proto -o azure-functions-language-worker-protobuf/src/rpc_static.js && pbts -o azure-functions-language-worker-protobuf/src/rpc.d.ts azure-functions-language-worker-protobuf/src/rpc_static.js -``` - -## Java -Maven plugin : [protobuf-maven-plugin](https://www.xolstice.org/protobuf-maven-plugin/) -In pom.xml add following under configuration for this plugin -${basedir}//azure-functions-language-worker-protobuf/src/proto - -## Python ---TODO - -## Contributing - -This project welcomes contributions and suggestions. Most contributions require you to agree to a -Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us -the rights to use your contribution. For details, visit https://cla.microsoft.com. - -When you submit a pull request, a CLA-bot will automatically determine whether you need to provide -a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions -provided by the bot. You will only need to do this once across all repos using our CLA. - -This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). -For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or -contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. diff --git a/azure_functions_worker/protos/_src/src/proto/FunctionRpc.proto b/azure_functions_worker/protos/_src/src/proto/FunctionRpc.proto deleted file mode 100644 index f48bc7bbe..000000000 --- a/azure_functions_worker/protos/_src/src/proto/FunctionRpc.proto +++ /dev/null @@ -1,730 +0,0 @@ -syntax = "proto3"; -// protobuf vscode extension: https://marketplace.visualstudio.com/items?itemName=zxh404.vscode-proto3 - -option java_multiple_files = true; -option java_package = "com.microsoft.azure.functions.rpc.messages"; -option java_outer_classname = "FunctionProto"; -option csharp_namespace = "Microsoft.Azure.WebJobs.Script.Grpc.Messages"; -option go_package ="github.com/Azure/azure-functions-go-worker/internal/rpc"; - -package AzureFunctionsRpcMessages; - -import "google/protobuf/duration.proto"; -import "identity/ClaimsIdentityRpc.proto"; -import "shared/NullableTypes.proto"; - -// Interface exported by the server. -service FunctionRpc { - rpc EventStream (stream StreamingMessage) returns (stream StreamingMessage) {} -} - -message StreamingMessage { - // Used to identify message between host and worker - string request_id = 1; - - // Payload of the message - oneof content { - - // Worker initiates stream - StartStream start_stream = 20; - - // Host sends capabilities/init data to worker - WorkerInitRequest worker_init_request = 17; - // Worker responds after initializing with its capabilities & status - WorkerInitResponse worker_init_response = 16; - - // MESSAGE NOT USED - // Worker periodically sends empty heartbeat message to host - WorkerHeartbeat worker_heartbeat = 15; - - // Host sends terminate message to worker. - // Worker terminates if it can, otherwise host terminates after a grace period - WorkerTerminate worker_terminate = 14; - - // Host periodically sends status request to the worker - WorkerStatusRequest worker_status_request = 12; - WorkerStatusResponse worker_status_response = 13; - - // On file change event, host sends notification to worker - FileChangeEventRequest file_change_event_request = 6; - - // Worker requests a desired action (restart worker, reload function) - WorkerActionResponse worker_action_response = 7; - - // Host sends required metadata to worker to load function - FunctionLoadRequest function_load_request = 8; - // Worker responds after loading with the load result - FunctionLoadResponse function_load_response = 9; - - // Host requests a given invocation - InvocationRequest invocation_request = 4; - - // Worker responds to a given invocation - InvocationResponse invocation_response = 5; - - // Host sends cancel message to attempt to cancel an invocation. - // If an invocation is cancelled, host will receive an invocation response with status cancelled. - InvocationCancel invocation_cancel = 21; - - // Worker logs a message back to the host - RpcLog rpc_log = 2; - - FunctionEnvironmentReloadRequest function_environment_reload_request = 25; - - FunctionEnvironmentReloadResponse function_environment_reload_response = 26; - - // Ask the worker to close any open shared memory resources for a given invocation - CloseSharedMemoryResourcesRequest close_shared_memory_resources_request = 27; - CloseSharedMemoryResourcesResponse close_shared_memory_resources_response = 28; - - // Worker indexing message types - FunctionsMetadataRequest functions_metadata_request = 29; - FunctionMetadataResponse function_metadata_response = 30; - - // Host sends required metadata to worker to load functions - FunctionLoadRequestCollection function_load_request_collection = 31; - - // Host gets the list of function load responses - FunctionLoadResponseCollection function_load_response_collection = 32; - - // Host sends required metadata to worker to warmup the worker - WorkerWarmupRequest worker_warmup_request = 33; - - // Worker responds after warming up with the warmup result - WorkerWarmupResponse worker_warmup_response = 34; - - } -} - -// Process.Start required info -// connection details -// protocol type -// protocol version - -// Worker sends the host information identifying itself -message StartStream { - // id of the worker - string worker_id = 2; -} - -// Host requests the worker to initialize itself -message WorkerInitRequest { - // version of the host sending init request - string host_version = 1; - - // A map of host supported features/capabilities - map capabilities = 2; - - // inform worker of supported categories and their levels - // i.e. Worker = Verbose, Function.MyFunc = None - map log_categories = 3; - - // Full path of worker.config.json location - string worker_directory = 4; - - // base directory for function app - string function_app_directory = 5; -} - -// Worker responds with the result of initializing itself -message WorkerInitResponse { - // PROPERTY NOT USED - // TODO: Remove from protobuf during next breaking change release - string worker_version = 1; - - // A map of worker supported features/capabilities - map capabilities = 2; - - // Status of the response - StatusResult result = 3; - - // Worker metadata captured for telemetry purposes - WorkerMetadata worker_metadata = 4; -} - -message WorkerMetadata { - // The runtime/stack name - string runtime_name = 1; - - // The version of the runtime/stack - string runtime_version = 2; - - // The version of the worker - string worker_version = 3; - - // The worker bitness/architecture - string worker_bitness = 4; - - // Optional additional custom properties - map custom_properties = 5; -} - -// Used by the host to determine success/failure/cancellation -message StatusResult { - // Indicates Failure/Success/Cancelled - enum Status { - Failure = 0; - Success = 1; - Cancelled = 2; - } - - // Status for the given result - Status status = 4; - - // Specific message about the result - string result = 1; - - // Exception message (if exists) for the status - RpcException exception = 2; - - // Captured logs or relevant details can use the logs property - repeated RpcLog logs = 3; -} - -// MESSAGE NOT USED -// TODO: Remove from protobuf during next breaking change release -message WorkerHeartbeat {} - -// Warning before killing the process after grace_period -// Worker self terminates ..no response on this -message WorkerTerminate { - google.protobuf.Duration grace_period = 1; -} - -// Host notifies worker of file content change -message FileChangeEventRequest { - // Types of File change operations (See link for more info: https://msdn.microsoft.com/en-us/library/t6xf43e0(v=vs.110).aspx) - enum Type { - Unknown = 0; - Created = 1; - Deleted = 2; - Changed = 4; - Renamed = 8; - All = 15; - } - - // type for this event - Type type = 1; - - // full file path for the file change notification - string full_path = 2; - - // Name of the function affected - string name = 3; -} - -// Indicates whether worker reloaded successfully or needs a restart -message WorkerActionResponse { - // indicates whether a restart is needed, or reload successfully - enum Action { - Restart = 0; - Reload = 1; - } - - // action for this response - Action action = 1; - - // text reason for the response - string reason = 2; -} - -// Used by the host to determine worker health -message WorkerStatusRequest { -} - -// Worker responds with status message -// TODO: Add any worker relevant status to response -message WorkerStatusResponse { -} - -message FunctionEnvironmentReloadRequest { - // Environment variables from the current process - map environment_variables = 1; - // Current directory of function app - string function_app_directory = 2; -} - -message FunctionEnvironmentReloadResponse { - // After specialization, worker sends capabilities & metadata. - // Worker metadata captured for telemetry purposes - WorkerMetadata worker_metadata = 1; - - // A map of worker supported features/capabilities - map capabilities = 2; - - // Status of the response - StatusResult result = 3; -} - -// Tell the out-of-proc worker to close any shared memory maps it allocated for given invocation -message CloseSharedMemoryResourcesRequest { - repeated string map_names = 1; -} - -// Response from the worker indicating which of the shared memory maps have been successfully closed and which have not been closed -// The key (string) is the map name and the value (bool) is true if it was closed, false if not -message CloseSharedMemoryResourcesResponse { - map close_map_results = 1; -} - -// Host tells the worker to load a list of Functions -message FunctionLoadRequestCollection { - repeated FunctionLoadRequest function_load_requests = 1; -} - -// Host gets the list of function load responses -message FunctionLoadResponseCollection { - repeated FunctionLoadResponse function_load_responses = 1; -} - -// Load request of a single Function -message FunctionLoadRequest { - // unique function identifier (avoid name collisions, facilitate reload case) - string function_id = 1; - - // Metadata for the request - RpcFunctionMetadata metadata = 2; - - // A flag indicating if managed dependency is enabled or not - bool managed_dependency_enabled = 3; -} - -// Worker tells host result of reload -message FunctionLoadResponse { - // unique function identifier - string function_id = 1; - - // Result of load operation - StatusResult result = 2; - // TODO: return type expected? - - // Result of load operation - bool is_dependency_downloaded = 3; -} - -// Information on how a Function should be loaded and its bindings -message RpcFunctionMetadata { - // TODO: do we want the host's name - the language worker might do a better job of assignment than the host - string name = 4; - - // base directory for the Function - string directory = 1; - - // Script file specified - string script_file = 2; - - // Entry point specified - string entry_point = 3; - - // Bindings info - map bindings = 6; - - // Is set to true for proxy - bool is_proxy = 7; - - // Function indexing status - StatusResult status = 8; - - // Function language - string language = 9; - - // Raw binding info - repeated string raw_bindings = 10; - - // unique function identifier (avoid name collisions, facilitate reload case) - string function_id = 13; - - // A flag indicating if managed dependency is enabled or not - bool managed_dependency_enabled = 14; - - // The optional function execution retry strategy to use on invocation failures. - RpcRetryOptions retry_options = 15; - - // Properties for function metadata - // They're usually specific to a worker and largely passed along to the controller API for use - // outside the host - map properties = 16; -} - -// Host tells worker it is ready to receive metadata -message FunctionsMetadataRequest { - // base directory for function app - string function_app_directory = 1; -} - -// Worker sends function metadata back to host -message FunctionMetadataResponse { - // list of function indexing responses - repeated RpcFunctionMetadata function_metadata_results = 1; - - // status of overall metadata request - StatusResult result = 2; - - // if set to true then host will perform indexing - bool use_default_metadata_indexing = 3; -} - -// Host requests worker to invoke a Function -message InvocationRequest { - // Unique id for each invocation - string invocation_id = 1; - - // Unique id for each Function - string function_id = 2; - - // Input bindings (include trigger) - repeated ParameterBinding input_data = 3; - - // binding metadata from trigger - map trigger_metadata = 4; - - // Populates activityId, tracestate and tags from host - RpcTraceContext trace_context = 5; - - // Current retry context - RetryContext retry_context = 6; -} - -// Host sends ActivityId, traceStateString and Tags from host -message RpcTraceContext { - // This corresponds to Activity.Current?.Id - string trace_parent = 1; - - // This corresponds to Activity.Current?.TraceStateString - string trace_state = 2; - - // This corresponds to Activity.Current?.Tags - map attributes = 3; -} - -// Host sends retry context for a function invocation -message RetryContext { - // Current retry count - int32 retry_count = 1; - - // Max retry count - int32 max_retry_count = 2; - - // Exception that caused the retry - RpcException exception = 3; -} - -// Host requests worker to cancel invocation -message InvocationCancel { - // Unique id for invocation - string invocation_id = 2; - - // PROPERTY NOT USED - google.protobuf.Duration grace_period = 1; -} - -// Worker responds with status of Invocation -message InvocationResponse { - // Unique id for invocation - string invocation_id = 1; - - // Output binding data - repeated ParameterBinding output_data = 2; - - // data returned from Function (for $return and triggers with return support) - TypedData return_value = 4; - - // Status of the invocation (success/failure/canceled) - StatusResult result = 3; -} - -message WorkerWarmupRequest { - // Full path of worker.config.json location - string worker_directory = 1; -} - -message WorkerWarmupResponse { - StatusResult result = 1; -} - -// Used to encapsulate data which could be a variety of types -message TypedData { - oneof data { - string string = 1; - string json = 2; - bytes bytes = 3; - bytes stream = 4; - RpcHttp http = 5; - sint64 int = 6; - double double = 7; - CollectionBytes collection_bytes = 8; - CollectionString collection_string = 9; - CollectionDouble collection_double = 10; - CollectionSInt64 collection_sint64 = 11; - ModelBindingData model_binding_data = 12; - CollectionModelBindingData collection_model_binding_data = 13; - } -} - -// Specify which type of data is contained in the shared memory region being read -enum RpcDataType { - unknown = 0; - string = 1; - json = 2; - bytes = 3; - stream = 4; - http = 5; - int = 6; - double = 7; - collection_bytes = 8; - collection_string = 9; - collection_double = 10; - collection_sint64 = 11; -} - -// Used to provide metadata about shared memory region to read data from -message RpcSharedMemory { - // Name of the shared memory map containing data - string name = 1; - // Offset in the shared memory map to start reading data from - int64 offset = 2; - // Number of bytes to read (starting from the offset) - int64 count = 3; - // Final type to which the read data (in bytes) is to be interpreted as - RpcDataType type = 4; -} - -// Used to encapsulate collection string -message CollectionString { - repeated string string = 1; -} - -// Used to encapsulate collection bytes -message CollectionBytes { - repeated bytes bytes = 1; -} - -// Used to encapsulate collection double -message CollectionDouble { - repeated double double = 1; -} - -// Used to encapsulate collection sint64 -message CollectionSInt64 { - repeated sint64 sint64 = 1; -} - -// Used to describe a given binding on invocation -message ParameterBinding { - // Name for the binding - string name = 1; - - oneof rpc_data { - // Data for the binding - TypedData data = 2; - - // Metadata about the shared memory region to read data from - RpcSharedMemory rpc_shared_memory = 3; - } -} - -// Used to describe a given binding on load -message BindingInfo { - // Indicates whether it is an input or output binding (or a fancy inout binding) - enum Direction { - in = 0; - out = 1; - inout = 2; - } - - // Indicates the type of the data for the binding - enum DataType { - undefined = 0; - string = 1; - binary = 2; - stream = 3; - } - - // Type of binding (e.g. HttpTrigger) - string type = 2; - - // Direction of the given binding - Direction direction = 3; - - DataType data_type = 4; - - // Properties for binding metadata - map properties = 5; -} - -// Used to send logs back to the Host -message RpcLog { - // Matching ILogger semantics - // https://github.com/aspnet/Logging/blob/9506ccc3f3491488fe88010ef8b9eb64594abf95/src/Microsoft.Extensions.Logging/Logger.cs - // Level for the Log - enum Level { - Trace = 0; - Debug = 1; - Information = 2; - Warning = 3; - Error = 4; - Critical = 5; - None = 6; - } - - // Category of the log. Defaults to User if not specified. - enum RpcLogCategory { - User = 0; - System = 1; - CustomMetric = 2; - } - - // Unique id for invocation (if exists) - string invocation_id = 1; - - // TOD: This should be an enum - // Category for the log (startup, load, invocation, etc.) - string category = 2; - - // Level for the given log message - Level level = 3; - - // Message for the given log - string message = 4; - - // Id for the even associated with this log (if exists) - string event_id = 5; - - // Exception (if exists) - RpcException exception = 6; - - // json serialized property bag - string properties = 7; - - // Category of the log. Either user(default), system, or custom metric. - RpcLogCategory log_category = 8; - - // strongly-typed (ish) property bag - map propertiesMap = 9; -} - -// Encapsulates an Exception -message RpcException { - // Source of the exception - string source = 3; - - // Stack trace for the exception - string stack_trace = 1; - - // Textual message describing the exception - string message = 2; - - // Worker specifies whether exception is a user exception, - // for purpose of application insights logging. Defaults to false. - bool is_user_exception = 4; - - // Type of exception. If it's a user exception, the type is passed along to app insights. - // Otherwise, it's ignored for now. - string type = 5; -} - -// Http cookie type. Note that only name and value are used for Http requests -message RpcHttpCookie { - // Enum that lets servers require that a cookie shouldn't be sent with cross-site requests - enum SameSite { - None = 0; - Lax = 1; - Strict = 2; - ExplicitNone = 3; - } - - // Cookie name - string name = 1; - - // Cookie value - string value = 2; - - // Specifies allowed hosts to receive the cookie - NullableString domain = 3; - - // Specifies URL path that must exist in the requested URL - NullableString path = 4; - - // Sets the cookie to expire at a specific date instead of when the client closes. - // It is generally recommended that you use "Max-Age" over "Expires". - NullableTimestamp expires = 5; - - // Sets the cookie to only be sent with an encrypted request - NullableBool secure = 6; - - // Sets the cookie to be inaccessible to JavaScript's Document.cookie API - NullableBool http_only = 7; - - // Allows servers to assert that a cookie ought not to be sent along with cross-site requests - SameSite same_site = 8; - - // Number of seconds until the cookie expires. A zero or negative number will expire the cookie immediately. - NullableDouble max_age = 9; -} - -// TODO - solidify this or remove it -message RpcHttp { - string method = 1; - string url = 2; - map headers = 3; - TypedData body = 4; - map params = 10; - string status_code = 12; - map query = 15; - bool enable_content_negotiation= 16; - TypedData rawBody = 17; - repeated RpcClaimsIdentity identities = 18; - repeated RpcHttpCookie cookies = 19; - map nullable_headers = 20; - map nullable_params = 21; - map nullable_query = 22; -} - -// Message representing Microsoft.Azure.WebJobs.ParameterBindingData -// Used for hydrating SDK-type bindings in out-of-proc workers -message ModelBindingData -{ - // The version of the binding data content - string version = 1; - - // The extension source of the binding data - string source = 2; - - // The content type of the binding data content - string content_type = 3; - - // The binding data content - bytes content = 4; -} - -// Used to encapsulate collection model_binding_data -message CollectionModelBindingData { - repeated ModelBindingData model_binding_data = 1; -} - -// Retry policy which the worker sends the host when the worker indexes -// a function. -message RpcRetryOptions -{ - // The retry strategy to use. Valid values are fixed delay or exponential backoff. - enum RetryStrategy - { - exponential_backoff = 0; - fixed_delay = 1; - } - - // The maximum number of retries allowed per function execution. - // -1 means to retry indefinitely. - int32 max_retry_count = 2; - - // The delay that's used between retries when you're using a fixed delay strategy. - google.protobuf.Duration delay_interval = 3; - - // The minimum retry delay when you're using an exponential backoff strategy - google.protobuf.Duration minimum_interval = 4; - - // The maximum retry delay when you're using an exponential backoff strategy - google.protobuf.Duration maximum_interval = 5; - - RetryStrategy retry_strategy = 6; -} \ No newline at end of file diff --git a/azure_functions_worker/protos/_src/src/proto/identity/ClaimsIdentityRpc.proto b/azure_functions_worker/protos/_src/src/proto/identity/ClaimsIdentityRpc.proto deleted file mode 100644 index c3945bb8a..000000000 --- a/azure_functions_worker/protos/_src/src/proto/identity/ClaimsIdentityRpc.proto +++ /dev/null @@ -1,26 +0,0 @@ -syntax = "proto3"; -// protobuf vscode extension: https://marketplace.visualstudio.com/items?itemName=zxh404.vscode-proto3 - -option java_package = "com.microsoft.azure.functions.rpc.messages"; - -import "shared/NullableTypes.proto"; - -// Light-weight representation of a .NET System.Security.Claims.ClaimsIdentity object. -// This is the same serialization as found in EasyAuth, and needs to be kept in sync with -// its ClaimsIdentitySlim definition, as seen in the WebJobs extension: -// https://github.com/Azure/azure-webjobs-sdk-extensions/blob/dev/src/WebJobs.Extensions.Http/ClaimsIdentitySlim.cs -message RpcClaimsIdentity { - NullableString authentication_type = 1; - NullableString name_claim_type = 2; - NullableString role_claim_type = 3; - repeated RpcClaim claims = 4; -} - -// Light-weight representation of a .NET System.Security.Claims.Claim object. -// This is the same serialization as found in EasyAuth, and needs to be kept in sync with -// its ClaimSlim definition, as seen in the WebJobs extension: -// https://github.com/Azure/azure-webjobs-sdk-extensions/blob/dev/src/WebJobs.Extensions.Http/ClaimSlim.cs -message RpcClaim { - string value = 1; - string type = 2; -} diff --git a/azure_functions_worker/protos/_src/src/proto/shared/NullableTypes.proto b/azure_functions_worker/protos/_src/src/proto/shared/NullableTypes.proto deleted file mode 100644 index 4fb476502..000000000 --- a/azure_functions_worker/protos/_src/src/proto/shared/NullableTypes.proto +++ /dev/null @@ -1,30 +0,0 @@ -syntax = "proto3"; -// protobuf vscode extension: https://marketplace.visualstudio.com/items?itemName=zxh404.vscode-proto3 - -option java_package = "com.microsoft.azure.functions.rpc.messages"; - -import "google/protobuf/timestamp.proto"; - -message NullableString { - oneof string { - string value = 1; - } -} - -message NullableDouble { - oneof double { - double value = 1; - } -} - -message NullableBool { - oneof bool { - bool value = 1; - } -} - -message NullableTimestamp { - oneof timestamp { - google.protobuf.Timestamp value = 1; - } -} diff --git a/azure_functions_worker/protos/shared/__init__.py b/azure_functions_worker/protos/shared/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/azure_functions_worker/utils/__init__.py b/azure_functions_worker/utils/__init__.py deleted file mode 100644 index 5b7f7a925..000000000 --- a/azure_functions_worker/utils/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. diff --git a/azure_functions_worker/utils/app_setting_manager.py b/azure_functions_worker/utils/app_setting_manager.py deleted file mode 100644 index 3d8ccbb45..000000000 --- a/azure_functions_worker/utils/app_setting_manager.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os -import sys - -from ..constants import ( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, - PYTHON_ENABLE_DEBUG_LOGGING, - PYTHON_ENABLE_INIT_INDEXING, - PYTHON_ENABLE_OPENTELEMETRY, - PYTHON_ENABLE_WORKER_EXTENSIONS, - PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT, - PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT_39, - PYTHON_ISOLATE_WORKER_DEPENDENCIES, - PYTHON_ROLLBACK_CWD_PATH, - PYTHON_SCRIPT_FILE_NAME, - PYTHON_THREADPOOL_THREAD_COUNT, -) - - -def get_python_appsetting_state(): - current_vars = os.environ.copy() - python_specific_settings = \ - [PYTHON_ROLLBACK_CWD_PATH, - PYTHON_THREADPOOL_THREAD_COUNT, - PYTHON_ISOLATE_WORKER_DEPENDENCIES, - PYTHON_ENABLE_DEBUG_LOGGING, - PYTHON_ENABLE_WORKER_EXTENSIONS, - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, - PYTHON_SCRIPT_FILE_NAME, - PYTHON_ENABLE_INIT_INDEXING, - PYTHON_ENABLE_OPENTELEMETRY] - - app_setting_states = "".join( - f"{app_setting}: {current_vars[app_setting]} | " - for app_setting in python_specific_settings - if app_setting in current_vars - ) - - # Special case for extensions - if 'PYTHON_ENABLE_WORKER_EXTENSIONS' not in current_vars: - if sys.version_info.minor == 9: - app_setting_states += \ - (f"{PYTHON_ENABLE_WORKER_EXTENSIONS}: " - f"{str(PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT_39)}") - else: - app_setting_states += \ - (f"{PYTHON_ENABLE_WORKER_EXTENSIONS}: " - f"{str(PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT)}") - - return app_setting_states diff --git a/azure_functions_worker/utils/common.py b/azure_functions_worker/utils/common.py deleted file mode 100644 index 963cd3c1c..000000000 --- a/azure_functions_worker/utils/common.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import importlib -import os -import re -import sys -from types import ModuleType -from typing import Callable, Optional - -from azure_functions_worker.constants import ( - CUSTOMER_PACKAGES_PATH, - PYTHON_EXTENSIONS_RELOAD_FUNCTIONS, -) - - -def is_true_like(setting: str) -> bool: - if setting is None: - return False - - return setting.lower().strip() in {'1', 'true', 't', 'yes', 'y'} - - -def is_false_like(setting: str) -> bool: - if setting is None: - return False - - return setting.lower().strip() in {'0', 'false', 'f', 'no', 'n'} - - -def is_envvar_true(env_key: str) -> bool: - if os.getenv(env_key) is None: - return False - - return is_true_like(os.environ[env_key]) - - -def is_envvar_false(env_key: str) -> bool: - if os.getenv(env_key) is None: - return False - - return is_false_like(os.environ[env_key]) - - -def is_python_version(version: str) -> bool: - current_version = f'{sys.version_info.major}.{sys.version_info.minor}' - return current_version == version - - -def get_app_setting( - setting: str, - default_value: Optional[str] = None, - validator: Optional[Callable[[str], bool]] = None -) -> Optional[str]: - """Returns the application setting from environment variable. - - Parameters - ---------- - setting: str - The name of the application setting (e.g. FUNCTIONS_RUNTIME_VERSION) - - default_value: Optional[str] - The expected return value when the application setting is not found, - or the app setting does not pass the validator. - - validator: Optional[Callable[[str], bool]] - A function accepts the app setting value and should return True when - the app setting value is acceptable. - - Returns - ------- - Optional[str] - A string value that is set in the application setting - """ - app_setting_value = os.getenv(setting) - - # If an app setting is not configured, we return the default value - if app_setting_value is None: - return default_value - - # If there's no validator, we should return the app setting value directly - if validator is None: - return app_setting_value - - # If the app setting is set with a validator, - # On True, should return the app setting value - # On False, should return the default value - if validator(app_setting_value): - return app_setting_value - return default_value - - -def get_sdk_version(module: ModuleType) -> str: - """Check the version of azure.functions sdk. - - Parameters - ---------- - module: ModuleType - The azure.functions SDK module - - Returns - ------- - str - The SDK version that our customer has installed. - """ - - return getattr(module, '__version__', 'undefined') - - -def get_sdk_from_sys_path() -> ModuleType: - """Get the azure.functions SDK from the latest sys.path defined. - This is to ensure the extension loaded from SDK coming from customer's - site-packages. - - Returns - ------- - ModuleType - The azure.functions that is loaded from the first sys.path entry - """ - - if is_envvar_true(PYTHON_EXTENSIONS_RELOAD_FUNCTIONS): - backup_azure_functions = None - backup_azure = None - - if 'azure.functions' in sys.modules: - backup_azure_functions = sys.modules.pop('azure.functions') - if 'azure' in sys.modules: - backup_azure = sys.modules.pop('azure') - - module = importlib.import_module('azure.functions') - - if backup_azure: - sys.modules['azure'] = backup_azure - if backup_azure_functions: - sys.modules['azure.functions'] = backup_azure_functions - - return module - - if CUSTOMER_PACKAGES_PATH not in sys.path: - sys.path.insert(0, CUSTOMER_PACKAGES_PATH) - - return importlib.import_module('azure.functions') - - -class InvalidFileNameError(Exception): - - def __init__(self, file_name: str) -> None: - super().__init__( - f'Invalid file name: {file_name}') - - -def validate_script_file_name(file_name: str): - # First character can be a letter, number, or underscore - # Following characters can be a letter, number, underscore, hyphen, or dash - # Ending must be .py - pattern = re.compile(r'^[a-zA-Z0-9_][a-zA-Z0-9_\-]*\.py$') - if not pattern.match(file_name): - raise InvalidFileNameError(file_name) diff --git a/azure_functions_worker/utils/dependency.py b/azure_functions_worker/utils/dependency.py deleted file mode 100644 index 76d4259be..000000000 --- a/azure_functions_worker/utils/dependency.py +++ /dev/null @@ -1,411 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import importlib.util -import inspect -import os -import re -import sys -from types import ModuleType -from typing import List, Optional - -from azure_functions_worker.utils.common import is_envvar_true, is_true_like - -from ..constants import ( - AZURE_WEBJOBS_SCRIPT_ROOT, - CONTAINER_NAME, - PYTHON_ISOLATE_WORKER_DEPENDENCIES, - PYTHON_ISOLATE_WORKER_DEPENDENCIES_DEFAULT, - PYTHON_ISOLATE_WORKER_DEPENDENCIES_DEFAULT_310, -) -from ..logging import logger -from ..utils.common import is_python_version -from ..utils.wrappers import enable_feature_by - - -class DependencyManager: - """The dependency manager controls the Python packages source, preventing - worker packages interfer customer's code. - - It has two mode, in worker mode, the Python packages are loaded from worker - path, (e.g. workers/python///). In customer mode, - the packages are loaded from customer's .python_packages/ folder or from - their virtual environment. - - Azure Functions has three different set of sys.path ordering, - - Linux Consumption sys.path: [ - "/tmp/functions\\standby\\wwwroot", # Placeholder folder - "/home/site/wwwroot/.python_packages/lib/site-packages", # CX's deps - "/azure-functions-host/workers/python/3.11/LINUX/X64", # Worker's deps - "/home/site/wwwroot" # CX's Working Directory - ] - - Linux Dedicated/Premium sys.path: [ - "/home/site/wwwroot", # CX's Working Directory - "/home/site/wwwroot/.python_packages/lib/site-packages", # CX's deps - "/azure-functions-host/workers/python/3.11/LINUX/X64", # Worker's deps - ] - - Core Tools sys.path: [ - "%appdata%\\azure-functions-core-tools\\bin\\workers\\" - "python\\3.11\\WINDOWS\\X64", # Worker's deps - "C:\\Users\\user\\Project\\.venv311\\lib\\site-packages", # CX's deps - "C:\\Users\\user\\Project", # CX's Working Directory - ] - - When we first start up the Python worker, we should only loaded from - worker's deps and create module namespace (e.g. google.protobuf variable). - - Once the worker receives worker init request, we clear out the sys.path, - worker sys.modules cache and sys.path_import_cache so the libraries - will only get loaded from CX's deps path. - """ - - cx_deps_path: str = '' - cx_working_dir: str = '' - worker_deps_path: str = '' - - @classmethod - def initialize(cls): - cls.cx_deps_path = cls._get_cx_deps_path() - cls.cx_working_dir = cls._get_cx_working_dir() - cls.worker_deps_path = cls._get_worker_deps_path() - - @classmethod - def is_in_linux_consumption(cls): - return CONTAINER_NAME in os.environ - - @classmethod - def should_load_cx_dependencies(cls): - """ - Customer dependencies should be loaded when dependency - isolation is enabled and - 1) App is a dedicated app - 2) App is linux consumption but not in placeholder mode. - This can happen when the worker restarts for any reason - (OOM, timeouts etc) and env reload request is not called. - """ - return not (DependencyManager.is_in_linux_consumption() - and is_envvar_true("WEBSITE_PLACEHOLDER_MODE")) - - @classmethod - @enable_feature_by( - flag=PYTHON_ISOLATE_WORKER_DEPENDENCIES, - flag_default=PYTHON_ISOLATE_WORKER_DEPENDENCIES_DEFAULT - ) - def use_worker_dependencies(cls): - """Switch the sys.path and ensure the worker imports are loaded from - Worker's dependenciess. - - This will not affect already imported namespaces, but will clear out - the module cache and ensure the upcoming modules are loaded from - worker's dependency path. - """ - - # The following log line will not show up in core tools but should - # work in kusto since core tools only collects gRPC logs. This function - # is executed even before the gRPC logging channel is ready. - logger.info('Applying use_worker_dependencies:' - ' worker_dependencies: %s,' - ' customer_dependencies: %s,' - ' working_directory: %s', cls.worker_deps_path, - cls.cx_deps_path, cls.cx_working_dir) - - cls._remove_from_sys_path(cls.cx_deps_path) - cls._remove_from_sys_path(cls.cx_working_dir) - cls._add_to_sys_path(cls.worker_deps_path, True) - logger.info('Start using worker dependencies %s', cls.worker_deps_path) - - @classmethod - @enable_feature_by( - flag=PYTHON_ISOLATE_WORKER_DEPENDENCIES, - flag_default=PYTHON_ISOLATE_WORKER_DEPENDENCIES_DEFAULT - ) - def prioritize_customer_dependencies(cls, cx_working_dir=None): - """Switch the sys.path and ensure the customer's code import are loaded - from CX's deppendencies. - - This will not affect already imported namespaces, but will clear out - the module cache and ensure the upcoming modules are loaded from - customer's dependency path. - - As for Linux Consumption, this will only remove worker_deps_path, - but the customer's path will be loaded in function_environment_reload. - - The search order of a module name in customer's paths is: - 1. cx_deps_path - 2. worker_deps_path - 3. cx_working_dir - """ - # Try to get the latest customer's working directory - # cx_working_dir => cls.cx_working_dir => AzureWebJobsScriptRoot - working_directory: str = '' - if cx_working_dir: - working_directory: str = os.path.abspath(cx_working_dir) - if not working_directory: - working_directory = cls.cx_working_dir - if not working_directory: - working_directory = os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT, '') - - # Try to get the latest customer's dependency path - cx_deps_path: str = cls._get_cx_deps_path() - if not cx_deps_path: - cx_deps_path = cls.cx_deps_path - - logger.info( - 'Applying prioritize_customer_dependencies: ' - 'worker_dependencies_path: %s, customer_dependencies_path: %s, ' - 'working_directory: %s, Linux Consumption: %s, Placeholder: %s', - cls.worker_deps_path, cx_deps_path, working_directory, - DependencyManager.is_in_linux_consumption(), - is_envvar_true("WEBSITE_PLACEHOLDER_MODE")) - - cls._remove_from_sys_path(cls.worker_deps_path) - cls._add_to_sys_path(cls.cx_deps_path, True) - - # Deprioritize worker dependencies but don't completely remove it - # Otherwise, it will break some really old function apps, those - # don't have azure-functions module in .python_packages - # https://github.com/Azure/azure-functions-core-tools/pull/1498 - cls._add_to_sys_path(cls.worker_deps_path, False) - - # The modules defined in customer's working directory should have the - # least priority since we uses the new folder structure. - # Please check the "Message to customer" section in the following PR: - # https://github.com/Azure/azure-functions-python-worker/pull/726 - cls._add_to_sys_path(working_directory, False) - - logger.info('Finished prioritize_customer_dependencies') - - @classmethod - def reload_customer_libraries(cls, cx_working_dir: str): - """Reload azure and google namespace, this including any modules in - this namespace, such as azure-functions, grpcio, grpcio-tools etc. - - Depends on the PYTHON_ISOLATE_WORKER_DEPENDENCIES, the actual behavior - differs. - - This is called only when placeholder mode is true. In the case of a - worker restart, this will not be called. - - Parameters - ---------- - cx_working_dir: str - The path which contains customer's project file (e.g. wwwroot). - """ - use_new_env = os.getenv(PYTHON_ISOLATE_WORKER_DEPENDENCIES) - if use_new_env is None: - use_new = ( - PYTHON_ISOLATE_WORKER_DEPENDENCIES_DEFAULT_310 if - is_python_version('3.10') else - PYTHON_ISOLATE_WORKER_DEPENDENCIES_DEFAULT - ) - else: - use_new = is_true_like(use_new_env) - - if use_new: - cls.prioritize_customer_dependencies(cx_working_dir) - else: - cls.reload_azure_google_namespace_from_worker_deps() - - @classmethod - def reload_azure_google_namespace_from_worker_deps(cls): - """This is the old implementation of reloading azure and google - namespace in Python worker directory. It is not actually re-importing - the module but only reloads the module scripts from the worker path. - - It is not doing what it is intended, but due to it is already released - on Linux Consumption production, we don't want to introduce regression - on existing customers. - - Only intended to be used in Linux Consumption scenario. - """ - # Reload package namespaces for customer's libraries - packages_to_reload = ['azure', 'google'] - packages_reloaded = [] - for p in packages_to_reload: - try: - importlib.reload(sys.modules[p]) - packages_reloaded.append(p) - except Exception as ex: - logger.warning('Unable to reload %s: \n%s', p, ex) - - logger.info(f'Reloaded modules: {",".join(packages_reloaded)}') - - # Reload azure.functions to give user package precedence - try: - importlib.reload(sys.modules['azure.functions']) - logger.info('Reloaded azure.functions module now at %s', - inspect.getfile(sys.modules['azure.functions'])) - except Exception as ex: - logger.warning( - 'Unable to reload azure.functions. Using default. ' - 'Exception:\n%s', ex) - - @classmethod - def _add_to_sys_path(cls, path: str, add_to_first: bool): - """This will ensure no duplicated path are added into sys.path and - clear importer cache. No action if path already exists in sys.path. - - Parameters - ---------- - path: str - The path needs to be added into sys.path. - If the path is an empty string, no action will be taken. - add_to_first: bool - Should the path added to the first entry (highest priority) - """ - if path and path not in sys.path: - if add_to_first: - sys.path.insert(0, path) - else: - sys.path.append(path) - - # Only clear path importer and sys.modules cache if path is not - # defined in sys.path - cls._clear_path_importer_cache_and_modules(path) - - @classmethod - def _remove_from_sys_path(cls, path: str): - """This will remove path from sys.path and clear importer cache. - No action if the path does not exist in sys.path. - - Parameters - ---------- - path: str - The path to be removed from sys.path. - If the path is an empty string, no action will be taken. - """ - if path and path in sys.path: - # Remove all occurances in sys.path - sys.path = list(filter(lambda p: p != path, sys.path)) - - # In case if any part of worker initialization do sys.path.pop() - # Always do a cache clear in path importer and sys.modules - cls._clear_path_importer_cache_and_modules(path) - - @classmethod - def _clear_path_importer_cache_and_modules(cls, path: str): - """Removes path from sys.path_importer_cache and clear related - sys.modules cache. No action if the path is empty or no entries - in sys.path_importer_cache or sys.modules. - - Parameters - ---------- - path: str - The path to be removed from sys.path_importer_cache. All related - modules will be cleared out from sys.modules cache. - If the path is an empty string, no action will be taken. - """ - if path and path in sys.path_importer_cache: - sys.path_importer_cache.pop(path) - - if path: - cls._remove_module_cache(path) - - @staticmethod - def _get_cx_deps_path() -> str: - """Get the directory storing the customer's third-party libraries. - - Returns - ------- - str - Core Tools: path to customer's site pacakges - Linux Dedicated/Premium: path to customer's site pacakges - Linux Consumption: empty string - """ - prefix: Optional[str] = os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT) - cx_paths: List[str] = [ - p for p in sys.path - if prefix and p.startswith(prefix) and ('site-packages' in p) - ] - # Return first or default of customer path - return (cx_paths or [''])[0] - - @staticmethod - def _get_cx_working_dir() -> str: - """Get the customer's working directory. - - Returns - ------- - str - Core Tools: AzureWebJobsScriptRoot env variable - Linux Dedicated/Premium: AzureWebJobsScriptRoot env variable - Linux Consumption: empty string - """ - return os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT, '') - - @staticmethod - def _get_worker_deps_path() -> str: - """Get the worker dependency sys.path. This will always available - even in all skus. - - Returns - ------- - str - The worker packages path - """ - # 1. Try to parse the absolute path python/3.8/LINUX/X64 in sys.path - r = re.compile(r'.*python(\/|\\)\d+\.\d+(\/|\\)(WINDOWS|LINUX|OSX).*') - worker_deps_paths: List[str] = [p for p in sys.path if r.match(p)] - if worker_deps_paths: - return worker_deps_paths[0] - - # 2. Try to find module spec of azure.functions without actually - # importing it (e.g. lib/site-packages/azure/functions/__init__.py) - try: - azf_spec = importlib.util.find_spec('azure.functions') - if azf_spec and azf_spec.origin: - return os.path.abspath( - os.path.join(os.path.dirname(azf_spec.origin), '..', '..') - ) - except ModuleNotFoundError: - logger.warning('Cannot locate built-in azure.functions module') - - # 3. If it fails to find one, try to find one from the parent path - # This is used for handling the CI/localdev environment - return os.path.abspath( - os.path.join(os.path.dirname(__file__), '..', '..') - ) - - @staticmethod - def _remove_module_cache(path: str): - """Remove module cache if the module is imported from specific path. - This will not impact builtin modules - - Parameters - ---------- - path: str - The module cache to be removed if it is imported from this path. - """ - if not path: - return - - not_builtin = set(sys.modules.keys()) - set(sys.builtin_module_names) - - # Don't reload azure_functions_worker - to_be_cleared_from_cache = set([ - module_name for module_name in not_builtin - if not module_name.startswith('azure_functions_worker') - ]) - - for module_name in to_be_cleared_from_cache: - module = sys.modules.get(module_name) - if not isinstance(module, ModuleType): - continue - - # Module path can be actual file path or a pure namespace path. - # Both of these has the module path placed in __path__ property - # The property .__path__ can be None or does not exist in module - try: - module_paths = set(getattr(module, '__path__', None) or []) - if hasattr(module, '__file__') and module.__file__: - module_paths.add(module.__file__) - - if any([p for p in module_paths if p.startswith(path)]): - sys.modules.pop(module_name) - except Exception as e: - logger.warning( - 'Attempt to remove module cache for %s but failed with ' - '%s. Using the original module cache.', - module_name, e) diff --git a/azure_functions_worker_v2/__init__.py b/azure_functions_worker_v2/__init__.py new file mode 100644 index 000000000..ac3640937 --- /dev/null +++ b/azure_functions_worker_v2/__init__.py @@ -0,0 +1,14 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +from .handle_event import (worker_init_request, + functions_metadata_request, + function_environment_reload_request, + invocation_request, + function_load_request) + +__all__ = ('worker_init_request', + 'functions_metadata_request', + 'function_environment_reload_request', + 'invocation_request', + 'function_load_request') diff --git a/azure_functions_worker/bindings/context.py b/azure_functions_worker_v2/bindings/context.py similarity index 56% rename from azure_functions_worker/bindings/context.py rename to azure_functions_worker_v2/bindings/context.py index 7effbf76d..18149abbc 100644 --- a/azure_functions_worker/bindings/context.py +++ b/azure_functions_worker_v2/bindings/context.py @@ -1,18 +1,18 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. + import threading -from typing import Type -from . import RetryContext, TraceContext +from .retrycontext import RetryContext +from .tracecontext import TraceContext class Context: - def __init__(self, func_name: str, func_dir: str, invocation_id: str, - thread_local_storage: Type[threading.local], + thread_local_storage: threading.local, trace_context: TraceContext, retry_context: RetryContext) -> None: self.__func_name = func_name @@ -27,7 +27,7 @@ def invocation_id(self) -> str: return self.__invocation_id @property - def thread_local_storage(self) -> Type[threading.local]: + def thread_local_storage(self) -> threading.local: return self.__thread_local_storage @property @@ -45,3 +45,23 @@ def trace_context(self) -> TraceContext: @property def retry_context(self) -> RetryContext: return self.__retry_context + + +def get_context(invoc_request, name: str, + directory: str) -> Context: + """ For more information refer: + https://aka.ms/azfunc-invocation-context + """ + trace_context = TraceContext( + invoc_request.trace_context.trace_parent, + invoc_request.trace_context.trace_state, + invoc_request.trace_context.attributes) + + retry_context = RetryContext( + invoc_request.retry_context.retry_count, + invoc_request.retry_context.max_retry_count, + invoc_request.retry_context.exception) + + return Context( + name, directory, invoc_request.invocation_id, + threading.local(), trace_context, retry_context) diff --git a/azure_functions_worker/bindings/datumdef.py b/azure_functions_worker_v2/bindings/datumdef.py similarity index 62% rename from azure_functions_worker/bindings/datumdef.py rename to azure_functions_worker_v2/bindings/datumdef.py index 34fb9b0af..9cc4eb6cf 100644 --- a/azure_functions_worker/bindings/datumdef.py +++ b/azure_functions_worker_v2/bindings/datumdef.py @@ -1,19 +1,11 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. + import json import logging -from typing import Any, List, Optional - -from .. import protos -from ..logging import logger - -try: - from http.cookies import SimpleCookie -except ImportError: - from Cookie import SimpleCookie -from dateutil import parser -from dateutil.parser import ParserError +from datetime import datetime +from typing import Any, List, Optional from .nullable_converters import ( to_nullable_bool, @@ -22,6 +14,11 @@ to_nullable_timestamp, ) +try: + from http.cookies import SimpleCookie +except ImportError: + from Cookie import SimpleCookie # type: ignore + class Datum: def __init__(self, value, type): @@ -64,10 +61,16 @@ def __repr__(self): val_repr = repr(self.value) if len(val_repr) > 10: val_repr = val_repr[:10] + '...' - return ''.format(self.type, val_repr) + return '' @classmethod - def from_typed_data(cls, td: protos.TypedData): + def from_typed_data(cls, protos): + try: + td = protos.TypedData + except Exception: + # Todo: better catch for Datum.from_typed_data(http.body) + # if the data being sent in is already protos.TypedData + td = protos tt = td.WhichOneof('data') if tt == 'http': http = td.http @@ -106,89 +109,13 @@ def from_typed_data(cls, td: protos.TypedData): return None else: raise NotImplementedError( - 'unsupported TypeData kind: {!r}'.format(tt) + 'unsupported TypeData kind: %s' % tt ) return cls(val, tt) - @classmethod - def from_rpc_shared_memory( - cls, - shmem: protos.RpcSharedMemory, - shmem_mgr) -> Optional['Datum']: - """ - Reads the specified shared memory region and converts the read data - into a datum object of the corresponding type. - """ - if shmem is None: - logger.warning('Cannot read from shared memory. ' - 'RpcSharedMemory is None.') - return None - - mem_map_name = shmem.name - offset = shmem.offset - count = shmem.count - data_type = shmem.type - ret_val = None - - if data_type == protos.RpcDataType.bytes: - val = shmem_mgr.get_bytes(mem_map_name, offset, count) - if val is not None: - ret_val = cls(val, 'bytes') - elif data_type == protos.RpcDataType.string: - val = shmem_mgr.get_string(mem_map_name, offset, count) - if val is not None: - ret_val = cls(val, 'string') - - if ret_val is not None: - logger.info( - 'Read %s bytes from memory map %s for data type %s', count, - mem_map_name, data_type) - return ret_val - return None - - @classmethod - def to_rpc_shared_memory( - cls, - datum: 'Datum', - shmem_mgr) -> Optional[protos.RpcSharedMemory]: - """ - Writes the given value to shared memory and returns the corresponding - RpcSharedMemory object which can be sent back to the functions host over - RPC. - """ - if datum.type == 'bytes': - value = datum.value - shared_mem_meta = shmem_mgr.put_bytes(value) - data_type = protos.RpcDataType.bytes - elif datum.type == 'string': - value = datum.value - shared_mem_meta = shmem_mgr.put_string(value) - data_type = protos.RpcDataType.string - else: - raise NotImplementedError( - f'Unsupported datum type ({datum.type}) for shared memory' - ) - - if shared_mem_meta is None: - logger.warning('Cannot write to shared memory for type: %s', - datum.type) - return None - - shmem = protos.RpcSharedMemory( - name=shared_mem_meta.mem_map_name, - offset=0, - count=shared_mem_meta.count_bytes, - type=data_type) - - logger.info( - 'Wrote %s bytes to memory map %s for data type %s', - shared_mem_meta.count_bytes, shared_mem_meta.mem_map_name, - data_type) - return shmem - -def datum_as_proto(datum: Datum) -> protos.TypedData: +def datum_as_proto(datum: Datum, protos): if datum.type == 'string': return protos.TypedData(string=datum.value) elif datum.type == 'bytes': @@ -202,9 +129,9 @@ def datum_as_proto(datum: Datum) -> protos.TypedData: k: v.value for k, v in datum.value['headers'].items() }, - cookies=parse_to_rpc_http_cookie_list(datum.value.get('cookies')), + cookies=parse_to_rpc_http_cookie_list(datum.value.get('cookies'), protos), enable_content_negotiation=False, - body=datum_as_proto(datum.value['body']), + body=datum_as_proto(datum.value['body'], protos), )) elif datum.type is None: return None @@ -223,11 +150,11 @@ def datum_as_proto(datum: Datum) -> protos.TypedData: return protos.TypedData(int=int(datum.value)) else: raise NotImplementedError( - 'unexpected Datum type: {!r}'.format(datum.type) + 'unexpected Datum type: %s' % datum.type ) -def parse_to_rpc_http_cookie_list(cookies: Optional[List[SimpleCookie]]): +def parse_to_rpc_http_cookie_list(cookies: Optional[List[SimpleCookie]], protos): if cookies is None: return cookies @@ -240,23 +167,30 @@ def parse_to_rpc_http_cookie_list(cookies: Optional[List[SimpleCookie]]): value=cookie_entity.value, domain=to_nullable_string( cookie_entity['domain'], - 'cookie.domain'), + 'cookie.domain', + protos), path=to_nullable_string( - cookie_entity['path'], 'cookie.path'), + cookie_entity['path'], + 'cookie.path', + protos), expires=to_nullable_timestamp( parse_cookie_attr_expires( - cookie_entity), 'cookie.expires'), + cookie_entity), 'cookie.expires', + protos), secure=to_nullable_bool( bool(cookie_entity['secure']), - 'cookie.secure'), + 'cookie.secure', + protos), http_only=to_nullable_bool( bool(cookie_entity['httponly']), - 'cookie.httpOnly'), + 'cookie.httpOnly', + protos), same_site=parse_cookie_attr_same_site( - cookie_entity), + cookie_entity, protos), max_age=to_nullable_double( cookie_entity['max-age'], - 'cookie.maxAge'))) + 'cookie.maxAge', + protos))) return rpc_http_cookies @@ -266,23 +200,23 @@ def parse_cookie_attr_expires(cookie_entity): if expires is not None and len(expires) != 0: try: - return parser.parse(expires) - except ParserError: + return datetime.strptime(expires, "%a, %d %b %Y %H:%M:%S GMT") + except ValueError: logging.error( - f"Can not parse value {expires} of expires in the cookie " - f"due to invalid format.") + "Can not parse value %s of expires in the cookie " + "due to invalid format.", expires) raise except OverflowError: logging.error( - f"Can not parse value {expires} of expires in the cookie " - f"because the parsed date exceeds the largest valid C " - f"integer on your system.") + "Can not parse value %s of expires in the cookie " + "because the parsed date exceeds the largest valid C " + "integer on your system.", expires) raise return None -def parse_cookie_attr_same_site(cookie_entity): +def parse_cookie_attr_same_site(cookie_entity, protos): same_site = getattr(protos.RpcHttpCookie.SameSite, "None") try: raw_same_site_str = cookie_entity['samesite'].lower() diff --git a/azure_functions_worker/bindings/generic.py b/azure_functions_worker_v2/bindings/generic.py similarity index 68% rename from azure_functions_worker/bindings/generic.py rename to azure_functions_worker_v2/bindings/generic.py index d5a0f8ab7..e0087f13d 100644 --- a/azure_functions_worker/bindings/generic.py +++ b/azure_functions_worker_v2/bindings/generic.py @@ -3,7 +3,7 @@ import typing from typing import Any, Optional -from . import datumdef +from .datumdef import Datum class GenericBinding: @@ -22,29 +22,29 @@ def check_output_type_annotation(cls, pytype: type) -> bool: @classmethod def encode(cls, obj: Any, *, - expected_type: Optional[type]) -> datumdef.Datum: + expected_type: Optional[type]) -> Datum: if isinstance(obj, str): - return datumdef.Datum(type='string', value=obj) + return Datum(type='string', value=obj) elif isinstance(obj, (bytes, bytearray)): - return datumdef.Datum(type='bytes', value=bytes(obj)) + return Datum(type='bytes', value=bytes(obj)) elif obj is None: - return datumdef.Datum(type=None, value=obj) + return Datum(type=None, value=obj) elif isinstance(obj, dict): - return datumdef.Datum(type='dict', value=obj) + return Datum(type='dict', value=obj) elif isinstance(obj, list): - return datumdef.Datum(type='list', value=obj) + return Datum(type='list', value=obj) elif isinstance(obj, int): - return datumdef.Datum(type='int', value=obj) + return Datum(type='int', value=obj) elif isinstance(obj, float): - return datumdef.Datum(type='double', value=obj) + return Datum(type='double', value=obj) elif isinstance(obj, bool): - return datumdef.Datum(type='bool', value=obj) + return Datum(type='bool', value=obj) else: raise NotImplementedError @classmethod - def decode(cls, data: datumdef.Datum, *, trigger_metadata) -> typing.Any: + def decode(cls, data: Datum, *, trigger_metadata) -> typing.Any: # Enabling support for Dapr bindings # https://github.com/Azure/azure-functions-python-worker/issues/1316 if data is None: @@ -61,8 +61,7 @@ def decode(cls, data: datumdef.Datum, *, trigger_metadata) -> typing.Any: result = None else: raise ValueError( - f'unexpected type of data received for the "generic" binding ' - f': {data_type!r}' + 'unexpected type of data received for the "generic" binding ', repr(data_type) ) return result diff --git a/azure_functions_worker/bindings/meta.py b/azure_functions_worker_v2/bindings/meta.py similarity index 62% rename from azure_functions_worker/bindings/meta.py rename to azure_functions_worker_v2/bindings/meta.py index ae40ce398..23a2afb6d 100644 --- a/azure_functions_worker/bindings/meta.py +++ b/azure_functions_worker_v2/bindings/meta.py @@ -1,20 +1,23 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. +# mypy: disable-error-code="attr-defined" import os import sys -import typing -from .. import protos -from ..constants import ( +from typing import Any, Dict, Optional, Union + +from .datumdef import Datum, datum_as_proto +from .generic import GenericBinding + +from ..http_v2 import HttpV2Registry +from ..logging import logger +from ..utils.constants import ( BASE_EXT_SUPPORTED_PY_MINOR_VERSION, CUSTOMER_PACKAGES_PATH, HTTP, HTTP_TRIGGER, ) -from ..http_v2 import HttpV2Registry -from ..logging import logger -from . import datumdef, generic -from .shared_memory_data_transfer import SharedMemoryManager + PB_TYPE = 'rpc_data' PB_TYPE_DATA = 'data' @@ -22,16 +25,18 @@ BINDING_REGISTRY = None DEFERRED_BINDING_REGISTRY = None -deferred_bindings_cache = {} +deferred_bindings_cache: Dict[Any, Any] = {} def _check_http_input_type_annotation(bind_name: str, pytype: type, is_deferred_binding: bool) -> bool: + logger.info("VICTORIA --- http v2 enabled %s", (HttpV2Registry.http_v2_enabled())) if HttpV2Registry.http_v2_enabled(): return HttpV2Registry.ext_base().RequestTrackerMeta \ .check_type(pytype) binding = get_binding(bind_name, is_deferred_binding) + logger.info("VICTORIA -- inside _check_http_input_type_annotation. Bind name: %s, binding: %s, pytype: %s, check: %s", bind_name, binding, pytype, binding.check_input_type_annotation(pytype)) return binding.check_input_type_annotation(pytype) @@ -40,6 +45,7 @@ def _check_http_output_type_annotation(bind_name: str, pytype: type) -> bool: return HttpV2Registry.ext_base().ResponseTrackerMeta.check_type(pytype) binding = get_binding(bind_name) + logger.info("VICTORIA -- inside _check_http_output_type_annotation. Bind name: %s, binding: %s, pytype: %s, check: %s", bind_name, binding, pytype, binding.check_output_type_annotation(pytype)) return binding.check_output_type_annotation(pytype) @@ -65,9 +71,10 @@ def load_binding_registry() -> None: if func is None: import azure.functions as func + logger.info("VICTORIA ---- azure-functions import succeeded: %s", func.__file__) global BINDING_REGISTRY - BINDING_REGISTRY = func.get_binding_registry() + BINDING_REGISTRY = func.get_binding_registry() # type: ignore if BINDING_REGISTRY is None: raise AttributeError('BINDING_REGISTRY is None. azure-functions ' @@ -91,7 +98,7 @@ def load_binding_registry() -> None: def get_binding(bind_name: str, - is_deferred_binding: typing.Optional[bool] = False)\ + is_deferred_binding: Optional[bool] = False)\ -> object: """ First checks if the binding is a non-deferred binding. This is @@ -101,11 +108,11 @@ def get_binding(bind_name: str, """ binding = None if binding is None and not is_deferred_binding: - binding = BINDING_REGISTRY.get(bind_name) + binding = BINDING_REGISTRY.get(bind_name) # type: ignore if binding is None and is_deferred_binding: - binding = DEFERRED_BINDING_REGISTRY.get(bind_name) + binding = DEFERRED_BINDING_REGISTRY.get(bind_name) # type: ignore if binding is None: - binding = generic.GenericBinding + binding = GenericBinding return binding @@ -117,12 +124,15 @@ def is_trigger_binding(bind_name: str) -> bool: def check_input_type_annotation(bind_name: str, pytype: type, is_deferred_binding: bool) -> bool: + logger.info("VICTORIA --- Inside check_input_type_annotation. bind_name: %s, pytype: %s", bind_name, pytype) global INPUT_TYPE_CHECK_OVERRIDE_MAP + logger.info("VICTORIA --- bind_name in input type check map: %s", (bind_name in INPUT_TYPE_CHECK_OVERRIDE_MAP)) if bind_name in INPUT_TYPE_CHECK_OVERRIDE_MAP: return INPUT_TYPE_CHECK_OVERRIDE_MAP[bind_name](bind_name, pytype, is_deferred_binding) binding = get_binding(bind_name, is_deferred_binding) + logger.info("VICTORIA -- inside _check_http_input_type_annotation. Bind name: %s, binding: %s, pytype: %s, check: %s", bind_name, binding, pytype, binding.check_input_type_annotation(pytype)) return binding.check_input_type_annotation(pytype) @@ -140,9 +150,9 @@ def has_implicit_output(bind_name: str) -> bool: binding = get_binding(bind_name) # Need to pass in bind_name to exempt Durable Functions - if binding is generic.GenericBinding: + if binding is GenericBinding: return (getattr(binding, 'has_implicit_output', lambda: False) - (bind_name)) + (bind_name)) # type: ignore else: # If the binding does not have metaclass of meta.InConverter @@ -152,16 +162,15 @@ def has_implicit_output(bind_name: str) -> bool: def from_incoming_proto( binding: str, - pb: protos.ParameterBinding, *, - pytype: typing.Optional[type], - trigger_metadata: typing.Optional[typing.Dict[str, protos.TypedData]], - shmem_mgr: SharedMemoryManager, + pb, *, + pytype: Optional[type], + trigger_metadata: Optional[Dict[str, Any]], function_name: str, - is_deferred_binding: typing.Optional[bool] = False) -> typing.Any: - binding = get_binding(binding, is_deferred_binding) + is_deferred_binding: Optional[bool] = False) -> Any: + binding_obj = get_binding(binding, is_deferred_binding) if trigger_metadata: metadata = { - k: datumdef.Datum.from_typed_data(v) + k: Datum.from_typed_data(v) for k, v in trigger_metadata.items() } else: @@ -170,120 +179,76 @@ def from_incoming_proto( pb_type = pb.WhichOneof(PB_TYPE) if pb_type == PB_TYPE_DATA: val = pb.data - datum = datumdef.Datum.from_typed_data(val) - elif pb_type == PB_TYPE_RPC_SHARED_MEMORY: - # Data was sent over shared memory, attempt to read - datum = datumdef.Datum.from_rpc_shared_memory(pb.rpc_shared_memory, - shmem_mgr) + datum = Datum.from_typed_data(val) else: - raise TypeError(f'Unknown ParameterBindingType: {pb_type}') + raise TypeError('Unknown ParameterBindingType: %s', pb_type) try: # if the binding is an sdk type binding if is_deferred_binding: - return deferred_bindings_decode(binding=binding, + return deferred_bindings_decode(binding=binding_obj, pb=pb, pytype=pytype, datum=datum, metadata=metadata, function_name=function_name) - return binding.decode(datum, trigger_metadata=metadata) + return binding_obj.decode(datum, trigger_metadata=metadata) except NotImplementedError: # Binding does not support the data. dt = val.WhichOneof('data') raise TypeError( - f'unable to decode incoming TypedData: ' - f'unsupported combination of TypedData field {dt!r} ' - f'and expected binding type {binding}') + 'unable to decode incoming TypedData: ' + 'unsupported combination of TypedData field %s ' + 'and expected binding type %s', repr(dt), binding_obj) -def get_datum(binding: str, obj: typing.Any, - pytype: typing.Optional[type]) -> datumdef.Datum: +def get_datum(binding: str, obj: Any, + pytype: Optional[type]) -> Union[Datum, None]: """ Convert an object to a datum with the specified type. """ - binding = get_binding(binding) + binding_obj = get_binding(binding) try: - datum = binding.encode(obj, expected_type=pytype) + datum = binding_obj.encode(obj, expected_type=pytype) except NotImplementedError: # Binding does not support the data. raise TypeError( - f'unable to encode outgoing TypedData: ' - f'unsupported type "{binding}" for ' - f'Python type "{type(obj).__name__}"') + 'unable to encode outgoing TypedData: ' + 'unsupported type "%s" for ' + 'Python type "%s"', binding, type(obj).__name__) return datum -def _does_datatype_support_caching(datum: datumdef.Datum): +def _does_datatype_support_caching(datum: Datum): supported_datatypes = ('bytes', 'string') return datum.type in supported_datatypes -def _can_transfer_over_shmem(shmem_mgr: SharedMemoryManager, - is_function_data_cache_enabled: bool, - datum: datumdef.Datum): - """ - If shared memory is enabled and supported for the given datum, try to - transfer to host over shared memory as a default. - If caching is enabled, then also check if this type is supported - if so, - transfer over shared memory. - In case of caching, some conditions like object size may not be - applicable since even small objects are also allowed to be cached. - """ - if not shmem_mgr.is_enabled(): - # If shared memory usage is not enabled, no further checks required - return False - if shmem_mgr.is_supported(datum): - # If transferring this object over shared memory is supported, do so. - return True - if is_function_data_cache_enabled and _does_datatype_support_caching(datum): - # If caching is enabled and this object can be cached, transfer over - # shared memory (since the cache uses shared memory). - # In this case, some requirements (like object size) for using shared - # memory may be ignored since we want to support caching of small - # objects (those that have sizes smaller that the minimum we transfer - # over shared memory when the cache is not enabled) as well. - return True - return False - - -def to_outgoing_proto(binding: str, obj: typing.Any, *, - pytype: typing.Optional[type]) -> protos.TypedData: +def to_outgoing_proto(binding: str, obj: Any, *, + pytype: Optional[type], + protos): datum = get_datum(binding, obj, pytype) - return datumdef.datum_as_proto(datum) + return datum_as_proto(datum, protos) # type: ignore -def to_outgoing_param_binding(binding: str, obj: typing.Any, *, - pytype: typing.Optional[type], +def to_outgoing_param_binding(binding: str, obj: Any, *, + pytype: Optional[type], out_name: str, - shmem_mgr: SharedMemoryManager, - is_function_data_cache_enabled: bool) \ - -> protos.ParameterBinding: + protos): datum = get_datum(binding, obj, pytype) - shared_mem_value = None - if _can_transfer_over_shmem(shmem_mgr, is_function_data_cache_enabled, - datum): - shared_mem_value = datumdef.Datum.to_rpc_shared_memory(datum, shmem_mgr) - # Check if data was written into shared memory - if shared_mem_value is not None: - # If it was, then use the rpc_shared_memory field in response message - return protos.ParameterBinding( - name=out_name, - rpc_shared_memory=shared_mem_value) - else: - # If not, send it as part of the response message over RPC - # rpc_val can be None here as we now support a None return type - rpc_val = datumdef.datum_as_proto(datum) - return protos.ParameterBinding( - name=out_name, - data=rpc_val) - - -def deferred_bindings_decode(binding: typing.Any, - pb: protos.ParameterBinding, *, - pytype: typing.Optional[type], - datum: typing.Any, - metadata: typing.Any, + # If not, send it as part of the response message over RPC + # rpc_val can be None here as we now support a None return type + rpc_val = datum_as_proto(datum, protos) # type: ignore + return protos.ParameterBinding( + name=out_name, + data=rpc_val) + + +def deferred_bindings_decode(binding: Any, + pb: Any, *, + pytype: Optional[type], + datum: Any, + metadata: Any, function_name: str): """ This cache holds deferred binding types (ie. BlobClient, ContainerClient) @@ -320,9 +285,8 @@ def deferred_bindings_decode(binding: typing.Any, return deferred_binding_type -def check_deferred_bindings_enabled(param_anno: type, - deferred_bindings_enabled: bool) -> (bool, - bool): +def check_deferred_bindings_enabled(param_anno: Union[type, None], + deferred_bindings_enabled: bool) -> Any: """ Checks if deferred bindings is enabled at fx and single binding level diff --git a/azure_functions_worker/bindings/nullable_converters.py b/azure_functions_worker_v2/bindings/nullable_converters.py similarity index 69% rename from azure_functions_worker/bindings/nullable_converters.py rename to azure_functions_worker_v2/bindings/nullable_converters.py index e1c75aecc..e33255153 100644 --- a/azure_functions_worker/bindings/nullable_converters.py +++ b/azure_functions_worker_v2/bindings/nullable_converters.py @@ -1,13 +1,11 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + from datetime import datetime from typing import Optional, Union -from google.protobuf.timestamp_pb2 import Timestamp - -from azure_functions_worker import protos - -def to_nullable_string(nullable: Optional[str], property_name: str) -> \ - Optional[protos.NullableString]: +def to_nullable_string(nullable: Optional[str], property_name: str, protos): """Converts string input to an 'NullableString' to be sent through the RPC layer. Input that is not a string but is also not null or undefined logs a function app level warning. @@ -16,20 +14,20 @@ def to_nullable_string(nullable: Optional[str], property_name: str) -> \ valid string :param property_name The name of the property that the caller will assign the output to. Used for debugging. + :param: protos The protos object used for returning the appropriate value """ if isinstance(nullable, str): return protos.NullableString(value=nullable) if nullable is not None: raise TypeError( - f"A 'str' type was expected instead of a '{type(nullable)}' " - f"type. Cannot parse value {nullable} of '{property_name}'.") + "A 'str' type was expected instead of a '%s' " + "type. Cannot parse value %s of '%s'.", type(nullable), nullable, property_name) return None -def to_nullable_bool(nullable: Optional[bool], property_name: str) -> \ - Optional[protos.NullableBool]: +def to_nullable_bool(nullable: Optional[bool], property_name: str, protos): """Converts boolean input to an 'NullableBool' to be sent through the RPC layer. Input that is not a boolean but is also not null or undefined logs a function app level warning. @@ -38,21 +36,21 @@ def to_nullable_bool(nullable: Optional[bool], property_name: str) -> \ valid boolean :param property_name The name of the property that the caller will assign the output to. Used for debugging. + :param protos The protos object used for returning the appropriate value """ if isinstance(nullable, bool): return protos.NullableBool(value=nullable) if nullable is not None: raise TypeError( - f"A 'bool' type was expected instead of a '{type(nullable)}' " - f"type. Cannot parse value {nullable} of '{property_name}'.") + "A 'bool' type was expected instead of a '%s' " + "type. Cannot parse value %s of '%s'.", type(nullable), nullable, property_name) return None def to_nullable_double(nullable: Optional[Union[str, int, float]], - property_name: str) -> \ - Optional[protos.NullableDouble]: + property_name: str, protos): """Converts int or float or str that parses to a number to an 'NullableDouble' to be sent through the RPC layer. Input that is not a valid number but is also not null or undefined logs a function app level @@ -61,6 +59,7 @@ def to_nullable_double(nullable: Optional[Union[str, int, float]], valid number :param property_name The name of the property that the caller will assign the output to. Used for debugging. + :param protos The protos object used for returning the appropriate value """ if isinstance(nullable, int) or isinstance(nullable, float): return protos.NullableDouble(value=nullable) @@ -72,20 +71,20 @@ def to_nullable_double(nullable: Optional[Union[str, int, float]], return protos.NullableDouble(value=float(nullable)) except Exception: raise TypeError( - f"Cannot parse value {nullable} of '{property_name}' to " - f"float.") + "Cannot parse value %s of '%s' to " + "float.", nullable, property_name) if nullable is not None: raise TypeError( - f"A 'int' or 'float'" - f" type was expected instead of a '{type(nullable)}' " - f"type. Cannot parse value {nullable} of '{property_name}'.") + "A 'int' or 'float'" + " type was expected instead of a '%s' " + "type. Cannot parse value %s of '%s'.", type(nullable), nullable, property_name) return None def to_nullable_timestamp(date_time: Optional[Union[datetime, int]], - property_name: str) -> protos.NullableTimestamp: + property_name: str, protos): """Converts Date or number input to an 'NullableTimestamp' to be sent through the RPC layer. Input that is not a Date or number but is also not null or undefined logs a function app level warning. @@ -94,6 +93,7 @@ def to_nullable_timestamp(date_time: Optional[Union[datetime, int]], valid input :param property_name The name of the property that the caller will assign the output to. Used for debugging. + :param protos The protos object used for returning the appropriate value """ if date_time is not None: try: @@ -102,10 +102,10 @@ def to_nullable_timestamp(date_time: Optional[Union[datetime, int]], date_time.timestamp() return protos.NullableTimestamp( - value=Timestamp(seconds=int(time_in_seconds))) + value=protos.Timestamp(seconds=int(time_in_seconds))) except Exception: raise TypeError( - f"A 'datetime' or 'int'" - f" type was expected instead of a '{type(date_time)}' " - f"type. Cannot parse value {date_time} of '{property_name}'.") + "A 'datetime' or 'int'" + " type was expected instead of a '%s' " + "type. Cannot parse value %s of '%s'.", type(date_time), date_time, property_name) return None diff --git a/azure_functions_worker/bindings/out.py b/azure_functions_worker_v2/bindings/out.py similarity index 79% rename from azure_functions_worker/bindings/out.py rename to azure_functions_worker_v2/bindings/out.py index 53ac0199d..3e2dc0d4b 100644 --- a/azure_functions_worker/bindings/out.py +++ b/azure_functions_worker_v2/bindings/out.py @@ -1,6 +1,8 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. +from typing import Optional + class Out: @@ -10,5 +12,5 @@ def __init__(self) -> None: def set(self, val): self.__value = val - def get(self) -> str: + def get(self) -> Optional[str]: return self.__value diff --git a/azure_functions_worker/bindings/retrycontext.py b/azure_functions_worker_v2/bindings/retrycontext.py similarity index 54% rename from azure_functions_worker/bindings/retrycontext.py rename to azure_functions_worker_v2/bindings/retrycontext.py index 8c2166385..d68b21ddf 100644 --- a/azure_functions_worker/bindings/retrycontext.py +++ b/azure_functions_worker_v2/bindings/retrycontext.py @@ -4,12 +4,31 @@ from dataclasses import dataclass from enum import Enum -from . import rpcexception + +class RpcException: + def __init__(self, + source: str, + stack_trace: str, + message: str) -> None: + self.__source = source + self.__stack_trace = stack_trace + self.__message = message + + @property + def source(self) -> str: + return self.__source + + @property + def stack_trace(self) -> str: + return self.__stack_trace + + @property + def message(self) -> str: + return self.__message class RetryPolicy(Enum): """Retry policy for the function invocation""" - MAX_RETRY_COUNT = "max_retry_count" STRATEGY = "strategy" DELAY_INTERVAL = "delay_interval" @@ -21,8 +40,6 @@ class RetryPolicy(Enum): class RetryContext: """Gets the current retry count from retry-context""" retry_count: int - """Gets the max retry count from retry-context""" max_retry_count: int - - rpc_exception: rpcexception.RpcException + rpc_exception: RpcException diff --git a/azure_functions_worker/bindings/tracecontext.py b/azure_functions_worker_v2/bindings/tracecontext.py similarity index 100% rename from azure_functions_worker/bindings/tracecontext.py rename to azure_functions_worker_v2/bindings/tracecontext.py diff --git a/azure_functions_worker/functions.py b/azure_functions_worker_v2/functions.py similarity index 69% rename from azure_functions_worker/functions.py rename to azure_functions_worker_v2/functions.py index 292fe4857..ca1498ff6 100644 --- a/azure_functions_worker/functions.py +++ b/azure_functions_worker_v2/functions.py @@ -5,12 +5,14 @@ import pathlib import typing import uuid +from .logging import logger -from . import bindings as bindings_utils -from . import protos -from ._thirdparty import typing_inspect -from .constants import HTTP_TRIGGER -from .protos import BindingInfo +from .bindings.meta import (has_implicit_output, + check_deferred_bindings_enabled, + check_output_type_annotation, + check_input_type_annotation) +from .utils.constants import HTTP_TRIGGER +from .utils.typing_inspect import is_generic_type, get_origin, get_args # type: ignore class ParamTypeInfo(typing.NamedTuple): @@ -33,7 +35,7 @@ class FunctionInfo(typing.NamedTuple): input_types: typing.Mapping[str, ParamTypeInfo] output_types: typing.Mapping[str, ParamTypeInfo] - return_type: typing.Optional[ParamTypeInfo] + return_type: typing.Optional[typing.Union[str, ParamTypeInfo]] trigger_metadata: typing.Optional[typing.Dict[str, typing.Any]] @@ -42,7 +44,7 @@ class FunctionLoadError(RuntimeError): def __init__(self, function_name: str, msg: str) -> None: super().__init__( - f'cannot load the {function_name} function: {msg}') + "cannot load the " + function_name + " function: " + msg) class Registry: @@ -52,7 +54,7 @@ class Registry: def __init__(self) -> None: self._functions = {} - def get_function(self, function_id: str) -> FunctionInfo: + def get_function(self, function_id: str) -> typing.Union[FunctionInfo, None]: if function_id in self._functions: return self._functions[function_id] @@ -63,15 +65,14 @@ def deferred_bindings_enabled(self) -> bool: @staticmethod def get_explicit_and_implicit_return(binding_name: str, - binding: BindingInfo, + binding, explicit_return: bool, implicit_return: bool, bound_params: dict) -> \ typing.Tuple[bool, bool]: if binding_name == '$return': explicit_return = True - elif bindings_utils.has_implicit_output( - binding.type): + elif has_implicit_output(binding.type): implicit_return = True bound_params[binding_name] = binding else: @@ -91,7 +92,7 @@ def get_return_binding(binding_name: str, return_binding_name = binding_type assert return_binding_name is not None explicit_return_val_set = True - elif bindings_utils.has_implicit_output(binding_type): + elif has_implicit_output(binding_type): return_binding_name = binding_type return return_binding_name, explicit_return_val_set @@ -99,7 +100,8 @@ def get_return_binding(binding_name: str, @staticmethod def validate_binding_direction(binding_name: str, binding_direction: str, - func_name: str): + func_name: str, + protos): if binding_direction == protos.BindingInfo.inout: raise FunctionLoadError( func_name, @@ -126,24 +128,26 @@ def is_context_required(params, bound_params: dict, raise FunctionLoadError( func_name, 'the "context" parameter is expected to be of ' - 'type azure.functions.Context, got ' - f'{ctx_anno!r}') + 'type azure.functions.Context, got "%s"', repr(ctx_anno)) return requires_context @staticmethod def validate_function_params(params: dict, bound_params: dict, - annotations: dict, func_name: str): + annotations: dict, func_name: str, + protos): + logger.info("VICTORIA --- Params: %s, BoundParams: %s, Annotations: %s, FuncName: %s", + params, bound_params, annotations, func_name) if set(params) - set(bound_params): raise FunctionLoadError( func_name, 'the following parameters are declared in Python but ' - f'not in function.json: {set(params) - set(bound_params)!r}') + 'not in function.json: %s', repr(set(params) - set(bound_params))) if set(bound_params) - set(params): raise FunctionLoadError( func_name, - f'the following parameters are declared in function.json but ' - f'not in Python: {set(bound_params) - set(params)!r}') + 'the following parameters are declared in function.json but ' + 'not in Python: %s', repr(set(params) - set(bound_params))) input_types: typing.Dict[str, ParamTypeInfo] = {} output_types: typing.Dict[str, ParamTypeInfo] = {} @@ -151,19 +155,21 @@ def validate_function_params(params: dict, bound_params: dict, for param in params.values(): binding = bound_params[param.name] + logger.info("VICTORIA --- Param %s, binding: %s", param, binding) param_has_anno = param.name in annotations param_anno = annotations.get(param.name) + logger.info("VICTORIA --- Param_has_anno %s, param_anno: %s", param_has_anno, param_anno) # Check if deferred bindings is enabled fx_deferred_bindings_enabled, is_deferred_binding = ( - bindings_utils.check_deferred_bindings_enabled( + check_deferred_bindings_enabled( param_anno, fx_deferred_bindings_enabled)) if param_has_anno: - if typing_inspect.is_generic_type(param_anno): - param_anno_origin = typing_inspect.get_origin(param_anno) + if is_generic_type(param_anno): + param_anno_origin = get_origin(param_anno) if param_anno_origin is not None: is_param_out = ( isinstance(param_anno_origin, type) @@ -185,74 +191,79 @@ def validate_function_params(params: dict, bound_params: dict, is_binding_out = binding.direction == protos.BindingInfo.out if is_param_out: - param_anno_args = typing_inspect.get_args(param_anno) + param_anno_args = get_args(param_anno) if len(param_anno_args) != 1: raise FunctionLoadError( func_name, - f'binding {param.name} has invalid Out annotation ' - f'{param_anno!r}') + 'binding %s has invalid Out annotation %s', param.name, repr(param_anno)) param_py_type = param_anno_args[0] # typing_inspect.get_args() returns a flat list, # so if the annotation was func.Out[typing.List[foo]], # we need to reconstruct it. if (isinstance(param_py_type, tuple) - and typing_inspect.is_generic_type(param_py_type[0])): + and is_generic_type(param_py_type[0])): param_py_type = operator.getitem( param_py_type[0], *param_py_type[1:]) else: param_py_type = param_anno + logger.info("VICTORIA --- Param_py_type %s", param_py_type) + if (param_has_anno and not isinstance(param_py_type, type) - and not typing_inspect.is_generic_type(param_py_type)): + and not is_generic_type(param_py_type)): raise FunctionLoadError( func_name, - f'binding {param.name} has invalid non-type annotation ' - f'{param_anno!r}') + 'binding %s has invalid non-type annotation %s', param.name, repr(param_anno)) if is_binding_out and param_has_anno and not is_param_out: raise FunctionLoadError( func_name, - f'binding {param.name} is declared to have the "out" ' + 'binding %s is declared to have the "out" ' 'direction, but its annotation in Python is not ' - 'a subclass of azure.functions.Out') + 'a subclass of azure.functions.Out', param.name) if not is_binding_out and is_param_out: raise FunctionLoadError( func_name, - f'binding {param.name} is declared to have the "in" ' + 'binding %s is declared to have the "in" ' 'direction in function.json, but its annotation ' - 'is azure.functions.Out in Python') + 'is azure.functions.Out in Python', param.name) if param_has_anno and param_py_type in (str, bytes) and ( - not bindings_utils.has_implicit_output(binding.type)): + not has_implicit_output(binding.type)): param_bind_type = 'generic' else: param_bind_type = binding.type + logger.info("VICTORIA --- param_bind_type %s", param_bind_type) + if param_has_anno: if is_param_out: - checks_out = bindings_utils.check_output_type_annotation( + checks_out = check_output_type_annotation( param_bind_type, param_py_type) else: - checks_out = bindings_utils.check_input_type_annotation( + checks_out = check_input_type_annotation( param_bind_type, param_py_type, is_deferred_binding) + logger.info("VICTORIA --- checks_out: %s", + checks_out) + if not checks_out: if binding.data_type is not protos.BindingInfo.undefined: raise FunctionLoadError( func_name, - f'{param.name!r} binding type "{binding.type}" ' - f'and dataType "{binding.data_type}" in ' - f'function.json do not match the corresponding ' - f'function parameter\'s Python type ' - f'annotation "{param_py_type.__name__}"') + '%s binding type "%s" ' + 'and dataType "%s" in ' + 'function.json do not match the corresponding ' + 'function parameter\'s Python type ' + 'annotation %s', repr(param.name), binding.type, binding.data_type, param_py_type.__name__) else: raise FunctionLoadError( func_name, - f'type of {param.name} binding in function.json ' - f'"{binding.type}" does not match its Python ' - f'annotation "{param_py_type.__name__}"') + 'type of %s binding in function.json ' + '"%s" does not match its Python ' + 'annotation "%s"', param.name, binding.type, param_py_type.__name__) param_type_info = ParamTypeInfo(param_bind_type, param_py_type, @@ -270,8 +281,8 @@ def get_function_return_type(annotations: dict, has_explicit_return: bool, return_pytype = None if has_explicit_return and 'return' in annotations: return_anno = annotations.get('return') - if typing_inspect.is_generic_type( - return_anno) and typing_inspect.get_origin( + if is_generic_type( + return_anno) and get_origin( return_anno).__name__ == 'Out': raise FunctionLoadError( func_name, @@ -281,18 +292,18 @@ def get_function_return_type(annotations: dict, has_explicit_return: bool, if not isinstance(return_pytype, type): raise FunctionLoadError( func_name, - f'has invalid non-type return ' - f'annotation {return_pytype!r}') + 'has invalid non-type return ' + 'annotation %s', repr(return_pytype)) if return_pytype is (str, bytes): binding_name = 'generic' - if not bindings_utils.check_output_type_annotation( + if not check_output_type_annotation( binding_name, return_pytype): raise FunctionLoadError( func_name, - f'Python return annotation "{return_pytype.__name__}" ' - f'does not match binding type "{binding_name}"') + 'Python return annotation "%s" ' + 'does not match binding type "%s"', return_pytype.__name__, binding_name) if has_implicit_return and 'return' in annotations: return_pytype = annotations.get('return') @@ -357,60 +368,7 @@ def _get_http_trigger_param_name(self, input_types): ) return http_trigger_param_name - def add_function(self, function_id: str, - func: typing.Callable, - metadata: protos.RpcFunctionMetadata): - func_name = metadata.name - sig = inspect.signature(func) - params = dict(sig.parameters) - annotations = typing.get_type_hints(func) - return_binding_name: typing.Optional[str] = None - explicit_return_val_set = False - has_explicit_return = False - has_implicit_return = False - - bound_params = {} - for binding_name, binding_info in metadata.bindings.items(): - self.validate_binding_direction(binding_name, - binding_info.direction, func_name) - - has_explicit_return, has_implicit_return = \ - self.get_explicit_and_implicit_return( - binding_name, binding_info, has_explicit_return, - has_implicit_return, bound_params) - - return_binding_name, explicit_return_val_set = \ - self.get_return_binding(binding_name, - binding_info.type, - return_binding_name, - explicit_return_val_set) - - requires_context = self.is_context_required(params, bound_params, - annotations, - func_name) - - input_types, output_types, _ = self.validate_function_params( - params, bound_params, annotations, func_name) - - return_type = \ - self.get_function_return_type(annotations, - has_explicit_return, - has_implicit_return, - return_binding_name, - func_name) - - self.add_func_to_registry_and_return_funcinfo(func, func_name, - function_id, - metadata.directory, - requires_context, - has_explicit_return, - has_implicit_return, - _, - input_types, - output_types, - return_type) - - def add_indexed_function(self, function): + def add_indexed_function(self, function, protos): func = function.get_user_function() func_name = function.get_function_name() function_id = str(uuid.uuid5(namespace=uuid.NAMESPACE_OID, @@ -429,7 +387,7 @@ def add_indexed_function(self, function): for binding in function.get_bindings(): self.validate_binding_direction(binding.name, binding.direction, - func_name) + func_name, protos) has_explicit_return, has_implicit_return = \ self.get_explicit_and_implicit_return( @@ -451,7 +409,8 @@ def add_indexed_function(self, function): params, bound_params, annotations, - func_name) + func_name, + protos) return_type = \ self.get_function_return_type(annotations, diff --git a/azure_functions_worker_v2/handle_event.py b/azure_functions_worker_v2/handle_event.py new file mode 100644 index 000000000..edae60753 --- /dev/null +++ b/azure_functions_worker_v2/handle_event.py @@ -0,0 +1,423 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import logging +import os +import sys + +from typing import List, Optional + +from .functions import FunctionInfo, Registry +from .http_v2 import ( + HttpServerInitError, + HttpV2Registry, + http_coordinator, + initialize_http_server, + sync_http_request, +) +from .loader import index_function_app, process_indexed_function +from .logging import logger +from .otel import otel_manager, initialize_azure_monitor, configure_opentelemetry +from .version import VERSION + +from .bindings.context import get_context +from .bindings.meta import (load_binding_registry, is_trigger_binding, + from_incoming_proto, to_outgoing_param_binding, + to_outgoing_proto) +from .bindings.out import Out +from .utils.constants import (FUNCTION_DATA_CACHE, + RAW_HTTP_BODY_BYTES, + TYPED_DATA_COLLECTION, + RPC_HTTP_BODY_ONLY, + WORKER_STATUS, + RPC_HTTP_TRIGGER_METADATA_REMOVED, + SHARED_MEMORY_DATA_TRANSFER, + TRUE, + PYTHON_ENABLE_OPENTELEMETRY, + PYTHON_ENABLE_OPENTELEMETRY_DEFAULT, + WORKER_OPEN_TELEMETRY_ENABLED, + PYTHON_ENABLE_INIT_INDEXING, + HTTP_URI, + REQUIRES_ROUTE_PARAMETERS, + PYTHON_SCRIPT_FILE_NAME, + PYTHON_SCRIPT_FILE_NAME_DEFAULT, + PYTHON_ENABLE_DEBUG_LOGGING) +from .utils.current import get_current_loop, execute_async, run_sync_func +from .utils.env_state import get_app_setting, is_envvar_true +from .utils.helpers import change_cwd, get_worker_metadata +from .utils.tracing import serialize_exception +from .utils.validators import validate_script_file_name + +metadata_result: Optional[List] = None +metadata_exception: Optional[Exception] = None +_functions = Registry() +_function_data_cache_enabled: bool = False +_host: str = "" +protos = None + + +async def worker_init_request(request): + logger.info("V2 Library Worker: received WorkerInitRequest," + "Version %s", VERSION) + global _host, protos, _function_data_cache_enabled, metadata_exception + init_request = request.request.worker_init_request + host_capabilities = init_request.capabilities + _host = request.properties.get("host") + protos = request.properties.get("protos") + if FUNCTION_DATA_CACHE in host_capabilities: + val = host_capabilities[FUNCTION_DATA_CACHE] + _function_data_cache_enabled = val == TRUE + + capabilities = { + RAW_HTTP_BODY_BYTES: TRUE, + TYPED_DATA_COLLECTION: TRUE, + RPC_HTTP_BODY_ONLY: TRUE, + WORKER_STATUS: TRUE, + RPC_HTTP_TRIGGER_METADATA_REMOVED: TRUE, + SHARED_MEMORY_DATA_TRANSFER: TRUE, + } + if get_app_setting(setting=PYTHON_ENABLE_OPENTELEMETRY, + default_value=PYTHON_ENABLE_OPENTELEMETRY_DEFAULT): + initialize_azure_monitor() + + if otel_manager.get_azure_monitor_available(): + capabilities[WORKER_OPEN_TELEMETRY_ENABLED] = TRUE + + # loading bindings registry and saving results to a static + # dictionary which will be later used in the invocation request + load_binding_registry() + + # Index in init by default + try: + load_function_metadata( + init_request.function_app_directory, + caller_info="worker_init_request") + + if HttpV2Registry.http_v2_enabled(): + logger.info("VICTORIA --- init req. Streaming app setting enabled. Setting streaming capabilities") + capabilities[HTTP_URI] = \ + initialize_http_server(_host) + capabilities[REQUIRES_ROUTE_PARAMETERS] = TRUE + logger.info("VICTORIA --- completed streaming setup") + + except HttpServerInitError as ex: + logger.info("VICTORIA --- HTTP server init error has occurred") + metadata_exception = ex + except Exception as ex: + # This is catching an exception that happens during indexing while the init + # request is still in progress. The proxy worker will do nothing with this, + # but metadata will fail + metadata_exception = ex + logger.info("VICTORIA --- an init exception has occurred: %s", ex) + + logger.info("VICTORIA --- successfully processed init req") + return protos.WorkerInitResponse( + capabilities=capabilities, + worker_metadata=get_worker_metadata(protos), + result=protos.StatusResult(status=protos.StatusResult.Success) + ) + + +# worker_status_request can be done in the proxy worker + +async def functions_metadata_request(request): + logger.info("V2 Library Worker: received WorkerMetadataRequest") + global protos, metadata_result, metadata_exception + logger.info("VICTORIA --- Metadata Result: %s, Metadata Exception: %s", metadata_result, metadata_exception) + + if metadata_exception: + logger.info("VICTORIA --- a metadata exception has occurred: %s", metadata_exception) + return protos.FunctionMetadataResponse( + result=protos.StatusResult( + status=protos.StatusResult.Failure, + exception=serialize_exception( + metadata_exception, protos))) + + else: + logger.info("VICTORIA --- no metadata exception has occurred") + return protos.FunctionMetadataResponse( + use_default_metadata_indexing=False, + function_metadata_results=metadata_result, + result=protos.StatusResult( + status=protos.StatusResult.Success)) + + +async def function_load_request(request): + logger.info("V2 Library Worker: received WorkerLoadRequest") + global protos + func_request = request.request.function_load_request + function_id = func_request.function_id + + return protos.FunctionLoadResponse( + function_id=function_id, + result=protos.StatusResult( + status=protos.StatusResult.Success)) + + +async def invocation_request(request): + logger.info("V2 Library Worker: received WorkerInvocationRequest") + global protos + invoc_request = request.request.invocation_request + logger.info("VICTORIA --- invocation request %s", invoc_request) + invocation_id = invoc_request.invocation_id + function_id = invoc_request.function_id + http_v2_enabled = False + threadpool = request.properties.get("threadpool") + logger.info("VICTORIA --- all variables obtained") + + try: + fi: FunctionInfo = _functions.get_function( + function_id) + assert fi is not None + + args = {} + + http_v2_enabled = _functions.get_function( + function_id).is_http_func and \ + HttpV2Registry.http_v2_enabled() + logger.info("VICTORIA --- http_v2_enabled %s", http_v2_enabled) + + for pb in invoc_request.input_data: + logger.info("VICTORIA --- pb: %s", pb) + pb_type_info = fi.input_types[pb.name] + if is_trigger_binding(pb_type_info.binding_name): + trigger_metadata = invoc_request.trigger_metadata + else: + trigger_metadata = None + + args[pb.name] = from_incoming_proto( + pb_type_info.binding_name, + pb, + trigger_metadata=trigger_metadata, + pytype=pb_type_info.pytype, + function_name=_functions.get_function( + function_id).name, + is_deferred_binding=pb_type_info.deferred_bindings_enabled) + + logger.info("VICTORIA --- args[pb.name]: %s", args[pb.name]) + + if http_v2_enabled: + http_request = await http_coordinator.get_http_request_async( + invocation_id) + + trigger_arg_name = fi.trigger_metadata.get('param_name') + func_http_request = args[trigger_arg_name] + await sync_http_request(http_request, func_http_request) + args[trigger_arg_name] = http_request + + fi_context = get_context(invoc_request, fi.name, + fi.directory) + + # Use local thread storage to store the invocation ID + # for a customer's threads + fi_context.thread_local_storage.invocation_id = invocation_id + if fi.requires_context: + args['context'] = fi_context + + if fi.output_types: + for name in fi.output_types: + args[name] = Out() + + if fi.is_async: + if otel_manager.get_azure_monitor_available(): + configure_opentelemetry(fi_context) + + call_result = await execute_async(fi.func, args) # Not supporting Extensions + else: + _loop = get_current_loop() + call_result = await _loop.run_in_executor( + threadpool, + run_sync_func, + invocation_id, fi_context, fi.func, args) + + if call_result is not None and not fi.has_return: + raise RuntimeError( + 'function %s without a $return binding' + 'returned a non-None value', repr(fi.name)) + + if http_v2_enabled: + http_coordinator.set_http_response(invocation_id, call_result) + + output_data = [] + if fi.output_types: + for out_name, out_type_info in fi.output_types.items(): + val = args[out_name].get() + if val is None: + # TODO: is the "Out" parameter optional? + # Can "None" be marshaled into protos.TypedData? + continue + + param_binding = to_outgoing_param_binding( + out_type_info.binding_name, val, + pytype=out_type_info.pytype, + out_name=out_name, + protos=protos) + output_data.append(param_binding) + logger.info("VICTORIA --- output_data: %s", output_data) + + return_value = None + if fi.return_type is not None and not http_v2_enabled: + return_value = to_outgoing_proto( + fi.return_type.binding_name, + call_result, + pytype=fi.return_type.pytype, + protos=protos + ) + logger.info("VICTORIA --- return_value: %s", return_value) + + # Actively flush customer print() function to console + sys.stdout.flush() + return protos.InvocationResponse( + invocation_id=invocation_id, + return_value=return_value, + result=protos.StatusResult( + status=protos.StatusResult.Success), + output_data=output_data) + + except Exception as ex: + if http_v2_enabled: + http_coordinator.set_http_response(invocation_id, ex) + global metadata_exception + metadata_exception = ex + return protos.InvocationResponse( + invocation_id=invocation_id, + result=protos.StatusResult( + status=protos.StatusResult.Failure, + exception=serialize_exception(ex, protos))) + + +async def function_environment_reload_request(request): + """Only runs on Linux Consumption placeholder specialization. + This is called only when placeholder mode is true. On worker restarts + worker init request will be called directly. + """ + logger.info("V2 Library Worker: received WorkerInitRequest," + "Version %s", VERSION) + global _host, protos, metadata_exception + try: + + func_env_reload_request = \ + request.request.function_environment_reload_request + directory = func_env_reload_request.function_app_directory + + if is_envvar_true(PYTHON_ENABLE_DEBUG_LOGGING): + root_logger = logging.getLogger("azure.functions") + root_logger.setLevel(logging.DEBUG) + + # calling load_binding_registry again since the + # reload_customer_libraries call clears the registry + load_binding_registry() + + capabilities = {} + if get_app_setting( + setting=PYTHON_ENABLE_OPENTELEMETRY, + default_value=PYTHON_ENABLE_OPENTELEMETRY_DEFAULT): + initialize_azure_monitor() + + if otel_manager.get_azure_monitor_available(): + capabilities[WORKER_OPEN_TELEMETRY_ENABLED] = ( + TRUE) + + try: + _host = request.properties.get("host") + protos = request.properties.get("protos") + load_function_metadata( + directory, + caller_info="environment_reload_request") + if HttpV2Registry.http_v2_enabled(): + capabilities[HTTP_URI] = \ + initialize_http_server(_host) + capabilities[REQUIRES_ROUTE_PARAMETERS] = TRUE + except HttpServerInitError as ex: + metadata_exception = ex + + # Change function app directory + if getattr(func_env_reload_request, + 'function_app_directory', None): + change_cwd( + func_env_reload_request.function_app_directory) + + return protos.FunctionEnvironmentReloadResponse( + capabilities=capabilities, + worker_metadata=get_worker_metadata(protos), + result=protos.StatusResult( + status=protos.StatusResult.Success)) + + except Exception as ex: + metadata_exception = ex + return protos.FunctionEnvironmentReloadResponse( + result=protos.StatusResult( + status=protos.StatusResult.Failure, + exception=serialize_exception(ex, protos))) + + +def load_function_metadata(function_app_directory, caller_info): + global protos, metadata_result + """ + This method is called to index the functions in the function app + directory and save the results in function_metadata_result or + function_metadata_exception in case of an exception. + """ + try: + script_file_name = get_app_setting( + setting=PYTHON_SCRIPT_FILE_NAME, + default_value=PYTHON_SCRIPT_FILE_NAME_DEFAULT) + + logger.debug( + 'Received load metadata request from %s, ' + 'script_file_name: %s', + caller_info, script_file_name) + + validate_script_file_name(script_file_name) + function_path = os.path.join(function_app_directory, + script_file_name) + + # For V1, the function path will not exist and + # return None. + global metadata_result + metadata_result = (index_functions(function_path, function_app_directory)) \ + if os.path.exists(function_path) else None + logger.info("VICTORIA --- metadata_result: %s", metadata_result) + except Exception as ex: + logger.info("VICTORIA --- exception in load_function_metadata: %s", ex) + global metadata_exception + metadata_exception = ex + + +def index_functions(function_path: str, function_dir: str): + global protos + indexed_functions = index_function_app(function_path) + logger.info( + "Indexed function app and found %s functions", + len(indexed_functions) + ) + + if indexed_functions: + fx_metadata_results, fx_bindings_logs = ( + process_indexed_function( + protos, + _functions, + indexed_functions, + function_dir)) + + indexed_function_logs: List[str] = [] + indexed_function_bindings_logs = [] + for func in indexed_functions: + func_binding_logs = fx_bindings_logs.get(func) + for binding in func.get_bindings(): + deferred_binding_info = func_binding_logs.get( + binding.name)\ + if func_binding_logs.get(binding.name) else "" + indexed_function_bindings_logs.append(( + binding.type, binding.name, deferred_binding_info)) + + function_log = "Function Name: " + func.get_function_name() + ", Function Binding: " + str(indexed_function_bindings_logs) + indexed_function_logs.append(function_log) + + logger.info( + 'Successfully processed FunctionMetadataRequest for ' + 'functions: %s. Deferred bindings enabled: %s.', " ".join( + indexed_function_logs), + _functions.deferred_bindings_enabled()) + + return fx_metadata_results diff --git a/azure_functions_worker/http_v2.py b/azure_functions_worker_v2/http_v2.py similarity index 95% rename from azure_functions_worker/http_v2.py rename to azure_functions_worker_v2/http_v2.py index 4eeeea9d9..5f1fb772e 100644 --- a/azure_functions_worker/http_v2.py +++ b/azure_functions_worker_v2/http_v2.py @@ -6,15 +6,13 @@ import importlib import socket import sys -from typing import Dict +from typing import Any, Dict -from azure_functions_worker.constants import ( +from azure_functions_worker_v2.utils.constants import ( BASE_EXT_SUPPORTED_PY_MINOR_VERSION, - PYTHON_ENABLE_INIT_INDEXING, X_MS_INVOCATION_ID, ) -from azure_functions_worker.logging import logger -from azure_functions_worker.utils.common import is_envvar_false +from azure_functions_worker_v2.logging import logger # Http V2 Exceptions @@ -119,7 +117,7 @@ class SingletonMeta(type): """ Metaclass for implementing the singleton pattern. """ - _instances = {} + _instances: Dict[Any, Any] = {} def __call__(cls, *args, **kwargs): if cls not in cls._instances: @@ -214,7 +212,7 @@ def initialize_http_server(host_addr, **kwargs): @app.route async def catch_all(request: request_type): # type: ignore - invoc_id = request.headers.get(X_MS_INVOCATION_ID) + invoc_id = request.headers.get(X_MS_INVOCATION_ID) # type: ignore if invoc_id is None: raise MissingHeaderError("Header %s not found" % X_MS_INVOCATION_ID) @@ -236,7 +234,7 @@ async def catch_all(request: request_type): # type: ignore loop = asyncio.get_event_loop() loop.create_task(web_server_run_task) - web_server_address = f"http://{host_addr}:{unused_port}" + web_server_address = "http://" + str(host_addr) + ":" + str(unused_port) logger.info('HTTP server starting on %s', web_server_address) return web_server_address @@ -279,8 +277,7 @@ def ext_base(cls): @classmethod def _check_http_v2_enabled(cls): - if sys.version_info.minor < BASE_EXT_SUPPORTED_PY_MINOR_VERSION or \ - is_envvar_false(PYTHON_ENABLE_INIT_INDEXING): + if sys.version_info.minor < BASE_EXT_SUPPORTED_PY_MINOR_VERSION: return False import azurefunctions.extensions.base as ext_base diff --git a/azure_functions_worker/loader.py b/azure_functions_worker_v2/loader.py similarity index 55% rename from azure_functions_worker/loader.py rename to azure_functions_worker_v2/loader.py index ce96c1406..b0a5ccf7e 100644 --- a/azure_functions_worker/loader.py +++ b/azure_functions_worker_v2/loader.py @@ -1,22 +1,23 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -"""Python functions loader.""" + import importlib import importlib.machinery -import os import os.path import pathlib import sys import time + from datetime import timedelta -from os import PathLike, fspath -from typing import Dict, Optional +from typing import Any, Dict, Optional, Union -from google.protobuf.duration_pb2 import Duration -from . import bindings, functions, protos +from .functions import Registry +from .logging import logger + +from .bindings.meta import get_deferred_raw_bindings from .bindings.retrycontext import RetryPolicy -from .constants import ( +from .utils.constants import ( CUSTOMER_PACKAGES_PATH, METADATA_PROPERTIES_WORKER_INDEXED, MODULE_NOT_FOUND_TS_URL, @@ -25,31 +26,13 @@ PYTHON_SCRIPT_FILE_NAME_DEFAULT, RETRY_POLICY, ) -from .logging import logger -from .utils.common import get_app_setting +from .utils.env_state import get_app_setting from .utils.wrappers import attach_message_to_exception _AZURE_NAMESPACE = '__app__' _DEFAULT_SCRIPT_FILENAME = '__init__.py' _DEFAULT_ENTRY_POINT = 'main' -_submodule_dirs = [] - - -def register_function_dir(path: PathLike) -> None: - try: - _submodule_dirs.append(fspath(path)) - except TypeError as e: - raise RuntimeError(f'Path ({path}) is incompatible with fspath. ' - f'It is of type {type(path)}.', e) - - -def install() -> None: - if _AZURE_NAMESPACE not in sys.modules: - # Create and register the __app__ namespace package. - ns_spec = importlib.machinery.ModuleSpec(_AZURE_NAMESPACE, None) - ns_spec.submodule_search_locations = _submodule_dirs - ns_pkg = importlib.util.module_from_spec(ns_spec) - sys.modules[_AZURE_NAMESPACE] = ns_pkg +_submodule_dirsL: list[Any] = [] def convert_to_seconds(timestr: str): @@ -58,11 +41,7 @@ def convert_to_seconds(timestr: str): seconds=x.tm_sec).total_seconds()) -def uninstall() -> None: - pass - - -def build_binding_protos(indexed_function) -> Dict: +def build_binding_protos(protos, indexed_function) -> Dict: binding_protos = {} for binding in indexed_function.get_bindings(): binding_protos[binding.name] = protos.BindingInfo( @@ -73,7 +52,7 @@ def build_binding_protos(indexed_function) -> Dict: return binding_protos -def build_retry_protos(indexed_function) -> Dict: +def build_retry_protos(protos, indexed_function) -> Union[Dict, None]: retry = get_retry_settings(indexed_function) if not retry: @@ -84,9 +63,9 @@ def build_retry_protos(indexed_function) -> Dict: retry_strategy = retry.get(RetryPolicy.STRATEGY.value) if strategy == "fixed_delay": - return build_fixed_delay_retry(retry, max_retry_count, retry_strategy) + return build_fixed_delay_retry(protos, retry, max_retry_count, retry_strategy) else: - return build_variable_interval_retry(retry, max_retry_count, + return build_variable_interval_retry(protos, retry, max_retry_count, retry_strategy) @@ -98,8 +77,8 @@ def get_retry_settings(indexed_function): return None -def build_fixed_delay_retry(retry, max_retry_count, retry_strategy): - delay_interval = Duration( +def build_fixed_delay_retry(protos, retry, max_retry_count, retry_strategy): + delay_interval = protos.Duration( seconds=convert_to_seconds(retry.get(RetryPolicy.DELAY_INTERVAL.value)) ) return protos.RpcRetryOptions( @@ -109,12 +88,12 @@ def build_fixed_delay_retry(retry, max_retry_count, retry_strategy): ) -def build_variable_interval_retry(retry, max_retry_count, retry_strategy): - minimum_interval = Duration( +def build_variable_interval_retry(protos, retry, max_retry_count, retry_strategy): + minimum_interval = protos.Duration( seconds=convert_to_seconds( retry.get(RetryPolicy.MINIMUM_INTERVAL.value)) ) - maximum_interval = Duration( + maximum_interval = protos.Duration( seconds=convert_to_seconds( retry.get(RetryPolicy.MAXIMUM_INTERVAL.value)) ) @@ -126,7 +105,8 @@ def build_variable_interval_retry(retry, max_retry_count, retry_strategy): ) -def process_indexed_function(functions_registry: functions.Registry, +def process_indexed_function(protos, + functions_registry: Registry, indexed_functions, function_dir): """ fx_metadata_results is a list of the RpcFunctionMetadata for @@ -143,10 +123,10 @@ def process_indexed_function(functions_registry: functions.Registry, fx_bindings_logs = {} for indexed_function in indexed_functions: function_info = functions_registry.add_indexed_function( - function=indexed_function) + function=indexed_function, protos=protos) - binding_protos = build_binding_protos(indexed_function) - retry_protos = build_retry_protos(indexed_function) + binding_protos = build_binding_protos(protos, indexed_function) + retry_protos = build_retry_protos(protos, indexed_function) raw_bindings, bindings_logs = get_fx_raw_bindings( indexed_function=indexed_function, @@ -174,71 +154,10 @@ def process_indexed_function(functions_registry: functions.Registry, @attach_message_to_exception( expt_type=ImportError, - message='Cannot find module. Please check the requirements.txt ' - 'file for the missing module. For more info, ' - 'please refer the troubleshooting ' - f'guide: {MODULE_NOT_FOUND_TS_URL}. ' - f'Current sys.path: {sys.path}', - debug_logs='Error in load_function. ' - f'Sys Path: {sys.path}, Sys Module: {sys.modules},' - 'python-packages Path exists: ' - f'{os.path.exists(CUSTOMER_PACKAGES_PATH)}') -def load_function(name: str, directory: str, script_file: str, - entry_point: Optional[str]): - dir_path = pathlib.Path(directory) - script_path = pathlib.Path(script_file) if script_file else pathlib.Path( - _DEFAULT_SCRIPT_FILENAME) - if not entry_point: - entry_point = _DEFAULT_ENTRY_POINT - - register_function_dir(dir_path.parent) - - try: - rel_script_path = script_path.relative_to(dir_path.parent) - except ValueError: - raise RuntimeError( - f'script path {script_file} is not relative to the specified ' - f'directory {directory}' - ) - - last_part = rel_script_path.parts[-1] - modname, ext = os.path.splitext(last_part) - if ext != '.py': - raise RuntimeError( - f'cannot load function {name}: ' - f'invalid Python filename {script_file}') - - modname_parts = [_AZURE_NAMESPACE] - modname_parts.extend(rel_script_path.parts[:-1]) - - # If the __init__.py contains the code, we should avoid double loading. - if modname.lower() != '__init__': - modname_parts.append(modname) - - fullmodname = '.'.join(modname_parts) - - mod = importlib.import_module(fullmodname) - - func = getattr(mod, entry_point, None) - if func is None or not callable(func): - raise RuntimeError( - f'cannot load function {name}: function {entry_point}() is not ' - f'present in {rel_script_path}') - - return func - - -@attach_message_to_exception( - expt_type=ImportError, - message='Cannot find module. Please check the requirements.txt ' - 'file for the missing module. For more info, ' - 'please refer the troubleshooting ' - f'guide: {MODULE_NOT_FOUND_TS_URL}. ' - f'Current sys.path: {sys.path}', - debug_logs='Error in index_function_app. ' - f'Sys Path: {sys.path}, Sys Module: {sys.modules},' - 'python-packages Path exists: ' - f'{os.path.exists(CUSTOMER_PACKAGES_PATH)}') + message="Cannot find module. Please check the requirements.txt file for the missing module. For more info, please refer the troubleshooting guide: " + MODULE_NOT_FOUND_TS_URL + + ". Current sys.path: " + " ".join(sys.path), + debug_logs="Error in index_function_app. Sys Path:" + " ".join(sys.path) + + ", python-packages Path exists: " + str(os.path.exists(CUSTOMER_PACKAGES_PATH))) def index_function_app(function_path: str): module_name = pathlib.Path(function_path).stem imported_module = importlib.import_module(module_name) @@ -251,15 +170,14 @@ def index_function_app(function_path: str): app = getattr(imported_module, i, None) else: raise ValueError( - f"More than one {app.__class__.__name__} or other top " - f"level function app instances are defined.") + "More than one %s or other top " + "level function app instances are defined.", app.__class__.__name__) if not app: script_file_name = get_app_setting( setting=PYTHON_SCRIPT_FILE_NAME, - default_value=f'{PYTHON_SCRIPT_FILE_NAME_DEFAULT}') - raise ValueError("Could not find top level function app instances in " - f"{script_file_name}.") + default_value=PYTHON_SCRIPT_FILE_NAME_DEFAULT) + raise ValueError("Could not find top level function app instances in %s.", script_file_name) return app.get_functions() @@ -278,7 +196,7 @@ def get_fx_raw_bindings(indexed_function, function_info): for this function. """ if function_info.deferred_bindings_enabled: - raw_bindings, bindings_logs = bindings.get_deferred_raw_bindings( + raw_bindings, bindings_logs = get_deferred_raw_bindings( indexed_function, function_info.input_types) return raw_bindings, bindings_logs diff --git a/azure_functions_worker_v2/logging.py b/azure_functions_worker_v2/logging.py new file mode 100644 index 000000000..49be533f6 --- /dev/null +++ b/azure_functions_worker_v2/logging.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import logging.handlers +import traceback + +# Logging Prefixes +SDK_LOG_PREFIX = "azure.functions" + +logger: logging.Logger = logging.getLogger(SDK_LOG_PREFIX) + + +def format_exception(exception: Exception) -> str: + msg = str(exception) + "\n" + msg += ''.join(traceback.format_exception(exception)) + return msg diff --git a/azure_functions_worker_v2/otel.py b/azure_functions_worker_v2/otel.py new file mode 100644 index 000000000..cadb3ca3a --- /dev/null +++ b/azure_functions_worker_v2/otel.py @@ -0,0 +1,106 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import os + +from .logging import logger + +from .utils.env_state import get_app_setting +from .utils.constants import (APPLICATIONINSIGHTS_CONNECTION_STRING, + PYTHON_AZURE_MONITOR_LOGGER_NAME, + PYTHON_AZURE_MONITOR_LOGGER_NAME_DEFAULT, + TRACESTATE, TRACEPARENT) + + +class OTelManager: + def __init__(self): + self._azure_monitor_available = False + self._context_api = None + self._trace_context_propagator = None + + def set_azure_monitor_available(self, azure_monitor_available): + self._azure_monitor_available = azure_monitor_available + + def get_azure_monitor_available(self): + return self._azure_monitor_available + + def set_context_api(self, context_api): + self._context_api = context_api + + def get_context_api(self): + return self._context_api + + def set_trace_context_propagator(self, trace_context_propagator): + self._trace_context_propagator = trace_context_propagator + + def get_trace_context_propagator(self): + return self._trace_context_propagator + + +def update_opentelemetry_status(): + """Check for OpenTelemetry library availability and + update the status attribute.""" + try: + from opentelemetry import context as context_api + from opentelemetry.trace.propagation.tracecontext import ( + TraceContextTextMapPropagator, + ) + + OTelManager.set_context_api(context_api) + OTelManager.set_trace_context_propagator(TraceContextTextMapPropagator()) + + except ImportError: + logger.exception( + "Cannot import OpenTelemetry libraries." + ) + + +def initialize_azure_monitor(): + """Initializes OpenTelemetry and Azure monitor distro + """ + update_opentelemetry_status() + try: + from azure.monitor.opentelemetry import configure_azure_monitor + + # Set functions resource detector manually until officially + # include in Azure monitor distro + os.environ.setdefault( + "OTEL_EXPERIMENTAL_RESOURCE_DETECTORS", + "azure_functions", + ) + + configure_azure_monitor( + # Connection string can be explicitly specified in Appsetting + # If not set, defaults to env var + # APPLICATIONINSIGHTS_CONNECTION_STRING + connection_string=get_app_setting( + setting=APPLICATIONINSIGHTS_CONNECTION_STRING + ), + logger_name=get_app_setting( + setting=PYTHON_AZURE_MONITOR_LOGGER_NAME, + default_value=PYTHON_AZURE_MONITOR_LOGGER_NAME_DEFAULT + ), + ) + OTelManager.set_azure_monitor_available(True) + + logger.info("Successfully configured Azure monitor distro.") + except ImportError: + logger.exception( + "Cannot import Azure Monitor distro." + ) + OTelManager.set_azure_monitor_available(False) + except Exception: + logger.exception( + "Error initializing Azure monitor distro." + ) + OTelManager.set_azure_monitor_available(False) + + +def configure_opentelemetry(invocation_context): + carrier = {TRACEPARENT: invocation_context.trace_context.trace_parent, + TRACESTATE: invocation_context.trace_context.trace_state} + ctx = OTelManager.get_trace_context_propagator().extract(carrier) + OTelManager.get_context_api().attach(ctx) + + +otel_manager = OTelManager() diff --git a/azure_functions_worker/__init__.py b/azure_functions_worker_v2/utils/__init__.py similarity index 100% rename from azure_functions_worker/__init__.py rename to azure_functions_worker_v2/utils/__init__.py diff --git a/azure_functions_worker/constants.py b/azure_functions_worker_v2/utils/constants.py similarity index 86% rename from azure_functions_worker/constants.py rename to azure_functions_worker_v2/utils/constants.py index b916252cf..974c4d15b 100644 --- a/azure_functions_worker/constants.py +++ b/azure_functions_worker_v2/utils/constants.py @@ -1,7 +1,13 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. +# TODO: organize this better + import sys +TRUE = "true" +TRACEPARENT = "traceparent" +TRACESTATE = "tracestate" + # Capabilities RAW_HTTP_BODY_BYTES = "RawHttpBodyBytes" TYPED_DATA_COLLECTION = "TypedDataCollection" @@ -21,12 +27,8 @@ # Platform Environment Variables AZURE_WEBJOBS_SCRIPT_ROOT = "AzureWebJobsScriptRoot" CONTAINER_NAME = "CONTAINER_NAME" - # Python Specific Feature Flags and App Settings -PYTHON_ROLLBACK_CWD_PATH = "PYTHON_ROLLBACK_CWD_PATH" PYTHON_THREADPOOL_THREAD_COUNT = "PYTHON_THREADPOOL_THREAD_COUNT" -PYTHON_ISOLATE_WORKER_DEPENDENCIES = "PYTHON_ISOLATE_WORKER_DEPENDENCIES" -PYTHON_ENABLE_WORKER_EXTENSIONS = "PYTHON_ENABLE_WORKER_EXTENSIONS" PYTHON_ENABLE_DEBUG_LOGGING = "PYTHON_ENABLE_DEBUG_LOGGING" FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED = \ "FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED" @@ -42,12 +44,6 @@ PYTHON_THREADPOOL_THREAD_COUNT_MAX = sys.maxsize PYTHON_THREADPOOL_THREAD_COUNT_MAX_37 = 32 -PYTHON_ISOLATE_WORKER_DEPENDENCIES_DEFAULT = False -PYTHON_ISOLATE_WORKER_DEPENDENCIES_DEFAULT_310 = False -PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT = False -PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT_39 = True -PYTHON_EXTENSIONS_RELOAD_FUNCTIONS = "PYTHON_EXTENSIONS_RELOAD_FUNCTIONS" - # new programming model default script file name PYTHON_SCRIPT_FILE_NAME = "PYTHON_SCRIPT_FILE_NAME" PYTHON_SCRIPT_FILE_NAME_DEFAULT = "function_app.py" diff --git a/azure_functions_worker_v2/utils/current.py b/azure_functions_worker_v2/utils/current.py new file mode 100644 index 000000000..264697bf6 --- /dev/null +++ b/azure_functions_worker_v2/utils/current.py @@ -0,0 +1,33 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import asyncio +import functools + +from typing import Any + +from ..otel import otel_manager, configure_opentelemetry + + +def get_current_loop(): + return asyncio.events.get_event_loop() + + +async def execute_async(function, args) -> Any: + return await function(**args) + +def execute_sync(function, args) -> Any: + return function(**args) + + +def run_sync_func(invocation_id, context, func, params): + # This helper exists because we need to access the current + # invocation_id from ThreadPoolExecutor's threads. + context.thread_local_storage.invocation_id = invocation_id + try: + if otel_manager.get_azure_monitor_available(): + configure_opentelemetry(context) + result = functools.partial(execute_sync, func) + return result(params) + finally: + context.thread_local_storage.invocation_id = None diff --git a/azure_functions_worker_v2/utils/env_state.py b/azure_functions_worker_v2/utils/env_state.py new file mode 100644 index 000000000..431bc4051 --- /dev/null +++ b/azure_functions_worker_v2/utils/env_state.py @@ -0,0 +1,75 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import os +from typing import Callable, Optional + + +def is_true_like(setting: str) -> bool: + if setting is None: + return False + + return setting.lower().strip() in {'1', 'true', 't', 'yes', 'y'} + + +def is_false_like(setting: str) -> bool: + if setting is None: + return False + + return setting.lower().strip() in {'0', 'false', 'f', 'no', 'n'} + + +def is_envvar_true(env_key: str) -> bool: + if os.getenv(env_key) is None: + return False + + return is_true_like(os.environ[env_key]) + + +def is_envvar_false(env_key: str) -> bool: + if os.getenv(env_key) is None: + return False + + return is_false_like(os.environ[env_key]) + + +def get_app_setting( + setting: str, + default_value: Optional[str] = None, + validator: Optional[Callable[[str], bool]] = None +) -> Optional[str]: + """Returns the application setting from environment variable. + + Parameters + ---------- + setting: str + The name of the application setting (e.g. FUNCTIONS_RUNTIME_VERSION) + + default_value: Optional[str] + The expected return value when the application setting is not found, + or the app setting does not pass the validator. + + validator: Optional[Callable[[str], bool]] + A function accepts the app setting value and should return True when + the app setting value is acceptable. + + Returns + ------- + Optional[str] + A string value that is set in the application setting + """ + app_setting_value = os.getenv(setting) + + # If an app setting is not configured, we return the default value + if app_setting_value is None: + return default_value + + # If there's no validator, we should return the app setting value directly + if validator is None: + return app_setting_value + + # If the app setting is set with a validator, + # On True, should return the app setting value + # On False, should return the default value + if validator(app_setting_value): + return app_setting_value + return default_value diff --git a/azure_functions_worker_v2/utils/helpers.py b/azure_functions_worker_v2/utils/helpers.py new file mode 100644 index 000000000..a6592088c --- /dev/null +++ b/azure_functions_worker_v2/utils/helpers.py @@ -0,0 +1,28 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import os +import platform +import sys + +from .constants import PYTHON_LANGUAGE_RUNTIME + +from ..logging import logger +from ..version import VERSION + + +def change_cwd(new_cwd: str): + if os.path.exists(new_cwd): + os.chdir(new_cwd) + logger.info('Changing current working directory to %s', new_cwd) + else: + logger.warning('Directory %s is not found when reloading', new_cwd) + + +def get_worker_metadata(protos): + return protos.WorkerMetadata( + runtime_name=PYTHON_LANGUAGE_RUNTIME, + runtime_version=str(sys.version_info.major) + "." + str(sys.version_info.minor), + worker_version=VERSION, + worker_bitness=platform.machine(), + custom_properties={}) diff --git a/azure_functions_worker/utils/tracing.py b/azure_functions_worker_v2/utils/tracing.py similarity index 77% rename from azure_functions_worker/utils/tracing.py rename to azure_functions_worker_v2/utils/tracing.py index 0e08bf84a..01f12ef3e 100644 --- a/azure_functions_worker/utils/tracing.py +++ b/azure_functions_worker_v2/utils/tracing.py @@ -1,6 +1,8 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. + import traceback + from traceback import StackSummary, extract_tb from typing import List @@ -37,3 +39,18 @@ def _remove_frame_from_stack(tbss: StackSummary, 'filename') != framename, tbss)) filtered_stack: StackSummary = StackSummary.from_list(filtered_stack_list) return filtered_stack + + +def serialize_exception(exc: Exception, protos): + try: + message = str(type(exc).__name__) + ": " + str(exc) + except Exception: + message = ('Unhandled exception in function. ' + 'Could not serialize original exception message.') + + try: + stack_trace = marshall_exception_trace(exc) + except Exception: + stack_trace = '' + + return protos.RpcException(message=message, stack_trace=stack_trace) diff --git a/azure_functions_worker/_thirdparty/typing_inspect.py b/azure_functions_worker_v2/utils/typing_inspect.py similarity index 95% rename from azure_functions_worker/_thirdparty/typing_inspect.py rename to azure_functions_worker_v2/utils/typing_inspect.py index f5ae783d2..67726ee3a 100644 --- a/azure_functions_worker/_thirdparty/typing_inspect.py +++ b/azure_functions_worker_v2/utils/typing_inspect.py @@ -1,3 +1,4 @@ +# type: ignore # Imported from https://github.com/ilevkivskyi/typing_inspect/blob/168fa6f7c5c55f720ce6282727211cf4cf6368f6/typing_inspect.py # NoQA E501 # Author: Ivan Levkivskyi # License: MIT @@ -12,12 +13,8 @@ # NOTE: This module must support Python 2.7 in addition to Python 3.x import collections.abc -import sys -from typing import Callable, ClassVar, Generic, Tuple, TypeVar, Union, _GenericAlias - -NEW_39_TYPING = sys.version_info[:3] >= (3, 9, 0) # PEP 560 -if NEW_39_TYPING: - from typing import _SpecialGenericAlias +from typing import (Callable, ClassVar, Generic, Tuple, + TypeVar, Union, _GenericAlias, _SpecialGenericAlias) # from mypy_extensions import _TypedDictMeta @@ -40,14 +37,9 @@ def is_generic_type(tp): is_generic_type(MutableMapping[T, List[int]]) == True is_generic_type(Sequence[Union[str, bytes]]) == True """ - if NEW_39_TYPING: - return (isinstance(tp, type) and issubclass(tp, Generic) + return (isinstance(tp, type) and issubclass(tp, Generic) or ((isinstance(tp, _GenericAlias) or isinstance(tp, _SpecialGenericAlias)) # NoQA E501 and tp.__origin__ not in (Union, tuple, ClassVar, collections.abc.Callable))) # NoQA E501 - return (isinstance(tp, type) - and issubclass(tp, Generic) - or isinstance(tp, _GenericAlias) - and tp.__origin__ not in (Union, tuple, ClassVar, collections.abc.Callable)) # NoQA E501 def is_callable_type(tp): diff --git a/azure_functions_worker_v2/utils/validators.py b/azure_functions_worker_v2/utils/validators.py new file mode 100644 index 000000000..5fed95d0d --- /dev/null +++ b/azure_functions_worker_v2/utils/validators.py @@ -0,0 +1,20 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import re + + +class InvalidFileNameError(Exception): + + def __init__(self, file_name: str) -> None: + super().__init__( + 'Invalid file name: %s', file_name) + + +def validate_script_file_name(file_name: str): + # First character can be a letter, number, or underscore + # Following characters can be a letter, number, underscore, hyphen, or dash + # Ending must be .py + pattern = re.compile(r'^[a-zA-Z0-9_][a-zA-Z0-9_\-]*\.py$') + if not pattern.match(file_name): + raise InvalidFileNameError(file_name) diff --git a/azure_functions_worker/utils/wrappers.py b/azure_functions_worker_v2/utils/wrappers.py similarity index 86% rename from azure_functions_worker/utils/wrappers.py rename to azure_functions_worker_v2/utils/wrappers.py index 29f379da3..1761da37e 100644 --- a/azure_functions_worker/utils/wrappers.py +++ b/azure_functions_worker_v2/utils/wrappers.py @@ -3,8 +3,8 @@ from typing import Any, Callable -from ..logging import error_logger, logger -from .common import is_envvar_false, is_envvar_true +from ..logging import logger +from .env_state import is_envvar_false, is_envvar_true from .tracing import extend_exception_message @@ -36,7 +36,7 @@ def call(*args, **kwargs): return decorate -def attach_message_to_exception(expt_type: Exception, message: str, +def attach_message_to_exception(expt_type: type[Exception], message: str, debug_logs=None) -> Callable: def decorate(func): def call(*args, **kwargs): @@ -45,7 +45,6 @@ def call(*args, **kwargs): except expt_type as e: if debug_logs is not None: logger.error(debug_logs) - error_logger.exception("Error: %s, %s", e, message) raise extend_exception_message(e, message) return call return decorate diff --git a/azure_functions_worker/version.py b/azure_functions_worker_v2/version.py similarity index 81% rename from azure_functions_worker/version.py rename to azure_functions_worker_v2/version.py index adb421530..f5c0b9635 100644 --- a/azure_functions_worker/version.py +++ b/azure_functions_worker_v2/version.py @@ -1,4 +1,4 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -VERSION = '4.34.0' +VERSION = '1.0.0a39' diff --git a/docs/.gitignore b/docs/.gitignore deleted file mode 100644 index 9b8b5673e..000000000 --- a/docs/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -_build -_templates diff --git a/docs/Azure.Functions.svg b/docs/Azure.Functions.svg deleted file mode 100755 index e555956ca..000000000 --- a/docs/Azure.Functions.svg +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - - diff --git a/docs/CODE_OF_CONDUCT.md b/docs/CODE_OF_CONDUCT.md deleted file mode 100644 index 6364e4045..000000000 --- a/docs/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,38 +0,0 @@ -# [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/) - -This code of conduct outlines expectations for participation in Microsoft-managed open source communities, as well as steps for reporting unacceptable behavior. We are committed to providing a welcoming and inspiring community for all. People violating this code of conduct may be banned from the community. - -Our open source communities strive to: - -- Be friendly and patient: Remember you might not be communicating in someone else's primary spoken or programming language, and others may not have your level of understanding. -- Be welcoming: Our communities welcome and support people of all backgrounds and identities. This includes, but is not limited to members of any race, ethnicity, culture, national origin, color, immigration status, social and economic class, educational level, sex, sexual orientation, gender identity and expression, age, size, family status, political belief, religion, and mental and physical ability. -- Be respectful: We are a world-wide community of professionals, and we conduct ourselves professionally. Disagreement is no excuse for poor behavior and poor manners. Disrespectful and unacceptable behavior includes, but is not limited to: - - Violent threats or language. - - Discriminatory or derogatory jokes and language. - - Posting sexually explicit or violent material. - - Posting, or threatening to post, people's personally identifying information ("doxing"). - - Insults, especially those using discriminatory terms or slurs. - - Behavior that could be perceived as sexual attention. - - Advocating for or encouraging any of the above behaviors. -- Understand disagreements: Disagreements, both social and technical, are useful learning opportunities. Seek to understand the other viewpoints and resolve differences constructively. -- This code is not exhaustive or complete. It serves to capture our common understanding of a productive, collaborative environment. We expect the code to be followed in spirit as much as in the letter. - -## Scope - -This code of conduct applies to all repos and communities for Microsoft-managed open source projects regardless of whether or not the repo explicitly calls out its use of this code. The code also applies in public spaces when an individual is representing a project or its community. Examples include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. - -> Note: Some Microsoft-managed communities have codes of conduct that pre-date this document and issue resolution process. While communities are not required to change their code, they are expected to use the resolution process outlined here. The review team will coordinate with the communities involved to address your concerns. - -## Reporting Code of Conduct Issues - -We encourage all communities to resolve issues on their own whenever possible. This builds a broader and deeper understanding and ultimately a healthier interaction. In the event that an issue cannot be resolved locally, please feel free to report your concerns by contacting opencode@microsoft.com. Your report will be handled in accordance with the issue resolution process described in the Code of Conduct FAQ. - -In your report please include: - -- Your contact information. -- Names (real, usernames or pseudonyms) of any individuals involved. If there are additional witnesses, please include them as well. -- Your account of what occurred, and if you believe the incident is ongoing. If there is a publicly available record (e.g. a mailing list archive or a public chat log), please include a link or attachment. -- Any additional information that may be helpful. -- All reports will be reviewed by a multi-person team and will result in a response that is deemed necessary and appropriate to the circumstances. Where additional perspectives are needed, the team may seek insight from others with relevant expertise or experience. The confidentiality of the person reporting the incident will be kept at all times. Involved parties are never part of the review team. - -Anyone asked to stop unacceptable behavior is expected to comply immediately. If an individual engages in unacceptable behavior, the review team may take any action they deem appropriate, including a permanent ban from the community. diff --git a/docs/Functions.Fox.Python.png b/docs/Functions.Fox.Python.png deleted file mode 100644 index a867d8d94..000000000 Binary files a/docs/Functions.Fox.Python.png and /dev/null differ diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index 568f65d77..000000000 --- a/docs/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -SPHINXPROJ = AzureFunctionsforPython -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/api.rst b/docs/api.rst deleted file mode 100644 index 982725ce6..000000000 --- a/docs/api.rst +++ /dev/null @@ -1,85 +0,0 @@ -.. _azure-functions-reference: - -============= -API Reference -============= - -.. module:: azure.functions - :synopsis: Azure Functions bindings. - -.. currentmodule:: azure.functions - - -.. _azure-functions-bindings-blob: - -Blob Bindings -============= - -.. autoclass:: azure.functions.InputStream - :members: - - -.. _azure-functions-bindings-http: - -HTTP Bindings -============= - -.. autoclass:: azure.functions.HttpRequest - :members: - -.. autoclass:: azure.functions.HttpResponse - :members: - - -.. _azure-functions-bindings-queue: - -Queue Bindings -============== - -.. autoclass:: azure.functions.QueueMessage - :members: - - -.. _azure-functions-bindings-timer: - -Timer Bindings -============== - -.. autoclass:: azure.functions.TimerRequest - :members: - - -.. _azure-functions-bindings-cosmosdb: - -CosmosDB Bindings -================= - -.. autoclass:: azure.functions.Document - :members: - - .. describe:: doc[field] - - Return the field of *doc* with field name *field*. - - .. describe:: doc[field] = value - - Set field of *doc* with field name *field* to *value*. - -.. autoclass:: azure.functions.DocumentList - :members: - - -.. _azure-functions-bindings-context: - -Function Context -================ - -.. autoclass:: azure.functions.Context - :members: - - -Out Parameters -============== - -.. autoclass:: azure.functions.Out - :members: diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index cf21c799d..000000000 --- a/docs/conf.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Configuration file for the Sphinx documentation builder. -# -# This file does only contain a selection of the most common options. For a -# full list see the documentation: -# http://www.sphinx-doc.org/en/stable/config - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - - -# -- Project information ----------------------------------------------------- - -project = 'Azure Functions for Python' -copyright = '2018, Microsoft Corporation' -author = 'Microsoft Corporation' - -# The short X.Y version -version = '1.0' -# The full version, including alpha/beta/rc tags -release = '1.0' - - -# -- General configuration --------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.viewcode', - 'sphinx.ext.githubpages', -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path . -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'alabaster' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# The default sidebars (for documents that don't match any pattern) are -# defined by theme itself. Builtin themes are using these templates by -# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', -# 'searchbox.html']``. -# -# html_sidebars = {} - - -# -- Options for HTMLHelp output --------------------------------------------- - -# Output file base name for HTML help builder. -htmlhelp_basename = 'AzureFunctionsforPythondoc' - - -# -- Options for LaTeX output ------------------------------------------------ - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'AzureFunctionsforPython.tex', 'Azure Functions for Python Documentation', - 'Microsoft Corporation', 'manual'), -] - - -# -- Options for manual page output ------------------------------------------ - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'azurefunctionsforpython', 'Azure Functions for Python Documentation', - [author], 1) -] - - -# -- Options for Texinfo output ---------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'AzureFunctionsforPython', 'Azure Functions for Python Documentation', - author, 'AzureFunctionsforPython', 'One line description of project.', - 'Miscellaneous'), -] - - -# -- Extension configuration ------------------------------------------------- - -# -- Options for intersphinx extension --------------------------------------- - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'python': ('https://docs.python.org/3', None)} diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index d6f0e8e1f..000000000 --- a/docs/index.rst +++ /dev/null @@ -1,466 +0,0 @@ -.. image:: https://travis-ci.org/Azure/azure-functions-python-worker.svg?branch=master - :target: https://travis-ci.org/Azure/azure-functions-python-worker - - -========================== -Azure Functions for Python -========================== - -Requirements -============ - -Azure Functions for Python support Python 3.7 or later. - - -Programming Model -================= - -An Azure function is implemented as a global Python function ``main()`` in the -file called ``__init__.py``. The name of the Python function can be changed by -specifying the ``entryPoint`` attribute in ``function.json``, and the name of -the file can be changed by specifying the ``scriptFile`` attribute in -``function.json``. - -Currently, the Azure function and its bindings must be declared in the -``function.json`` file. Optionally, the function parameters and the -return type may also be declared as Python type annotations. The annotations -must match the types expected by the bindings declared in ``function.json``. - -Below is an example of a simple function triggerred by an HTTP request. - -``function.json``: - -.. code-block:: json - - { - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] - } - - -``__init__.py``: - -.. code-block:: python - - import azure.functions - - def main(req: azure.functions.HttpRequest) -> str: - user = req.params.get('user', 'User') - return f'Hello, {user}!' - - -The annotations are optional, so the function may also be written as: - -.. code-block:: python - - def main(req): - user = req.params.get('user', 'User') - return f'Hello, {user}!' - - -Logging -======= - -Azure Functions adds a root :mod:`logging ` handler -automatically, and any log output produced using the standard logging output -is captured by the Functions runtime. - - -Context -======= - -A function can obtain the invocation context by including the special -``context`` argument in its signature. The context is passed as a -:class:`Context ` instance: - -.. code-block:: python - - import azure.functions - - def main(req: azure.functions.HttpRequest, - context: azure.functions.Context) -> str: - return f'{context.invocation_id}' - - -Bindings -======== - -Azure Functions for Python supports the following binding types: - -* :ref:`HTTP and webhooks `: trigger, output; -* :ref:`Blob storage `: trigger, input, output; -* :ref:`Queue `: trigger, output; -* :ref:`Timers `: trigger. - - -.. _azure-bindings-http: - -HTTP and webhook bindings -------------------------- - -The trigger binding is passed as a -:class:`HttpRequest ` object. Output bindings -can be returned as a ``str`` or an -:class:`HttpResponse ` object. - -Example -~~~~~~~ - -``function.json``: - -.. code-block:: json - - { - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] - } - - -``__init__.py``: - -.. code-block:: python - - import azure.functions - - def main(req: azure.functions.HttpRequest) -> str: - user = req.params.get('user', 'User') - return f'Hello, {user}!' - - - -.. _azure-bindings-blob: - -Blob storage bindings ---------------------- - -The trigger and input bindings are passed as -:class:`InputStream ` instances. Output can -be a ``bytes``, ``str`` or a :term:`file-like object `. - -Blob storage trigger example -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``function.json``: - -.. code-block:: json - - { - "bindings": [ - { - "type": "blobTrigger", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "file.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return", - } - ] - } - - -``__init__.py``: - - -.. code-block:: python - - import azure.functions - - def main(file: azure.functions.InputStream) -> bytes: - return file.read() - - -Blob storage output example -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -``function.json``: - -.. code-block:: json - - { - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "blob", - "direction": "out", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "test-file.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return", - } - ] - } - - -``__init__.py``: - - -.. code-block:: python - - import azure.functions - - def main(req: azure.functions.HttpRequest, - file: azure.functions.Out[bytes]) -> azure.functions.HttpResponse: - # write the request body into the output blob - file.set(req.get_body()) - - return azure.functions.HttpResponse( - content_type='application/json', - body='{"status": "OK"}' - ) - -Note that in the above example we use the :class:`Out ` -interface to set the value of the output binding. - - -.. _azure-bindings-queue: - -Queue storage bindings ----------------------- - -Queue storage trigger bindings are passed as -:class:`QueueMessage ` instances. Output -bindings can be returned as a ``str``, ``bytes`` or a -:class:`QueueMessage ` instance. - - -Example -~~~~~~~ - -``function.json``: - -.. code-block:: json - - { - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "queueTrigger", - "direction": "in", - "name": "msg", - "queueName": "inputqueue", - "connection": "AzureWebJobsStorage", - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "queueName": "outputqueue", - "connection": "AzureWebJobsStorage", - } - ] - } - - -``__init__.py``: - -.. code-block:: python - - import azure.functions - - def main( - msg: azure.functions.QueueMessage) -> azure.functions.QueueMessage: - body = msg.get_body() - # ... process message - # Put a message into the output queue signaling that this message - # was processed. - return azure.functions.QueueMessage( - body=f'Processed: {msg.id}' - ) - - -.. _azure-bindings-timer: - -Timer bindings --------------- - -Timer trigger bindings are passwd as -:class:`TimerRequest ` instances. - -Example -~~~~~~~ - -``function.json``: - -.. code-block:: json - - { - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "timerTrigger", - "direction": "in", - "name": "timer", - "schedule": "*/5 * * * * *" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "queueName": "outputqueue", - "connection": "AzureWebJobsStorage", - } - ] - } - - -``__init__.py``: - -.. code-block:: python - - import datetime - import azure.functions - - def main(timer: azure.functions.TimerRequest) -> str: - # process timer event... - # put the current timestamp into the output queue. - return f'{datetime.datetime.now().timestamp()}' - - -.. _azure-bindings-cosmosdb: - -CosmosDB Bindings ------------------ - -The trigger and input CosmosDB bindings are passed as -:class:`DocumentList ` instances. Output can -be a :class:`Document ` instance, a -:class:`DocumentList ` instance or an iterable -containing ``Document`` instances. - -CosmosDB Trigger Example -~~~~~~~~~~~~~~~~~~~~~~~~ - -``function.json``: - -.. code-block:: json - - { - "bindings": [ - { - "direction": "in", - "type": "cosmosDBTrigger", - "name": "docs", - "databaseName": "test", - "collectionName": "items", - "leaseCollectionName": "leases", - }, - { - "type": "http", - "direction": "out", - "name": "$return", - } - ] - } - - -``__init__.py``: - - -.. code-block:: python - - import azure.functions as func - - def main(docs: func.DocumentList) -> str: - return docs[0].to_json() - - -CosmosDB Output Example -~~~~~~~~~~~~~~~~~~~~~~~ - -``function.json``: - -.. code-block:: json - - { - "scriptFile": "__init__.py", - - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "direction": "out", - "type": "cosmosDB", - "name": "doc", - "databaseName": "test", - "collectionName": "items", - "leaseCollectionName": "leases", - "createIfNotExists": true - }, - { - "direction": "out", - "name": "$return", - "type": "http" - } - ] - } - - -``__init__.py``: - - -.. code-block:: python - - import azure.functions as func - - - def main(req: func.HttpRequest, doc: func.Out[func.Document]): - doc.set(func.Document.from_json(req.get_body())) - - return 'OK' - - -Reference ---------- - -:ref:`Azure Functions for Python Reference `. - - -.. toctree:: - :maxdepth: 2 - :hidden: - - usage - api diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index 831c830da..000000000 --- a/docs/make.bat +++ /dev/null @@ -1,36 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=. -set BUILDDIR=_build -set SPHINXPROJ=AzureFunctionsforPython - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% - -:end -popd diff --git a/docs/sharedmemory_existing.png b/docs/sharedmemory_existing.png deleted file mode 100644 index 6b59e03d8..000000000 Binary files a/docs/sharedmemory_existing.png and /dev/null differ diff --git a/docs/sharedmemory_new.png b/docs/sharedmemory_new.png deleted file mode 100644 index 174b04e71..000000000 Binary files a/docs/sharedmemory_new.png and /dev/null differ diff --git a/docs/usage.rst b/docs/usage.rst deleted file mode 100644 index 52337420c..000000000 --- a/docs/usage.rst +++ /dev/null @@ -1,5 +0,0 @@ -.. _azure-functions-usage: - - -Azure Functions Usage -===================== diff --git a/eng/ci/code-mirror.yml b/eng/ci/code-mirror.yml index bc010ab3f..c9e965de7 100644 --- a/eng/ci/code-mirror.yml +++ b/eng/ci/code-mirror.yml @@ -1,10 +1,8 @@ trigger: branches: include: - - dev - - release/* - - sdk/* # run for sdk and extension release branches - - extensions/* + - dev-3* + - library-release/* resources: repositories: diff --git a/eng/ci/core-tools-tests.yml b/eng/ci/core-tools-tests.yml deleted file mode 100644 index a62683f69..000000000 --- a/eng/ci/core-tools-tests.yml +++ /dev/null @@ -1,26 +0,0 @@ -resources: - repositories: - - repository: 1es - type: git - name: 1ESPipelineTemplates/1ESPipelineTemplates - ref: refs/tags/release - - repository: eng - type: git - name: engineering - ref: refs/tags/release - -variables: - - template: /eng/templates/utils/official-variables.yml@self - -extends: - template: v1/1ES.Unofficial.PipelineTemplate.yml@1es - parameters: - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows - - stages: - - stage: RunCoreToolsTests - jobs: - - template: /eng/templates/official/jobs/ci-core-tools-tests.yml@self diff --git a/eng/ci/custom-image-tests.yml b/eng/ci/custom-image-tests.yml deleted file mode 100644 index 95956683a..000000000 --- a/eng/ci/custom-image-tests.yml +++ /dev/null @@ -1,26 +0,0 @@ -resources: - repositories: - - repository: 1es - type: git - name: 1ESPipelineTemplates/1ESPipelineTemplates - ref: refs/tags/release - - repository: eng - type: git - name: engineering - ref: refs/tags/release - -variables: - - template: /eng/templates/utils/official-variables.yml@self - -extends: - template: v1/1ES.Unofficial.PipelineTemplate.yml@1es - parameters: - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows - - stages: - - stage: RunCustomDockerImageTests - jobs: - - template: /eng/templates/official/jobs/ci-custom-image-tests.yml@self diff --git a/eng/ci/docker-consumption-tests.yml b/eng/ci/docker-consumption-tests.yml deleted file mode 100644 index d1de145a0..000000000 --- a/eng/ci/docker-consumption-tests.yml +++ /dev/null @@ -1,37 +0,0 @@ -# CI only, does not trigger on PRs. -pr: none - -schedules: - - cron: "0 10 * * *" - displayName: Run everyday at 5 AM CST - branches: - include: - - dev - always: true - -resources: - repositories: - - repository: 1es - type: git - name: 1ESPipelineTemplates/1ESPipelineTemplates - ref: refs/tags/release - - repository: eng - type: git - name: engineering - ref: refs/tags/release - -variables: - - template: /eng/templates/utils/official-variables.yml@self - -extends: - template: v1/1ES.Unofficial.PipelineTemplate.yml@1es - parameters: - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows - - stages: - - stage: RunDockerConsumptionTests - jobs: - - template: /eng/templates/official/jobs/ci-docker-consumption-tests.yml@self diff --git a/eng/ci/docker-dedicated-tests.yml b/eng/ci/docker-dedicated-tests.yml deleted file mode 100644 index 518b03392..000000000 --- a/eng/ci/docker-dedicated-tests.yml +++ /dev/null @@ -1,37 +0,0 @@ -# CI only, does not trigger on PRs. -pr: none - -schedules: - - cron: "0 11 * * *" - displayName: Run everyday at 6 AM CST - branches: - include: - - dev - always: true - -resources: - repositories: - - repository: 1es - type: git - name: 1ESPipelineTemplates/1ESPipelineTemplates - ref: refs/tags/release - - repository: eng - type: git - name: engineering - ref: refs/tags/release - -variables: - - template: /eng/templates/utils/official-variables.yml@self - -extends: - template: v1/1ES.Unofficial.PipelineTemplate.yml@1es - parameters: - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows - - stages: - - stage: RunDockerDedicatedTests - jobs: - - template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self diff --git a/eng/ci/emulator-tests.yml b/eng/ci/emulator-tests.yml deleted file mode 100644 index adb1016f3..000000000 --- a/eng/ci/emulator-tests.yml +++ /dev/null @@ -1,46 +0,0 @@ -trigger: none # ensure this is not ran as a CI build - -pr: - branches: - include: - - dev - - release/* - -schedules: - - cron: "0 8 * * 1,2,3,4,5" - displayName: Monday to Friday 3 AM CST build - branches: - include: - - dev - always: true - -resources: - repositories: - - repository: 1es - type: git - name: 1ESPipelineTemplates/1ESPipelineTemplates - ref: refs/tags/release - - repository: eng - type: git - name: engineering - ref: refs/tags/release - -variables: - - template: /ci/variables/build.yml@eng - - template: /ci/variables/cfs.yml@eng - - template: /eng/templates/utils/variables.yml@self - -extends: - template: v1/1ES.Unofficial.PipelineTemplate.yml@1es - parameters: - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows - - stages: - - stage: RunEmulatorTests - jobs: - - template: /eng/templates/jobs/ci-emulator-tests.yml@self - parameters: - PoolName: 1es-pool-azfunc \ No newline at end of file diff --git a/eng/ci/integration-tests.yml b/eng/ci/integration-tests.yml deleted file mode 100644 index 6f8f69d9e..000000000 --- a/eng/ci/integration-tests.yml +++ /dev/null @@ -1,53 +0,0 @@ -trigger: # run for sdk and extension release branches - batch: true - branches: - include: - - sdk/* - - extensions/* - -pr: - branches: - include: - - dev - - release/* - -schedules: - - cron: "0 8 * * 1,2,3,4,5" - displayName: Monday to Friday 3 AM CST build - branches: - include: - - dev - always: true - -resources: - repositories: - - repository: 1es - type: git - name: 1ESPipelineTemplates/1ESPipelineTemplates - ref: refs/tags/release - - repository: eng - type: git - name: engineering - ref: refs/tags/release - -variables: - - template: /eng/templates/utils/variables.yml@self - - template: /eng/templates/utils/official-variables.yml@self - -extends: - template: v1/1ES.Unofficial.PipelineTemplate.yml@1es - parameters: - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows - - stages: - - stage: RunE2ETests - dependsOn: [] - jobs: - - template: /eng/templates/official/jobs/ci-e2e-tests.yml@self - - stage: RunLCTests - dependsOn: [] - jobs: - - template: /eng/templates/official/jobs/ci-lc-tests.yml@self diff --git a/eng/ci/official-build.yml b/eng/ci/official-build.yml index 568fdf16b..2a5bd84ad 100644 --- a/eng/ci/official-build.yml +++ b/eng/ci/official-build.yml @@ -2,8 +2,8 @@ trigger: batch: true branches: include: - - dev - - release/* + - dev-3* + - library-release/* # CI only, does not trigger on PRs. pr: none @@ -13,7 +13,7 @@ schedules: displayName: At 12:00 AM, only on Monday branches: include: - - dev + - dev-3* always: true resources: @@ -28,8 +28,6 @@ resources: ref: refs/tags/release variables: - - template: /eng/templates/utils/variables.yml@self - - template: /eng/templates/utils/official-variables.yml@self - name: codeql.excludePathPatterns value: deps/,build/ @@ -49,30 +47,7 @@ extends: - stage: Build jobs: - template: /eng/templates/official/jobs/build-artifacts.yml@self - - - stage: RunE2ETests - dependsOn: Build - jobs: - - template: /eng/templates/official/jobs/ci-e2e-tests.yml@self - - stage: RunEmulatorTests - dependsOn: Build - jobs: - - template: /eng/templates/jobs/ci-emulator-tests.yml@self - parameters: - PoolName: 1es-pool-azfunc - stage: RunUnitTests dependsOn: Build jobs: - template: /eng/templates/jobs/ci-unit-tests.yml@self - - stage: RunDockerConsumptionTests - dependsOn: Build - jobs: - - template: /eng/templates/official/jobs/ci-docker-consumption-tests.yml@self - - stage: RunDockerDedicatedTests - dependsOn: Build - jobs: - - template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self - - stage: RunLinuxConsumptionTests - dependsOn: Build - jobs: - - template: /eng/templates/official/jobs/ci-lc-tests.yml@self diff --git a/eng/ci/package-worker.yml b/eng/ci/package-worker.yml new file mode 100644 index 000000000..27587a55e --- /dev/null +++ b/eng/ci/package-worker.yml @@ -0,0 +1,37 @@ +trigger: + branches: + exclude: + - '*' # Don't trigger this pipeline automatically + +# CI only, does not trigger on PRs. +pr: none + +# Does not run on a schedule + +resources: + repositories: + - repository: 1es + type: git + name: 1ESPipelineTemplates/1ESPipelineTemplates + ref: refs/tags/release + - repository: eng + type: git + name: engineering + ref: refs/tags/release + +extends: + template: v1/1ES.Official.PipelineTemplate.yml@1es + parameters: + pool: + name: 1es-pool-azfunc + image: 1es-windows-2022 + os: windows + sdl: + codeSignValidation: + enabled: true + break: true + + stages: + - stage: AggregatePackages + jobs: + - template: /eng/templates/official/jobs/aggregate-artifacts.yml@self diff --git a/eng/ci/public-build.yml b/eng/ci/public-build.yml index 26f1b6625..22fae9b39 100644 --- a/eng/ci/public-build.yml +++ b/eng/ci/public-build.yml @@ -2,21 +2,21 @@ trigger: batch: true branches: include: - - dev + - dev-3* - sdk/* - extensions/* pr: branches: include: - - dev + - dev-3* schedules: - cron: '0 0 * * MON' displayName: At 12:00 AM, only on Monday branches: include: - - dev + - dev-3* always: true resources: @@ -26,17 +26,6 @@ resources: name: 1ESPipelineTemplates/1ESPipelineTemplates ref: refs/tags/release -variables: - - template: /eng/templates/utils/variables.yml@self - - name: codeql.excludePathPatterns - value: deps/,build/ - - name: codeql.compiled.enabled - value: true - - name: codeql.runSourceLanguagesInSourceAnalysis - value: true - - name: codeql.sourceLanguages - value: python, powershell - extends: template: v1/1ES.Unofficial.PipelineTemplate.yml@1es parameters: @@ -44,8 +33,18 @@ extends: name: 1es-pool-azfunc-public image: 1es-windows-2022 os: windows + + sdl: + codeql: + compiled: + enabled: true # still only runs for default branch + sourceLanguages: python, powershell + excludePathPatterns: deps/,build/ + runSourceLanguagesInSourceAnalysis: true + settings: skipBuildTagsForGitHubPullRequests: ${{ variables['System.PullRequest.IsFork'] }} + stages: - stage: Build jobs: @@ -53,10 +52,4 @@ extends: - stage: RunUnitTests dependsOn: Build jobs: - - template: /eng/templates/jobs/ci-unit-tests.yml@self - - stage: RunEmulatorTests - dependsOn: Build - jobs: - - template: /eng/templates/jobs/ci-emulator-tests.yml@self - parameters: - PoolName: 1es-pool-azfunc-public \ No newline at end of file + - template: /eng/templates/jobs/ci-unit-tests.yml@self \ No newline at end of file diff --git a/eng/scripts/install-dependencies.sh b/eng/scripts/install-dependencies.sh index d1b953642..913cbb04f 100644 --- a/eng/scripts/install-dependencies.sh +++ b/eng/scripts/install-dependencies.sh @@ -4,9 +4,5 @@ python -m pip install --upgrade pip python -m pip install -U azure-functions --pre python -m pip install -U -e .[dev] -if [[ $1 != "3.7" ]]; then - python -m pip install --pre -U -e .[test-http-v2] -fi -if [[ $1 != "3.7" && $1 != "3.8" ]]; then - python -m pip install --pre -U -e .[test-deferred-bindings] -fi \ No newline at end of file +python -m pip install --pre -U -e .[test-http-v2] +python -m pip install --pre -U -e .[test-deferred-bindings] \ No newline at end of file diff --git a/eng/scripts/test-extensions.sh b/eng/scripts/test-extensions.sh deleted file mode 100644 index 7166dc8e4..000000000 --- a/eng/scripts/test-extensions.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -python -m pip install --upgrade pip -if [[ $2 != "3.7" ]]; then - python -m pip install -e $1/PythonExtensionArtifact - python -m pip install --pre -e .[test-http-v2] -fi -if [[ $2 != "3.7" && $2 != "3.8" ]]; then - python -m pip install -e $1/PythonExtensionArtifact - python -m pip install --pre -U -e .[test-deferred-bindings] -fi - -python -m pip install -U -e .[dev] \ No newline at end of file diff --git a/eng/scripts/test-sdk.sh b/eng/scripts/test-sdk.sh deleted file mode 100644 index 649a58a2c..000000000 --- a/eng/scripts/test-sdk.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -python -m pip install --upgrade pip -python -m pip install -e $1/PythonSdkArtifact -python -m pip install -e .[dev] - -if [[ $2 != "3.7" ]]; then - python -m pip install --pre -U -e .[test-http-v2] -fi -if [[ $2 != "3.7" && $2 != "3.8" ]]; then - python -m pip install --pre -U -e .[test-deferred-bindings] -fi \ No newline at end of file diff --git a/eng/scripts/test-setup.sh b/eng/scripts/test-setup.sh deleted file mode 100644 index d062021dc..000000000 --- a/eng/scripts/test-setup.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -cd tests -python -m invoke -c test_setup build-protos -python -m invoke -c test_setup webhost --branch-name=dev -python -m invoke -c test_setup extensions \ No newline at end of file diff --git a/eng/templates/jobs/build.yml b/eng/templates/jobs/build.yml index dd422f4fa..285680642 100644 --- a/eng/templates/jobs/build.yml +++ b/eng/templates/jobs/build.yml @@ -7,22 +7,37 @@ jobs: image: 1es-ubuntu-22.04 os: linux + variables: + # Default Variable + pythonVersion: '3.13' + steps: + - script: | + echo "Branch name: $(Build.SourceBranchName)" + # Extract the last two digits (minor version) from the branch name + version=$(echo $(Build.SourceBranchName) | sed 's/dev-\([0-9]*\)/\1/') + minor_version=${version: -2} # Get last two digits + + # Check if minor_version is a number; if not, set default to 13 + if ! [[ "$minor_version" =~ ^[0-9]+$ ]]; then + minor_version=13 + fi + + echo "Extracted minor version: $minor_version" + echo "##vso[task.setvariable variable=pythonVersion]$minor_version" + displayName: 'Extract Python version from branch name' - task: UsePythonVersion@0 inputs: - versionSpec: "3.11" + versionSpec: '3.$(pythonVersion)' - bash: | python --version displayName: 'Check python version' - bash: | - python -m venv .env - .env\Scripts\Activate.ps1 python -m pip install --upgrade pip - python -m pip install . + python -m pip install build + python -m build displayName: 'Build python worker' - # Skip the build stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version - condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - bash: | pip install pip-audit - pip-audit -r requirements.txt + pip-audit . displayName: 'Run vulnerability scan' \ No newline at end of file diff --git a/eng/templates/jobs/ci-emulator-tests.yml b/eng/templates/jobs/ci-emulator-tests.yml deleted file mode 100644 index d2ab3ce87..000000000 --- a/eng/templates/jobs/ci-emulator-tests.yml +++ /dev/null @@ -1,100 +0,0 @@ -jobs: - - job: "TestPython" - displayName: "Run Python Emulator Tests" - - pool: - name: ${{ parameters.PoolName }} - image: 1es-ubuntu-22.04 - os: linux - - strategy: - matrix: - Python37: - PYTHON_VERSION: '3.7' - Python38: - PYTHON_VERSION: '3.8' - Python39: - PYTHON_VERSION: '3.9' - Python310: - PYTHON_VERSION: '3.10' - Python311: - PYTHON_VERSION: '3.11' - Python312: - PYTHON_VERSION: '3.12' - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: $(PYTHON_VERSION) - - task: UseDotNet@2 - displayName: 'Install .NET 8' - inputs: - version: 8.0.x - - bash: | - chmod +x eng/scripts/install-dependencies.sh - chmod +x eng/scripts/test-setup.sh - - eng/scripts/install-dependencies.sh $(PYTHON_VERSION) - eng/scripts/test-setup.sh - displayName: 'Install dependencies and the worker' - condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - - task: DownloadPipelineArtifact@2 - displayName: 'Download Python SDK Artifact' - inputs: - buildType: specific - artifactName: 'azure-functions' - project: 'internal' - definition: 679 - buildVersionToDownload: latest - targetPath: '$(Pipeline.Workspace)/PythonSdkArtifact' - condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) - - bash: | - chmod +x eng/scripts/test-sdk.sh - chmod +x eng/scripts/test-setup.sh - - eng/scripts/test-sdk.sh $(Pipeline.Workspace) $(PYTHON_VERSION) - eng/scripts/test-setup.sh - displayName: 'Install test python sdk, dependencies and the worker' - condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) - - task: DownloadPipelineArtifact@2 - displayName: 'Download Python Extension Artifact' - inputs: - buildType: specific - artifactName: $(PYTHONEXTENSIONNAME) - project: 'internal' - definition: 798 - buildVersionToDownload: latest - targetPath: '$(Pipeline.Workspace)/PythonExtensionArtifact' - condition: or(eq(variables.isExtensionsRelease, true), eq(variables['USETESTPYTHONEXTENSIONS'], true)) - - bash: | - chmod +x eng/scripts/test-setup.sh - chmod +x eng/scripts/test-extensions.sh - - eng/scripts/test-extensions.sh $(Pipeline.Workspace) $(PYTHON_VERSION) - eng/scripts/test-setup.sh - displayName: 'Install test python extension, dependencies and the worker' - condition: or(eq(variables.isExtensionsRelease, true), eq(variables['USETESTPYTHONEXTENSIONS'], true)) - - bash: | - docker compose -f tests/emulator_tests/utils/eventhub/docker-compose.yml pull - docker compose -f tests/emulator_tests/utils/eventhub/docker-compose.yml up -d - displayName: 'Install Azurite and Start EventHub Emulator' - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --ignore=tests/emulator_tests/test_servicebus_functions.py tests/emulator_tests - env: - AzureWebJobsStorage: "UseDevelopmentStorage=true" - AzureWebJobsEventHubConnectionString: "Endpoint=sb://localhost;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=SAS_KEY_VALUE;UseDevelopmentEmulator=true;" - displayName: "Running $(PYTHON_VERSION) Python Linux Emulator Tests" - - bash: | - # Stop and remove EventHub Emulator container to free up the port - docker stop eventhubs-emulator - docker container rm --force eventhubs-emulator - docker compose -f tests/emulator_tests/utils/servicebus/docker-compose.yml pull - docker compose -f tests/emulator_tests/utils/servicebus/docker-compose.yml up -d - env: - AzureWebJobsSQLPassword: $(AzureWebJobsSQLPassword) - displayName: 'Install Azurite and Start ServiceBus Emulator' - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 tests/emulator_tests/test_servicebus_functions.py - env: - AzureWebJobsStorage: "UseDevelopmentStorage=true" - AzureWebJobsServiceBusConnectionString: "Endpoint=sb://localhost;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=SAS_KEY_VALUE;UseDevelopmentEmulator=true;" - displayName: "Running $(PYTHON_VERSION) Python ServiceBus Linux Emulator Tests" diff --git a/eng/templates/jobs/ci-unit-tests.yml b/eng/templates/jobs/ci-unit-tests.yml index 5ff54888c..e2414e565 100644 --- a/eng/templates/jobs/ci-unit-tests.yml +++ b/eng/templates/jobs/ci-unit-tests.yml @@ -4,18 +4,8 @@ jobs: strategy: matrix: - Python37: - PYTHON_VERSION: '3.7' - Python38: - PYTHON_VERSION: '3.8' - Python39: - PYTHON_VERSION: '3.9' - Python310: - PYTHON_VERSION: '3.10' - Python311: - PYTHON_VERSION: '3.11' - Python312: - PYTHON_VERSION: '3.12' + Python313: + PYTHON_VERSION: '3.13' steps: - task: UsePythonVersion@0 @@ -26,16 +16,9 @@ jobs: inputs: version: 8.0.x - bash: | - chmod +x eng/scripts/install-dependencies.sh - chmod +x eng/scripts/test-setup.sh - - eng/scripts/install-dependencies.sh $(PYTHON_VERSION) - eng/scripts/test-setup.sh + chmod +x eng/scripts/install-dependencies.sh + eng/scripts/install-dependencies.sh displayName: 'Install dependencies' - condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --instafail --cov=./azure_functions_worker --cov-report xml --cov-branch tests/unittests + python -m pytest -q -n auto --dist loadfile --reruns 4 --instafail --cov=./azure_functions_worker_v2 --cov-report xml --cov-branch tests/unittests displayName: "Running $(PYTHON_VERSION) Unit Tests" - # Skip running tests for SDK and Extensions release branches. Public pipeline doesn't have permissions to download artifact. - condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - \ No newline at end of file diff --git a/eng/templates/official/jobs/aggregate-artifacts.yml b/eng/templates/official/jobs/aggregate-artifacts.yml new file mode 100644 index 000000000..fdacc8378 --- /dev/null +++ b/eng/templates/official/jobs/aggregate-artifacts.yml @@ -0,0 +1,37 @@ +jobs: + - job: "Aggregate" + displayName: 'Aggregate Python Library Workers' + + pool: + name: 1es-pool-azfunc + image: 1es-ubuntu-22.04 + os: linux + + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - output: pipelineArtifact + targetPath: $(Build.SourcesDirectory)/final_package + artifactName: "dist" + + steps: + # Add download task for each artifact + - task: DownloadPipelineArtifact@2 + displayName: 'Download Python Worker Artifact' + inputs: + buildType: specific + project: 'internal' + definition: 652 + buildVersionToDownload: specific + pipelineId: $(BuildId1) + artifactName: 'azure-functions-runtime-py313' + targetPath: "azure-functions-runtime-py313" + # Compiles the python packages + - script: | + mkdir -p $(Build.SourcesDirectory)/final_package + cp $(Build.SourcesDirectory)/azure-functions-runtime-py3*/*.whl $(Build.SourcesDirectory)/final_package/ + displayName: 'Merge and Compile Python Packages' + - script: | + echo "Contents of final_package folder:" + ls -al $(Build.SourcesDirectory)/final_package + displayName: 'Verify folder contents post compilation' \ No newline at end of file diff --git a/eng/templates/official/jobs/build-artifacts.yml b/eng/templates/official/jobs/build-artifacts.yml index 631115b4c..85d63f1f9 100644 --- a/eng/templates/official/jobs/build-artifacts.yml +++ b/eng/templates/official/jobs/build-artifacts.yml @@ -1,242 +1,77 @@ jobs: -- job: Build_WINDOWS_X64 - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows - strategy: - matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: 'python/prodV4/worker.py' - Python38V4: - pythonVersion: '3.8' - workerPath: 'python/prodV4/worker.py' - Python39V4: - pythonVersion: '3.9' - workerPath: 'python/prodV4/worker.py' - Python310V4: - pythonVersion: '3.10' - workerPath: 'python/prodV4/worker.py' - Python311V4: - pythonVersion: '3.11' - workerPath: 'python/prodV4/worker.py' - Python312V4: - pythonVersion: '3.12' - workerPath: 'python/prodV4/worker.py' - templateContext: - outputParentDirectory: $(Build.ArtifactStagingDirectory) - outputs: - - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) - artifactName: "$(pythonVersion)_WINDOWS_X64" - steps: - - template: ../../../../pack/templates/win_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - architecture: 'x64' - artifactName: '$(pythonVersion)_WINDOWS_X64' -- job: Build_WINDOWS_X86 - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows - strategy: - matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: 'python/prodV4/worker.py' - Python38V4: - pythonVersion: '3.8' - workerPath: 'python/prodV4/worker.py' - Python39V4: - pythonVersion: '3.9' - workerPath: 'python/prodV4/worker.py' - Python310V4: - pythonVersion: '3.10' - workerPath: 'python/prodV4/worker.py' - Python311V4: - pythonVersion: '3.11' - workerPath: 'python/prodV4/worker.py' - Python312V4: - pythonVersion: '3.12' - workerPath: 'python/prodV4/worker.py' - templateContext: - outputParentDirectory: $(Build.ArtifactStagingDirectory) - outputs: - - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) - artifactName: "$(pythonVersion)_WINDOWS_X86" - steps: - - template: ../../../../pack/templates/win_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - architecture: 'x86' - artifactName: '$(pythonVersion)_WINDOWS_x86' -- job: Build_LINUX_X64 - pool: - name: 1es-pool-azfunc - image: 1es-ubuntu-22.04 - os: linux - strategy: - matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: 'python/prodV4/worker.py' - Python38V4: - pythonVersion: '3.8' - workerPath: 'python/prodV4/worker.py' - Python39V4: - pythonVersion: '3.9' - workerPath: 'python/prodV4/worker.py' - Python310V4: - pythonVersion: '3.10' - workerPath: 'python/prodV4/worker.py' - Python311V4: - pythonVersion: '3.11' - workerPath: 'python/prodV4/worker.py' - Python312V4: - pythonVersion: '3.12' - workerPath: 'python/prodV4/worker.py' - templateContext: - outputParentDirectory: $(Build.ArtifactStagingDirectory) - outputs: - - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) - artifactName: "$(pythonVersion)_LINUX_X64" - steps: - - template: ../../../../pack/templates/nix_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - artifactName: '$(pythonVersion)_LINUX_X64' -- job: Build_OSX_X64 - pool: - name: Azure Pipelines - image: macOS-latest - os: macOS - strategy: - matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: 'python/prodV4/worker.py' - Python38V4: - pythonVersion: '3.8' - workerPath: 'python/prodV4/worker.py' - Python39V4: - pythonVersion: '3.9' - workerPath: 'python/prodV4/worker.py' - Python310V4: - pythonVersion: '3.10' - workerPath: 'python/prodV4/worker.py' - Python311V4: - pythonVersion: '3.11' - workerPath: 'python/prodV4/worker.py' - Python312V4: - pythonVersion: '3.12' - workerPath: 'python/prodV4/worker.py' - templateContext: - outputParentDirectory: $(Build.ArtifactStagingDirectory) - outputs: - - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) - artifactName: "$(pythonVersion)_OSX_X64" - steps: - - template: ../../../../pack/templates/nix_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - artifactName: '$(pythonVersion)_OSX_X64' -- job: Build_OSX_ARM64 - pool: - name: Azure Pipelines - image: macOS-latest - os: macOS - strategy: - matrix: - Python39V4: - pythonVersion: '3.9' - workerPath: 'python/prodV4/worker.py' - Python310V4: - pythonVersion: '3.10' - workerPath: 'python/prodV4/worker.py' - Python311V4: - pythonVersion: '3.11' - workerPath: 'python/prodV4/worker.py' - Python312V4: - pythonVersion: '3.12' - workerPath: 'python/prodV4/worker.py' - templateContext: - outputParentDirectory: $(Build.ArtifactStagingDirectory) - outputs: - - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) - artifactName: "$(pythonVersion)_OSX_ARM4" - steps: - - template: ../../../../pack/templates/macos_64_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - artifactName: '$(pythonVersion)_OSX_ARM64' + - job: "Build" + displayName: 'Build Python Library Worker' -- job: PackageWorkers - dependsOn: ['Build_WINDOWS_X64', 'Build_WINDOWS_X86', 'Build_LINUX_X64', 'Build_OSX_X64', 'Build_OSX_ARM64'] - templateContext: + pool: + name: 1es-pool-azfunc + image: 1es-ubuntu-22.04 + os: linux + + variables: + # Default version + pythonVersion: '3.13' + + templateContext: outputParentDirectory: $(Build.ArtifactStagingDirectory) outputs: - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) - artifactName: "PythonWorker" - steps: - - bash: | - echo "Releasing from $BUILD_SOURCEBRANCHNAME" - sudo apt-get install -y jq + targetPath: $(Build.SourcesDirectory)/dist + artifactName: "azure-functions-runtime-py3$(pythonVersion)" + + steps: + - script: | + echo "Branch name: $(Build.SourceBranchName)" + # Extract the last two digits (minor version) from the branch name + version=$(echo $(Build.SourceBranchName) | sed 's/dev-\([0-9]*\)/\1/') + minor_version=${version: -2} # Get last two digits + + # Check if minor_version is a number; if not, set default to 13 + if ! [[ "$minor_version" =~ ^[0-9]+$ ]]; then + minor_version=13 + fi - if [[ $BUILD_SOURCEBRANCHNAME = 4\.* ]] - then - echo "Generating V4 Release Package for $BUILD_SOURCEBRANCHNAME" - NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" - WKVERSION="$BUILD_SOURCEBRANCHNAME" - elif [[ $BUILD_SOURCEBRANCHNAME = dev ]] - then - echo "Generating V4 Integration Test Package for $BUILD_SOURCEBRANCHNAME" - VERSION=$(cat azure_functions_worker/version.py | tail -1 | cut -d' ' -f3 | sed "s/'//g") - NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" - WKVERSION="$VERSION-$(Build.BuildNumber)" - else - # this is only to test nuget related workflow because we are setting nuspec here - echo "Generating Integration Test Package for $BUILD_SOURCEBRANCHNAME for testing purpose" - LATEST_TAG=$(curl https://api.github.com/repos/Azure/azure-functions-python-worker/tags -s | jq '.[0].name' | sed 's/\"//g' | cut -d'.' -f-2) - NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" - # Only required for Integration Test. Version number contains date (e.g. 3.1.2.20211028-dev) - WKVERSION="3.$LATEST_TAG-$(BUILD_BUILDID)-TEST" - echo "No Matching Release Tag For $BUILD_SOURCEBRANCH" - fi + echo "Extracted minor version: $minor_version" + echo "##vso[task.setvariable variable=pythonVersion]$minor_version" + displayName: 'Extract Python version from branch name' + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.$(pythonVersion)' + - bash: | + python --version + displayName: 'Check python version' + - bash: | + python -m pip install -U pip + python -m pip install build + python -m build + displayName: 'Build Python Library Worker' + - script: | + echo "Contents of dist folder:" + ls -al $(Build.SourcesDirectory)/dist + displayName: 'Verify dist folder contents' + - script: | + echo "Branch name: $(Build.SourceBranchName)" + # Extract the part after the slash (release/-) + branch_name=$(echo $(Build.SourceBranchName) | sed 's/refs\/heads\///') + + # Extract the package version (everything before the first dash) + package_version=$(echo $branch_name | sed 's|^release/\([^/]*\)-.*|\1|') - echo "##vso[task.setvariable variable=nuspec_path]$NUSPEC" - echo "##vso[task.setvariable variable=worker_version]$WKVERSION" - displayName: "Generate Worker NuGet Package for Release $BUILD_SOURCEBRANCHNAME" - - task: DownloadPipelineArtifact@2 - inputs: - buildType: 'current' - targetPath: '$(Build.SourcesDirectory)' - - task: ManifestGeneratorTask@0 - displayName: 'SBOM Generation Task' - inputs: - BuildDropPath: '$(Build.ArtifactStagingDirectory)' - BuildComponentPath: '$(Build.SourcesDirectory)' - Verbosity: 'Verbose' - - task: CopyFiles@2 - inputs: - SourceFolder: '$(Build.ArtifactStagingDirectory)' - Contents: '**' - TargetFolder: '$(Build.SourcesDirectory)' - - task: NuGetCommand@2 - inputs: - command: pack - packagesToPack: '$(nuspec_path)' - packDestination: $(Build.ArtifactStagingDirectory) - versioningScheme: 'byEnvVar' - versionEnvVar: WORKER_VERSION + if [[ ! "$package_version" =~ ^[0-9]+\.[0-9]+\.[0-9]+([a-zA-Z]+[0-9]*)?$ ]]; then + echo "Invalid package version detected. Setting to default: 1.0.0" + package_version="1.0.0" + fi + + # Print the extracted package version + echo "Extracted package version: $package_version" + + # Set the package version as a pipeline variable + echo "##vso[task.setvariable variable=packageVersion]$package_version" + displayName: 'Extract Package Version from Branch Name' + - script: | + wheel_file=$(ls $(Build.SourcesDirectory)/dist/*.whl) + new_wheel_name="azure-functions-runtime-$(packageVersion)-py3$(pythonVersion)-none-any.whl" + mv "$wheel_file" "$(Build.SourcesDirectory)/dist/$new_wheel_name" + - bash: | + pip install pip-audit + pip-audit . + displayName: 'Run vulnerability scan' diff --git a/eng/templates/official/jobs/ci-core-tools-tests.yml b/eng/templates/official/jobs/ci-core-tools-tests.yml deleted file mode 100644 index 3e8a9b622..000000000 --- a/eng/templates/official/jobs/ci-core-tools-tests.yml +++ /dev/null @@ -1,35 +0,0 @@ -jobs: - - job: "TestPython" - displayName: "Run Python Core Tools E2E Tests" - - pool: - name: 1es-pool-azfunc - image: 1es-ubuntu-22.04 - os: linux - - steps: - - task: UsePythonVersion@0 - displayName: 'Install Python' - inputs: - versionSpec: "3.10" - addToPath: true - - task: UseDotNet@2 - displayName: 'Install DotNet 3' - inputs: - packageType: 'sdk' - version: "3.1.x" - - task: UseDotNet@2 - displayName: 'Install DotNet 6' - inputs: - packageType: 'sdk' - version: "6.x" - - pwsh: '$(Build.SourcesDirectory)/.ci/e2e_integration_test/start-e2e.ps1' - env: - AzureWebJobsStorage: $(LinuxStorageConnectionString311) - AzureWebJobsCosmosDBConnectionString: $(LinuxCosmosDBConnectionString311) - AzureWebJobsEventHubConnectionString: $(LinuxEventHubConnectionString311) - AzureWebJobsServiceBusConnectionString: $(LinuxServiceBusConnectionString311) - AzureWebJobsSqlConnectionString: $(LinuxSqlConnectionString311) - AzureWebJobsEventGridTopicUri: $(LinuxEventGridTopicUriString311) - AzureWebJobsEventGridConnectionKey: $(LinuxEventGridConnectionKeyString311) - displayName: 'Running Python Language Worker E2E Tests' diff --git a/eng/templates/official/jobs/ci-custom-image-tests.yml b/eng/templates/official/jobs/ci-custom-image-tests.yml deleted file mode 100644 index cb08f8a5b..000000000 --- a/eng/templates/official/jobs/ci-custom-image-tests.yml +++ /dev/null @@ -1,34 +0,0 @@ -jobs: - - job: "TestPython" - displayName: "Run Python Docker Custom Tests" - - pool: - name: 1es-pool-azfunc - image: 1es-ubuntu-22.04 - os: linux - - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: $(CUSTOM_PYTHON_VERSION) - - bash: | - chmod +x eng/scripts/install-dependencies.sh - - eng/scripts/install-dependencies.sh $(PYTHON_VERSION) - cd tests - python -m invoke -c test_setup build-protos - displayName: 'Install dependencies' - - bash: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests - env: - DEDICATED_DOCKER_TEST: $(CUSTOM_DED_IMAGE) - CONSUMPTION_DOCKER_TEST: $(CUSTOM_CON_IMAGE) - IMAGE_NAME: $(CUSTOM_IMAGE_NAME) - AzureWebJobsStorage: $(LinuxStorageConnectionString311) - AzureWebJobsCosmosDBConnectionString: $(LinuxCosmosDBConnectionString311) - AzureWebJobsEventHubConnectionString: $(LinuxEventHubConnectionString311) - AzureWebJobsServiceBusConnectionString: $(LinuxServiceBusConnectionString311) - AzureWebJobsSqlConnectionString: $(LinuxSqlConnectionString311) - AzureWebJobsEventGridTopicUri: $(LinuxEventGridTopicUriString311) - AzureWebJobsEventGridConnectionKey: $(LinuxEventGridConnectionKeyString311) - displayName: "Running Python DockerCustom tests" \ No newline at end of file diff --git a/eng/templates/official/jobs/ci-docker-consumption-tests.yml b/eng/templates/official/jobs/ci-docker-consumption-tests.yml deleted file mode 100644 index f1a4a23f1..000000000 --- a/eng/templates/official/jobs/ci-docker-consumption-tests.yml +++ /dev/null @@ -1,71 +0,0 @@ -jobs: - - job: "TestPython" - displayName: "Run Python Docker Consumption Tests" - - pool: - name: 1es-pool-azfunc - image: 1es-ubuntu-22.04 - os: linux - - strategy: - matrix: - Python38: - PYTHON_VERSION: '3.8' - STORAGE_CONNECTION: $(LinuxStorageConnectionString38) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString38) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString38) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString38) - SQL_CONNECTION: $(LinuxSqlConnectionString38) - EVENTGRID_URI: $(LinuxEventGridTopicUriString38) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString38) - Python39: - PYTHON_VERSION: '3.9' - STORAGE_CONNECTION: $(LinuxStorageConnectionString39) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39) - SQL_CONNECTION: $(LinuxSqlConnectionString39) - EVENTGRID_URI: $(LinuxEventGridTopicUriString39) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39) - Python310: - PYTHON_VERSION: '3.10' - STORAGE_CONNECTION: $(LinuxStorageConnectionString310) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310) - SQL_CONNECTION: $(LinuxSqlConnectionString310) - EVENTGRID_URI: $(LinuxEventGridTopicUriString310) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310) - Python311: - PYTHON_VERSION: '3.11' - STORAGE_CONNECTION: $(LinuxStorageConnectionString311) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311) - SQL_CONNECTION: $(LinuxSqlConnectionString311) - EVENTGRID_URI: $(LinuxEventGridTopicUriString311) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311) - - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: $(PYTHON_VERSION) - - bash: | - chmod +x eng/scripts/install-dependencies.sh - - eng/scripts/install-dependencies.sh $(PYTHON_VERSION) - cd tests - python -m invoke -c test_setup build-protos - displayName: 'Install dependencies' - - bash: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests - env: - CONSUMPTION_DOCKER_TEST: "true" - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - displayName: "Running $(PYTHON_VERSION) Docker Consumption tests" \ No newline at end of file diff --git a/eng/templates/official/jobs/ci-docker-dedicated-tests.yml b/eng/templates/official/jobs/ci-docker-dedicated-tests.yml deleted file mode 100644 index b76b1f883..000000000 --- a/eng/templates/official/jobs/ci-docker-dedicated-tests.yml +++ /dev/null @@ -1,71 +0,0 @@ -jobs: - - job: "TestPython" - displayName: "Run Python Docker Dedicated Tests" - - pool: - name: 1es-pool-azfunc - image: 1es-ubuntu-22.04 - os: linux - - strategy: - matrix: - Python38: - PYTHON_VERSION: '3.8' - STORAGE_CONNECTION: $(LinuxStorageConnectionString38) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString38) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString38) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString38) - SQL_CONNECTION: $(LinuxSqlConnectionString38) - EVENTGRID_URI: $(LinuxEventGridTopicUriString38) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString38) - Python39: - PYTHON_VERSION: '3.9' - STORAGE_CONNECTION: $(LinuxStorageConnectionString39) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39) - SQL_CONNECTION: $(LinuxSqlConnectionString39) - EVENTGRID_URI: $(LinuxEventGridTopicUriString39) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39) - Python310: - PYTHON_VERSION: '3.10' - STORAGE_CONNECTION: $(LinuxStorageConnectionString310) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310) - SQL_CONNECTION: $(LinuxSqlConnectionString310) - EVENTGRID_URI: $(LinuxEventGridTopicUriString310) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310) - Python311: - PYTHON_VERSION: '3.11' - STORAGE_CONNECTION: $(LinuxStorageConnectionString311) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311) - SQL_CONNECTION: $(LinuxSqlConnectionString311) - EVENTGRID_URI: $(LinuxEventGridTopicUriString311) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311) - - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: $(PYTHON_VERSION) - - bash: | - chmod +x eng/scripts/install-dependencies.sh - - eng/scripts/install-dependencies.sh $(PYTHON_VERSION) - cd tests - python -m invoke -c test_setup build-protos - displayName: 'Install dependencies' - - bash: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests - env: - DEDICATED_DOCKER_TEST: "true" - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - displayName: "Running $(PYTHON_VERSION) Docker Dedicated tests" \ No newline at end of file diff --git a/eng/templates/official/jobs/ci-e2e-tests.yml b/eng/templates/official/jobs/ci-e2e-tests.yml deleted file mode 100644 index b3ff4c57d..000000000 --- a/eng/templates/official/jobs/ci-e2e-tests.yml +++ /dev/null @@ -1,148 +0,0 @@ -jobs: - - job: "TestPython" - displayName: "Run Python E2E Tests" - - pool: - name: 1es-pool-azfunc - image: 1es-ubuntu-22.04 - os: linux - - strategy: - matrix: - Python37: - PYTHON_VERSION: '3.7' - STORAGE_CONNECTION: $(LinuxStorageConnectionString37) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString37) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString37) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString37) - SQL_CONNECTION: $(LinuxSqlConnectionString37) - EVENTGRID_URI: $(LinuxEventGridTopicUriString37) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString37) - Python38: - PYTHON_VERSION: '3.8' - STORAGE_CONNECTION: $(LinuxStorageConnectionString38) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString38) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString38) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString38) - SQL_CONNECTION: $(LinuxSqlConnectionString38) - EVENTGRID_URI: $(LinuxEventGridTopicUriString38) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString38) - Python39: - PYTHON_VERSION: '3.9' - STORAGE_CONNECTION: $(LinuxStorageConnectionString39) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39) - SQL_CONNECTION: $(LinuxSqlConnectionString39) - EVENTGRID_URI: $(LinuxEventGridTopicUriString39) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39) - Python310: - PYTHON_VERSION: '3.10' - STORAGE_CONNECTION: $(LinuxStorageConnectionString310) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310) - SQL_CONNECTION: $(LinuxSqlConnectionString310) - EVENTGRID_URI: $(LinuxEventGridTopicUriString310) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310) - Python311: - PYTHON_VERSION: '3.11' - STORAGE_CONNECTION: $(LinuxStorageConnectionString311) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311) - SQL_CONNECTION: $(LinuxSqlConnectionString311) - EVENTGRID_URI: $(LinuxEventGridTopicUriString311) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311) - Python312: - PYTHON_VERSION: '3.12' - STORAGE_CONNECTION: $(LinuxStorageConnectionString312) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString312) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString312) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString312) - SQL_CONNECTION: $(LinuxSqlConnectionString312) - EVENTGRID_URI: $(LinuxEventGridTopicUriString312) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString312) - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: $(PYTHON_VERSION) - - task: UseDotNet@2 - displayName: 'Install .NET 8' - inputs: - version: 8.0.x - - bash: | - chmod +x eng/scripts/install-dependencies.sh - chmod +x eng/scripts/test-setup.sh - - eng/scripts/install-dependencies.sh $(PYTHON_VERSION) - eng/scripts/test-setup.sh - displayName: 'Install dependencies and the worker' - condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - - task: DownloadPipelineArtifact@2 - displayName: 'Download Python SDK Artifact' - inputs: - buildType: specific - artifactName: 'azure-functions' - project: 'internal' - definition: 679 - buildVersionToDownload: latest - targetPath: '$(Pipeline.Workspace)/PythonSdkArtifact' - condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) - - bash: | - chmod +x eng/scripts/test-sdk.sh - chmod +x eng/scripts/test-setup.sh - - eng/scripts/test-sdk.sh $(Pipeline.Workspace) $(PYTHON_VERSION) - eng/scripts/test-setup.sh - displayName: 'Install test python sdk, dependencies and the worker' - condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) - - task: DownloadPipelineArtifact@2 - displayName: 'Download Python Extension Artifact' - inputs: - buildType: specific - artifactName: $(PYTHONEXTENSIONNAME) - project: 'internal' - definition: 798 - buildVersionToDownload: latest - targetPath: '$(Pipeline.Workspace)/PythonExtensionArtifact' - condition: or(eq(variables.isExtensionsRelease, true), eq(variables['USETESTPYTHONEXTENSIONS'], true)) - - bash: | - chmod +x eng/scripts/test-setup.sh - chmod +x eng/scripts/test-extensions.sh - - eng/scripts/test-extensions.sh $(Pipeline.Workspace) $(PYTHON_VERSION) - eng/scripts/test-setup.sh - displayName: 'Install test python extension, dependencies and the worker' - condition: or(eq(variables.isExtensionsRelease, true), eq(variables['USETESTPYTHONEXTENSIONS'], true)) - - powershell: | - $pipelineVarSet = "$(USETESTPYTHONSDK)" - Write-Host "pipelineVarSet: $pipelineVarSet" - $branch = "$(Build.SourceBranch)" - Write-Host "Branch: $branch" - if($branch.StartsWith("refs/heads/sdk/") -or $pipelineVarSet -eq "true") - { - Write-Host "##vso[task.setvariable variable=skipTest;]true" - } - else - { - Write-Host "##vso[task.setvariable variable=skipTest;]false" - } - displayName: 'Set skipTest variable' - condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) - - powershell: | - Write-Host "skipTest: $(skipTest)" - displayName: 'Display skipTest variable' - condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python E2E Tests" diff --git a/eng/templates/official/jobs/ci-lc-tests.yml b/eng/templates/official/jobs/ci-lc-tests.yml deleted file mode 100644 index b5229d901..000000000 --- a/eng/templates/official/jobs/ci-lc-tests.yml +++ /dev/null @@ -1,42 +0,0 @@ -jobs: - - job: "TestPython" - displayName: "Run Python Linux Consumption Tests" - - pool: - name: 1es-pool-azfunc - image: 1es-ubuntu-22.04 - os: linux - - strategy: - matrix: - Python37: - PYTHON_VERSION: '3.7' - Python38: - PYTHON_VERSION: '3.8' - Python39: - PYTHON_VERSION: '3.9' - Python310: - PYTHON_VERSION: '3.10' - Python311: - PYTHON_VERSION: '3.11' - - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: $(PYTHON_VERSION) - - bash: | - python -m pip install --upgrade pip - python -m pip install -U -e .[dev] - - cd tests - python -m invoke -c test_setup build-protos - displayName: 'Install dependencies and the worker' - # Skip the installation stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version - condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - - bash: | - python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests - env: - AzureWebJobsStorage: $(LinuxStorageConnectionString312) - _DUMMY_CONT_KEY: $(_DUMMY_CONT_KEY) - displayName: "Running $(PYTHON_VERSION) Linux Consumption tests" - condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) \ No newline at end of file diff --git a/eng/templates/utils/official-variables.yml b/eng/templates/utils/official-variables.yml deleted file mode 100644 index 1654d7e6c..000000000 --- a/eng/templates/utils/official-variables.yml +++ /dev/null @@ -1,4 +0,0 @@ -variables: - - template: /ci/variables/build.yml@eng - - template: /ci/variables/cfs.yml@eng - - group: python-integration-resources \ No newline at end of file diff --git a/eng/templates/utils/variables.yml b/eng/templates/utils/variables.yml deleted file mode 100644 index 6361d2d19..000000000 --- a/eng/templates/utils/variables.yml +++ /dev/null @@ -1,5 +0,0 @@ -variables: - - name: isSdkRelease - value: $[startsWith(variables['Build.SourceBranch'], 'refs/heads/sdk/')] - - name: isExtensionsRelease - value: $[startsWith(variables['Build.SourceBranch'], 'refs/heads/extensions/')] \ No newline at end of file diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 000000000..fb0651860 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,5 @@ +[azure_functions_worker_v2/utils/typing_inspect.py] +ignore_errors = True + +[mypy] +ignore_missing_imports = True diff --git a/pack/Microsoft.Azure.Functions.PythonWorker.targets b/pack/Microsoft.Azure.Functions.PythonWorker.targets deleted file mode 100644 index 887dc8d52..000000000 --- a/pack/Microsoft.Azure.Functions.PythonWorker.targets +++ /dev/null @@ -1,43 +0,0 @@ - - - - <_PythonWorkerToolsDir>$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)../tools')) - - - - <_PythonSupportedRuntime Include="win-x86" WorkerPath="WINDOWS/X86" /> - <_PythonSupportedRuntime Include="win-x64" WorkerPath="WINDOWS/X64" /> - <_PythonSupportedRuntime Include="linux-x64" WorkerPath="LINUX/X64" /> - <_PythonSupportedRuntime Include="osx-x64" WorkerPath="OSX/X64" /> - <_PythonSupportedRuntime Include="osx-arm64" WorkerPath="OSX/Arm64" /> - - - - - - - - - - - - <_PythonWorkerFiles Include="$(_PythonWorkerToolsDir)/**" CopyToOutputDirectory="PreserveNewest" CopyToPublishDirectory="PreserveNewest" /> - - - - - - - <_PythonWorkersRuntimeFolder>@(_PythonSupportedRuntime->WithMetadataValue('Identity', '$(RuntimeIdentifier)')->Metadata('WorkerPath')) - - - - <_PythonWorkerFiles Include="$(_PythonWorkerToolsDir)/*" CopyToOutputDirectory="PreserveNewest" CopyToPublishDirectory="PreserveNewest" /> - <_PythonWorkerFiles Include="$(_PythonWorkerToolsDir)/**/$(_PythonWorkersRuntimeFolder)/**" CopyToOutputDirectory="PreserveNewest" CopyToPublishDirectory="PreserveNewest" /> - - - - - - - diff --git a/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec b/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec deleted file mode 100644 index b3ce47d0c..000000000 --- a/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec +++ /dev/null @@ -1,45 +0,0 @@ - - - - Microsoft.Azure.Functions.PythonWorker - 1.1.0 - Microsoft - Microsoft - false - Microsoft Azure Functions Python Worker - © .NET Foundation. All rights reserved. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/pack/scripts/mac_arm64_deps.sh b/pack/scripts/mac_arm64_deps.sh deleted file mode 100644 index 2d70bafad..000000000 --- a/pack/scripts/mac_arm64_deps.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -python -m venv .env -source .env/bin/activate -python -m pip install --upgrade pip - -python -m pip install . -python -m pip install . --no-compile --target "$BUILD_SOURCESDIRECTORY/deps" - -python -m pip install invoke -cd tests -python -m invoke -c test_setup build-protos - -cd .. -cp .artifactignore "$BUILD_SOURCESDIRECTORY/deps" -cp -r azure_functions_worker/protos "$BUILD_SOURCESDIRECTORY/deps/azure_functions_worker" \ No newline at end of file diff --git a/pack/scripts/nix_deps.sh b/pack/scripts/nix_deps.sh deleted file mode 100644 index 2d70bafad..000000000 --- a/pack/scripts/nix_deps.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -python -m venv .env -source .env/bin/activate -python -m pip install --upgrade pip - -python -m pip install . -python -m pip install . --no-compile --target "$BUILD_SOURCESDIRECTORY/deps" - -python -m pip install invoke -cd tests -python -m invoke -c test_setup build-protos - -cd .. -cp .artifactignore "$BUILD_SOURCESDIRECTORY/deps" -cp -r azure_functions_worker/protos "$BUILD_SOURCESDIRECTORY/deps/azure_functions_worker" \ No newline at end of file diff --git a/pack/scripts/win_deps.ps1 b/pack/scripts/win_deps.ps1 deleted file mode 100644 index a7be372e7..000000000 --- a/pack/scripts/win_deps.ps1 +++ /dev/null @@ -1,18 +0,0 @@ -python -m venv .env -.env\Scripts\Activate.ps1 -python -m pip install --upgrade pip - -python -m pip install . - -$depsPath = Join-Path -Path $env:BUILD_SOURCESDIRECTORY -ChildPath "deps" -$protosPath = Join-Path -Path $depsPath -ChildPath "azure_functions_worker/protos" - -python -m pip install . azure-functions --no-compile --target $depsPath.ToString() - -python -m pip install invoke -cd tests -python -m invoke -c test_setup build-protos - -cd .. -Copy-Item -Path ".artifactignore" -Destination $depsPath.ToString() -Copy-Item -Path "azure_functions_worker/protos/*" -Destination $protosPath.ToString() -Recurse -Force diff --git a/pack/templates/macos_64_env_gen.yml b/pack/templates/macos_64_env_gen.yml deleted file mode 100644 index 90a3578d7..000000000 --- a/pack/templates/macos_64_env_gen.yml +++ /dev/null @@ -1,44 +0,0 @@ -parameters: - pythonVersion: '' - artifactName: '' - workerPath: '' - -steps: -- task: UsePythonVersion@0 - inputs: - versionSpec: ${{ parameters.pythonVersion }} - addToPath: true -- task: ShellScript@2 - inputs: - disableAutoCwd: true - scriptPath: 'pack/scripts/mac_arm64_deps.sh' -- bash: | - pip install pip-audit - pip-audit -r requirements.txt - displayName: 'Run vulnerability scan' - condition: ne(variables['pythonVersion'], '3.7') -- task: CopyFiles@2 - inputs: - contents: | - ${{ parameters.workerPath }} - targetFolder: '$(Build.ArtifactStagingDirectory)' - flattenFolders: true -- task: CopyFiles@2 - inputs: - sourceFolder: '$(Build.SourcesDirectory)/deps' - contents: | - ** - !grpc_tools/**/* - !grpcio_tools*/* - !build/** - !docs/** - !pack/** - !python/** - !tests/** - !setuptools*/** - !_distutils_hack/** - !distutils-precedence.pth - !pkg_resources/** - !*.dist-info/** - !werkzeug/debug/shared/debugger.js - targetFolder: '$(Build.ArtifactStagingDirectory)' diff --git a/pack/templates/nix_env_gen.yml b/pack/templates/nix_env_gen.yml deleted file mode 100644 index ae3cf4330..000000000 --- a/pack/templates/nix_env_gen.yml +++ /dev/null @@ -1,44 +0,0 @@ -parameters: - pythonVersion: '' - artifactName: '' - workerPath: '' - -steps: -- task: UsePythonVersion@0 - inputs: - versionSpec: ${{ parameters.pythonVersion }} - addToPath: true -- task: ShellScript@2 - inputs: - disableAutoCwd: true - scriptPath: 'pack/scripts/nix_deps.sh' -- bash: | - pip install pip-audit - pip-audit -r requirements.txt - displayName: 'Run vulnerability scan' - condition: ne(variables['pythonVersion'], '3.7') -- task: CopyFiles@2 - inputs: - contents: | - ${{ parameters.workerPath }} - targetFolder: '$(Build.ArtifactStagingDirectory)' - flattenFolders: true -- task: CopyFiles@2 - inputs: - sourceFolder: '$(Build.SourcesDirectory)/deps' - contents: | - ** - !grpc_tools/**/* - !grpcio_tools*/* - !build/** - !docs/** - !pack/** - !python/** - !tests/** - !setuptools*/** - !_distutils_hack/** - !distutils-precedence.pth - !pkg_resources/** - !*.dist-info/** - !werkzeug/debug/shared/debugger.js - targetFolder: '$(Build.ArtifactStagingDirectory)' diff --git a/pack/templates/win_env_gen.yml b/pack/templates/win_env_gen.yml deleted file mode 100644 index 2eee3411a..000000000 --- a/pack/templates/win_env_gen.yml +++ /dev/null @@ -1,44 +0,0 @@ -parameters: - pythonVersion: '' - artifactName: '' - workerPath: '' - -steps: -- task: UsePythonVersion@0 - inputs: - versionSpec: ${{ parameters.pythonVersion }} - architecture: ${{ parameters.architecture }} - addToPath: true -- task: PowerShell@2 - inputs: - filePath: 'pack\scripts\win_deps.ps1' -- bash: | - pip install pip-audit - pip-audit -r requirements.txt - displayName: 'Run vulnerability scan' - condition: ne(variables['pythonVersion'], '3.7') -- task: CopyFiles@2 - inputs: - contents: | - ${{ parameters.workerPath }} - targetFolder: '$(Build.ArtifactStagingDirectory)' - flattenFolders: true -- task: CopyFiles@2 - inputs: - sourceFolder: '$(Build.SourcesDirectory)\deps' - contents: | - ** - !grpc_tools\**\* - !grpcio_tools*\* - !build\** - !docs\** - !pack\** - !python\** - !tests\** - !setuptools*\** - !_distutils_hack\** - !distutils-precedence.pth - !pkg_resources\** - !*.dist-info\** - !werkzeug\debug\shared\debugger.js - targetFolder: '$(Build.ArtifactStagingDirectory)' diff --git a/pack/utils/__init__.py b/pack/utils/__init__.py deleted file mode 100644 index 71c835333..000000000 --- a/pack/utils/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Needed to make azure a namespace for package discovery -from pkgutil import extend_path -import typing -__path__: typing.Iterable[str] = extend_path(__path__, __name__) diff --git a/pyproject.toml b/pyproject.toml index c7f41970c..b4a1db9c9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,7 @@ [project] -name = "azure-functions-worker" +name = "test-worker" dynamic = ["version"] +requires-python = ">=3.11" description = "Python Language Worker for Azure Functions Runtime" authors = [ { name = "Azure Functions team at Microsoft Corp.", email = "azurefunctions@microsoft.com" } @@ -13,11 +14,7 @@ classifiers = [ "Development Status :: 5 - Production/Stable", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.13", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Operating System :: MacOS :: MacOS X", @@ -26,15 +23,8 @@ classifiers = [ "Intended Audience :: Developers" ] dependencies = [ - "azure-functions==1.23.0b1", - "python-dateutil ~=2.9.0", - "protobuf~=3.19.3; python_version == '3.7'", - "protobuf~=4.25.3; python_version >= '3.8'", - "grpcio-tools~=1.43.0; python_version == '3.7'", - "grpcio-tools~=1.59.0; python_version >= '3.8'", - "grpcio~=1.43.0; python_version == '3.7'", - "grpcio~=1.59.0; python_version >= '3.8'", - "azurefunctions-extensions-base; python_version >= '3.8'" + "azurefunctions-extensions-base; python_version >= '3.8'", + "azure-functions" ] [project.urls] @@ -48,12 +38,15 @@ dev = [ "flask", "fastapi~=0.103.2", "pydantic", + "pycryptodome==3.*", "flake8==5.*; python_version == '3.7'", "flake8==6.*; python_version >= '3.8'", "mypy", - "pytest~=7.4.4", + "pytest", "requests==2.*", "coverage", + "grpcio~=1.70.0", + "grpcio-tools~=1.70.0", "pytest-sugar", "pytest-cov", "pytest-xdist", @@ -68,16 +61,15 @@ dev = [ "pandas", "numpy", "pre-commit", - "invoke", - "cryptography" + "invoke" ] test-http-v2 = [ - "azurefunctions-extensions-http-fastapi==1.0.0b1", + "azurefunctions-extensions-http-fastapi", "ujson", "orjson" ] test-deferred-bindings = [ - "azurefunctions-extensions-bindings-blob==1.0.0b2" + "azurefunctions-extensions-bindings-blob" ] [build-system] @@ -87,7 +79,7 @@ build-backend = "setuptools.build_meta" [tool.setuptools.packages.find] exclude = [ - 'eng', 'tests', 'pack' + 'eng', 'tests', 'pack', 'azure_functions_worker' ] [tool.isort] @@ -95,9 +87,9 @@ profile = "black" line_length = 88 multi_line_output = 3 include_trailing_comma = true -known_first_party = ["azure_functions_worker"] +known_first_party = ["azure_functions_worker_v2"] default_section = "THIRDPARTY" -src_paths = ["azure_functions_worker"] +src_paths = ["azure_functions_worker_v2"] skip_glob = [ "build", "dist", @@ -118,4 +110,4 @@ skip_glob = [ ] [tool.setuptools.dynamic] -version = {attr = "azure_functions_worker.version.VERSION"} +version = {attr = "azure_functions_worker_v2.version.VERSION"} diff --git a/python/prodV4/worker.config.json b/python/prodV4/worker.config.json deleted file mode 100644 index 548822af9..000000000 --- a/python/prodV4/worker.config.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "description":{ - "language":"python", - "defaultRuntimeVersion":"3.11", - "supportedOperatingSystems":["LINUX", "OSX", "WINDOWS"], - "supportedRuntimeVersions":["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"], - "supportedArchitectures":["X64", "X86", "Arm64"], - "extensions":[".py"], - "defaultExecutablePath":"python", - "defaultWorkerPath":"%FUNCTIONS_WORKER_RUNTIME_VERSION%/{os}/{architecture}/worker.py", - "workerIndexing": "true" - }, - "processOptions": { - "initializationTimeout": "00:02:00", - "environmentReloadTimeout": "00:02:00" - } -} \ No newline at end of file diff --git a/python/prodV4/worker.py b/python/prodV4/worker.py deleted file mode 100644 index 021fa3f03..000000000 --- a/python/prodV4/worker.py +++ /dev/null @@ -1,68 +0,0 @@ -import os -import pathlib -import sys - -# User packages -PKGS_PATH = "/home/site/wwwroot/.python_packages" -VENV_PKGS_PATH = "site/wwwroot/worker_venv" - -PKGS = "lib/site-packages" - -# Azure environment variables -AZURE_WEBSITE_INSTANCE_ID = "WEBSITE_INSTANCE_ID" -AZURE_CONTAINER_NAME = "CONTAINER_NAME" -AZURE_WEBJOBS_SCRIPT_ROOT = "AzureWebJobsScriptRoot" - - -def is_azure_environment(): - """Check if the function app is running on the cloud""" - return (AZURE_CONTAINER_NAME in os.environ - or AZURE_WEBSITE_INSTANCE_ID in os.environ) - - -def add_script_root_to_sys_path(): - """Append function project root to module finding sys.path""" - functions_script_root = os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT) - if functions_script_root is not None: - sys.path.append(functions_script_root) - - -def determine_user_pkg_paths(): - """This finds the user packages when function apps are running on the cloud - - For Python 3.7+, we only accept: - /home/site/wwwroot/.python_packages/lib/site-packages - """ - minor_version = sys.version_info[1] - - if not (7 <= minor_version <= 12): - raise RuntimeError(f'Unsupported Python version: 3.{minor_version}') - - usr_packages_path = [os.path.join(PKGS_PATH, PKGS)] - return usr_packages_path - - -if __name__ == '__main__': - # worker.py lives in the same directory as azure_functions_worker - func_worker_dir = str(pathlib.Path(__file__).absolute().parent) - env = os.environ - - # Setting up python path for all environments to prioritize - # third-party user packages over worker packages in PYTHONPATH - user_pkg_paths = determine_user_pkg_paths() - joined_pkg_paths = os.pathsep.join(user_pkg_paths) - env['PYTHONPATH'] = f'{joined_pkg_paths}:{func_worker_dir}' - - if is_azure_environment(): - os.execve(sys.executable, - [sys.executable, '-m', 'azure_functions_worker'] - + sys.argv[1:], - env) - else: - # On local development, we prioritize worker packages over - # third-party user packages (in .venv) - sys.path.insert(1, func_worker_dir) - add_script_root_to_sys_path() - from azure_functions_worker import main - - main.main() diff --git a/python/test/worker.config.json b/python/test/worker.config.json deleted file mode 100644 index a0b0ad3fe..000000000 --- a/python/test/worker.config.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "description":{ - "language":"python", - "extensions":[".py"], - "defaultExecutablePath":"python", - "defaultWorkerPath":"worker.py", - "workerIndexing": "true" - }, - "processOptions": { - "initializationTimeout": "00:02:00", - "environmentReloadTimeout": "00:02:00" - } -} \ No newline at end of file diff --git a/python/test/worker.py b/python/test/worker.py deleted file mode 100644 index e2ef12d22..000000000 --- a/python/test/worker.py +++ /dev/null @@ -1,19 +0,0 @@ -import sys -import os -from azure_functions_worker import main - - -# Azure environment variables -AZURE_WEBJOBS_SCRIPT_ROOT = "AzureWebJobsScriptRoot" - - -def add_script_root_to_sys_path(): - '''Append function project root to module finding sys.path''' - functions_script_root = os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT) - if functions_script_root is not None: - sys.path.append(functions_script_root) - - -if __name__ == '__main__': - add_script_root_to_sys_path() - main.main() diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 3fdb69c81..000000000 --- a/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -# Required dependencies listed in pyproject.toml -. diff --git a/setup.cfg b/setup.cfg index 6f5a7fb98..5ed48789c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,8 +13,8 @@ warn_return_any = True disallow_subclassing_any = False ignore_missing_imports = True -[mypy-azure_functions_worker.protos.*] +[mypy-azure_functions_worker_v2.protos.*] ignore_errors = True -[mypy-azure_functions_worker._thirdparty.typing_inspect] +[mypy-azure_functions_worker_v2._thirdparty.typing_inspect] ignore_errors = True diff --git a/tests/.gitignore b/tests/.gitignore deleted file mode 100644 index 3e4ede76c..000000000 --- a/tests/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -*_functions/bin/ -*_functions/host.json -*_functions/ping/ diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index b45b30f61..000000000 --- a/tests/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -"""Bootstrap for '$ python setup.py test' command.""" - -import os.path -import sys -import unittest -import unittest.runner - - -def suite(): - test_loader = unittest.TestLoader() - test_suite = test_loader.discover( - os.path.dirname(__file__), pattern='test_*.py') - return test_suite - - -if __name__ == '__main__': - runner = unittest.runner.TextTestRunner() - result = runner.run(suite()) - sys.exit(not result.wasSuccessful()) diff --git a/tests/consumption_tests/test_linux_consumption.py b/tests/consumption_tests/test_linux_consumption.py deleted file mode 100644 index 09f10fd91..000000000 --- a/tests/consumption_tests/test_linux_consumption.py +++ /dev/null @@ -1,383 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os -import sys -from time import sleep -from unittest import TestCase, skipIf - -from requests import Request -from tests.utils.testutils_lc import LinuxConsumptionWebHostController - -from azure_functions_worker.constants import ( - PYTHON_ENABLE_DEBUG_LOGGING, - PYTHON_ENABLE_INIT_INDEXING, - PYTHON_ENABLE_WORKER_EXTENSIONS, - PYTHON_ISOLATE_WORKER_DEPENDENCIES, -) - -_DEFAULT_HOST_VERSION = "4" - - -class TestLinuxConsumption(TestCase): - """Test worker behaviors on specific scenarios. - - SCM_RUN_FROM_PACKAGE: built function apps are acquired from - -> "Simple Batch" Subscription - -> "AzureFunctionsPythonWorkerCILinuxDevOps" Resource Group - -> "pythonworkersa" Storage Account - -> "python-worker-lc-apps" Blob Container - - For a list of scenario names: - https://pythonworker39sa.blob.core.windows.net/python-worker-lc-apps?restype=container&comp=list - """ - - @classmethod - def setUpClass(cls): - cls._py_version = f'{sys.version_info.major}.{sys.version_info.minor}' - cls._py_shortform = f'{sys.version_info.major}{sys.version_info.minor}' - - cls._storage = os.getenv('AzureWebJobsStorage') - if cls._storage is None: - raise RuntimeError('Environment variable AzureWebJobsStorage is ' - 'required before running Linux Consumption test') - - def test_placeholder_mode_root_returns_ok(self): - """In any circumstances, a placeholder container should returns 200 - even when it is not specialized. - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - req = Request('GET', ctrl.url) - resp = ctrl.send_request(req) - self.assertTrue(resp.ok) - - def test_http_no_auth(self): - """An HttpTrigger function app with 'azure-functions' library - should return 200. - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": self._get_blob_url("HttpNoAuth") - }) - req = Request('GET', f'{ctrl.url}/api/HttpTrigger') - resp = ctrl.send_request(req) - self.assertEqual(resp.status_code, 200) - - def test_common_libraries(self): - """A function app with the following requirements.txt: - - azure-functions - azure-eventhub - azure-storage-blob - numpy - cryptography - pyodbc - requests - - should return 200 after importing all libraries. - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": self._get_blob_url("CommonLibraries") - }) - req = Request('GET', f'{ctrl.url}/api/HttpTrigger') - resp = ctrl.send_request(req) - self.assertEqual(resp.status_code, 200) - content = resp.json() - self.assertIn('azure.functions', content) - self.assertIn('azure.storage.blob', content) - self.assertIn('numpy', content) - self.assertIn('cryptography', content) - self.assertIn('pyodbc', content) - self.assertIn('requests', content) - - @skipIf(sys.version_info.minor in (10, 11), - "Protobuf pinning fails during remote build") - def test_new_protobuf(self): - """A function app with the following requirements.txt: - - azure-functions==1.7.0 - protobuf==3.15.8 - grpcio==1.33.2 - - should return 200 after importing all libraries. - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": self._get_blob_url("NewProtobuf"), - PYTHON_ISOLATE_WORKER_DEPENDENCIES: "1" - }) - req = Request('GET', f'{ctrl.url}/api/HttpTrigger') - resp = ctrl.send_request(req) - self.assertEqual(resp.status_code, 200) - - content = resp.json() - - # Worker always picks up the SDK version bundled with the image - # Version of the packages are inconsistent due to isolation's bug - self.assertEqual(content['azure.functions'], '1.7.0') - self.assertEqual(content['google.protobuf'], '3.15.8') - self.assertEqual(content['grpc'], '1.33.2') - - @skipIf(sys.version_info.minor in (10, 11), - "Protobuf pinning fails during remote build") - def test_old_protobuf(self): - """A function app with the following requirements.txt: - - azure-functions==1.5.0 - protobuf==3.8.0 - grpcio==1.27.1 - - should return 200 after importing all libraries. - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": self._get_blob_url("OldProtobuf"), - PYTHON_ISOLATE_WORKER_DEPENDENCIES: "1" - }) - req = Request('GET', f'{ctrl.url}/api/HttpTrigger') - resp = ctrl.send_request(req) - self.assertEqual(resp.status_code, 200) - - content = resp.json() - - # Worker always picks up the SDK version bundled with the image - # Version of the packages are inconsistent due to isolation's bug - self.assertIn(content['azure.functions'], '1.5.0') - self.assertIn(content['google.protobuf'], '3.8.0') - self.assertIn(content['grpc'], '1.27.1') - - def test_debug_logging_disabled(self): - """An HttpTrigger function app with 'azure-functions' library - should return 200 and by default customer debug logging should be - disabled. - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": self._get_blob_url("EnableDebugLogging") - }) - req = Request('GET', f'{ctrl.url}/api/HttpTrigger1') - resp = ctrl.send_request(req) - - self.assertEqual(resp.status_code, 200) - container_log = ctrl.get_container_logs() - func_start_idx = container_log.find( - "Executing 'Functions.HttpTrigger1'") - self.assertTrue(func_start_idx > -1, - "HttpTrigger function is not executed.") - func_log = container_log[func_start_idx:] - - self.assertIn('logging info', func_log) - self.assertIn('logging warning', func_log) - self.assertIn('logging error', func_log) - self.assertNotIn('logging debug', func_log) - - def test_debug_logging_enabled(self): - """An HttpTrigger function app with 'azure-functions' library - should return 200 and with customer debug logging enabled, debug logs - should be written to container logs. - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": self._get_blob_url( - "EnableDebugLogging"), - PYTHON_ENABLE_DEBUG_LOGGING: "1" - }) - req = Request('GET', f'{ctrl.url}/api/HttpTrigger1') - resp = ctrl.send_request(req) - - self.assertEqual(resp.status_code, 200) - container_log = ctrl.get_container_logs() - func_start_idx = container_log.find( - "Executing 'Functions.HttpTrigger1'") - self.assertTrue(func_start_idx > -1) - func_log = container_log[func_start_idx:] - - self.assertIn('logging info', func_log) - self.assertIn('logging warning', func_log) - self.assertIn('logging error', func_log) - self.assertIn('logging debug', func_log) - - def test_pinning_functions_to_older_version(self): - """An HttpTrigger function app with 'azure-functions==1.11.1' library - should return 200 with the azure functions version set to 1.11.1 - since dependency isolation is enabled by default for all py versions - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": self._get_blob_url( - "PinningFunctions"), - PYTHON_ISOLATE_WORKER_DEPENDENCIES: "1", - }) - req = Request('GET', f'{ctrl.url}/api/HttpTrigger1') - resp = ctrl.send_request(req) - - self.assertEqual(resp.status_code, 200) - self.assertIn("Func Version: 1.11.1", resp.text) - - @skipIf(sys.version_info.minor != 10, - "This is testing only for python310") - def test_opencensus_with_extensions_enabled(self): - """A function app with extensions enabled containing the - following libraries: - - azure-functions, opencensus - - should return 200 after importing all libraries. - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": self._get_blob_url("Opencensus"), - PYTHON_ENABLE_WORKER_EXTENSIONS: "1" - }) - req = Request('GET', f'{ctrl.url}/api/opencensus') - resp = ctrl.send_request(req) - self.assertEqual(resp.status_code, 200) - - @skipIf(sys.version_info.minor != 10, - "This is testing only for python310") - def test_opencensus_with_extensions_enabled_init_indexing(self): - """ - A function app with init indexing enabled - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": self._get_blob_url("Opencensus"), - PYTHON_ENABLE_WORKER_EXTENSIONS: "1", - PYTHON_ENABLE_INIT_INDEXING: "true" - }) - req = Request('GET', f'{ctrl.url}/api/opencensus') - resp = ctrl.send_request(req) - self.assertEqual(resp.status_code, 200) - - @skipIf(sys.version_info.minor != 9, - "This is testing only for python39 where extensions" - "enabled by default") - def test_reload_variables_after_timeout_error(self): - """ - A function app with HTTPtrigger which has a function timeout of - 20s. The app as a sleep of 30s which should trigger a timeout - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": self._get_blob_url( - "TimeoutError"), - PYTHON_ISOLATE_WORKER_DEPENDENCIES: "1" - }) - req = Request('GET', f'{ctrl.url}/api/hello') - resp = ctrl.send_request(req) - self.assertEqual(resp.status_code, 500) - - sleep(2) - logs = ctrl.get_container_logs() - self.assertRegex( - logs, - r"Applying prioritize_customer_dependencies: " - r"worker_dependencies_path: \/azure-functions-host\/" - r"workers\/python\/.*?\/LINUX\/X64," - r" customer_dependencies_path: \/home\/site\/wwwroot\/" - r"\.python_packages\/lib\/site-packages, working_directory:" - r" \/home\/site\/wwwroot, Linux Consumption: True," - r" Placeholder: False") - self.assertNotIn("Failure Exception: ModuleNotFoundError", - logs) - - @skipIf(sys.version_info.minor != 9, - "This is testing only for python39 where extensions" - "enabled by default") - def test_reload_variables_after_oom_error(self): - """ - A function app with HTTPtrigger mocking error code 137 - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": self._get_blob_url( - "OOMError"), - PYTHON_ISOLATE_WORKER_DEPENDENCIES: "1" - }) - req = Request('GET', f'{ctrl.url}/api/httptrigger') - resp = ctrl.send_request(req) - self.assertEqual(resp.status_code, 500) - - sleep(2) - logs = ctrl.get_container_logs() - self.assertRegex( - logs, - r"Applying prioritize_customer_dependencies: " - r"worker_dependencies_path: \/azure-functions-host\/" - r"workers\/python\/.*?\/LINUX\/X64," - r" customer_dependencies_path: \/home\/site\/wwwroot\/" - r"\.python_packages\/lib\/site-packages, working_directory:" - r" \/home\/site\/wwwroot, Linux Consumption: True," - r" Placeholder: False") - - self.assertNotIn("Failure Exception: ModuleNotFoundError", - logs) - - @skipIf(sys.version_info.minor != 10, - "This is testing only for python310") - def test_http_v2_fastapi_streaming_upload_download(self): - """ - A function app using http v2 fastapi extension with streaming upload and - download - """ - with LinuxConsumptionWebHostController(_DEFAULT_HOST_VERSION, - self._py_version) as ctrl: - ctrl.assign_container(env={ - "AzureWebJobsStorage": self._storage, - "SCM_RUN_FROM_PACKAGE": - self._get_blob_url("HttpV2FastApiStreaming"), - PYTHON_ENABLE_INIT_INDEXING: "true", - PYTHON_ISOLATE_WORKER_DEPENDENCIES: "1" - }) - - def generate_random_bytes_stream(): - """Generate a stream of random bytes.""" - yield b'streaming' - yield b'testing' - yield b'response' - yield b'is' - yield b'returned' - - req = Request('POST', - f'{ctrl.url}/api/http_v2_fastapi_streaming', - data=generate_random_bytes_stream()) - resp = ctrl.send_request(req) - self.assertEqual(resp.status_code, 200) - - streamed_data = b'' - for chunk in resp.iter_content(chunk_size=1024): - if chunk: - streamed_data += chunk - - self.assertEqual( - streamed_data, b'streamingtestingresponseisreturned') - - def _get_blob_url(self, scenario_name: str) -> str: - return ( - f'https://pythonworker{self._py_shortform}sa.blob.core.windows.net/' - f'python-worker-lc-apps/{scenario_name}{self._py_shortform}.zip' - ) diff --git a/tests/emulator_tests/blob_functions/blob_functions_stein/function_app.py b/tests/emulator_tests/blob_functions/blob_functions_stein/function_app.py deleted file mode 100644 index 24489b0e6..000000000 --- a/tests/emulator_tests/blob_functions/blob_functions_stein/function_app.py +++ /dev/null @@ -1,445 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import hashlib -import io -import json -import random -import string - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="blob_trigger") -@app.blob_trigger(arg_name="file", - path="python-worker-tests/test-blob-trigger.txt", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-blob-triggered.txt", - connection="AzureWebJobsStorage") -def blob_trigger(file: func.InputStream) -> str: - return json.dumps({ - 'name': file.name, - 'length': file.length, - 'content': file.read().decode('utf-8') - }) - - -@app.function_name(name="get_blob_as_bytes") -@app.route(route="get_blob_as_bytes") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-bytes.txt", - data_type="BINARY", - connection="AzureWebJobsStorage") -def get_blob_as_bytes(req: func.HttpRequest, file: bytes) -> str: - assert isinstance(file, bytes) - return file.decode('utf-8') - - -@app.function_name(name="get_blob_as_bytes_return_http_response") -@app.route(route="get_blob_as_bytes_return_http_response") -@app.blob_input(arg_name="file", - path="python-worker-tests/shmem-test-bytes.txt", - data_type="BINARY", - connection="AzureWebJobsStorage") -def get_blob_as_bytes_return_http_response(req: func.HttpRequest, file: bytes) \ - -> func.HttpResponse: - """ - Read a blob (bytes) and respond back (in HTTP response) with the number of - bytes read and the MD5 digest of the content. - """ - assert isinstance(file, bytes) - - content_size = len(file) - content_sha256 = hashlib.sha256(file).hexdigest() - - response_dict = { - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) - - -@app.function_name(name="get_blob_as_bytes_stream_return_http_response") -@app.route(route="get_blob_as_bytes_stream_return_http_response") -@app.blob_input(arg_name="file", - path="python-worker-tests/shmem-test-bytes.txt", - data_type="BINARY", - connection="AzureWebJobsStorage") -def get_blob_as_bytes_stream_return_http_response(req: func.HttpRequest, - file: func.InputStream) \ - -> func.HttpResponse: - """ - Read a blob (as azf.InputStream) and respond back (in HTTP response) with - the number of bytes read and the MD5 digest of the content. - """ - file_bytes = file.read() - - content_size = len(file_bytes) - content_sha256 = hashlib.sha256(file_bytes).hexdigest() - - response_dict = { - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) - - -@app.function_name(name="get_blob_as_str") -@app.route(route="get_blob_as_str") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-str.txt", - data_type="STRING", - connection="AzureWebJobsStorage") -def get_blob_as_str(req: func.HttpRequest, file: str) -> str: - assert isinstance(file, str) - return file - - -@app.function_name(name="get_blob_as_str_return_http_response") -@app.route(route="get_blob_as_str_return_http_response") -@app.blob_input(arg_name="file", - path="python-worker-tests/shmem-test-bytes.txt", - data_type="STRING", - connection="AzureWebJobsStorage") -def get_blob_as_str_return_http_response(req: func.HttpRequest, - file: str) -> func.HttpResponse: - """ - Read a blob (string) and respond back (in HTTP response) with the number of - characters read and the MD5 digest of the utf-8 encoded content. - """ - assert isinstance(file, str) - - num_chars = len(file) - content_bytes = file.encode('utf-8') - content_sha256 = hashlib.sha256(content_bytes).hexdigest() - - response_dict = { - 'num_chars': num_chars, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) - - -@app.function_name(name="get_blob_bytes") -@app.route(route="get_blob_bytes") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-bytes.txt", - connection="AzureWebJobsStorage") -def get_blob_bytes(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_blob_filelike") -@app.route(route="get_blob_filelike") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-filelike.txt", - connection="AzureWebJobsStorage") -def get_blob_filelike(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_blob_return") -@app.route(route="get_blob_return") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-return.txt", - connection="AzureWebJobsStorage") -def get_blob_return(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_blob_str") -@app.route(route="get_blob_str") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-str.txt", - connection="AzureWebJobsStorage") -def get_blob_str(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_blob_triggered") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-blob-triggered.txt", - connection="AzureWebJobsStorage") -@app.route(route="get_blob_triggered") -def get_blob_triggered(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="put_blob_as_bytes_return_http_response") -@app.blob_output(arg_name="file", - path="python-worker-tests/shmem-test-bytes-out.txt", - data_type="BINARY", - connection="AzureWebJobsStorage") -@app.route(route="put_blob_as_bytes_return_http_response") -def put_blob_as_bytes_return_http_response(req: func.HttpRequest, - file: func.Out[ - bytes]) -> func.HttpResponse: - """ - Write a blob (bytes) and respond back (in HTTP response) with the number of - bytes written and the MD5 digest of the content. - The number of bytes to write are specified in the input HTTP request. - """ - content_size = int(req.params['content_size']) - - # When this is set, then 0x01 byte is repeated content_size number of - # times to use as input. - # This is to avoid generating random input for large size which can be - # slow. - if 'no_random_input' in req.params: - content = b'\x01' * content_size - else: - content = bytearray(random.getrandbits(8) for _ in range(content_size)) - content_sha256 = hashlib.sha256(content).hexdigest() - - file.set(content) - - response_dict = { - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) - - -@app.function_name(name="put_blob_as_str_return_http_response") -@app.blob_output(arg_name="file", - path="python-worker-tests/shmem-test-str-out.txt", - data_type="STRING", - connection="AzureWebJobsStorage") -@app.route(route="put_blob_as_str_return_http_response") -def put_blob_as_str_return_http_response(req: func.HttpRequest, file: func.Out[ - str]) -> func.HttpResponse: - """ - Write a blob (string) and respond back (in HTTP response) with the number of - characters written and the MD5 digest of the utf-8 encoded content. - The number of characters to write are specified in the input HTTP request. - """ - num_chars = int(req.params['num_chars']) - - content = ''.join(random.choices(string.ascii_uppercase + string.digits, - k=num_chars)) - content_bytes = content.encode('utf-8') - content_size = len(content_bytes) - content_sha256 = hashlib.sha256(content_bytes).hexdigest() - - file.set(content) - - response_dict = { - 'num_chars': num_chars, - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) - - -@app.function_name(name="put_blob_bytes") -@app.blob_output(arg_name="file", - path="python-worker-tests/test-bytes.txt", - connection="AzureWebJobsStorage") -@app.route(route="put_blob_bytes") -def put_blob_bytes(req: func.HttpRequest, file: func.Out[bytes]) -> str: - file.set(req.get_body()) - return 'OK' - - -@app.function_name(name="put_blob_filelike") -@app.blob_output(arg_name="file", - path="python-worker-tests/test-filelike.txt", - connection="AzureWebJobsStorage") -@app.route(route="put_blob_filelike") -def put_blob_filelike(req: func.HttpRequest, - file: func.Out[io.StringIO]) -> str: - file.set(io.StringIO('filelike')) - return 'OK' - - -@app.function_name(name="put_blob_return") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-return.txt", - connection="AzureWebJobsStorage") -@app.route(route="put_blob_return", binding_arg_name="resp") -def put_blob_return(req: func.HttpRequest, - resp: func.Out[func.HttpResponse]) -> str: - return 'FROM RETURN' - - -@app.function_name(name="put_blob_str") -@app.blob_output(arg_name="file", - path="python-worker-tests/test-str.txt", - connection="AzureWebJobsStorage") -@app.route(route="put_blob_str") -def put_blob_str(req: func.HttpRequest, file: func.Out[str]) -> str: - file.set(req.get_body()) - return 'OK' - - -@app.function_name(name="put_blob_trigger") -@app.blob_output(arg_name="file", - path="python-worker-tests/test-blob-trigger.txt", - connection="AzureWebJobsStorage") -@app.route(route="put_blob_trigger") -def put_blob_trigger(req: func.HttpRequest, file: func.Out[str]) -> str: - file.set(req.get_body()) - return 'OK' - - -def _generate_content_and_digest(content_size): - content = bytearray(random.getrandbits(8) for _ in range(content_size)) - content_sha256 = hashlib.sha256(content).hexdigest() - return content, content_sha256 - - -@app.function_name(name="put_get_multiple_blobs_as_bytes_return_http_response") -@app.blob_input(arg_name="inputfile1", - data_type="BINARY", - path="python-worker-tests/shmem-test-bytes-1.txt", - connection="AzureWebJobsStorage") -@app.blob_input(arg_name="inputfile2", - data_type="BINARY", - path="python-worker-tests/shmem-test-bytes-2.txt", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="outputfile1", - path="python-worker-tests/shmem-test-bytes-out-1.txt", - data_type="BINARY", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="outputfile2", - path="python-worker-tests/shmem-test-bytes-out-2.txt", - data_type="BINARY", - connection="AzureWebJobsStorage") -@app.route(route="put_get_multiple_blobs_as_bytes_return_http_response") -def put_get_multiple_blobs_as_bytes_return_http_response( - req: func.HttpRequest, - inputfile1: bytes, - inputfile2: bytes, - outputfile1: func.Out[bytes], - outputfile2: func.Out[bytes]) -> func.HttpResponse: - """ - Read two blobs (bytes) and respond back (in HTTP response) with the number - of bytes read from each blob and the MD5 digest of the content of each. - Write two blobs (bytes) and respond back (in HTTP response) with the number - bytes written in each blob and the MD5 digest of the content of each. - The number of bytes to write are specified in the input HTTP request. - """ - input_content_size_1 = len(inputfile1) - input_content_size_2 = len(inputfile2) - - input_content_sha256_1 = hashlib.sha256(inputfile1).hexdigest() - input_content_sha256_2 = hashlib.sha256(inputfile2).hexdigest() - - output_content_size_1 = int(req.params['output_content_size_1']) - output_content_size_2 = int(req.params['output_content_size_2']) - - output_content_1, output_content_sha256_1 = \ - _generate_content_and_digest(output_content_size_1) - output_content_2, output_content_sha256_2 = \ - _generate_content_and_digest(output_content_size_2) - - outputfile1.set(output_content_1) - outputfile2.set(output_content_2) - - response_dict = { - 'input_content_size_1': input_content_size_1, - 'input_content_size_2': input_content_size_2, - 'input_content_sha256_1': input_content_sha256_1, - 'input_content_sha256_2': input_content_sha256_2, - 'output_content_size_1': output_content_size_1, - 'output_content_size_2': output_content_size_2, - 'output_content_sha256_1': output_content_sha256_1, - 'output_content_sha256_2': output_content_sha256_2 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) - - -@app.function_name(name="blob_trigger_default_source_enum") -@app.blob_trigger(arg_name="file", - path="python-worker-tests/test-blob-trigger.txt", - connection="AzureWebJobsStorage", - source=func.BlobSource.LOGS_AND_CONTAINER_SCAN) -def blob_trigger_default_source_enum(file: func.InputStream) -> str: - return json.dumps({ - 'name': file.name, - 'length': file.length, - 'content': file.read().decode('utf-8') - }) - - -@app.function_name(name="blob_trigger_eventgrid_source_enum") -@app.blob_trigger(arg_name="file", - path="python-worker-tests/test-blob-trigger.txt", - connection="AzureWebJobsStorage", - source=func.BlobSource.EVENT_GRID) -def blob_trigger_eventgrid_source_enum(file: func.InputStream) -> str: - return json.dumps({ - 'name': file.name, - 'length': file.length, - 'content': file.read().decode('utf-8') - }) - - -@app.function_name(name="blob_trigger_default_source_str") -@app.blob_trigger(arg_name="file", - path="python-worker-tests/test-blob-trigger.txt", - connection="AzureWebJobsStorage", - source="LogsAndContainerScan") -def blob_trigger_default_source_str(file: func.InputStream) -> str: - return json.dumps({ - 'name': file.name, - 'length': file.length, - 'content': file.read().decode('utf-8') - }) - - -@app.function_name(name="blob_trigger_eventgrid_source_str") -@app.blob_trigger(arg_name="file", - path="python-worker-tests/test-blob-trigger.txt", - connection="AzureWebJobsStorage", - source="EventGrid") -def blob_trigger_eventgrid_source_str(file: func.InputStream) -> str: - return json.dumps({ - 'name': file.name, - 'length': file.length, - 'content': file.read().decode('utf-8') - }) diff --git a/tests/emulator_tests/blob_functions/blob_functions_stein/generic/function_app.py b/tests/emulator_tests/blob_functions/blob_functions_stein/generic/function_app.py deleted file mode 100644 index 77e9dc596..000000000 --- a/tests/emulator_tests/blob_functions/blob_functions_stein/generic/function_app.py +++ /dev/null @@ -1,471 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import hashlib -import io -import json -import random -import string - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="blob_trigger") -@app.generic_trigger( - arg_name="file", - type="blobTrigger", - path="python-worker-tests/test-blob-trigger.txt", - connection="AzureWebJobsStorage") -@app.generic_output_binding( - arg_name="$return", - type="blob", - path="python-worker-tests/test-blob-triggered.txt", - connection="AzureWebJobsStorage") -def blob_trigger(file: func.InputStream) -> str: - return json.dumps({ - 'name': file.name, - 'length': file.length, - 'content': file.read().decode('utf-8') - }) - - -@app.function_name(name="get_blob_as_bytes") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_blob_as_bytes") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - data_type="BINARY", - path="python-worker-tests/test-bytes.txt") -def get_blob_as_bytes(req: func.HttpRequest, file: bytes) -> str: - assert isinstance(file, bytes) - return file.decode('utf-8') - - -@app.function_name(name="get_blob_as_bytes_return_http_response") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_blob_as_bytes_return_http_response") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - data_type="BINARY", - path="python-worker-tests/shmem-test-bytes.txt") -def get_blob_as_bytes_return_http_response(req: func.HttpRequest, file: bytes) \ - -> func.HttpResponse: - """ - Read a blob (bytes) and respond back (in HTTP response) with the number of - bytes read and the MD5 digest of the content. - """ - assert isinstance(file, bytes) - - content_size = len(file) - content_sha256 = hashlib.sha256(file).hexdigest() - - response_dict = { - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) - - -@app.function_name(name="get_blob_as_bytes_stream_return_http_response") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_blob_as_bytes_stream_return_http_response") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - data_type="BINARY", - path="python-worker-tests/shmem-test-bytes.txt") -def get_blob_as_bytes_stream_return_http_response(req: func.HttpRequest, - file: func.InputStream) \ - -> func.HttpResponse: - """ - Read a blob (as azf.InputStream) and respond back (in HTTP response) with - the number of bytes read and the MD5 digest of the content. - """ - file_bytes = file.read() - - content_size = len(file_bytes) - content_sha256 = hashlib.sha256(file_bytes).hexdigest() - - response_dict = { - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) - - -@app.function_name(name="get_blob_as_str") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_blob_as_str") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - data_type="STRING", - path="python-worker-tests/test-str.txt") -def get_blob_as_str(req: func.HttpRequest, file: str) -> str: - assert isinstance(file, str) - return file - - -@app.function_name(name="get_blob_as_str_return_http_response") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_blob_as_str_return_http_response") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - data_type="STRING", - path="python-worker-tests/shmem-test-bytes.txt") -def get_blob_as_str_return_http_response(req: func.HttpRequest, - file: str) -> func.HttpResponse: - """ - Read a blob (string) and respond back (in HTTP response) with the number of - characters read and the MD5 digest of the utf-8 encoded content. - """ - assert isinstance(file, str) - - num_chars = len(file) - content_bytes = file.encode('utf-8') - content_sha256 = hashlib.sha256(content_bytes).hexdigest() - - response_dict = { - 'num_chars': num_chars, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) - - -@app.function_name(name="get_blob_bytes") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_blob_bytes") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - path="python-worker-tests/test-bytes.txt") -def get_blob_bytes(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_blob_filelike") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_blob_filelike") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - path="python-worker-tests/test-filelike.txt") -def get_blob_filelike(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_blob_return") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_blob_return") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - path="python-worker-tests/test-return.txt") -def get_blob_return(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_blob_str") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_blob_str") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - path="python-worker-tests/test-str.txt") -def get_blob_str(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_blob_triggered") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_blob_triggered") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - path="python-worker-tests/test-blob-triggered.txt") -def get_blob_triggered(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="put_blob_as_bytes_return_http_response") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="put_blob_as_bytes_return_http_response") -@app.generic_output_binding( - arg_name="file", - type="blob", - data_type="BINARY", - connection="AzureWebJobsStorage", - path="python-worker-tests/shmem-test-bytes-out.txt") -def put_blob_as_bytes_return_http_response(req: func.HttpRequest, - file: func.Out[ - bytes]) -> func.HttpResponse: - """ - Write a blob (bytes) and respond back (in HTTP response) with the number of - bytes written and the MD5 digest of the content. - The number of bytes to write are specified in the input HTTP request. - """ - content_size = int(req.params['content_size']) - - # When this is set, then 0x01 byte is repeated content_size number of - # times to use as input. - # This is to avoid generating random input for large size which can be - # slow. - if 'no_random_input' in req.params: - content = b'\x01' * content_size - else: - content = bytearray(random.getrandbits(8) for _ in range(content_size)) - content_sha256 = hashlib.sha256(content).hexdigest() - - file.set(content) - - response_dict = { - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) - - -@app.function_name(name="put_blob_as_str_return_http_response") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="put_blob_as_str_return_http_response") -@app.generic_output_binding( - arg_name="file", - type="blob", - data_type="STRING", - connection="AzureWebJobsStorage", - path="python-worker-tests/shmem-test-str-out.txt") -def put_blob_as_str_return_http_response( - req: func.HttpRequest, file: func.Out[str]) -> func.HttpResponse: - """ - Write a blob (string) and respond back (in HTTP response) with the number of - characters written and the MD5 digest of the utf-8 encoded content. - The number of characters to write are specified in the input HTTP request. - """ - num_chars = int(req.params['num_chars']) - - content = ''.join(random.choices(string.ascii_uppercase + string.digits, - k=num_chars)) - content_bytes = content.encode('utf-8') - content_size = len(content_bytes) - content_sha256 = hashlib.sha256(content_bytes).hexdigest() - - file.set(content) - - response_dict = { - 'num_chars': num_chars, - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) - - -@app.function_name(name="put_blob_bytes") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="put_blob_bytes") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_output_binding( - arg_name="file", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-bytes.txt") -def put_blob_bytes(req: func.HttpRequest, file: func.Out[bytes]) -> str: - file.set(req.get_body()) - return 'OK' - - -@app.function_name(name="put_blob_filelike") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="put_blob_filelike") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_output_binding( - arg_name="file", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-filelike.txt") -def put_blob_filelike(req: func.HttpRequest, - file: func.Out[io.StringIO]) -> str: - file.set(io.StringIO('filelike')) - return 'OK' - - -@app.function_name(name="put_blob_return") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="put_blob_return") -@app.generic_output_binding( - arg_name="$return", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-return.txt") -def put_blob_return(req: func.HttpRequest) -> str: - return 'FROM RETURN' - - -@app.function_name(name="put_blob_str") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="put_blob_str") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_output_binding( - arg_name="file", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-str.txt") -def put_blob_str(req: func.HttpRequest, file: func.Out[str]) -> str: - file.set(req.get_body()) - return 'OK' - - -@app.function_name(name="put_blob_trigger") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="put_blob_trigger") -@app.generic_output_binding( - arg_name="file", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-blob-trigger.txt") -def put_blob_trigger(req: func.HttpRequest, file: func.Out[str]) -> str: - file.set(req.get_body()) - return 'OK' - - -def _generate_content_and_digest(content_size): - content = bytearray(random.getrandbits(8) for _ in range(content_size)) - content_sha256 = hashlib.sha256(content).hexdigest() - return content, content_sha256 - - -@app.function_name(name="put_get_multiple_blobs_as_bytes_return_http_response") -@app.generic_trigger( - arg_name="req", type="httpTrigger", - route="put_get_multiple_blobs_as_bytes_return_http_response") -@app.generic_input_binding( - arg_name="inputfile1", - connection="AzureWebJobsStorage", - type="blob", - data_type="BINARY", - path="python-worker-tests/shmem-test-bytes-1.txt") -@app.generic_input_binding( - arg_name="inputfile2", - connection="AzureWebJobsStorage", - type="blob", - data_type="BINARY", - path="python-worker-tests/shmem-test-bytes-2.txt") -@app.generic_output_binding( - arg_name="outputfile1", - connection="AzureWebJobsStorage", - type="blob", - data_type="BINARY", - path="python-worker-tests/shmem-test-bytes-out-1.txt") -@app.generic_output_binding( - arg_name="outputfile2", - connection="AzureWebJobsStorage", - type="blob", - data_type="BINARY", - path="python-worker-tests/shmem-test-bytes-out-2.txt") -def put_get_multiple_blobs_as_bytes_return_http_response( - req: func.HttpRequest, - inputfile1: bytes, - inputfile2: bytes, - outputfile1: func.Out[bytes], - outputfile2: func.Out[bytes]) -> func.HttpResponse: - """ - Read two blobs (bytes) and respond back (in HTTP response) with the number - of bytes read from each blob and the MD5 digest of the content of each. - Write two blobs (bytes) and respond back (in HTTP response) with the number - bytes written in each blob and the MD5 digest of the content of each. - The number of bytes to write are specified in the input HTTP request. - """ - input_content_size_1 = len(inputfile1) - input_content_size_2 = len(inputfile2) - - input_content_sha256_1 = hashlib.sha256(inputfile1).hexdigest() - input_content_sha256_2 = hashlib.sha256(inputfile2).hexdigest() - - output_content_size_1 = int(req.params['output_content_size_1']) - output_content_size_2 = int(req.params['output_content_size_2']) - - output_content_1, output_content_sha256_1 = \ - _generate_content_and_digest(output_content_size_1) - output_content_2, output_content_sha256_2 = \ - _generate_content_and_digest(output_content_size_2) - - outputfile1.set(output_content_1) - outputfile2.set(output_content_2) - - response_dict = { - 'input_content_size_1': input_content_size_1, - 'input_content_size_2': input_content_size_2, - 'input_content_sha256_1': input_content_sha256_1, - 'input_content_sha256_2': input_content_sha256_2, - 'output_content_size_1': output_content_size_1, - 'output_content_size_2': output_content_size_2, - 'output_content_sha256_1': output_content_sha256_1, - 'output_content_sha256_2': output_content_sha256_2 - } - - response_body = json.dumps(response_dict, indent=2) - - return func.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) diff --git a/tests/emulator_tests/blob_functions/blob_trigger/function.json b/tests/emulator_tests/blob_functions/blob_trigger/function.json deleted file mode 100644 index 85f59728d..000000000 --- a/tests/emulator_tests/blob_functions/blob_trigger/function.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "blobTrigger", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-blob-trigger.txt" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-blob-triggered.txt" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/blob_trigger/main.py b/tests/emulator_tests/blob_functions/blob_trigger/main.py deleted file mode 100644 index 5f162baaf..000000000 --- a/tests/emulator_tests/blob_functions/blob_trigger/main.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as azf - - -def main(file: azf.InputStream) -> str: - return json.dumps({ - 'name': file.name, - 'length': file.length, - 'content': file.read().decode('utf-8') - }) diff --git a/tests/emulator_tests/blob_functions/get_blob_as_bytes/function.json b/tests/emulator_tests/blob_functions/get_blob_as_bytes/function.json deleted file mode 100644 index 79caf12a9..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_as_bytes/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "dataType": "binary", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-bytes.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/get_blob_as_bytes/main.py b/tests/emulator_tests/blob_functions/get_blob_as_bytes/main.py deleted file mode 100644 index 94a73d99e..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_as_bytes/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: bytes) -> str: - assert isinstance(file, bytes) - return file.decode('utf-8') diff --git a/tests/emulator_tests/blob_functions/get_blob_as_bytes_return_http_response/function.json b/tests/emulator_tests/blob_functions/get_blob_as_bytes_return_http_response/function.json deleted file mode 100644 index 59e8d01cf..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_as_bytes_return_http_response/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "dataType": "binary", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/shmem-test-bytes.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/get_blob_as_bytes_return_http_response/main.py b/tests/emulator_tests/blob_functions/get_blob_as_bytes_return_http_response/main.py deleted file mode 100644 index f7069cab6..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_as_bytes_return_http_response/main.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import hashlib -import json - -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: bytes) -> azf.HttpResponse: - """ - Read a blob (bytes) and respond back (in HTTP response) with the number of - bytes read and the MD5 digest of the content. - """ - assert isinstance(file, bytes) - - content_size = len(file) - content_sha256 = hashlib.sha256(file).hexdigest() - - response_dict = { - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return azf.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) diff --git a/tests/emulator_tests/blob_functions/get_blob_as_bytes_stream_return_http_response/function.json b/tests/emulator_tests/blob_functions/get_blob_as_bytes_stream_return_http_response/function.json deleted file mode 100644 index 59e8d01cf..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_as_bytes_stream_return_http_response/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "dataType": "binary", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/shmem-test-bytes.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/get_blob_as_bytes_stream_return_http_response/main.py b/tests/emulator_tests/blob_functions/get_blob_as_bytes_stream_return_http_response/main.py deleted file mode 100644 index bd65835b5..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_as_bytes_stream_return_http_response/main.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import hashlib -import json - -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.InputStream) -> azf.HttpResponse: - """ - Read a blob (as azf.InputStream) and respond back (in HTTP response) with - the number of bytes read and the MD5 digest of the content. - """ - file_bytes = file.read() - - content_size = len(file_bytes) - content_sha256 = hashlib.sha256(file_bytes).hexdigest() - - response_dict = { - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return azf.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) diff --git a/tests/emulator_tests/blob_functions/get_blob_as_str/function.json b/tests/emulator_tests/blob_functions/get_blob_as_str/function.json deleted file mode 100644 index ef991b625..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_as_str/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "dataType": "string", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-str.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/get_blob_as_str/main.py b/tests/emulator_tests/blob_functions/get_blob_as_str/main.py deleted file mode 100644 index 59034f9ba..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_as_str/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: str) -> str: - assert isinstance(file, str) - return file diff --git a/tests/emulator_tests/blob_functions/get_blob_as_str_return_http_response/function.json b/tests/emulator_tests/blob_functions/get_blob_as_str_return_http_response/function.json deleted file mode 100644 index 5da04fd22..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_as_str_return_http_response/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "dataType": "string", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/shmem-test-str.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/get_blob_as_str_return_http_response/main.py b/tests/emulator_tests/blob_functions/get_blob_as_str_return_http_response/main.py deleted file mode 100644 index 16f98375f..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_as_str_return_http_response/main.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import hashlib -import json - -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: str) -> azf.HttpResponse: - """ - Read a blob (string) and respond back (in HTTP response) with the number of - characters read and the MD5 digest of the utf-8 encoded content. - """ - assert isinstance(file, str) - - num_chars = len(file) - content_bytes = file.encode('utf-8') - content_sha256 = hashlib.sha256(content_bytes).hexdigest() - - response_dict = { - 'num_chars': num_chars, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return azf.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) diff --git a/tests/emulator_tests/blob_functions/get_blob_bytes/function.json b/tests/emulator_tests/blob_functions/get_blob_bytes/function.json deleted file mode 100644 index 36e6472a2..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_bytes/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-bytes.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/get_blob_bytes/main.py b/tests/emulator_tests/blob_functions/get_blob_bytes/main.py deleted file mode 100644 index 46dc607e2..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_bytes/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/emulator_tests/blob_functions/get_blob_filelike/function.json b/tests/emulator_tests/blob_functions/get_blob_filelike/function.json deleted file mode 100644 index bea089b58..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_filelike/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-filelike.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/get_blob_filelike/main.py b/tests/emulator_tests/blob_functions/get_blob_filelike/main.py deleted file mode 100644 index 46dc607e2..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_filelike/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/emulator_tests/blob_functions/get_blob_return/function.json b/tests/emulator_tests/blob_functions/get_blob_return/function.json deleted file mode 100644 index 208e1dd53..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_return/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-return.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/get_blob_return/main.py b/tests/emulator_tests/blob_functions/get_blob_return/main.py deleted file mode 100644 index 46dc607e2..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_return/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/emulator_tests/blob_functions/get_blob_str/function.json b/tests/emulator_tests/blob_functions/get_blob_str/function.json deleted file mode 100644 index 7117f87d7..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_str/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-str.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/get_blob_str/main.py b/tests/emulator_tests/blob_functions/get_blob_str/main.py deleted file mode 100644 index 46dc607e2..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_str/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/emulator_tests/blob_functions/get_blob_triggered/function.json b/tests/emulator_tests/blob_functions/get_blob_triggered/function.json deleted file mode 100644 index 9ba913e86..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_triggered/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-blob-triggered.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/get_blob_triggered/main.py b/tests/emulator_tests/blob_functions/get_blob_triggered/main.py deleted file mode 100644 index 46dc607e2..000000000 --- a/tests/emulator_tests/blob_functions/get_blob_triggered/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/emulator_tests/blob_functions/put_blob_as_bytes_return_http_response/function.json b/tests/emulator_tests/blob_functions/put_blob_as_bytes_return_http_response/function.json deleted file mode 100644 index 4f8821813..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_as_bytes_return_http_response/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "out", - "name": "file", - "dataType": "binary", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/shmem-test-bytes-out.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/put_blob_as_bytes_return_http_response/main.py b/tests/emulator_tests/blob_functions/put_blob_as_bytes_return_http_response/main.py deleted file mode 100644 index 583258820..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_as_bytes_return_http_response/main.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import hashlib -import json -import random - -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.Out[bytes]) -> azf.HttpResponse: - """ - Write a blob (bytes) and respond back (in HTTP response) with the number of - bytes written and the MD5 digest of the content. - The number of bytes to write are specified in the input HTTP request. - """ - content_size = int(req.params['content_size']) - - # When this is set, then 0x01 byte is repeated content_size number of - # times to use as input. - # This is to avoid generating random input for large size which can be - # slow. - if 'no_random_input' in req.params: - content = b'\x01' * content_size - else: - content = bytearray(random.getrandbits(8) for _ in range(content_size)) - content_sha256 = hashlib.sha256(content).hexdigest() - - file.set(content) - - response_dict = { - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return azf.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) diff --git a/tests/emulator_tests/blob_functions/put_blob_as_str_return_http_response/function.json b/tests/emulator_tests/blob_functions/put_blob_as_str_return_http_response/function.json deleted file mode 100644 index 59a6ff68a..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_as_str_return_http_response/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "out", - "name": "file", - "dataType": "string", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/shmem-test-str-out.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/put_blob_as_str_return_http_response/main.py b/tests/emulator_tests/blob_functions/put_blob_as_str_return_http_response/main.py deleted file mode 100644 index 3174d3cf0..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_as_str_return_http_response/main.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import hashlib -import json -import random -import string - -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.Out[str]) -> azf.HttpResponse: - """ - Write a blob (string) and respond back (in HTTP response) with the number of - characters written and the MD5 digest of the utf-8 encoded content. - The number of characters to write are specified in the input HTTP request. - """ - num_chars = int(req.params['num_chars']) - - content = ''.join(random.choices(string.ascii_uppercase + string.digits, - k=num_chars)) - content_bytes = content.encode('utf-8') - content_size = len(content_bytes) - content_sha256 = hashlib.sha256(content_bytes).hexdigest() - - file.set(content) - - response_dict = { - 'num_chars': num_chars, - 'content_size': content_size, - 'content_sha256': content_sha256 - } - - response_body = json.dumps(response_dict, indent=2) - - return azf.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) diff --git a/tests/emulator_tests/blob_functions/put_blob_bytes/function.json b/tests/emulator_tests/blob_functions/put_blob_bytes/function.json deleted file mode 100644 index 21b47df00..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_bytes/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "out", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-bytes.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/put_blob_bytes/main.py b/tests/emulator_tests/blob_functions/put_blob_bytes/main.py deleted file mode 100644 index 605677ab2..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_bytes/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.Out[str]) -> str: - file.set(req.get_body()) - return 'OK' diff --git a/tests/emulator_tests/blob_functions/put_blob_filelike/function.json b/tests/emulator_tests/blob_functions/put_blob_filelike/function.json deleted file mode 100644 index 09a1bb480..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_filelike/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "out", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-filelike.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/put_blob_filelike/main.py b/tests/emulator_tests/blob_functions/put_blob_filelike/main.py deleted file mode 100644 index e5b4e51b5..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_filelike/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import io - -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.Out[io.StringIO]) -> str: - file.set(io.StringIO('filelike')) - return 'OK' diff --git a/tests/emulator_tests/blob_functions/put_blob_return/function.json b/tests/emulator_tests/blob_functions/put_blob_return/function.json deleted file mode 100644 index 662d67396..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_return/function.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-return.txt" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/put_blob_return/main.py b/tests/emulator_tests/blob_functions/put_blob_return/main.py deleted file mode 100644 index 73491c93d..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_return/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest) -> str: - return 'FROM RETURN' diff --git a/tests/emulator_tests/blob_functions/put_blob_str/function.json b/tests/emulator_tests/blob_functions/put_blob_str/function.json deleted file mode 100644 index 8b2543f90..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_str/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "out", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-str.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/put_blob_str/main.py b/tests/emulator_tests/blob_functions/put_blob_str/main.py deleted file mode 100644 index 605677ab2..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_str/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.Out[str]) -> str: - file.set(req.get_body()) - return 'OK' diff --git a/tests/emulator_tests/blob_functions/put_blob_trigger/function.json b/tests/emulator_tests/blob_functions/put_blob_trigger/function.json deleted file mode 100644 index b6bb70d32..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_trigger/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "out", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-blob-trigger.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/put_blob_trigger/main.py b/tests/emulator_tests/blob_functions/put_blob_trigger/main.py deleted file mode 100644 index 605677ab2..000000000 --- a/tests/emulator_tests/blob_functions/put_blob_trigger/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.Out[str]) -> str: - file.set(req.get_body()) - return 'OK' diff --git a/tests/emulator_tests/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/function.json b/tests/emulator_tests/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/function.json deleted file mode 100644 index 8ec3b7737..000000000 --- a/tests/emulator_tests/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/function.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "inputfile1", - "dataType": "binary", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/shmem-test-bytes-1.txt" - }, - { - "type": "blob", - "direction": "in", - "name": "inputfile2", - "dataType": "binary", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/shmem-test-bytes-2.txt" - }, - { - "type": "blob", - "direction": "out", - "name": "outputfile1", - "dataType": "binary", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/shmem-test-bytes-out-1.txt" - }, - { - "type": "blob", - "direction": "out", - "name": "outputfile2", - "dataType": "binary", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/shmem-test-bytes-out-2.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/main.py b/tests/emulator_tests/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/main.py deleted file mode 100644 index 95710c9c5..000000000 --- a/tests/emulator_tests/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/main.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import hashlib -import json -import random - -import azure.functions as azf - - -def _generate_content_and_digest(content_size): - content = bytearray(random.getrandbits(8) for _ in range(content_size)) - content_sha256 = hashlib.sha256(content).hexdigest() - return content, content_sha256 - - -def main( - req: azf.HttpRequest, - inputfile1: bytes, - inputfile2: bytes, - outputfile1: azf.Out[bytes], - outputfile2: azf.Out[bytes]) -> azf.HttpResponse: - """ - Read two blobs (bytes) and respond back (in HTTP response) with the number - of bytes read from each blob and the MD5 digest of the content of each. - Write two blobs (bytes) and respond back (in HTTP response) with the number - bytes written in each blob and the MD5 digest of the content of each. - The number of bytes to write are specified in the input HTTP request. - """ - input_content_size_1 = len(inputfile1) - input_content_size_2 = len(inputfile2) - - input_content_sha256_1 = hashlib.sha256(inputfile1).hexdigest() - input_content_sha256_2 = hashlib.sha256(inputfile2).hexdigest() - - output_content_size_1 = int(req.params['output_content_size_1']) - output_content_size_2 = int(req.params['output_content_size_2']) - - output_content_1, output_content_sha256_1 = \ - _generate_content_and_digest(output_content_size_1) - output_content_2, output_content_sha256_2 = \ - _generate_content_and_digest(output_content_size_2) - - outputfile1.set(output_content_1) - outputfile2.set(output_content_2) - - response_dict = { - 'input_content_size_1': input_content_size_1, - 'input_content_size_2': input_content_size_2, - 'input_content_sha256_1': input_content_sha256_1, - 'input_content_sha256_2': input_content_sha256_2, - 'output_content_size_1': output_content_size_1, - 'output_content_size_2': output_content_size_2, - 'output_content_sha256_1': output_content_sha256_1, - 'output_content_sha256_2': output_content_sha256_2 - } - - response_body = json.dumps(response_dict, indent=2) - - return azf.HttpResponse( - body=response_body, - mimetype="application/json", - status_code=200 - ) diff --git a/tests/emulator_tests/eventhub_batch_functions/eventhub_batch_functions_stein/function_app.py b/tests/emulator_tests/eventhub_batch_functions/eventhub_batch_functions_stein/function_app.py deleted file mode 100644 index 0e4569132..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/eventhub_batch_functions_stein/function_app.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import os -import typing - -import azure.functions as func -from azure.eventhub import EventData, EventHubProducerClient - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -# This is an actual EventHub trigger which handles Eventhub events in batches. -# It serializes multiple event data into a json and store it into a blob. -@app.function_name(name="eventhub_multiple") -@app.event_hub_message_trigger( - arg_name="events", - event_hub_name="python-worker-ci-eventhub-batch", - connection="AzureWebJobsEventHubConnectionString", - data_type="string", - cardinality="many") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-eventhub-batch-triggered.txt", - connection="AzureWebJobsStorage") -def eventhub_multiple(events) -> str: - table_entries = [] - for event in events: - json_entry = event.get_body() - table_entry = json.loads(json_entry) - table_entries.append(table_entry) - - table_json = json.dumps(table_entries) - - return table_json - - -# An HttpTrigger to generating EventHub event from EventHub Output Binding -@app.function_name(name="eventhub_output_batch") -@app.event_hub_output(arg_name="$return", - connection="AzureWebJobsEventHubConnectionString", - event_hub_name="python-worker-ci-eventhub-batch") -@app.route(route="eventhub_output_batch", binding_arg_name="out") -def eventhub_output_batch(req: func.HttpRequest, out: func.Out[str]) -> str: - events = req.get_body().decode('utf-8') - return events - - -# Retrieve the event data from storage blob and return it as Http response -@app.function_name(name="get_eventhub_batch_triggered") -@app.route(route="get_eventhub_batch_triggered") -@app.blob_input(arg_name="testEntities", - path="python-worker-tests/test-eventhub-batch-triggered.txt", - connection="AzureWebJobsStorage") -def get_eventhub_batch_triggered(req: func.HttpRequest, testEntities: func.InputStream): - return func.HttpResponse(status_code=200, body=testEntities.read().decode('utf-8')) - - -# Retrieve the event data from storage blob and return it as Http response -@app.function_name(name="get_metadata_batch_triggered") -@app.route(route="get_metadata_batch_triggered") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-metadata-batch-triggered.txt", - connection="AzureWebJobsStorage") -def get_metadata_batch_triggered(req: func.HttpRequest, - file: func.InputStream) -> str: - return func.HttpResponse(body=file.read().decode('utf-8'), - status_code=200, - mimetype='application/json') - - -# This is an actual EventHub trigger which handles Eventhub events in batches. -# It serializes multiple event data into a json and store it into a blob. -@app.function_name(name="metadata_multiple") -@app.event_hub_message_trigger( - arg_name="events", - event_hub_name="python-worker-ci-eventhub-batch-metadata", - connection="AzureWebJobsEventHubConnectionString", - data_type="binary", - cardinality="many") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-metadata-batch-triggered.txt", - connection="AzureWebJobsStorage") -def metadata_multiple(events: typing.List[func.EventHubEvent]) -> bytes: - event_list = [] - for event in events: - event_dict: typing.Mapping[str, typing.Any] = { - 'body': event.get_body().decode('utf-8'), - 'enqueued_time': event.enqueued_time.isoformat(), - 'partition_key': event.partition_key, - 'sequence_number': event.sequence_number, - 'offset': event.offset, - 'metadata': event.metadata - } - event_list.append(event_dict) - - return json.dumps(event_list) - - -# An HttpTrigger to generating EventHub event from azure-eventhub SDK. -# Events generated from azure-eventhub contain the full metadata. -@app.function_name(name="metadata_output_batch") -@app.route(route="metadata_output_batch") -def main(req: func.HttpRequest): - # Get event count from http request query parameter - count = int(req.params.get('count', '1')) - - # Parse event metadata from http request - json_string = req.get_body().decode('utf-8') - event_dict = json.loads(json_string) - - # Create an EventHub Client and event batch - client = EventHubProducerClient.from_connection_string( - os.getenv('AzureWebJobsEventHubConnectionString'), - eventhub_name='python-worker-ci-eventhub-batch-metadata') - - # Generate new event based on http request with full metadata - event_data_batch = client.create_batch() - random_number = int(event_dict.get('body', '0')) - for i in range(count): - event_data_batch.add(EventData(str(random_number + i))) - - # Send out event into event hub - with client: - client.send_batch(event_data_batch) - - return 'OK' diff --git a/tests/emulator_tests/eventhub_batch_functions/eventhub_multiple/__init__.py b/tests/emulator_tests/eventhub_batch_functions/eventhub_multiple/__init__.py deleted file mode 100644 index ea0a96284..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/eventhub_multiple/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - - -# This is an actual EventHub trigger which handles Eventhub events in batches. -# It serializes multiple event data into a json and store it into a blob. -def main(events) -> str: - table_entries = [] - for event in events: - json_entry = event.get_body() - table_entry = json.loads(json_entry) - table_entries.append(table_entry) - - table_json = json.dumps(table_entries) - - return table_json diff --git a/tests/emulator_tests/eventhub_batch_functions/eventhub_multiple/function.json b/tests/emulator_tests/eventhub_batch_functions/eventhub_multiple/function.json deleted file mode 100644 index c4e9626da..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/eventhub_multiple/function.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "eventHubTrigger", - "name": "events", - "direction": "in", - "cardinality": "many", - "dataType": "string", - "eventHubName": "python-worker-ci-eventhub-batch", - "connection": "AzureWebJobsEventHubConnectionString" - }, - { - "direction": "out", - "type": "blob", - "name": "$return", - "path": "python-worker-tests/test-eventhub-batch-triggered.txt", - "connection": "AzureWebJobsStorage" - } - ] -} diff --git a/tests/emulator_tests/eventhub_batch_functions/eventhub_output_batch/__init__.py b/tests/emulator_tests/eventhub_batch_functions/eventhub_output_batch/__init__.py deleted file mode 100644 index 19be8848e..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/eventhub_output_batch/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -# An HttpTrigger to generating EventHub event from EventHub Output Binding -def main(req: func.HttpRequest) -> str: - events = req.get_body().decode('utf-8') - return events diff --git a/tests/emulator_tests/eventhub_batch_functions/eventhub_output_batch/function.json b/tests/emulator_tests/eventhub_batch_functions/eventhub_output_batch/function.json deleted file mode 100644 index 60de05e61..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/eventhub_output_batch/function.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "eventHub", - "name": "$return", - "direction": "out", - "eventHubName": "python-worker-ci-eventhub-batch", - "connection": "AzureWebJobsEventHubConnectionString" - } - ] -} diff --git a/tests/emulator_tests/eventhub_batch_functions/get_eventhub_batch_triggered/__init__.py b/tests/emulator_tests/eventhub_batch_functions/get_eventhub_batch_triggered/__init__.py deleted file mode 100644 index 153829b31..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/get_eventhub_batch_triggered/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -# Retrieve the event data from storage blob and return it as Http response -def main(req: func.HttpRequest, testEntities: func.InputStream): - return func.HttpResponse(status_code=200, body=testEntities.read().decode('utf-8')) diff --git a/tests/emulator_tests/eventhub_batch_functions/get_eventhub_batch_triggered/function.json b/tests/emulator_tests/eventhub_batch_functions/get_eventhub_batch_triggered/function.json deleted file mode 100644 index 8ec2e9d65..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/get_eventhub_batch_triggered/function.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "authLevel": "anonymous", - "methods": [ - "get" - ], - "name": "req" - }, - { - "direction": "in", - "type": "blob", - "name": "testEntities", - "path": "python-worker-tests/test-eventhub-batch-triggered.txt", - "connection": "AzureWebJobsStorage" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/eventhub_batch_functions/get_metadata_batch_triggered/__init__.py b/tests/emulator_tests/eventhub_batch_functions/get_metadata_batch_triggered/__init__.py deleted file mode 100644 index a0cb5a619..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/get_metadata_batch_triggered/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -# Retrieve the event data from storage blob and return it as Http response -def main(req: func.HttpRequest, file: func.InputStream) -> str: - return func.HttpResponse(body=file.read().decode('utf-8'), - status_code=200, - mimetype='application/json') diff --git a/tests/emulator_tests/eventhub_batch_functions/get_metadata_batch_triggered/function.json b/tests/emulator_tests/eventhub_batch_functions/get_metadata_batch_triggered/function.json deleted file mode 100644 index 4de82ecd9..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/get_metadata_batch_triggered/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-metadata-batch-triggered.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/eventhub_batch_functions/metadata_multiple/__init__.py b/tests/emulator_tests/eventhub_batch_functions/metadata_multiple/__init__.py deleted file mode 100644 index 3c9845d9f..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/metadata_multiple/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json -import typing - -import azure.functions as func - - -# This is an actual EventHub trigger which handles Eventhub events in batches. -# It serializes multiple event data into a json and store it into a blob. -def main(events: typing.List[func.EventHubEvent]) -> bytes: - event_list = [] - for event in events: - event_dict: typing.Mapping[str, typing.Any] = { - 'body': event.get_body().decode('utf-8'), - 'enqueued_time': event.enqueued_time.isoformat(), - 'partition_key': event.partition_key, - 'sequence_number': event.sequence_number, - 'offset': event.offset, - 'metadata': event.metadata - } - event_list.append(event_dict) - - return json.dumps(event_list) diff --git a/tests/emulator_tests/eventhub_batch_functions/metadata_multiple/function.json b/tests/emulator_tests/eventhub_batch_functions/metadata_multiple/function.json deleted file mode 100644 index a3bfaed41..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/metadata_multiple/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "eventHubTrigger", - "name": "events", - "direction": "in", - "cardinality": "many", - "dataType": "binary", - "eventHubName": "python-worker-ci-eventhub-batch-metadata", - "connection": "AzureWebJobsEventHubConnectionString" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-metadata-batch-triggered.txt" - } - ] - } diff --git a/tests/emulator_tests/eventhub_batch_functions/metadata_output_batch/__init__.py b/tests/emulator_tests/eventhub_batch_functions/metadata_output_batch/__init__.py deleted file mode 100644 index 54322c2af..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/metadata_output_batch/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json -import os - -import azure.functions as func -from azure.eventhub import EventData, EventHubProducerClient - - -# An HttpTrigger to generating EventHub event from azure-eventhub SDK. -# Events generated from azure-eventhub contain the full metadata. -def main(req: func.HttpRequest): - # Get event count from http request query parameter - count = int(req.params.get('count', '1')) - - # Parse event metadata from http request - json_string = req.get_body().decode('utf-8') - event_dict = json.loads(json_string) - - # Create an EventHub Client and event batch - client = EventHubProducerClient.from_connection_string( - os.getenv('AzureWebJobsEventHubConnectionString'), - eventhub_name='python-worker-ci-eventhub-batch-metadata') - - # Generate new event based on http request with full metadata - event_data_batch = client.create_batch() - random_number = int(event_dict.get('body', '0')) - for i in range(count): - event_data_batch.add(EventData(str(random_number + i))) - - # Send out event into event hub - with client: - client.send_batch(event_data_batch) - - return 'OK' diff --git a/tests/emulator_tests/eventhub_batch_functions/metadata_output_batch/function.json b/tests/emulator_tests/eventhub_batch_functions/metadata_output_batch/function.json deleted file mode 100644 index 89747eb27..000000000 --- a/tests/emulator_tests/eventhub_batch_functions/metadata_output_batch/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "direction": "out", - "name": "$return", - "type": "http" - } - ] -} diff --git a/tests/emulator_tests/eventhub_functions/eventhub_functions_stein/function_app.py b/tests/emulator_tests/eventhub_functions/eventhub_functions_stein/function_app.py deleted file mode 100644 index 1481f7b55..000000000 --- a/tests/emulator_tests/eventhub_functions/eventhub_functions_stein/function_app.py +++ /dev/null @@ -1,107 +0,0 @@ -import json -import os -import typing - -import azure.functions as func -from azure.eventhub import EventData -from azure.eventhub.aio import EventHubProducerClient - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -# An HttpTrigger to generating EventHub event from EventHub Output Binding -@app.function_name(name="eventhub_output") -@app.route(route="eventhub_output") -@app.event_hub_output(arg_name="event", - event_hub_name="python-worker-ci-eventhub-one", - connection="AzureWebJobsEventHubConnectionString") -def eventhub_output(req: func.HttpRequest, event: func.Out[str]): - event.set(req.get_body().decode('utf-8')) - return 'OK' - - -# This is an actual EventHub trigger which will convert the event data -# into a storage blob. -@app.function_name(name="eventhub_trigger") -@app.event_hub_message_trigger(arg_name="event", - event_hub_name="python-worker-ci-eventhub-one", - connection="AzureWebJobsEventHubConnectionString" - ) -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-eventhub-triggered.txt", - connection="AzureWebJobsStorage") -def eventhub_trigger(event: func.EventHubEvent) -> bytes: - return event.get_body() - - -# Retrieve the event data from storage blob and return it as Http response -@app.function_name(name="get_eventhub_triggered") -@app.route(route="get_eventhub_triggered") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-eventhub-triggered.txt", - connection="AzureWebJobsStorage") -def get_eventhub_triggered(req: func.HttpRequest, - file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -# Retrieve the event data from storage blob and return it as Http response -@app.function_name(name="get_metadata_triggered") -@app.route(route="get_metadata_triggered") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-metadata-triggered.txt", - connection="AzureWebJobsStorage") -async def get_metadata_triggered(req: func.HttpRequest, - file: func.InputStream) -> str: - return func.HttpResponse(body=file.read().decode('utf-8'), - status_code=200, - mimetype='application/json') - - -# An HttpTrigger to generating EventHub event from azure-eventhub SDK. -# Events generated from azure-eventhub contain the full metadata. -@app.function_name(name="metadata_output") -@app.route(route="metadata_output") -async def metadata_output(req: func.HttpRequest): - # Parse event metadata from http request - json_string = req.get_body().decode('utf-8') - event_dict = json.loads(json_string) - - # Create an EventHub Client and event batch - client = EventHubProducerClient.from_connection_string( - os.getenv('AzureWebJobsEventHubConnectionString'), - eventhub_name='python-worker-ci-eventhub-one-metadata') - - # Generate new event based on http request with full metadata - event_data_batch = await client.create_batch() - event_data_batch.add(EventData(event_dict.get('body'))) - - # Send out event into event hub - try: - await client.send_batch(event_data_batch) - finally: - await client.close() - - return 'OK' - - -@app.function_name(name="metadata_trigger") -@app.event_hub_message_trigger( - arg_name="event", - event_hub_name="python-worker-ci-eventhub-one-metadata", - connection="AzureWebJobsEventHubConnectionString") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-metadata-triggered.txt", - connection="AzureWebJobsStorage") -async def metadata_trigger(event: func.EventHubEvent) -> bytes: - event_dict: typing.Mapping[str, typing.Any] = { - 'body': event.get_body().decode('utf-8'), - # Uncomment this when the EnqueuedTimeUtc is fixed in azure-functions - # 'enqueued_time': event.enqueued_time.isoformat(), - 'partition_key': event.partition_key, - 'sequence_number': event.sequence_number, - 'offset': event.offset, - 'metadata': event.metadata - } - - return json.dumps(event_dict) diff --git a/tests/emulator_tests/eventhub_functions/eventhub_functions_stein/generic/function_app.py b/tests/emulator_tests/eventhub_functions/eventhub_functions_stein/generic/function_app.py deleted file mode 100644 index 1b2940488..000000000 --- a/tests/emulator_tests/eventhub_functions/eventhub_functions_stein/generic/function_app.py +++ /dev/null @@ -1,126 +0,0 @@ -import json -import os -import typing - -import azure.functions as func -from azure.eventhub import EventData -from azure.eventhub.aio import EventHubProducerClient - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -# An HttpTrigger to generating EventHub event from EventHub Output Binding -@app.function_name(name="eventhub_output") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="eventhub_output") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_output_binding(arg_name="event", type="eventHub", - event_hub_name="python-worker-ci-eventhub-one", - connection="AzureWebJobsEventHubConnectionString") -def eventhub_output(req: func.HttpRequest, event: func.Out[str]): - event.set(req.get_body().decode('utf-8')) - return 'OK' - - -# This is an actual EventHub trigger which will convert the event data -# into a storage blob. -@app.function_name(name="eventhub_trigger") -@app.generic_trigger(arg_name="event", - type="eventHubTrigger", - event_hub_name="python-worker-ci-eventhub-one", - connection="AzureWebJobsEventHubConnectionString") -@app.generic_output_binding( - arg_name="$return", - type="blob", - path="python-worker-tests/test-eventhub-triggered.txt", - connection="AzureWebJobsStorage") -def eventhub_trigger(event: func.EventHubEvent) -> bytes: - return event.get_body() - - -# Retrieve the event data from storage blob and return it as Http response -@app.function_name(name="get_eventhub_triggered") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="get_eventhub_triggered") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - type="blob", - path="python-worker-tests/test-eventhub-triggered.txt", - connection="AzureWebJobsStorage") -def get_eventhub_triggered(req: func.HttpRequest, - file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -# Retrieve the event data from storage blob and return it as Http response -@app.function_name(name="get_metadata_triggered") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_metadata_triggered") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding(arg_name="file", - type="blob", - path="python-worker-tests/test-metadata-triggered" - ".txt", - connection="AzureWebJobsStorage") -async def get_metadata_triggered(req: func.HttpRequest, - file: func.InputStream) -> str: - return func.HttpResponse(body=file.read().decode('utf-8'), - status_code=200, - mimetype='application/json') - - -# An HttpTrigger to generating EventHub event from azure-eventhub SDK. -# Events generated from azure-eventhub contain the full metadata. -@app.function_name(name="metadata_output") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="metadata_output") -@app.generic_output_binding(arg_name="$return", type="http") -async def metadata_output(req: func.HttpRequest): - # Parse event metadata from http request - json_string = req.get_body().decode('utf-8') - event_dict = json.loads(json_string) - - # Create an EventHub Client and event batch - client = EventHubProducerClient.from_connection_string( - os.getenv('AzureWebJobsEventHubConnectionString'), - eventhub_name='python-worker-ci-eventhub-one-metadata') - - # Generate new event based on http request with full metadata - event_data_batch = await client.create_batch() - event_data_batch.add(EventData(event_dict.get('body'))) - - # Send out event into event hub - try: - await client.send_batch(event_data_batch) - finally: - await client.close() - - return 'OK' - - -@app.function_name(name="metadata_trigger") -@app.generic_trigger( - arg_name="event", - type="eventHubTrigger", - event_hub_name="python-worker-ci-eventhub-one-metadata", - connection="AzureWebJobsEventHubConnectionString") -@app.generic_output_binding( - arg_name="$return", - type="blob", - path="python-worker-tests/test-metadata-triggered.txt", - connection="AzureWebJobsStorage") -async def metadata_trigger(event: func.EventHubEvent) -> bytes: - event_dict: typing.Mapping[str, typing.Any] = { - 'body': event.get_body().decode('utf-8'), - # Uncomment this when the EnqueuedTimeUtc is fixed in azure-functions - # 'enqueued_time': event.enqueued_time.isoformat(), - 'partition_key': event.partition_key, - 'sequence_number': event.sequence_number, - 'offset': event.offset, - 'metadata': event.metadata - } - - return json.dumps(event_dict) diff --git a/tests/emulator_tests/eventhub_functions/eventhub_output/__init__.py b/tests/emulator_tests/eventhub_functions/eventhub_output/__init__.py deleted file mode 100644 index 9a41012b7..000000000 --- a/tests/emulator_tests/eventhub_functions/eventhub_output/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -# An HttpTrigger to generating EventHub event from EventHub Output Binding -def main(req: func.HttpRequest, event: func.Out[str]): - event.set(req.get_body().decode('utf-8')) - - return 'OK' diff --git a/tests/emulator_tests/eventhub_functions/eventhub_output/function.json b/tests/emulator_tests/eventhub_functions/eventhub_output/function.json deleted file mode 100644 index ec96c1617..000000000 --- a/tests/emulator_tests/eventhub_functions/eventhub_output/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "eventHub", - "name": "event", - "direction": "out", - "eventHubName": "python-worker-ci-eventhub-one", - "connection": "AzureWebJobsEventHubConnectionString" - }, - { - "direction": "out", - "name": "$return", - "type": "http" - } - ] -} diff --git a/tests/emulator_tests/eventhub_functions/eventhub_trigger/__init__.py b/tests/emulator_tests/eventhub_functions/eventhub_trigger/__init__.py deleted file mode 100644 index bc177d499..000000000 --- a/tests/emulator_tests/eventhub_functions/eventhub_trigger/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -# This is an actual EventHub trigger which will convert the event data -# into a storage blob. -def main(event: func.EventHubEvent) -> bytes: - return event.get_body() diff --git a/tests/emulator_tests/eventhub_functions/eventhub_trigger/function.json b/tests/emulator_tests/eventhub_functions/eventhub_trigger/function.json deleted file mode 100644 index f8d15f4e6..000000000 --- a/tests/emulator_tests/eventhub_functions/eventhub_trigger/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "eventHubTrigger", - "name": "event", - "direction": "in", - "eventHubName": "python-worker-ci-eventhub-one", - "connection": "AzureWebJobsEventHubConnectionString" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-eventhub-triggered.txt" - } - ] -} diff --git a/tests/emulator_tests/eventhub_functions/get_eventhub_triggered/function.json b/tests/emulator_tests/eventhub_functions/get_eventhub_triggered/function.json deleted file mode 100644 index 4328b71a5..000000000 --- a/tests/emulator_tests/eventhub_functions/get_eventhub_triggered/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-eventhub-triggered.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/eventhub_functions/get_eventhub_triggered/main.py b/tests/emulator_tests/eventhub_functions/get_eventhub_triggered/main.py deleted file mode 100644 index 78ba500ab..000000000 --- a/tests/emulator_tests/eventhub_functions/get_eventhub_triggered/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -# Retrieve the event data from storage blob and return it as Http response -def main(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/emulator_tests/eventhub_functions/get_metadata_triggered/__init__.py b/tests/emulator_tests/eventhub_functions/get_metadata_triggered/__init__.py deleted file mode 100644 index 597270397..000000000 --- a/tests/emulator_tests/eventhub_functions/get_metadata_triggered/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -# Retrieve the event data from storage blob and return it as Http response -async def main(req: func.HttpRequest, file: func.InputStream) -> str: - return func.HttpResponse(body=file.read().decode('utf-8'), - status_code=200, - mimetype='application/json') diff --git a/tests/emulator_tests/eventhub_functions/get_metadata_triggered/function.json b/tests/emulator_tests/eventhub_functions/get_metadata_triggered/function.json deleted file mode 100644 index 4244ca821..000000000 --- a/tests/emulator_tests/eventhub_functions/get_metadata_triggered/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-metadata-triggered.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/eventhub_functions/metadata_output/__init__.py b/tests/emulator_tests/eventhub_functions/metadata_output/__init__.py deleted file mode 100644 index e02c79f13..000000000 --- a/tests/emulator_tests/eventhub_functions/metadata_output/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json -import os - -import azure.functions as func -from azure.eventhub import EventData -from azure.eventhub.aio import EventHubProducerClient - - -# An HttpTrigger to generating EventHub event from azure-eventhub SDK. -# Events generated from azure-eventhub contain the full metadata. -async def main(req: func.HttpRequest): - - # Parse event metadata from http request - json_string = req.get_body().decode('utf-8') - event_dict = json.loads(json_string) - - # Create an EventHub Client and event batch - client = EventHubProducerClient.from_connection_string( - os.getenv('AzureWebJobsEventHubConnectionString'), - eventhub_name='python-worker-ci-eventhub-one-metadata') - - # Generate new event based on http request with full metadata - event_data_batch = await client.create_batch() - event_data_batch.add(EventData(event_dict.get('body'))) - - # Send out event into event hub - try: - await client.send_batch(event_data_batch) - finally: - await client.close() - - return 'OK' diff --git a/tests/emulator_tests/eventhub_functions/metadata_output/function.json b/tests/emulator_tests/eventhub_functions/metadata_output/function.json deleted file mode 100644 index 9b4018660..000000000 --- a/tests/emulator_tests/eventhub_functions/metadata_output/function.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "direction": "out", - "name": "$return", - "type": "http" - } - ] -} diff --git a/tests/emulator_tests/eventhub_functions/metadata_trigger/__init__.py b/tests/emulator_tests/eventhub_functions/metadata_trigger/__init__.py deleted file mode 100644 index 5088e6637..000000000 --- a/tests/emulator_tests/eventhub_functions/metadata_trigger/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json -import typing - -import azure.functions as func - - -# This is an actual EventHub trigger. It picks a few of EventHub properties -# and converts them into a storage blob -async def main(event: func.EventHubEvent) -> bytes: - event_dict: typing.Mapping[str, typing.Any] = { - 'body': event.get_body().decode('utf-8'), - # Uncomment this when the EnqueuedTimeUtc is fixed in azure-functions - # 'enqueued_time': event.enqueued_time.isoformat(), - 'partition_key': event.partition_key, - 'sequence_number': event.sequence_number, - 'offset': event.offset, - 'metadata': event.metadata - } - - return json.dumps(event_dict) diff --git a/tests/emulator_tests/eventhub_functions/metadata_trigger/function.json b/tests/emulator_tests/eventhub_functions/metadata_trigger/function.json deleted file mode 100644 index 9a3b2e13c..000000000 --- a/tests/emulator_tests/eventhub_functions/metadata_trigger/function.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "eventHubTrigger", - "name": "event", - "direction": "in", - "eventHubName": "python-worker-ci-eventhub-one-metadata", - "connection": "AzureWebJobsEventHubConnectionString" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-metadata-triggered.txt" - } - ] -} diff --git a/tests/emulator_tests/generic_functions/generic_functions_stein/function_app.py b/tests/emulator_tests/generic_functions/generic_functions_stein/function_app.py deleted file mode 100644 index 2da6d44ca..000000000 --- a/tests/emulator_tests/generic_functions/generic_functions_stein/function_app.py +++ /dev/null @@ -1,159 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import logging -import uuid - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="return_processed_last") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="return_processed_last") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="testEntity", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def return_processed_last(req: func.HttpRequest, testEntity): - return func.HttpResponse(status_code=200) - - -@app.function_name(name="return_not_processed_last") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="return_not_processed_last") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="testEntities", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def return_not_processed_last(req: func.HttpRequest, testEntities): - return func.HttpResponse(status_code=200) - - -@app.function_name(name="mytimer") -@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer", - run_on_startup=False, - use_monitor=False) -@app.generic_input_binding( - arg_name="testEntity", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def mytimer(mytimer: func.TimerRequest, testEntity) -> None: - logging.info("This timer trigger function executed successfully") - - -@app.function_name(name="return_string") -@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer", - run_on_startup=False, - use_monitor=False) -@app.generic_input_binding( - arg_name="testEntity", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def return_string(mytimer: func.TimerRequest, testEntity): - logging.info("Return string") - return "hi!" - - -@app.function_name(name="return_bytes") -@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer", - run_on_startup=False, - use_monitor=False) -@app.generic_input_binding( - arg_name="testEntity", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def return_bytes(mytimer: func.TimerRequest, testEntity): - logging.info("Return bytes") - return "test-dată" - - -@app.function_name(name="return_dict") -@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer", - run_on_startup=False, - use_monitor=False) -@app.generic_input_binding( - arg_name="testEntity", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def return_dict(mytimer: func.TimerRequest, testEntity): - logging.info("Return dict") - return {"hello": "world"} - - -@app.function_name(name="return_list") -@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer", - run_on_startup=False, - use_monitor=False) -@app.generic_input_binding( - arg_name="testEntity", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def return_list(mytimer: func.TimerRequest, testEntity): - logging.info("Return list") - return [1, 2, 3] - - -@app.function_name(name="return_int") -@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer", - run_on_startup=False, - use_monitor=False) -@app.generic_input_binding( - arg_name="testEntity", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def return_int(mytimer: func.TimerRequest, testEntity): - logging.info("Return int") - return 12 - - -@app.function_name(name="return_double") -@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer", - run_on_startup=False, - use_monitor=False) -@app.generic_input_binding( - arg_name="testEntity", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def return_double(mytimer: func.TimerRequest, testEntity): - logging.info("Return double") - return 12.34 - - -@app.function_name(name="return_bool") -@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer", - run_on_startup=False, - use_monitor=False) -@app.generic_input_binding( - arg_name="testEntity", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def return_bool(mytimer: func.TimerRequest, testEntity): - logging.info("Return bool") - return True - - -@app.function_name(name="table_out_binding") -@app.route(route="table_out_binding", binding_arg_name="resp") -@app.table_output(arg_name="$return", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def table_out_binding(req: func.HttpRequest, resp: func.Out[func.HttpResponse]): - row_key_uuid = str(uuid.uuid4()) - table_dict = {'PartitionKey': 'test', 'RowKey': row_key_uuid} - table_json = json.dumps(table_dict) - resp.set(table_json) - return table_json diff --git a/tests/emulator_tests/generic_functions/return_bool/function.json b/tests/emulator_tests/generic_functions/return_bool/function.json deleted file mode 100644 index 4dc852e37..000000000 --- a/tests/emulator_tests/generic_functions/return_bool/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "name": "mytimer", - "type": "timerTrigger", - "direction": "in", - "schedule": "*/1 * * * * *", - "runOnStartup": false - }, - { - "direction": "in", - "type": "table", - "name": "testEntity", - "partitionKey": "test", - "rowKey": "WillBePopulatedWithGuid", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/generic_functions/return_bool/main.py b/tests/emulator_tests/generic_functions/return_bool/main.py deleted file mode 100644 index 08d693dff..000000000 --- a/tests/emulator_tests/generic_functions/return_bool/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - - -def main(mytimer: func.TimerRequest, testEntity): - logging.info("Return bool") - return True diff --git a/tests/emulator_tests/generic_functions/return_bytes/function.json b/tests/emulator_tests/generic_functions/return_bytes/function.json deleted file mode 100644 index 4dc852e37..000000000 --- a/tests/emulator_tests/generic_functions/return_bytes/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "name": "mytimer", - "type": "timerTrigger", - "direction": "in", - "schedule": "*/1 * * * * *", - "runOnStartup": false - }, - { - "direction": "in", - "type": "table", - "name": "testEntity", - "partitionKey": "test", - "rowKey": "WillBePopulatedWithGuid", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/generic_functions/return_bytes/main.py b/tests/emulator_tests/generic_functions/return_bytes/main.py deleted file mode 100644 index c02b678c0..000000000 --- a/tests/emulator_tests/generic_functions/return_bytes/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - - -def main(mytimer: func.TimerRequest, testEntity): - logging.info("Return bytes") - return "test-dată" diff --git a/tests/emulator_tests/generic_functions/return_dict/function.json b/tests/emulator_tests/generic_functions/return_dict/function.json deleted file mode 100644 index 4dc852e37..000000000 --- a/tests/emulator_tests/generic_functions/return_dict/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "name": "mytimer", - "type": "timerTrigger", - "direction": "in", - "schedule": "*/1 * * * * *", - "runOnStartup": false - }, - { - "direction": "in", - "type": "table", - "name": "testEntity", - "partitionKey": "test", - "rowKey": "WillBePopulatedWithGuid", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/generic_functions/return_dict/main.py b/tests/emulator_tests/generic_functions/return_dict/main.py deleted file mode 100644 index 27f343fcb..000000000 --- a/tests/emulator_tests/generic_functions/return_dict/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - - -def main(mytimer: func.TimerRequest, testEntity): - logging.info("Return dict") - return {"hello": "world"} diff --git a/tests/emulator_tests/generic_functions/return_double/function.json b/tests/emulator_tests/generic_functions/return_double/function.json deleted file mode 100644 index 4dc852e37..000000000 --- a/tests/emulator_tests/generic_functions/return_double/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "name": "mytimer", - "type": "timerTrigger", - "direction": "in", - "schedule": "*/1 * * * * *", - "runOnStartup": false - }, - { - "direction": "in", - "type": "table", - "name": "testEntity", - "partitionKey": "test", - "rowKey": "WillBePopulatedWithGuid", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/generic_functions/return_double/main.py b/tests/emulator_tests/generic_functions/return_double/main.py deleted file mode 100644 index 6bfc4b9d7..000000000 --- a/tests/emulator_tests/generic_functions/return_double/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - - -def main(mytimer: func.TimerRequest, testEntity): - logging.info("Return double") - return 12.34 diff --git a/tests/emulator_tests/generic_functions/return_int/function.json b/tests/emulator_tests/generic_functions/return_int/function.json deleted file mode 100644 index 54c81f8f3..000000000 --- a/tests/emulator_tests/generic_functions/return_int/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "name": "mytimer", - "type": "timerTrigger", - "direction": "in", - "schedule": "*/1 * * * * *", - "runOnStartup": false - }, - { - "direction": "in", - "type": "table", - "name": "testEntity", - "partitionKey": "test", - "rowKey": "WillBePopulatedWithGuid", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - } - ] -} diff --git a/tests/emulator_tests/generic_functions/return_int/main.py b/tests/emulator_tests/generic_functions/return_int/main.py deleted file mode 100644 index 3a5e7175d..000000000 --- a/tests/emulator_tests/generic_functions/return_int/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - - -def main(mytimer: func.TimerRequest, testEntity): - logging.info("Return int") - return 12 diff --git a/tests/emulator_tests/generic_functions/return_list/function.json b/tests/emulator_tests/generic_functions/return_list/function.json deleted file mode 100644 index 4dc852e37..000000000 --- a/tests/emulator_tests/generic_functions/return_list/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "name": "mytimer", - "type": "timerTrigger", - "direction": "in", - "schedule": "*/1 * * * * *", - "runOnStartup": false - }, - { - "direction": "in", - "type": "table", - "name": "testEntity", - "partitionKey": "test", - "rowKey": "WillBePopulatedWithGuid", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/generic_functions/return_list/main.py b/tests/emulator_tests/generic_functions/return_list/main.py deleted file mode 100644 index feccec7e2..000000000 --- a/tests/emulator_tests/generic_functions/return_list/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - - -def main(mytimer: func.TimerRequest, testEntity): - logging.info("Return list") - return [1, 2, 3] diff --git a/tests/emulator_tests/generic_functions/return_none/function.json b/tests/emulator_tests/generic_functions/return_none/function.json deleted file mode 100644 index 4dc852e37..000000000 --- a/tests/emulator_tests/generic_functions/return_none/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "name": "mytimer", - "type": "timerTrigger", - "direction": "in", - "schedule": "*/1 * * * * *", - "runOnStartup": false - }, - { - "direction": "in", - "type": "table", - "name": "testEntity", - "partitionKey": "test", - "rowKey": "WillBePopulatedWithGuid", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/generic_functions/return_none/main.py b/tests/emulator_tests/generic_functions/return_none/main.py deleted file mode 100644 index 8f52c716b..000000000 --- a/tests/emulator_tests/generic_functions/return_none/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - - -def main(mytimer: func.TimerRequest, testEntity) -> None: - logging.info("This timer trigger function executed successfully") diff --git a/tests/emulator_tests/generic_functions/return_none_no_type_hint/function.json b/tests/emulator_tests/generic_functions/return_none_no_type_hint/function.json deleted file mode 100644 index 4dc852e37..000000000 --- a/tests/emulator_tests/generic_functions/return_none_no_type_hint/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "name": "mytimer", - "type": "timerTrigger", - "direction": "in", - "schedule": "*/1 * * * * *", - "runOnStartup": false - }, - { - "direction": "in", - "type": "table", - "name": "testEntity", - "partitionKey": "test", - "rowKey": "WillBePopulatedWithGuid", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/generic_functions/return_none_no_type_hint/main.py b/tests/emulator_tests/generic_functions/return_none_no_type_hint/main.py deleted file mode 100644 index 69877f6d8..000000000 --- a/tests/emulator_tests/generic_functions/return_none_no_type_hint/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - - -def main(mytimer: func.TimerRequest, testEntity): - logging.info("Timer trigger with none return and no type hint") diff --git a/tests/emulator_tests/generic_functions/return_not_processed_last/__init__.py b/tests/emulator_tests/generic_functions/return_not_processed_last/__init__.py deleted file mode 100644 index 300fae398..000000000 --- a/tests/emulator_tests/generic_functions/return_not_processed_last/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -# There are 3 bindings defined in function.json: -# 1. req: HTTP trigger -# 2. testEntities: table input (generic) -# 3. $return: HTTP response -# The bindings will be processed by the worker in this order: -# req -> $return -> testEntities -def main(req: func.HttpRequest, testEntities): - return func.HttpResponse(status_code=200) diff --git a/tests/emulator_tests/generic_functions/return_not_processed_last/function.json b/tests/emulator_tests/generic_functions/return_not_processed_last/function.json deleted file mode 100644 index e02ae4d15..000000000 --- a/tests/emulator_tests/generic_functions/return_not_processed_last/function.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "authLevel": "anonymous", - "methods": [ - "get" - ], - "name": "req" - }, - { - "direction": "in", - "type": "table", - "name": "testEntities", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/generic_functions/return_processed_last/__init__.py b/tests/emulator_tests/generic_functions/return_processed_last/__init__.py deleted file mode 100644 index 3d8f56122..000000000 --- a/tests/emulator_tests/generic_functions/return_processed_last/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -# There are 3 bindings defined in function.json: -# 1. req: HTTP trigger -# 2. testEntity: table input (generic) -# 3. $return: HTTP response -# The bindings will be processed by the worker in this order: -# req -> testEntity -> $return -def main(req: func.HttpRequest, testEntity): - return func.HttpResponse(status_code=200) diff --git a/tests/emulator_tests/generic_functions/return_processed_last/function.json b/tests/emulator_tests/generic_functions/return_processed_last/function.json deleted file mode 100644 index d23f01a86..000000000 --- a/tests/emulator_tests/generic_functions/return_processed_last/function.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "authLevel": "anonymous", - "methods": [ - "get" - ], - "name": "req" - }, - { - "direction": "in", - "type": "table", - "name": "testEntity", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/generic_functions/return_string/function.json b/tests/emulator_tests/generic_functions/return_string/function.json deleted file mode 100644 index 4dc852e37..000000000 --- a/tests/emulator_tests/generic_functions/return_string/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "name": "mytimer", - "type": "timerTrigger", - "direction": "in", - "schedule": "*/1 * * * * *", - "runOnStartup": false - }, - { - "direction": "in", - "type": "table", - "name": "testEntity", - "partitionKey": "test", - "rowKey": "WillBePopulatedWithGuid", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/generic_functions/return_string/main.py b/tests/emulator_tests/generic_functions/return_string/main.py deleted file mode 100644 index 02f7aa432..000000000 --- a/tests/emulator_tests/generic_functions/return_string/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - - -def main(mytimer: func.TimerRequest, testEntity): - logging.info("Return string") - return "hi!" diff --git a/tests/emulator_tests/generic_functions/table_out_binding/__init__.py b/tests/emulator_tests/generic_functions/table_out_binding/__init__.py deleted file mode 100644 index 09c7058e9..000000000 --- a/tests/emulator_tests/generic_functions/table_out_binding/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import uuid -import azure.functions as func - - -def main(req: func.HttpRequest, resp: func.Out[func.HttpResponse]): - row_key_uuid = str(uuid.uuid4()) - table_dict = {'PartitionKey': 'test', 'RowKey': row_key_uuid} - table_json = json.dumps(table_dict) - resp.set(table_json) - return table_json diff --git a/tests/emulator_tests/generic_functions/table_out_binding/function.json b/tests/emulator_tests/generic_functions/table_out_binding/function.json deleted file mode 100644 index 25537873a..000000000 --- a/tests/emulator_tests/generic_functions/table_out_binding/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "authLevel": "anonymous", - "methods": ["post"], - "name": "req" - }, - { - "direction": "out", - "type": "table", - "name": "$return", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - }, - { - "name": "resp", - "type": "http", - "direction": "out" - } - ] - } \ No newline at end of file diff --git a/tests/emulator_tests/queue_functions/get_queue_blob/function.json b/tests/emulator_tests/queue_functions/get_queue_blob/function.json deleted file mode 100644 index 4abc22167..000000000 --- a/tests/emulator_tests/queue_functions/get_queue_blob/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-queue-blob.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return", - } - ] -} diff --git a/tests/emulator_tests/queue_functions/get_queue_blob/main.py b/tests/emulator_tests/queue_functions/get_queue_blob/main.py deleted file mode 100644 index b82ec6efd..000000000 --- a/tests/emulator_tests/queue_functions/get_queue_blob/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.InputStream) -> str: - return json.dumps({ - 'queue': json.loads(file.read().decode('utf-8')) - }) diff --git a/tests/emulator_tests/queue_functions/get_queue_blob_message_return/function.json b/tests/emulator_tests/queue_functions/get_queue_blob_message_return/function.json deleted file mode 100644 index 7f040a6be..000000000 --- a/tests/emulator_tests/queue_functions/get_queue_blob_message_return/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-queue-blob-message-return.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return", - } - ] -} diff --git a/tests/emulator_tests/queue_functions/get_queue_blob_message_return/main.py b/tests/emulator_tests/queue_functions/get_queue_blob_message_return/main.py deleted file mode 100644 index 46dc607e2..000000000 --- a/tests/emulator_tests/queue_functions/get_queue_blob_message_return/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/emulator_tests/queue_functions/get_queue_blob_return/function.json b/tests/emulator_tests/queue_functions/get_queue_blob_return/function.json deleted file mode 100644 index ce3b8c8c2..000000000 --- a/tests/emulator_tests/queue_functions/get_queue_blob_return/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-queue-blob-return.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return", - } - ] -} diff --git a/tests/emulator_tests/queue_functions/get_queue_blob_return/main.py b/tests/emulator_tests/queue_functions/get_queue_blob_return/main.py deleted file mode 100644 index 46dc607e2..000000000 --- a/tests/emulator_tests/queue_functions/get_queue_blob_return/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/emulator_tests/queue_functions/get_queue_untyped_blob_return/function.json b/tests/emulator_tests/queue_functions/get_queue_untyped_blob_return/function.json deleted file mode 100644 index f0f0b8c6c..000000000 --- a/tests/emulator_tests/queue_functions/get_queue_untyped_blob_return/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "main.py", - "disabled": false, - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-queue-untyped-blob-return.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/emulator_tests/queue_functions/get_queue_untyped_blob_return/main.py b/tests/emulator_tests/queue_functions/get_queue_untyped_blob_return/main.py deleted file mode 100644 index 46dc607e2..000000000 --- a/tests/emulator_tests/queue_functions/get_queue_untyped_blob_return/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, file: azf.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/emulator_tests/queue_functions/put_queue/function.json b/tests/emulator_tests/queue_functions/put_queue/function.json deleted file mode 100644 index b8e03f2be..000000000 --- a/tests/emulator_tests/queue_functions/put_queue/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "main.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "direction": "out", - "name": "msg", - "queueName": "testqueue", - "connection": "AzureWebJobsStorage", - "type": "queue" - }, - { - "direction": "out", - "name": "$return", - "type": "http" - } - ] -} diff --git a/tests/emulator_tests/queue_functions/put_queue/main.py b/tests/emulator_tests/queue_functions/put_queue/main.py deleted file mode 100644 index fde178e41..000000000 --- a/tests/emulator_tests/queue_functions/put_queue/main.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, msg: azf.Out[str]): - msg.set(req.get_body()) - - return 'OK' diff --git a/tests/emulator_tests/queue_functions/put_queue_message_return/function.json b/tests/emulator_tests/queue_functions/put_queue_message_return/function.json deleted file mode 100644 index ce1de0819..000000000 --- a/tests/emulator_tests/queue_functions/put_queue_message_return/function.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "scriptFile": "main.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "direction": "out", - "name": "$return", - "queueName": "testqueue-message-return", - "connection": "AzureWebJobsStorage", - "type": "queue" - } - ] -} diff --git a/tests/emulator_tests/queue_functions/put_queue_message_return/main.py b/tests/emulator_tests/queue_functions/put_queue_message_return/main.py deleted file mode 100644 index 3550166f3..000000000 --- a/tests/emulator_tests/queue_functions/put_queue_message_return/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest) -> bytes: - return azf.QueueMessage(body=req.get_body()) diff --git a/tests/emulator_tests/queue_functions/put_queue_multiple_out/function.json b/tests/emulator_tests/queue_functions/put_queue_multiple_out/function.json deleted file mode 100644 index 7fb98a3c2..000000000 --- a/tests/emulator_tests/queue_functions/put_queue_multiple_out/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "main.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "name": "resp", - "type": "http", - "direction": "out" - }, - { - "direction": "out", - "name": "msg", - "queueName": "testqueue-return-multiple-outparam", - "connection": "AzureWebJobsStorage", - "type": "queue" - } - ] -} diff --git a/tests/emulator_tests/queue_functions/put_queue_multiple_out/main.py b/tests/emulator_tests/queue_functions/put_queue_multiple_out/main.py deleted file mode 100644 index afb174337..000000000 --- a/tests/emulator_tests/queue_functions/put_queue_multiple_out/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -def main(req: func.HttpRequest, resp: func.Out[func.HttpResponse], - msg: func.Out[func.QueueMessage]) -> None: - data = req.get_body().decode() - msg.set(func.QueueMessage(body=data)) - resp.set(func.HttpResponse(body='HTTP response: {}'.format(data))) diff --git a/tests/emulator_tests/queue_functions/put_queue_return/function.json b/tests/emulator_tests/queue_functions/put_queue_return/function.json deleted file mode 100644 index 129b7cb20..000000000 --- a/tests/emulator_tests/queue_functions/put_queue_return/function.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "scriptFile": "main.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "direction": "out", - "name": "$return", - "queueName": "testqueue-return", - "connection": "AzureWebJobsStorage", - "type": "queue" - } - ] -} diff --git a/tests/emulator_tests/queue_functions/put_queue_return/main.py b/tests/emulator_tests/queue_functions/put_queue_return/main.py deleted file mode 100644 index 21f3b275b..000000000 --- a/tests/emulator_tests/queue_functions/put_queue_return/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest) -> bytes: - return req.get_body() diff --git a/tests/emulator_tests/queue_functions/put_queue_return_multiple/function.json b/tests/emulator_tests/queue_functions/put_queue_return_multiple/function.json deleted file mode 100644 index cc1f2fc14..000000000 --- a/tests/emulator_tests/queue_functions/put_queue_return_multiple/function.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "scriptFile": "main.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "direction": "out", - "name": "msgs", - "queueName": "testqueue-return-multiple", - "connection": "AzureWebJobsStorage", - "type": "queue" - } - ] -} diff --git a/tests/emulator_tests/queue_functions/put_queue_return_multiple/main.py b/tests/emulator_tests/queue_functions/put_queue_return_multiple/main.py deleted file mode 100644 index 93152d7bb..000000000 --- a/tests/emulator_tests/queue_functions/put_queue_return_multiple/main.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import typing - -import azure.functions as azf - - -def main(req: azf.HttpRequest, msgs: azf.Out[typing.List[str]]): - msgs.set(['one', 'two']) diff --git a/tests/emulator_tests/queue_functions/put_queue_untyped_return/function.json b/tests/emulator_tests/queue_functions/put_queue_untyped_return/function.json deleted file mode 100644 index 8dee2e9c5..000000000 --- a/tests/emulator_tests/queue_functions/put_queue_untyped_return/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "main.py", - "disabled": false, - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "direction": "out", - "name": "$return", - "queueName": "testqueue-untyped-return", - "connection": "AzureWebJobsStorage", - "type": "queue" - } - ] -} diff --git a/tests/emulator_tests/queue_functions/put_queue_untyped_return/main.py b/tests/emulator_tests/queue_functions/put_queue_untyped_return/main.py deleted file mode 100644 index 3550166f3..000000000 --- a/tests/emulator_tests/queue_functions/put_queue_untyped_return/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest) -> bytes: - return azf.QueueMessage(body=req.get_body()) diff --git a/tests/emulator_tests/queue_functions/queue_functions_stein/function_app.py b/tests/emulator_tests/queue_functions/queue_functions_stein/function_app.py deleted file mode 100644 index 087cf4592..000000000 --- a/tests/emulator_tests/queue_functions/queue_functions_stein/function_app.py +++ /dev/null @@ -1,185 +0,0 @@ -import json -import logging -import typing - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="get_queue_blob") -@app.route(route="get_queue_blob") -@app.blob_input(arg_name="file", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob.txt") -def get_queue_blob(req: func.HttpRequest, file: func.InputStream) -> str: - return json.dumps({ - 'queue': json.loads(file.read().decode('utf-8')) - }) - - -@app.function_name(name="get_queue_blob_message_return") -@app.route(route="get_queue_blob_message_return") -@app.blob_input(arg_name="file", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob-message-return.txt") -def get_queue_blob_message_return(req: func.HttpRequest, - file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_queue_blob_return") -@app.route(route="get_queue_blob_return") -@app.blob_input(arg_name="file", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob-return.txt") -def get_queue_blob_return(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_queue_untyped_blob_return") -@app.route(route="get_queue_untyped_blob_return") -@app.blob_input(arg_name="file", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-untyped-blob-return.txt") -def get_queue_untyped_blob_return(req: func.HttpRequest, - file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="put_queue") -@app.route(route="put_queue") -@app.queue_output(arg_name="msg", - connection="AzureWebJobsStorage", - queue_name="testqueue") -def put_queue(req: func.HttpRequest, msg: func.Out[str]): - msg.set(req.get_body()) - - return 'OK' - - -@app.function_name(name="put_queue_message_return") -@app.route(route="put_queue_message_return", binding_arg_name="resp") -@app.queue_output(arg_name="$return", - connection="AzureWebJobsStorage", - queue_name="testqueue-message-return") -def main(req: func.HttpRequest, resp: func.Out[str]) -> bytes: - return func.QueueMessage(body=req.get_body()) - - -@app.function_name("put_queue_multiple_out") -@app.route(route="put_queue_multiple_out", binding_arg_name="resp") -@app.queue_output(arg_name="msg", - connection="AzureWebJobsStorage", - queue_name="testqueue-return-multiple-outparam") -def put_queue_multiple_out(req: func.HttpRequest, - resp: func.Out[func.HttpResponse], - msg: func.Out[func.QueueMessage]) -> None: - data = req.get_body().decode() - msg.set(func.QueueMessage(body=data)) - resp.set(func.HttpResponse(body='HTTP response: {}'.format(data))) - - -@app.function_name("put_queue_return") -@app.route(route="put_queue_return", binding_arg_name="resp") -@app.queue_output(arg_name="$return", - connection="AzureWebJobsStorage", - queue_name="testqueue-return") -def put_queue_return(req: func.HttpRequest, resp: func.Out[str]) -> bytes: - return req.get_body() - - -@app.function_name(name="put_queue_multiple_return") -@app.route(route="put_queue_multiple_return") -@app.queue_output(arg_name="msgs", - connection="AzureWebJobsStorage", - queue_name="testqueue-return-multiple") -def put_queue_multiple_return(req: func.HttpRequest, - msgs: func.Out[typing.List[str]]): - msgs.set(['one', 'two']) - - -@app.function_name(name="put_queue_untyped_return") -@app.route(route="put_queue_untyped_return", binding_arg_name="resp") -@app.queue_output(arg_name="$return", - connection="AzureWebJobsStorage", - queue_name="testqueue-untyped-return") -def put_queue_untyped_return(req: func.HttpRequest, - resp: func.Out[str]) -> bytes: - return func.QueueMessage(body=req.get_body()) - - -@app.function_name(name="queue_trigger") -@app.queue_trigger(arg_name="msg", - queue_name="testqueue", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="$return", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob.txt") -def queue_trigger(msg: func.QueueMessage) -> str: - result = json.dumps({ - 'id': msg.id, - 'body': msg.get_body().decode('utf-8'), - 'expiration_time': (msg.expiration_time.isoformat() - if msg.expiration_time else None), - 'insertion_time': (msg.insertion_time.isoformat() - if msg.insertion_time else None), - 'time_next_visible': (msg.time_next_visible.isoformat() - if msg.time_next_visible else None), - 'pop_receipt': msg.pop_receipt, - 'dequeue_count': msg.dequeue_count - }) - - return result - - -@app.function_name(name="queue_trigger_message_return") -@app.queue_trigger(arg_name="msg", - queue_name="testqueue-message-return", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="$return", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob-message-return.txt") -def queue_trigger_message_return(msg: func.QueueMessage) -> bytes: - return msg.get_body() - - -@app.function_name(name="queue_trigger_return") -@app.queue_trigger(arg_name="msg", - queue_name="testqueue-return", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="$return", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob-return.txt") -def queue_trigger_return(msg: func.QueueMessage) -> bytes: - return msg.get_body() - - -@app.function_name(name="queue_trigger_return_multiple") -@app.queue_trigger(arg_name="msg", - queue_name="testqueue-return-multiple", - connection="AzureWebJobsStorage") -def queue_trigger_return_multiple(msg: func.QueueMessage) -> None: - logging.info('trigger on message: %s', msg.get_body().decode('utf-8')) - - -@app.function_name(name="queue_trigger_untyped") -@app.queue_trigger(arg_name="msg", - queue_name="testqueue-untyped-return", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="$return", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-untyped-blob-return.txt") -def queue_trigger_untyped(msg: str) -> str: - return msg - - -@app.function_name(name="put_queue_return_multiple") -@app.route(route="put_queue_return_multiple", binding_arg_name="resp") -@app.queue_output(arg_name="msgs", - connection="AzureWebJobsStorage", - queue_name="testqueue-return-multiple") -def put_queue_return_multiple(req: func.HttpRequest, - resp: func.Out[str], - msgs: func.Out[typing.List[str]]): - msgs.set(['one', 'two']) diff --git a/tests/emulator_tests/queue_functions/queue_functions_stein/generic/function_app.py b/tests/emulator_tests/queue_functions/queue_functions_stein/generic/function_app.py deleted file mode 100644 index a2ad14b58..000000000 --- a/tests/emulator_tests/queue_functions/queue_functions_stein/generic/function_app.py +++ /dev/null @@ -1,253 +0,0 @@ -import json -import logging -import typing - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="get_queue_blob") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="get_queue_blob") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob.txt") -def get_queue_blob(req: func.HttpRequest, file: func.InputStream) -> str: - return json.dumps({ - 'queue': json.loads(file.read().decode('utf-8')) - }) - - -@app.function_name(name="get_queue_blob_message_return") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="get_queue_blob_message_return") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob-message-return.txt") -def get_queue_blob_message_return(req: func.HttpRequest, - file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_queue_blob_return") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="get_queue_blob_return") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding(arg_name="file", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob-return" - ".txt") -def get_queue_blob_return(req: func.HttpRequest, - file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_queue_untyped_blob_return") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="get_queue_untyped_blob_return") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-untyped-blob-return.txt") -def get_queue_untyped_blob_return(req: func.HttpRequest, - file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="put_queue") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="put_queue") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_output_binding( - arg_name="msg", - type="queue", - connection="AzureWebJobsStorage", - queue_name="testqueue") -def put_queue(req: func.HttpRequest, msg: func.Out[str]): - msg.set(req.get_body()) - - return 'OK' - - -@app.function_name(name="put_queue_message_return") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="put_queue_message_return") -@app.generic_output_binding(arg_name="resp", type="http") -@app.generic_output_binding( - arg_name="$return", - type="queue", - connection="AzureWebJobsStorage", - queue_name="testqueue-message-return") -def main(req: func.HttpRequest, resp: func.Out[str]) -> bytes: - return func.QueueMessage(body=req.get_body()) - - -@app.function_name(name="put_queue_multiple_out") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="put_queue_multiple_out") -@app.generic_output_binding(arg_name="resp", type="http") -@app.generic_output_binding( - arg_name="msg", - type="queue", - connection="AzureWebJobsStorage", - queue_name="testqueue-return-multiple-outparam") -def put_queue_multiple_out(req: func.HttpRequest, - resp: func.Out[func.HttpResponse], - msg: func.Out[func.QueueMessage]) -> None: - data = req.get_body().decode() - msg.set(func.QueueMessage(body=data)) - resp.set(func.HttpResponse(body='HTTP response: {}'.format(data))) - - -@app.function_name("put_queue_return") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="put_queue_return") -@app.generic_output_binding(arg_name="resp", type="http") -@app.generic_output_binding( - arg_name="$return", - type="queue", - connection="AzureWebJobsStorage", - queue_name="testqueue-return") -def put_queue_return(req: func.HttpRequest, resp: func.Out[str]) -> bytes: - return req.get_body() - - -@app.function_name(name="put_queue_multiple_return") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="put_queue_multiple_return") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_output_binding( - arg_name="msgs", - type="queue", - connection="AzureWebJobsStorage", - queue_name="testqueue-return-multiple") -def put_queue_multiple_return(req: func.HttpRequest, - msgs: func.Out[typing.List[str]]): - msgs.set(['one', 'two']) - - -@app.function_name(name="put_queue_untyped_return") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="put_queue_untyped_return") -@app.generic_output_binding(arg_name="resp", type="http") -@app.generic_output_binding( - arg_name="$return", - type="queue", - connection="AzureWebJobsStorage", - queue_name="testqueue-untyped-return") -def put_queue_untyped_return(req: func.HttpRequest, - resp: func.Out[str]) -> bytes: - return func.QueueMessage(body=req.get_body()) - - -@app.function_name(name="queue_trigger") -@app.generic_trigger(arg_name="msg", - type="queueTrigger", - queue_name="testqueue", - connection="AzureWebJobsStorage") -@app.generic_output_binding(arg_name="$return", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob.txt") -def queue_trigger(msg: func.QueueMessage) -> str: - result = json.dumps({ - 'id': msg.id, - 'body': msg.get_body().decode('utf-8'), - 'expiration_time': (msg.expiration_time.isoformat() - if msg.expiration_time else None), - 'insertion_time': (msg.insertion_time.isoformat() - if msg.insertion_time else None), - 'time_next_visible': (msg.time_next_visible.isoformat() - if msg.time_next_visible else None), - 'pop_receipt': msg.pop_receipt, - 'dequeue_count': msg.dequeue_count - }) - - return result - - -@app.function_name(name="queue_trigger_message_return") -@app.generic_trigger(arg_name="msg", - type="queueTrigger", - queue_name="testqueue-message-return", - connection="AzureWebJobsStorage") -@app.generic_output_binding( - arg_name="$return", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob-message-return.txt") -def queue_trigger_message_return(msg: func.QueueMessage) -> bytes: - return msg.get_body() - - -@app.function_name(name="queue_trigger_return") -@app.generic_trigger(arg_name="msg", - type="queueTrigger", - queue_name="testqueue-return", - connection="AzureWebJobsStorage") -@app.generic_output_binding( - arg_name="$return", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-blob-return.txt") -def queue_trigger_return(msg: func.QueueMessage) -> bytes: - return msg.get_body() - - -@app.function_name(name="queue_trigger_return_multiple") -@app.generic_trigger(arg_name="msg", - type="queueTrigger", - queue_name="testqueue-return-multiple", - connection="AzureWebJobsStorage") -def queue_trigger_return_multiple(msg: func.QueueMessage) -> None: - logging.info('trigger on message: %s', msg.get_body().decode('utf-8')) - - -@app.function_name(name="queue_trigger_untyped") -@app.generic_trigger(arg_name="msg", - type="queueTrigger", - queue_name="testqueue-untyped-return", - connection="AzureWebJobsStorage") -@app.generic_output_binding(arg_name="$return", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-queue-untyped" - "-blob-return.txt") -def queue_trigger_untyped(msg: str) -> str: - return msg - - -@app.function_name(name="put_queue_return_multiple") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="put_queue_return_multiple") -@app.generic_output_binding(arg_name="resp", type="http") -@app.generic_output_binding( - arg_name="msgs", - type="queue", - connection="AzureWebJobsStorage", - queue_name="testqueue-return-multiple") -def put_queue_return_multiple(req: func.HttpRequest, - resp: func.Out[str], - msgs: func.Out[typing.List[str]]): - msgs.set(['one', 'two']) diff --git a/tests/emulator_tests/queue_functions/queue_trigger/function.json b/tests/emulator_tests/queue_functions/queue_trigger/function.json deleted file mode 100644 index 9c7f2b322..000000000 --- a/tests/emulator_tests/queue_functions/queue_trigger/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "main.py", - - "bindings": [ - { - "type": "queueTrigger", - "direction": "in", - "name": "msg", - "queueName": "testqueue", - "connection": "AzureWebJobsStorage", - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-queue-blob.txt" - } - ] -} diff --git a/tests/emulator_tests/queue_functions/queue_trigger/main.py b/tests/emulator_tests/queue_functions/queue_trigger/main.py deleted file mode 100644 index 08a5d4e9d..000000000 --- a/tests/emulator_tests/queue_functions/queue_trigger/main.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as azf - - -def main(msg: azf.QueueMessage) -> str: - result = json.dumps({ - 'id': msg.id, - 'body': msg.get_body().decode('utf-8'), - 'expiration_time': (msg.expiration_time.isoformat() - if msg.expiration_time else None), - 'insertion_time': (msg.insertion_time.isoformat() - if msg.insertion_time else None), - 'time_next_visible': (msg.time_next_visible.isoformat() - if msg.time_next_visible else None), - 'pop_receipt': msg.pop_receipt, - 'dequeue_count': msg.dequeue_count - }) - - return result diff --git a/tests/emulator_tests/queue_functions/queue_trigger_message_return/function.json b/tests/emulator_tests/queue_functions/queue_trigger_message_return/function.json deleted file mode 100644 index 9c9e5a03a..000000000 --- a/tests/emulator_tests/queue_functions/queue_trigger_message_return/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "main.py", - - "bindings": [ - { - "type": "queueTrigger", - "direction": "in", - "name": "msg", - "queueName": "testqueue-message-return", - "connection": "AzureWebJobsStorage" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-queue-blob-message-return.txt" - } - ] -} diff --git a/tests/emulator_tests/queue_functions/queue_trigger_message_return/main.py b/tests/emulator_tests/queue_functions/queue_trigger_message_return/main.py deleted file mode 100644 index be573b50b..000000000 --- a/tests/emulator_tests/queue_functions/queue_trigger_message_return/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(msg: azf.QueueMessage) -> bytes: - return msg.get_body() diff --git a/tests/emulator_tests/queue_functions/queue_trigger_return/function.json b/tests/emulator_tests/queue_functions/queue_trigger_return/function.json deleted file mode 100644 index 3604e0eb8..000000000 --- a/tests/emulator_tests/queue_functions/queue_trigger_return/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "main.py", - - "bindings": [ - { - "type": "queueTrigger", - "direction": "in", - "name": "msg", - "queueName": "testqueue-return", - "connection": "AzureWebJobsStorage" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-queue-blob-return.txt" - } - ] -} diff --git a/tests/emulator_tests/queue_functions/queue_trigger_return/main.py b/tests/emulator_tests/queue_functions/queue_trigger_return/main.py deleted file mode 100644 index be573b50b..000000000 --- a/tests/emulator_tests/queue_functions/queue_trigger_return/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(msg: azf.QueueMessage) -> bytes: - return msg.get_body() diff --git a/tests/emulator_tests/queue_functions/queue_trigger_return_multiple/function.json b/tests/emulator_tests/queue_functions/queue_trigger_return_multiple/function.json deleted file mode 100644 index 0eb42ab89..000000000 --- a/tests/emulator_tests/queue_functions/queue_trigger_return_multiple/function.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "scriptFile": "main.py", - - "bindings": [ - { - "type": "queueTrigger", - "direction": "in", - "name": "msg", - "queueName": "testqueue-return-multiple", - "connection": "AzureWebJobsStorage", - } - ] -} diff --git a/tests/emulator_tests/queue_functions/queue_trigger_return_multiple/main.py b/tests/emulator_tests/queue_functions/queue_trigger_return_multiple/main.py deleted file mode 100644 index 6abb82b0a..000000000 --- a/tests/emulator_tests/queue_functions/queue_trigger_return_multiple/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions as azf - -logger = logging.getLogger(__name__) - - -def main(msg: azf.QueueMessage) -> None: - logging.info('trigger on message: %s', msg.get_body().decode('utf-8')) diff --git a/tests/emulator_tests/queue_functions/queue_trigger_untyped/function.json b/tests/emulator_tests/queue_functions/queue_trigger_untyped/function.json deleted file mode 100644 index a4d434ad3..000000000 --- a/tests/emulator_tests/queue_functions/queue_trigger_untyped/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "main.py", - "disabled": false, - - "bindings": [ - { - "type": "queueTrigger", - "direction": "in", - "name": "msg", - "queueName": "testqueue-untyped-return", - "connection": "AzureWebJobsStorage" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-queue-untyped-blob-return.txt" - } - ] -} diff --git a/tests/emulator_tests/queue_functions/queue_trigger_untyped/main.py b/tests/emulator_tests/queue_functions/queue_trigger_untyped/main.py deleted file mode 100644 index 64fa31587..000000000 --- a/tests/emulator_tests/queue_functions/queue_trigger_untyped/main.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -logger = logging.getLogger(__name__) - - -def main(msg: str) -> str: - return msg diff --git a/tests/emulator_tests/servicebus_functions/get_servicebus_triggered/__init__.py b/tests/emulator_tests/servicebus_functions/get_servicebus_triggered/__init__.py deleted file mode 100644 index 1e5ac3d90..000000000 --- a/tests/emulator_tests/servicebus_functions/get_servicebus_triggered/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -def main(req: func.HttpRequest, file: func.InputStream) -> str: - return func.HttpResponse( - file.read().decode('utf-8'), mimetype='application/json') diff --git a/tests/emulator_tests/servicebus_functions/get_servicebus_triggered/function.json b/tests/emulator_tests/servicebus_functions/get_servicebus_triggered/function.json deleted file mode 100644 index 944dd45a9..000000000 --- a/tests/emulator_tests/servicebus_functions/get_servicebus_triggered/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-servicebus-triggered.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return", - } - ] -} diff --git a/tests/emulator_tests/servicebus_functions/put_message/__init__.py b/tests/emulator_tests/servicebus_functions/put_message/__init__.py deleted file mode 100644 index 85ad99bf7..000000000 --- a/tests/emulator_tests/servicebus_functions/put_message/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, msg: azf.Out[str]): - msg.set(req.get_body().decode('utf-8')) - - return 'OK' diff --git a/tests/emulator_tests/servicebus_functions/put_message/function.json b/tests/emulator_tests/servicebus_functions/put_message/function.json deleted file mode 100644 index 722f19541..000000000 --- a/tests/emulator_tests/servicebus_functions/put_message/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "direction": "out", - "name": "msg", - "queueName": "testqueue", - "connection": "AzureWebJobsServiceBusConnectionString", - "type": "serviceBus" - }, - { - "direction": "out", - "name": "$return", - "type": "http" - } - ] -} diff --git a/tests/emulator_tests/servicebus_functions/servicebus_functions_stein/function_app.py b/tests/emulator_tests/servicebus_functions/servicebus_functions_stein/function_app.py deleted file mode 100644 index 9e9d12246..000000000 --- a/tests/emulator_tests/servicebus_functions/servicebus_functions_stein/function_app.py +++ /dev/null @@ -1,73 +0,0 @@ -import json - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.route(route="put_message") -@app.service_bus_queue_output( - arg_name="msg", - connection="AzureWebJobsServiceBusConnectionString", - queue_name="testqueue") -def put_message(req: func.HttpRequest, msg: func.Out[str]): - msg.set(req.get_body().decode('utf-8')) - return 'OK' - - -@app.route(route="get_servicebus_triggered") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-servicebus-triggered.txt", - connection="AzureWebJobsStorage") -def get_servicebus_triggered(req: func.HttpRequest, - file: func.InputStream) -> str: - return func.HttpResponse( - file.read().decode('utf-8'), mimetype='application/json') - - -@app.service_bus_queue_trigger( - arg_name="msg", - connection="AzureWebJobsServiceBusConnectionString", - queue_name="testqueue") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-servicebus-triggered.txt", - connection="AzureWebJobsStorage") -def servicebus_trigger(msg: func.ServiceBusMessage) -> str: - result = json.dumps({ - 'message_id': msg.message_id, - 'body': msg.get_body().decode('utf-8'), - 'content_type': msg.content_type, - 'delivery_count': msg.delivery_count, - 'expiration_time': (msg.expiration_time.isoformat() if - msg.expiration_time else None), - 'label': msg.label, - 'partition_key': msg.partition_key, - 'reply_to': msg.reply_to, - 'reply_to_session_id': msg.reply_to_session_id, - 'scheduled_enqueue_time': (msg.scheduled_enqueue_time.isoformat() if - msg.scheduled_enqueue_time else None), - 'session_id': msg.session_id, - 'time_to_live': msg.time_to_live, - 'to': msg.to, - 'user_properties': msg.user_properties, - - 'application_properties': msg.application_properties, - 'correlation_id': msg.correlation_id, - 'dead_letter_error_description': msg.dead_letter_error_description, - 'dead_letter_reason': msg.dead_letter_reason, - 'dead_letter_source': msg.dead_letter_source, - 'enqueued_sequence_number': msg.enqueued_sequence_number, - 'enqueued_time_utc': (msg.enqueued_time_utc.isoformat() if - msg.enqueued_time_utc else None), - 'expires_at_utc': (msg.expires_at_utc.isoformat() if - msg.expires_at_utc else None), - 'locked_until': (msg.locked_until.isoformat() if - msg.locked_until else None), - 'lock_token': msg.lock_token, - 'sequence_number': msg.sequence_number, - 'state': msg.state, - 'subject': msg.subject, - 'transaction_partition_key': msg.transaction_partition_key - }) - - return result diff --git a/tests/emulator_tests/servicebus_functions/servicebus_functions_stein/generic/function_app.py b/tests/emulator_tests/servicebus_functions/servicebus_functions_stein/generic/function_app.py deleted file mode 100644 index 4fd48785a..000000000 --- a/tests/emulator_tests/servicebus_functions/servicebus_functions_stein/generic/function_app.py +++ /dev/null @@ -1,81 +0,0 @@ -import json - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="put_message") -@app.generic_trigger(arg_name="req", type="httpTrigger", route="put_message") -@app.generic_output_binding(arg_name="msg", - type="serviceBus", - connection="AzureWebJobsServiceBusConnectionString", - queue_name="testqueue") -@app.generic_output_binding(arg_name="$return", type="http") -def put_message(req: func.HttpRequest, msg: func.Out[str]): - msg.set(req.get_body().decode('utf-8')) - return 'OK' - - -@app.function_name(name="get_servicebus_triggered") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_servicebus_triggered") -@app.generic_input_binding(arg_name="file", - type="blob", - path="python-worker-tests/test-servicebus-triggered.txt", # NoQA - connection="AzureWebJobsStorage") -@app.generic_output_binding(arg_name="$return", type="http") -def get_servicebus_triggered(req: func.HttpRequest, - file: func.InputStream) -> str: - return func.HttpResponse( - file.read().decode('utf-8'), mimetype='application/json') - - -@app.generic_trigger( - arg_name="msg", - type="serviceBusTrigger", - connection="AzureWebJobsServiceBusConnectionString", - queue_name="testqueue") -@app.generic_output_binding(arg_name="$return", - path="python-worker-tests/test-servicebus-triggered.txt", # NoQA - type="blob", - connection="AzureWebJobsStorage") -def servicebus_trigger(msg: func.ServiceBusMessage) -> str: - result = json.dumps({ - 'message_id': msg.message_id, - 'body': msg.get_body().decode('utf-8'), - 'content_type': msg.content_type, - 'delivery_count': msg.delivery_count, - 'expiration_time': (msg.expiration_time.isoformat() if - msg.expiration_time else None), - 'label': msg.label, - 'partition_key': msg.partition_key, - 'reply_to': msg.reply_to, - 'reply_to_session_id': msg.reply_to_session_id, - 'scheduled_enqueue_time': (msg.scheduled_enqueue_time.isoformat() if - msg.scheduled_enqueue_time else None), - 'session_id': msg.session_id, - 'time_to_live': msg.time_to_live, - 'to': msg.to, - 'user_properties': msg.user_properties, - - 'application_properties': msg.application_properties, - 'correlation_id': msg.correlation_id, - 'dead_letter_error_description': msg.dead_letter_error_description, - 'dead_letter_reason': msg.dead_letter_reason, - 'dead_letter_source': msg.dead_letter_source, - 'enqueued_sequence_number': msg.enqueued_sequence_number, - 'enqueued_time_utc': (msg.enqueued_time_utc.isoformat() if - msg.enqueued_time_utc else None), - 'expires_at_utc': (msg.expires_at_utc.isoformat() if - msg.expires_at_utc else None), - 'locked_until': (msg.locked_until.isoformat() if - msg.locked_until else None), - 'lock_token': msg.lock_token, - 'sequence_number': msg.sequence_number, - 'state': msg.state, - 'subject': msg.subject, - 'transaction_partition_key': msg.transaction_partition_key - }) - - return result diff --git a/tests/emulator_tests/servicebus_functions/servicebus_trigger/__init__.py b/tests/emulator_tests/servicebus_functions/servicebus_trigger/__init__.py deleted file mode 100644 index 341779a4d..000000000 --- a/tests/emulator_tests/servicebus_functions/servicebus_trigger/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as azf - - -def main(msg: azf.ServiceBusMessage) -> str: - result = json.dumps({ - 'message_id': msg.message_id, - 'body': msg.get_body().decode('utf-8'), - 'content_type': msg.content_type, - 'delivery_count': msg.delivery_count, - 'expiration_time': (msg.expiration_time.isoformat() if - msg.expiration_time else None), - 'label': msg.label, - 'partition_key': msg.partition_key, - 'reply_to': msg.reply_to, - 'reply_to_session_id': msg.reply_to_session_id, - 'scheduled_enqueue_time': (msg.scheduled_enqueue_time.isoformat() if - msg.scheduled_enqueue_time else None), - 'session_id': msg.session_id, - 'time_to_live': msg.time_to_live, - 'to': msg.to, - 'user_properties': msg.user_properties, - - 'application_properties': msg.application_properties, - 'correlation_id': msg.correlation_id, - 'dead_letter_error_description': msg.dead_letter_error_description, - 'dead_letter_reason': msg.dead_letter_reason, - 'dead_letter_source': msg.dead_letter_source, - 'enqueued_sequence_number': msg.enqueued_sequence_number, - 'enqueued_time_utc': (msg.enqueued_time_utc.isoformat() if - msg.enqueued_time_utc else None), - 'expires_at_utc': (msg.expires_at_utc.isoformat() if - msg.expires_at_utc else None), - 'locked_until': (msg.locked_until.isoformat() if - msg.locked_until else None), - 'lock_token': msg.lock_token, - 'sequence_number': msg.sequence_number, - 'state': msg.state, - 'subject': msg.subject, - 'transaction_partition_key': msg.transaction_partition_key - }) - - return result diff --git a/tests/emulator_tests/servicebus_functions/servicebus_trigger/function.json b/tests/emulator_tests/servicebus_functions/servicebus_trigger/function.json deleted file mode 100644 index b6fe4355e..000000000 --- a/tests/emulator_tests/servicebus_functions/servicebus_trigger/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "serviceBusTrigger", - "direction": "in", - "name": "msg", - "queueName": "testqueue", - "connection": "AzureWebJobsServiceBusConnectionString", - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-servicebus-triggered.txt" - } - ] -} diff --git a/tests/emulator_tests/table_functions/table_functions_stein/function_app.py b/tests/emulator_tests/table_functions/table_functions_stein/function_app.py deleted file mode 100644 index 5ebd10e07..000000000 --- a/tests/emulator_tests/table_functions/table_functions_stein/function_app.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import uuid - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="table_in_binding") -@app.route(route="table_in_binding/{id}") -@app.table_input(arg_name="testEntity", - connection="AzureWebJobsStorage", - table_name="BindingTestTable", - row_key='{id}', - partition_key="test") -def table_in_binding(req: func.HttpRequest, testEntity): - return func.HttpResponse(status_code=200, body=testEntity) - - -@app.function_name(name="table_out_binding") -@app.route(route="table_out_binding", binding_arg_name="resp") -@app.table_output(arg_name="$return", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def table_out_binding(req: func.HttpRequest, resp: func.Out[func.HttpResponse]): - row_key_uuid = str(uuid.uuid4()) - table_dict = {'PartitionKey': 'test', 'RowKey': row_key_uuid} - table_json = json.dumps(table_dict) - resp.set(table_json) - return table_json diff --git a/tests/emulator_tests/table_functions/table_functions_stein/generic/function_app.py b/tests/emulator_tests/table_functions/table_functions_stein/generic/function_app.py deleted file mode 100644 index 0a03a2366..000000000 --- a/tests/emulator_tests/table_functions/table_functions_stein/generic/function_app.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import uuid - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="table_in_binding") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="table_in_binding/{id}") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="testEntity", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable", - row_key="{id}", - partition_key="test") -def table_in_binding(req: func.HttpRequest, testEntity): - return func.HttpResponse(status_code=200, body=testEntity) - - -@app.function_name(name="table_out_binding") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="table_out_binding") -@app.generic_output_binding(arg_name="resp", type="http") -@app.generic_output_binding( - arg_name="$return", - type="table", - connection="AzureWebJobsStorage", - table_name="BindingTestTable") -def table_out_binding(req: func.HttpRequest, resp: func.Out[func.HttpResponse]): - row_key_uuid = str(uuid.uuid4()) - table_dict = {'PartitionKey': 'test', 'RowKey': row_key_uuid} - table_json = json.dumps(table_dict) - resp.set(table_json) - return table_json diff --git a/tests/emulator_tests/table_functions/table_in_binding/__init__.py b/tests/emulator_tests/table_functions/table_in_binding/__init__.py deleted file mode 100644 index a125e2bdb..000000000 --- a/tests/emulator_tests/table_functions/table_in_binding/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -def main(req: func.HttpRequest, testEntity): - return func.HttpResponse(status_code=200, body=testEntity) diff --git a/tests/emulator_tests/table_functions/table_in_binding/function.json b/tests/emulator_tests/table_functions/table_in_binding/function.json deleted file mode 100644 index d62461d0b..000000000 --- a/tests/emulator_tests/table_functions/table_in_binding/function.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "authLevel": "anonymous", - "methods": [ - "get" - ], - "name": "req" - }, - { - "direction": "in", - "type": "table", - "name": "testEntity", - "partitionKey": "test", - "rowKey": "WillBePopulatedWithGuid", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/table_functions/table_out_binding/__init__.py b/tests/emulator_tests/table_functions/table_out_binding/__init__.py deleted file mode 100644 index 5e869a2ae..000000000 --- a/tests/emulator_tests/table_functions/table_out_binding/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import uuid - -import azure.functions as func - - -def main(req: func.HttpRequest, resp: func.Out[func.HttpResponse]): - row_key_uuid = str(uuid.uuid4()) - table_dict = {'PartitionKey': 'test', 'RowKey': row_key_uuid} - table_json = json.dumps(table_dict) - resp.set(table_json) - return table_json diff --git a/tests/emulator_tests/table_functions/table_out_binding/function.json b/tests/emulator_tests/table_functions/table_out_binding/function.json deleted file mode 100644 index 416920ca4..000000000 --- a/tests/emulator_tests/table_functions/table_out_binding/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "authLevel": "anonymous", - "methods": ["post"], - "name": "req" - }, - { - "direction": "out", - "type": "table", - "name": "$return", - "tableName": "BindingTestTable", - "connection": "AzureWebJobsStorage" - }, - { - "name": "resp", - "type": "http", - "direction": "out" - } - ] -} \ No newline at end of file diff --git a/tests/emulator_tests/test_blob_functions.py b/tests/emulator_tests/test_blob_functions.py deleted file mode 100644 index d6a840a38..000000000 --- a/tests/emulator_tests/test_blob_functions.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import time - -from requests import JSONDecodeError -from tests.utils import testutils - - -class TestBlobFunctions(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'blob_functions' - - @testutils.retryable_test(3, 5) - def test_blob_io_str(self): - r = self.webhost.request('POST', 'put_blob_str', data='test-data') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - r = self.webhost.request('GET', 'get_blob_str') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-data') - - r = self.webhost.request('GET', 'get_blob_as_str') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-data') - - def test_blob_io_large_str(self): - large_string = 'DummyDataDummyDataDummyData' * 1024 * 1024 # 27 MB - - r = self.webhost.request('POST', 'put_blob_str', data=large_string) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - r = self.webhost.request('GET', 'get_blob_str') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, large_string) - - r = self.webhost.request('GET', 'get_blob_as_str') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, large_string) - - def test_blob_io_bytes(self): - r = self.webhost.request('POST', 'put_blob_bytes', - data='test-dată'.encode('utf-8')) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - r = self.webhost.request('POST', 'get_blob_bytes') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-dată') - - r = self.webhost.request('POST', 'get_blob_as_bytes') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-dată') - - def test_blob_io_large_bytes(self): - large_string = 'DummyDataDummyDataDummyData' * 1024 * 1024 # 27 MB - - r = self.webhost.request('POST', 'put_blob_bytes', - data=large_string.encode('utf-8')) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - r = self.webhost.request('POST', 'get_blob_bytes') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, large_string) - - r = self.webhost.request('POST', 'get_blob_as_bytes') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, large_string) - - def test_blob_io_filelike(self): - r = self.webhost.request('POST', 'put_blob_filelike') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - r = self.webhost.request('POST', 'get_blob_filelike') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'filelike') - - def test_blob_io_return(self): - r = self.webhost.request('POST', 'put_blob_return') - self.assertEqual(r.status_code, 200) - - r = self.webhost.request('POST', 'get_blob_return') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'FROM RETURN') - - def test_blob_trigger(self): - data = "DummyData" - - r = self.webhost.request('POST', 'put_blob_trigger', - data=data.encode('utf-8')) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - # Blob trigger may be processed after some delay - # We check it every 2 seconds to allow the trigger to be fired - max_retries = 10 - for try_no in range(max_retries): - time.sleep(2) - - try: - # Check that the trigger has fired - r = self.webhost.request('GET', 'get_blob_triggered') - self.assertEqual(r.status_code, 200) - response = r.json() - - self.assertEqual(response['name'], - 'python-worker-tests/test-blob-trigger.txt') - self.assertEqual(response['content'], data) - - break - # JSONDecodeError will be thrown if the response is empty. - except (AssertionError, JSONDecodeError): - if try_no == max_retries - 1: - raise - - def test_blob_trigger_with_large_content(self): - data = 'DummyDataDummyDataDummyData' * 1024 * 1024 # 27 MB - - r = self.webhost.request('POST', 'put_blob_trigger', - data=data.encode('utf-8')) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - # Blob trigger may be processed after some delay - # We check it every 2 seconds to allow the trigger to be fired - max_retries = 10 - for try_no in range(max_retries): - try: - # Check that the trigger has fired - r = self.webhost.request('GET', 'get_blob_triggered') - - # Waiting for blob to get updated - time.sleep(2) - - self.assertEqual(r.status_code, 200) - response = r.json() - - self.assertEqual(response['name'], - 'python-worker-tests/test-blob-trigger.txt') - self.assertEqual(response['content'], data) - break - # JSONDecodeError will be thrown if the response is empty. - except (AssertionError, JSONDecodeError): - if try_no == max_retries - 1: - raise - - -class TestBlobFunctionsStein(TestBlobFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'blob_functions' / \ - 'blob_functions_stein' - - -class TestBlobFunctionsSteinGeneric(TestBlobFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'blob_functions' / \ - 'blob_functions_stein' / 'generic' diff --git a/tests/emulator_tests/test_eventhub_batch_functions.py b/tests/emulator_tests/test_eventhub_batch_functions.py deleted file mode 100644 index 1a8ae2a9e..000000000 --- a/tests/emulator_tests/test_eventhub_batch_functions.py +++ /dev/null @@ -1,242 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import sys -import time -from datetime import datetime -from unittest.case import skipIf - -from dateutil import parser -from tests.utils import testutils - - -class TestEventHubFunctions(testutils.WebHostTestCase): - """Test EventHub Trigger and Output Bindings (cardinality: many). - - Each testcase consists of 3 part: - 1. An eventhub_output_batch HTTP trigger for generating EventHub event - 2. An eventhub_multiple EventHub trigger for converting event into blob - 3. A get_eventhub_batch_triggered HTTP trigger for the event body - """ - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'eventhub_batch_functions' - - @classmethod - def get_libraries_to_install(cls): - return ['azure-eventhub'] - - @testutils.retryable_test(3, 5) - def test_eventhub_multiple(self): - NUM_EVENTS = 3 - all_row_keys_seen = dict([(i, True) for i in range(NUM_EVENTS)]) - partition_key = str(round(time.time())) - - # wait for host to restart after change - time.sleep(5) - - docs = [] - for i in range(NUM_EVENTS): - doc = {'PartitionKey': partition_key, 'RowKey': i} - docs.append(doc) - - r = self.webhost.request('POST', 'eventhub_output_batch', - data=json.dumps(docs)) - self.assertEqual(r.status_code, 200) - - row_keys = [i for i in range(NUM_EVENTS)] - seen = [False] * NUM_EVENTS - row_keys_seen = dict(zip(row_keys, seen)) - - # Allow trigger to fire. - time.sleep(5) - - r = self.webhost.request('GET', 'get_eventhub_batch_triggered') - - # Waiting for the blob get updated with the latest data from the - # eventhub output binding - time.sleep(2) - self.assertEqual(r.status_code, 200) - entries = r.json() - for entry in entries: - self.assertEqual(entry['PartitionKey'], partition_key) - row_key = entry['RowKey'] - row_keys_seen[row_key] = True - - self.assertDictEqual(all_row_keys_seen, row_keys_seen) - - @skipIf(sys.version_info.minor == 7, - "Using azure-eventhub SDK with the EventHub Emulator" - "requires Python 3.8+") - @testutils.retryable_test(3, 5) - def test_eventhub_multiple_with_metadata(self): - # Generate a unique event body for EventHub event - # Record the start_time and end_time for checking event enqueue time - start_time = datetime.utcnow() - count = 10 - random_number = str(round(time.time()) % 1000) - req_body = { - 'body': random_number - } - - # Invoke metadata_output HttpTrigger to generate an EventHub event - # from azure-eventhub SDK - r = self.webhost.request('POST', - f'metadata_output_batch?count={count}', - data=json.dumps(req_body)) - self.assertEqual(r.status_code, 200) - self.assertIn('OK', r.text) - end_time = datetime.utcnow() - - # Once the event get generated, allow function host to pool from - # EventHub and wait for metadata_multiple to execute, - # converting the event metadata into a blob. - time.sleep(5) - - # Call get_metadata_batch_triggered to retrieve event metadata - r = self.webhost.request('GET', 'get_metadata_batch_triggered') - self.assertEqual(r.status_code, 200) - - # Check metadata and events length, events should be batched processed - events = r.json() - self.assertIsInstance(events, list) - self.assertGreater(len(events), 1) - - # EventhubEvent property check - for event_index in range(len(events)): - event = events[event_index] - - # Check if the event is enqueued between start_time and end_time - enqueued_time = parser.isoparse(event['enqueued_time']) - self.assertTrue(start_time < enqueued_time < end_time) - - # Check if event properties are properly set - self.assertIsNone(event['partition_key']) # only 1 partition - self.assertGreaterEqual(event['sequence_number'], 0) - self.assertIsNotNone(event['offset']) - - # Check if event.metadata field is properly set - self.assertIsNotNone(event['metadata']) - metadata = event['metadata'] - sys_props_array = metadata['SystemPropertiesArray'] - sys_props = sys_props_array[event_index] - enqueued_time = parser.isoparse(sys_props['EnqueuedTimeUtc']) - - # Check event trigger time and other system properties - self.assertTrue( - start_time.timestamp() < enqueued_time.timestamp() < end_time.timestamp()) # NoQA - self.assertIsNone(sys_props['PartitionKey']) - self.assertGreaterEqual(sys_props['SequenceNumber'], 0) - self.assertIsNotNone(sys_props['Offset']) - - -class TestEventHubBatchFunctionsStein(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'eventhub_batch_functions' / \ - 'eventhub_batch_functions_stein' - - @classmethod - def get_libraries_to_install(cls): - return ['azure-eventhub'] - - @testutils.retryable_test(3, 5) - def test_eventhub_multiple(self): - NUM_EVENTS = 3 - all_row_keys_seen = dict([(i, True) for i in range(NUM_EVENTS)]) - partition_key = str(round(time.time())) - - docs = [] - for i in range(NUM_EVENTS): - doc = {'PartitionKey': partition_key, 'RowKey': i} - docs.append(doc) - - r = self.webhost.request('POST', 'eventhub_output_batch', - data=json.dumps(docs)) - self.assertEqual(r.status_code, 200) - - row_keys = [i for i in range(NUM_EVENTS)] - seen = [False] * NUM_EVENTS - row_keys_seen = dict(zip(row_keys, seen)) - - # Allow trigger to fire. - time.sleep(5) - - r = self.webhost.request( - 'GET', - 'get_eventhub_batch_triggered') - self.assertEqual(r.status_code, 200) - entries = r.json() - for entry in entries: - self.assertEqual(entry['PartitionKey'], partition_key) - row_key = entry['RowKey'] - row_keys_seen[row_key] = True - - self.assertDictEqual(all_row_keys_seen, row_keys_seen) - - @skipIf(sys.version_info.minor == 7, - "Using azure-eventhub SDK with the EventHub Emulator" - "requires Python 3.8+") - @testutils.retryable_test(3, 5) - def test_eventhub_multiple_with_metadata(self): - # Generate a unique event body for EventHub event - # Record the start_time and end_time for checking event enqueue time - start_time = datetime.utcnow() - count = 10 - random_number = str(round(time.time()) % 1000) - req_body = { - 'body': random_number - } - - # Invoke metadata_output HttpTrigger to generate an EventHub event - # from azure-eventhub SDK - r = self.webhost.request('POST', - f'metadata_output_batch?count={count}', - data=json.dumps(req_body)) - self.assertEqual(r.status_code, 200) - self.assertIn('OK', r.text) - end_time = datetime.utcnow() - - # Once the event get generated, allow function host to pool from - # EventHub and wait for metadata_multiple to execute, - # converting the event metadata into a blob. - time.sleep(5) - - # Call get_metadata_batch_triggered to retrieve event metadata - r = self.webhost.request('GET', 'get_metadata_batch_triggered') - self.assertEqual(r.status_code, 200) - - # Check metadata and events length, events should be batched processed - events = r.json() - self.assertIsInstance(events, list) - self.assertGreater(len(events), 1) - - # EventhubEvent property check - for event_index in range(len(events)): - event = events[event_index] - - # Check if the event is enqueued between start_time and end_time - enqueued_time = parser.isoparse(event['enqueued_time']) - self.assertTrue(start_time < enqueued_time < end_time) - - # Check if event properties are properly set - self.assertIsNone(event['partition_key']) # only 1 partition - self.assertGreaterEqual(event['sequence_number'], 0) - self.assertIsNotNone(event['offset']) - - # Check if event.metadata field is properly set - self.assertIsNotNone(event['metadata']) - metadata = event['metadata'] - sys_props_array = metadata['SystemPropertiesArray'] - sys_props = sys_props_array[event_index] - enqueued_time = parser.isoparse(sys_props['EnqueuedTimeUtc']) - - # Check event trigger time and other system properties - self.assertTrue( - start_time.timestamp() < enqueued_time.timestamp() - < end_time.timestamp()) # NoQA - self.assertIsNone(sys_props['PartitionKey']) - self.assertGreaterEqual(sys_props['SequenceNumber'], 0) - self.assertIsNotNone(sys_props['Offset']) diff --git a/tests/emulator_tests/test_eventhub_functions.py b/tests/emulator_tests/test_eventhub_functions.py deleted file mode 100644 index 03088c731..000000000 --- a/tests/emulator_tests/test_eventhub_functions.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import sys -import time - -from unittest import skipIf - -from tests.utils import testutils - - -class TestEventHubFunctions(testutils.WebHostTestCase): - """Test EventHub Trigger and Output Bindings (cardinality: one). - - Each testcase consists of 3 part: - 1. An eventhub_output HTTP trigger for generating EventHub event - 2. An actual eventhub_trigger EventHub trigger for storing event into blob - 3. A get_eventhub_triggered HTTP trigger for retrieving event info blob - """ - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'eventhub_functions' - - @classmethod - def get_libraries_to_install(cls): - return ['azure-eventhub'] - - @testutils.retryable_test(3, 5) - def test_eventhub_trigger(self): - # Generate a unique event body for the EventHub event - data = str(round(time.time())) - doc = {'id': data} - - # Invoke eventhub_output HttpTrigger to generate an Eventhub Event. - r = self.webhost.request('POST', 'eventhub_output', - data=json.dumps(doc)) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - # Once the event get generated, allow function host to poll from - # EventHub and wait for eventhub_trigger to execute, - # converting the event metadata into a blob. - time.sleep(5) - - # Call get_eventhub_triggered to retrieve event metadata from blob. - r = self.webhost.request('GET', 'get_eventhub_triggered') - - # Waiting for the blob get updated with the latest data from the - # eventhub output binding - time.sleep(5) - self.assertEqual(r.status_code, 200) - response = r.json() - - # Check if the event body matches the initial data - self.assertEqual(response, doc) - - @skipIf(sys.version_info.minor == 7, - "Using azure-eventhub SDK with the EventHub Emulator" - "requires Python 3.8+") - @testutils.retryable_test(3, 5) - def test_eventhub_trigger_with_metadata(self): - # Generate a unique event body for EventHub event - # Record the start_time and end_time for checking event enqueue time - random_number = str(round(time.time()) % 1000) - req_body = { - 'body': random_number - } - - # Invoke metadata_output HttpTrigger to generate an EventHub event - # from azure-eventhub SDK - r = self.webhost.request('POST', 'metadata_output', - data=json.dumps(req_body)) - self.assertEqual(r.status_code, 200) - self.assertIn('OK', r.text) - - # Once the event get generated, allow function host to pool from - # EventHub and wait for eventhub_trigger to execute, - # converting the event metadata into a blob. - time.sleep(5) - - # Call get_metadata_triggered to retrieve event metadata from blob - r = self.webhost.request('GET', 'get_metadata_triggered') - - # Waiting for the blob get updated with the latest data from the - # eventhub output binding - time.sleep(5) - self.assertEqual(r.status_code, 200) - - # Check if the event body matches the unique random_number - event = r.json() - self.assertEqual(event['body'], random_number) - - # EventhubEvent property check - # Reenable these lines after enqueued_time property is fixed - # enqueued_time = parser.isoparse(event['enqueued_time']) - # self.assertIsNotNone(enqueued_time) - self.assertIsNone(event['partition_key']) # There's only 1 partition - self.assertGreaterEqual(event['sequence_number'], 0) - self.assertIsNotNone(event['offset']) - - # Check if the event contains proper metadata fields - self.assertIsNotNone(event['metadata']) - metadata = event['metadata'] - sys_props = metadata['SystemProperties'] - self.assertIsNone(sys_props['PartitionKey']) - self.assertGreaterEqual(sys_props['SequenceNumber'], 0) - self.assertIsNotNone(sys_props['Offset']) - - -class TestEventHubFunctionsStein(TestEventHubFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'eventhub_functions' / \ - 'eventhub_functions_stein' - - -class TestEventHubFunctionsSteinGeneric(TestEventHubFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'eventhub_functions' / \ - 'eventhub_functions_stein' / 'generic' diff --git a/tests/emulator_tests/test_generic_functions.py b/tests/emulator_tests/test_generic_functions.py deleted file mode 100644 index 8dc44c835..000000000 --- a/tests/emulator_tests/test_generic_functions.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import time -import typing - -from tests.utils import testutils - - -class TestGenericFunctions(testutils.WebHostTestCase): - """Test Generic Functions with implicit output enabled - - With implicit output enabled for generic types, these tests cover - scenarios where a function has both explicit and implicit output - set to true. We prioritize explicit output. These tests check - that no matter the ordering, the return type is still correctly set. - """ - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'generic_functions' - - def test_return_processed_last(self): - # Tests the case where implicit and explicit return are true - # in the same function and $return is processed before - # the generic binding is - out_resp = self.webhost.request('POST', 'table_out_binding') - self.assertEqual(out_resp.status_code, 200) - - r = self.webhost.request('GET', 'return_processed_last') - self.assertEqual(r.status_code, 200) - - def test_return_not_processed_last(self): - # Tests the case where implicit and explicit return are true - # in the same function and the generic binding is processed - # before $return - out_resp = self.webhost.request('POST', 'table_out_binding') - self.assertEqual(out_resp.status_code, 200) - - r = self.webhost.request('GET', 'return_not_processed_last') - self.assertEqual(r.status_code, 200) - - def test_return_types(self): - out_resp = self.webhost.request('POST', 'table_out_binding') - self.assertEqual(out_resp.status_code, 200) - # Checking that the function app is okay - time.sleep(10) - # Checking webhost status. - r = self.webhost.request('GET', '', no_prefix=True, - timeout=5) - self.assertTrue(r.ok) - - def check_log_return_types(self, host_out: typing.List[str]): - # Checks that functions executed correctly - self.assertIn("This timer trigger function executed " - "successfully", host_out) - self.assertIn("Return string", host_out) - self.assertIn("Return bytes", host_out) - self.assertIn("Return dict", host_out) - self.assertIn("Return list", host_out) - self.assertIn("Return int", host_out) - self.assertIn("Return double", host_out) - self.assertIn("Return bool", host_out) - - # Checks for failed executions (TypeErrors, etc.) - errors_found = False - for log in host_out: - if "Exception" in log: - errors_found = True - break - self.assertFalse(errors_found) - - -class TestGenericFunctionsStein(TestGenericFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'generic_functions' / \ - 'generic_functions_stein' diff --git a/tests/emulator_tests/test_queue_functions.py b/tests/emulator_tests/test_queue_functions.py deleted file mode 100644 index 793628169..000000000 --- a/tests/emulator_tests/test_queue_functions.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import time - -from tests.utils import testutils - - -class TestQueueFunctions(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'queue_functions' - - def test_queue_basic(self): - r = self.webhost.request('POST', 'put_queue', - data='test-message') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - # wait for queue_trigger to process the queue item - time.sleep(1) - - r = self.webhost.request('GET', 'get_queue_blob') - self.assertEqual(r.status_code, 200) - msg_info = r.json() - - self.assertIn('queue', msg_info) - msg = msg_info['queue'] - - self.assertEqual(msg['body'], 'test-message') - for attr in {'id', 'expiration_time', 'insertion_time', - 'time_next_visible', 'pop_receipt', 'dequeue_count'}: - self.assertIsNotNone(msg.get(attr)) - - def test_queue_return(self): - r = self.webhost.request('POST', 'put_queue_return', - data='test-message-return') - self.assertEqual(r.status_code, 200) - - # wait for queue_trigger to process the queue item - time.sleep(1) - - r = self.webhost.request('GET', 'get_queue_blob_return') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-message-return') - - def test_queue_message_object_return(self): - r = self.webhost.request('POST', 'put_queue_message_return', - data='test-message-object-return') - self.assertEqual(r.status_code, 200) - - # wait for queue_trigger to process the queue item - time.sleep(1) - - r = self.webhost.request('GET', 'get_queue_blob_message_return') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-message-object-return') - - def test_queue_untyped_return(self): - r = self.webhost.request('POST', 'put_queue_untyped_return', - data='test-untyped-return') - self.assertEqual(r.status_code, 200) - - # wait for queue_trigger to process the queue item - time.sleep(1) - - r = self.webhost.request('GET', 'get_queue_untyped_blob_return') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-untyped-return') - - def test_queue_return_multiple(self): - r = self.webhost.request('POST', 'put_queue_return_multiple', - data='foo') - self.assertTrue(200 <= r.status_code < 300, - f"Returned status code {r.status_code}, " - "not in the 200-300 range.") - - # wait for queue_trigger to process the queue item - time.sleep(1) - - def test_queue_return_multiple_outparam(self): - r = self.webhost.request('POST', 'put_queue_multiple_out', - data='foo') - self.assertTrue(200 <= r.status_code < 300, - f"Returned status code {r.status_code}, " - "not in the 200-300 range.") - self.assertEqual(r.text, 'HTTP response: foo') - - -class TestQueueFunctionsStein(TestQueueFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'queue_functions' / \ - 'queue_functions_stein' - - -class TestQueueFunctionsSteinGeneric(TestQueueFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'queue_functions' / \ - 'queue_functions_stein' / 'generic' diff --git a/tests/emulator_tests/test_servicebus_functions.py b/tests/emulator_tests/test_servicebus_functions.py deleted file mode 100644 index 2e6bd7310..000000000 --- a/tests/emulator_tests/test_servicebus_functions.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import time - -from tests.utils import testutils - - -class TestServiceBusFunctions(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'servicebus_functions' - - @testutils.retryable_test(3, 5) - def test_servicebus_basic(self): - data = str(round(time.time())) - r = self.webhost.request('POST', 'put_message', - data=data) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - max_retries = 10 - - for try_no in range(max_retries): - # wait for trigger to process the queue item - time.sleep(1) - - try: - r = self.webhost.request('GET', 'get_servicebus_triggered') - self.assertEqual(r.status_code, 200) - msg = r.json() - self.assertEqual(msg['body'], data) - for attr in {'message_id', 'body', 'content_type', 'delivery_count', - 'expiration_time', 'label', 'partition_key', 'reply_to', - 'reply_to_session_id', 'scheduled_enqueue_time', - 'session_id', 'time_to_live', 'to', 'user_properties', - 'application_properties', 'correlation_id', - 'dead_letter_error_description', 'dead_letter_reason', - 'dead_letter_source', 'enqueued_sequence_number', - 'enqueued_time_utc', 'expires_at_utc', 'locked_until', - 'lock_token', 'sequence_number', 'state', 'subject', - 'transaction_partition_key'}: - self.assertIn(attr, msg) - except (AssertionError, json.JSONDecodeError): - if try_no == max_retries - 1: - raise - else: - break - - -class TestServiceBusFunctionsStein(TestServiceBusFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'servicebus_functions' / \ - 'servicebus_functions_stein' - - -class TestServiceBusFunctionsSteinGeneric(TestServiceBusFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'servicebus_functions' / \ - 'servicebus_functions_stein' / 'generic' diff --git a/tests/emulator_tests/test_table_functions.py b/tests/emulator_tests/test_table_functions.py deleted file mode 100644 index b5282bd92..000000000 --- a/tests/emulator_tests/test_table_functions.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import pathlib -import time - -from tests.utils import testutils - - -class TestTableFunctions(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'table_functions' - - def test_table_bindings(self): - out_resp = self.webhost.request('POST', 'table_out_binding') - self.assertEqual(out_resp.status_code, 200) - row_key = json.loads(out_resp.text)['RowKey'] - - script_dir = pathlib.Path(self.get_script_dir()) - json_path = pathlib.Path('table_in_binding/function.json') - full_json_path = testutils.TESTS_ROOT / script_dir / json_path - # Dynamically rewrite function.json to point to new row key - with open(full_json_path, 'r') as f: - func_dict = json.load(f) - func_dict['bindings'][1]['rowKey'] = row_key - - with open(full_json_path, 'w') as f: - json.dump(func_dict, f, indent=2) - - # wait for host to restart after change - time.sleep(1) - - in_resp = self.webhost.request('GET', 'table_in_binding') - self.assertEqual(in_resp.status_code, 200) - row_key_present = False - for row in json.loads(in_resp.text): - if row["RowKey"] == row_key: - row_key_present = True - break - self.assertTrue(row_key_present) - - -class TestTableFunctionsStein(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'table_functions' / \ - 'table_functions_stein' - - def test_table_bindings(self): - out_resp = self.webhost.request('POST', 'table_out_binding') - self.assertEqual(out_resp.status_code, 200) - row_key = json.loads(out_resp.text)['RowKey'] - - in_resp = self.webhost.request('GET', f'table_in_binding/{row_key}') - self.assertEqual(in_resp.status_code, 200) - row_key_present = False - for row in json.loads(in_resp.text): - if row["RowKey"] == row_key: - row_key_present = True - break - self.assertTrue(row_key_present) - - -class TestTableFunctionsGeneric(TestTableFunctionsStein): - - @classmethod - def get_script_dir(cls): - return testutils.EMULATOR_TESTS_FOLDER / 'table_functions' / \ - 'table_functions_stein' / \ - 'generic' diff --git a/tests/emulator_tests/utils/eventhub/config.json b/tests/emulator_tests/utils/eventhub/config.json deleted file mode 100644 index 710935c14..000000000 --- a/tests/emulator_tests/utils/eventhub/config.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "UserConfig": { - "NamespaceConfig": [ - { - "Type": "EventHub", - "Name": "emulatorNs1", - "Entities": [ - { - "Name": "python-worker-ci-eventhub-batch", - "PartitionCount": 2, - "ConsumerGroups": [ - { - "Name": "cg1" - } - ] - }, - { - "Name": "python-worker-ci-eventhub-batch-metadata", - "PartitionCount": 2, - "ConsumerGroups": [ - { - "Name": "cg1" - } - ] - }, - { - "Name": "python-worker-ci-eventhub-one", - "PartitionCount": 2, - "ConsumerGroups": [ - { - "Name": "cg1" - } - ] - }, - { - "Name": "python-worker-ci-eventhub-one-metadata", - "PartitionCount": 2, - "ConsumerGroups": [ - { - "Name": "cg1" - } - ] - } - ] - } - ], - "LoggingConfig": { - "Type": "File" - } - } -} \ No newline at end of file diff --git a/tests/emulator_tests/utils/eventhub/docker-compose.yml b/tests/emulator_tests/utils/eventhub/docker-compose.yml deleted file mode 100644 index 2c40aa042..000000000 --- a/tests/emulator_tests/utils/eventhub/docker-compose.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: microsoft-azure-eventhubs -services: - # Service for the Event Hubs Emulator - emulator: - container_name: "eventhubs-emulator" - image: "mcr.microsoft.com/azure-messaging/eventhubs-emulator:latest" - volumes: - - "./config.json:/Eventhubs_Emulator/ConfigFiles/Config.json" - ports: - - "5672:5672" - environment: - BLOB_SERVER: azurite - METADATA_SERVER: azurite - ACCEPT_EULA: Y - depends_on: - - azurite - networks: - eh-emulator: - aliases: - - "eventhubs-emulator" - # Service for the Azurite Storage Emulator - azurite: - container_name: "azurite" - image: "mcr.microsoft.com/azure-storage/azurite:latest" - ports: - - "10000:10000" - - "10001:10001" - - "10002:10002" - networks: - eh-emulator: - aliases: - - "azurite" -networks: - eh-emulator: \ No newline at end of file diff --git a/tests/emulator_tests/utils/servicebus/config.json b/tests/emulator_tests/utils/servicebus/config.json deleted file mode 100644 index 20cf83447..000000000 --- a/tests/emulator_tests/utils/servicebus/config.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "UserConfig": { - "Namespaces": [ - { - "Name": "sbemulatorns", - "Queues": [ - { - "Name": "testqueue", - "Properties": { - "DeadLetteringOnMessageExpiration": false, - "DefaultMessageTimeToLive": "PT1H", - "DuplicateDetectionHistoryTimeWindow": "PT20S", - "ForwardDeadLetteredMessagesTo": "", - "ForwardTo": "", - "LockDuration": "PT1M", - "MaxDeliveryCount": 10, - "RequiresDuplicateDetection": false, - "RequiresSession": false - } - } - ] - } - ], - "Logging": { - "Type": "File" - } - } -} \ No newline at end of file diff --git a/tests/emulator_tests/utils/servicebus/docker-compose.yml b/tests/emulator_tests/utils/servicebus/docker-compose.yml deleted file mode 100644 index c1781a858..000000000 --- a/tests/emulator_tests/utils/servicebus/docker-compose.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: microsoft-azure-servicebus -services: - # Service for the Service Bus Emulator - sbemulator: - container_name: "servicebus-emulator" - image: mcr.microsoft.com/azure-messaging/servicebus-emulator:latest - volumes: - - "./config.json:/ServiceBus_Emulator/ConfigFiles/Config.json" - ports: - - "5672:5672" - environment: - SQL_SERVER: sqledge - MSSQL_SA_PASSWORD: ${AzureWebJobsSQLPassword} - ACCEPT_EULA: Y - depends_on: - - sqledge - networks: - sb-emulator: - aliases: - - "sb-emulator" - sqledge: - container_name: "sqledge" - image: "mcr.microsoft.com/azure-sql-edge:latest" - networks: - sb-emulator: - aliases: - - "sqledge" - environment: - ACCEPT_EULA: Y - MSSQL_SA_PASSWORD: ${AzureWebJobsSQLPassword} - # Service for the Azurite Storage Emulator - azurite: - container_name: "azurite-sb" - image: "mcr.microsoft.com/azure-storage/azurite:latest" - ports: - - "10003:10003" - - "10004:10004" - - "10005:10005" -networks: - sb-emulator: \ No newline at end of file diff --git a/tests/endtoend/blueprint_functions/blueprint_different_dir/blueprint_directory/blueprint.py b/tests/endtoend/blueprint_functions/blueprint_different_dir/blueprint_directory/blueprint.py deleted file mode 100644 index d232dcead..000000000 --- a/tests/endtoend/blueprint_functions/blueprint_different_dir/blueprint_directory/blueprint.py +++ /dev/null @@ -1,41 +0,0 @@ -import logging -import time -from datetime import datetime - -import azure.functions as func - -bp = func.Blueprint() - - -@bp.route(route="default_template") -def default_template(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - name = req.params.get('name') - if not name: - try: - req_body = req.get_json() - except ValueError: - pass - else: - name = req_body.get('name') - - if name: - return func.HttpResponse( - f"Hello, {name}. This HTTP triggered function " - f"executed successfully.") - else: - return func.HttpResponse( - "This HTTP triggered function executed successfully. " - "Pass a name in the query string or in the request body for a" - " personalized response.", - status_code=200 - ) - - -@bp.route(route="http_func") -def http_func(req: func.HttpRequest) -> func.HttpResponse: - time.sleep(1) - - current_time = datetime.now().strftime("%H:%M:%S") - return func.HttpResponse(f"{current_time}") diff --git a/tests/endtoend/blueprint_functions/blueprint_different_dir/function_app.py b/tests/endtoend/blueprint_functions/blueprint_different_dir/function_app.py deleted file mode 100644 index 3f1ba52d6..000000000 --- a/tests/endtoend/blueprint_functions/blueprint_different_dir/function_app.py +++ /dev/null @@ -1,6 +0,0 @@ -import azure.functions as func -from blueprint_directory.blueprint import bp - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - -app.register_functions(bp) diff --git a/tests/endtoend/blueprint_functions/functions_in_blueprint_only/blueprint.py b/tests/endtoend/blueprint_functions/functions_in_blueprint_only/blueprint.py deleted file mode 100644 index d232dcead..000000000 --- a/tests/endtoend/blueprint_functions/functions_in_blueprint_only/blueprint.py +++ /dev/null @@ -1,41 +0,0 @@ -import logging -import time -from datetime import datetime - -import azure.functions as func - -bp = func.Blueprint() - - -@bp.route(route="default_template") -def default_template(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - name = req.params.get('name') - if not name: - try: - req_body = req.get_json() - except ValueError: - pass - else: - name = req_body.get('name') - - if name: - return func.HttpResponse( - f"Hello, {name}. This HTTP triggered function " - f"executed successfully.") - else: - return func.HttpResponse( - "This HTTP triggered function executed successfully. " - "Pass a name in the query string or in the request body for a" - " personalized response.", - status_code=200 - ) - - -@bp.route(route="http_func") -def http_func(req: func.HttpRequest) -> func.HttpResponse: - time.sleep(1) - - current_time = datetime.now().strftime("%H:%M:%S") - return func.HttpResponse(f"{current_time}") diff --git a/tests/endtoend/blueprint_functions/functions_in_blueprint_only/function_app.py b/tests/endtoend/blueprint_functions/functions_in_blueprint_only/function_app.py deleted file mode 100644 index 44712bfee..000000000 --- a/tests/endtoend/blueprint_functions/functions_in_blueprint_only/function_app.py +++ /dev/null @@ -1,6 +0,0 @@ -import azure.functions as func -from blueprint import bp - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - -app.register_functions(bp) diff --git a/tests/endtoend/blueprint_functions/functions_in_both_blueprint_functionapp/blueprint.py b/tests/endtoend/blueprint_functions/functions_in_both_blueprint_functionapp/blueprint.py deleted file mode 100644 index 785049396..000000000 --- a/tests/endtoend/blueprint_functions/functions_in_both_blueprint_functionapp/blueprint.py +++ /dev/null @@ -1,31 +0,0 @@ -import logging - -import azure.functions as func - -bp = func.Blueprint() - - -@bp.route(route="default_template") -def default_template(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - name = req.params.get('name') - if not name: - try: - req_body = req.get_json() - except ValueError: - pass - else: - name = req_body.get('name') - - if name: - return func.HttpResponse( - f"Hello, {name}. This HTTP triggered function " - f"executed successfully.") - else: - return func.HttpResponse( - "This HTTP triggered function executed successfully. " - "Pass a name in the query string or in the request body for a" - " personalized response.", - status_code=200 - ) diff --git a/tests/endtoend/blueprint_functions/functions_in_both_blueprint_functionapp/function_app.py b/tests/endtoend/blueprint_functions/functions_in_both_blueprint_functionapp/function_app.py deleted file mode 100644 index 1813e0578..000000000 --- a/tests/endtoend/blueprint_functions/functions_in_both_blueprint_functionapp/function_app.py +++ /dev/null @@ -1,12 +0,0 @@ -import azure.functions as func -from blueprint import bp - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - -app.register_blueprint(bp) - - -@app.route(route="return_http") -def return_http(req: func.HttpRequest): - return func.HttpResponse('

Hello World™

', - mimetype='text/html') diff --git a/tests/endtoend/blueprint_functions/multiple_function_registers/function_app.py b/tests/endtoend/blueprint_functions/multiple_function_registers/function_app.py deleted file mode 100644 index 2b212266d..000000000 --- a/tests/endtoend/blueprint_functions/multiple_function_registers/function_app.py +++ /dev/null @@ -1,12 +0,0 @@ -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.route(route="return_http") -def return_http(req: func.HttpRequest): - return func.HttpResponse('

Hello World™

', - mimetype='text/html') - - -asgi_app = func.AsgiFunctionApp() diff --git a/tests/endtoend/blueprint_functions/only_blueprint/function_app.py b/tests/endtoend/blueprint_functions/only_blueprint/function_app.py deleted file mode 100644 index 785049396..000000000 --- a/tests/endtoend/blueprint_functions/only_blueprint/function_app.py +++ /dev/null @@ -1,31 +0,0 @@ -import logging - -import azure.functions as func - -bp = func.Blueprint() - - -@bp.route(route="default_template") -def default_template(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - name = req.params.get('name') - if not name: - try: - req_body = req.get_json() - except ValueError: - pass - else: - name = req_body.get('name') - - if name: - return func.HttpResponse( - f"Hello, {name}. This HTTP triggered function " - f"executed successfully.") - else: - return func.HttpResponse( - "This HTTP triggered function executed successfully. " - "Pass a name in the query string or in the request body for a" - " personalized response.", - status_code=200 - ) diff --git a/tests/endtoend/cosmosdb_functions/cosmosdb_functions_stein/function_app.py b/tests/endtoend/cosmosdb_functions/cosmosdb_functions_stein/function_app.py deleted file mode 100644 index c0ddcaad1..000000000 --- a/tests/endtoend/cosmosdb_functions/cosmosdb_functions_stein/function_app.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.route() -@app.cosmos_db_input( - arg_name="docs", database_name="test", - container_name="items", - id="cosmosdb-input-test", - connection="AzureWebJobsCosmosDBConnectionString") -def cosmosdb_input(req: func.HttpRequest, docs: func.DocumentList) -> str: - return func.HttpResponse(docs[0].to_json(), mimetype='application/json') - - -@app.cosmos_db_trigger( - arg_name="docs", database_name="test", - container_name="items", - lease_container_name="leases", - connection="AzureWebJobsCosmosDBConnectionString", - create_lease_container_if_not_exists=True) -@app.blob_output(arg_name="$return", connection="AzureWebJobsStorage", - path="python-worker-tests/test-cosmosdb-triggered.txt") -def cosmosdb_trigger(docs: func.DocumentList) -> str: - return docs[0].to_json() - - -@app.route() -@app.blob_input(arg_name="file", connection="AzureWebJobsStorage", - path="python-worker-tests/test-cosmosdb-triggered.txt") -def get_cosmosdb_triggered(req: func.HttpRequest, - file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.route() -@app.cosmos_db_output( - arg_name="doc", database_name="test", - container_name="items", - create_if_not_exists=True, - connection="AzureWebJobsCosmosDBConnectionString") -def put_document(req: func.HttpRequest, doc: func.Out[func.Document]): - doc.set(func.Document.from_json(req.get_body())) - return 'OK' diff --git a/tests/endtoend/cosmosdb_functions/cosmosdb_functions_stein/generic/function_app.py b/tests/endtoend/cosmosdb_functions/cosmosdb_functions_stein/generic/function_app.py deleted file mode 100644 index baf665be6..000000000 --- a/tests/endtoend/cosmosdb_functions/cosmosdb_functions_stein/generic/function_app.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.generic_trigger(arg_name="req", type="httpTrigger") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="docs", - type="cosmosDB", - database_name="test", - container_name="items", - id="cosmosdb-input-test", - connection="AzureWebJobsCosmosDBConnectionString") -def cosmosdb_input(req: func.HttpRequest, docs: func.DocumentList) -> str: - return func.HttpResponse(docs[0].to_json(), mimetype='application/json') - - -@app.generic_trigger( - arg_name="docs", - type="cosmosDBTrigger", - database_name="test", - container_name="items", - lease_container_name="leases", - connection="AzureWebJobsCosmosDBConnectionString", - create_lease_container_if_not_exists=True) -@app.generic_output_binding( - arg_name="$return", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-cosmosdb-triggered.txt") -def cosmosdb_trigger(docs: func.DocumentList) -> str: - return docs[0].to_json() - - -@app.generic_trigger(arg_name="req", type="httpTrigger") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - path="python-worker-tests/test-cosmosdb-triggered.txt") -def get_cosmosdb_triggered(req: func.HttpRequest, - file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.generic_trigger(arg_name="req", type="httpTrigger") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_output_binding( - arg_name="doc", - database_name="test", - type="cosmosDB", - container_name="items", - create_if_not_exists=True, - connection="AzureWebJobsCosmosDBConnectionString") -def put_document(req: func.HttpRequest, doc: func.Out[func.Document]): - doc.set(func.Document.from_json(req.get_body())) - return 'OK' diff --git a/tests/endtoend/cosmosdb_functions/cosmosdb_input/__init__.py b/tests/endtoend/cosmosdb_functions/cosmosdb_input/__init__.py deleted file mode 100644 index 313d63137..000000000 --- a/tests/endtoend/cosmosdb_functions/cosmosdb_input/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -def main(req: func.HttpRequest, docs: func.DocumentList) -> str: - return func.HttpResponse(docs[0].to_json(), mimetype='application/json') diff --git a/tests/endtoend/cosmosdb_functions/cosmosdb_input/function.json b/tests/endtoend/cosmosdb_functions/cosmosdb_input/function.json deleted file mode 100644 index 23608f043..000000000 --- a/tests/endtoend/cosmosdb_functions/cosmosdb_input/function.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "direction": "in", - "type": "cosmosDB", - "name": "docs", - "databaseName": "test", - "containerName": "items", - "id": "cosmosdb-input-test", - "leaseContainerName": "leases", - "connection": "AzureWebJobsCosmosDBConnectionString", - "createLeaseContainerIfNotExists": true - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/cosmosdb_functions/cosmosdb_trigger/__init__.py b/tests/endtoend/cosmosdb_functions/cosmosdb_trigger/__init__.py deleted file mode 100644 index a8868aa79..000000000 --- a/tests/endtoend/cosmosdb_functions/cosmosdb_trigger/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(docs: azf.DocumentList) -> str: - return docs[0].to_json() diff --git a/tests/endtoend/cosmosdb_functions/cosmosdb_trigger/function.json b/tests/endtoend/cosmosdb_functions/cosmosdb_trigger/function.json deleted file mode 100644 index 76a24c07d..000000000 --- a/tests/endtoend/cosmosdb_functions/cosmosdb_trigger/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "direction": "in", - "type": "cosmosDBTrigger", - "name": "docs", - "databaseName": "test", - "containerName": "items", - "id": "cosmosdb-trigger-test", - "leaseContainerName": "leases", - "connection": "AzureWebJobsCosmosDBConnectionString", - "createLeaseContainerIfNotExists": true - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-cosmosdb-triggered.txt" - } - ] -} diff --git a/tests/endtoend/cosmosdb_functions/cosmosdb_v3_functions_stein/function_app.py b/tests/endtoend/cosmosdb_functions/cosmosdb_v3_functions_stein/function_app.py deleted file mode 100644 index 27f51e38a..000000000 --- a/tests/endtoend/cosmosdb_functions/cosmosdb_v3_functions_stein/function_app.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - -app = func.FunctionApp() - - -@app.route() -@app.cosmos_db_input_v3( - arg_name="docs", database_name="test", - collection_name="items", - id="cosmosdb-input-test", - connection_string_setting="AzureWebJobsCosmosDBConnectionString") -def cosmosdb_input(req: func.HttpRequest, docs: func.DocumentList) -> str: - return func.HttpResponse(docs[0].to_json(), mimetype='application/json') - - -@app.cosmos_db_trigger_v3( - arg_name="docs", database_name="test", - collection_name="items", - lease_collection_name="leases", - connection_string_setting="AzureWebJobsCosmosDBConnectionString", - create_lease_collection_if_not_exists=True) -@app.blob_output(arg_name="$return", connection="AzureWebJobsStorage", - path="python-worker-tests/test-cosmosdb-triggered.txt") -def cosmosdb_trigger(docs: func.DocumentList) -> str: - return docs[0].to_json() - - -@app.route() -@app.blob_input(arg_name="file", connection="AzureWebJobsStorage", - path="python-worker-tests/test-cosmosdb-triggered.txt") -def get_cosmosdb_triggered(req: func.HttpRequest, - file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.route() -@app.cosmos_db_output_v3( - arg_name="doc", database_name="test", - collection_name="items", - create_if_not_exists=True, - connection_string_setting="AzureWebJobsCosmosDBConnectionString") -def put_document(req: func.HttpRequest, doc: func.Out[func.Document]): - doc.set(func.Document.from_json(req.get_body())) - - return 'OK' diff --git a/tests/endtoend/cosmosdb_functions/cosmosdb_v3_functions_stein/generic/function_app.py b/tests/endtoend/cosmosdb_functions/cosmosdb_v3_functions_stein/generic/function_app.py deleted file mode 100644 index dee78952a..000000000 --- a/tests/endtoend/cosmosdb_functions/cosmosdb_v3_functions_stein/generic/function_app.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - -app = func.FunctionApp() - - -@app.generic_trigger(arg_name="req", type="httpTrigger") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="docs", - type="cosmosDB", - database_name="test", - collection_name="items", - id="cosmosdb-input-test", - connection_string_setting="AzureWebJobsCosmosDBConnectionString") -def cosmosdb_input(req: func.HttpRequest, docs: func.DocumentList) -> str: - return func.HttpResponse(docs[0].to_json(), mimetype='application/json') - - -@app.generic_trigger( - arg_name="docs", - type="cosmosDBTrigger", - database_name="test", - collection_name="items", - lease_collection_name="leases", - connection_string_setting="AzureWebJobsCosmosDBConnectionString", - create_lease_collection_if_not_exists=True) -@app.generic_output_binding( - arg_name="$return", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-cosmosdb-triggered.txt") -def cosmosdb_trigger(docs: func.DocumentList) -> str: - return docs[0].to_json() - - -@app.generic_trigger(arg_name="req", type="httpTrigger") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - connection="AzureWebJobsStorage", - type="blob", - path="python-worker-tests/test-cosmosdb-triggered.txt") -def get_cosmosdb_triggered(req: func.HttpRequest, - file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.generic_trigger(arg_name="req", type="httpTrigger") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_output_binding( - arg_name="doc", - database_name="test", - type="cosmosDB", - collection_name="items", - create_if_not_exists=True, - connection_string_setting="AzureWebJobsCosmosDBConnectionString") -def put_document(req: func.HttpRequest, doc: func.Out[func.Document]): - doc.set(func.Document.from_json(req.get_body())) - - return 'OK' diff --git a/tests/endtoend/cosmosdb_functions/get_cosmosdb_triggered/function.json b/tests/endtoend/cosmosdb_functions/get_cosmosdb_triggered/function.json deleted file mode 100644 index e3778812e..000000000 --- a/tests/endtoend/cosmosdb_functions/get_cosmosdb_triggered/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-cosmosdb-triggered.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return", - } - ] -} diff --git a/tests/endtoend/cosmosdb_functions/get_cosmosdb_triggered/main.py b/tests/endtoend/cosmosdb_functions/get_cosmosdb_triggered/main.py deleted file mode 100644 index 167c7a574..000000000 --- a/tests/endtoend/cosmosdb_functions/get_cosmosdb_triggered/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -def main(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/endtoend/cosmosdb_functions/put_document/__init__.py b/tests/endtoend/cosmosdb_functions/put_document/__init__.py deleted file mode 100644 index 5e481332e..000000000 --- a/tests/endtoend/cosmosdb_functions/put_document/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -def main(req: func.HttpRequest, doc: func.Out[func.Document]): - doc.set(func.Document.from_json(req.get_body())) - - return 'OK' diff --git a/tests/endtoend/cosmosdb_functions/put_document/function.json b/tests/endtoend/cosmosdb_functions/put_document/function.json deleted file mode 100644 index b385fbfd5..000000000 --- a/tests/endtoend/cosmosdb_functions/put_document/function.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "direction": "out", - "type": "cosmosDB", - "name": "doc", - "databaseName": "test", - "containerName": "items", - "leaseContainerName": "leases", - "createLeaseContainerIfNotExists": true, - "connection": "AzureWebJobsCosmosDBConnectionString", - "createIfNotExists": true - }, - { - "direction": "out", - "name": "$return", - "type": "http" - } - ] -} diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/__init__.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/__init__.py deleted file mode 100644 index 88cb6b2dc..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# Import binding implementations to register them -from . import http # NoQA -from ._abc import Context, Out -from ._http import HttpRequest, HttpResponse -from .meta import get_binding_registry - -__all__ = ( - # Functions - 'get_binding_registry', - - # Generics. - 'Context', - 'Out', - - # Binding rich types, sorted alphabetically. - 'HttpRequest', - 'HttpResponse', -) - -__version__ = '9.9.9' diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_abc.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_abc.py deleted file mode 100644 index 8add53c99..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_abc.py +++ /dev/null @@ -1,422 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import abc -import datetime -import io -import typing - -T = typing.TypeVar('T') - - -class Out(abc.ABC, typing.Generic[T]): - """An interface to set function output parameters.""" - - @abc.abstractmethod - def set(self, val: T) -> None: - """Set the value of the output parameter.""" - pass - - @abc.abstractmethod - def get(self) -> T: - """Get the value of the output parameter.""" - pass - - -class RpcException: - """Rpc Exception object.""" - - @property - @abc.abstractmethod - def source(self) -> str: - """Source of the exception.""" - pass - - @property - @abc.abstractmethod - def stack_trace(self) -> str: - """Stack trace for the exception.""" - pass - - @property - @abc.abstractmethod - def message(self) -> str: - """Textual message describing the exception.""" - pass - - -class TraceContext(abc.ABC): - """Trace context object.""" - - @property - @abc.abstractmethod - def trace_state(self) -> str: - """Gets trace state from trace-context.""" - pass - - @property - @abc.abstractmethod - def trace_parent(self) -> str: - """Gets trace parent from trace-context.""" - pass - - @property - @abc.abstractmethod - def attributes(self) -> typing.Dict[str, str]: - """Gets trace-context attributes.""" - pass - - -class RetryContext(abc.ABC): - """Retry Context object. - For more information refer: https://aka.ms/azfunc-retries-policies - """ - - @property - @abc.abstractmethod - def retry_count(self) -> int: - """Gets the current retry count from retry-context.""" - pass - - @property - @abc.abstractmethod - def max_retry_count(self) -> int: - """Gets the max retry count from retry-context.""" - pass - - @property - @abc.abstractmethod - def exception(self) -> RpcException: - """Gets the RpcException""" - pass - - -class Context(abc.ABC): - """Function invocation context.""" - - @property - @abc.abstractmethod - def invocation_id(self) -> str: - """Function invocation ID.""" - pass - - @property - @abc.abstractmethod - def function_name(self) -> str: - """Function name.""" - pass - - @property - @abc.abstractmethod - def function_directory(self) -> str: - """Function directory.""" - pass - - @property - @abc.abstractmethod - def trace_context(self) -> TraceContext: - """Context for distributed tracing.""" - pass - - @property - @abc.abstractmethod - def retry_context(self) -> RetryContext: - """Context for retries to the function.""" - pass - - -class HttpRequest(abc.ABC): - """HTTP request object.""" - - @property - @abc.abstractmethod - def method(self) -> str: - """Request method.""" - pass - - @property - @abc.abstractmethod - def url(self) -> str: - """Request URL.""" - pass - - @property - @abc.abstractmethod - def headers(self) -> typing.Mapping[str, str]: - """A dictionary containing request headers.""" - pass - - @property - @abc.abstractmethod - def params(self) -> typing.Mapping[str, str]: - """A dictionary containing request GET parameters.""" - pass - - @property - @abc.abstractmethod - def route_params(self) -> typing.Mapping[str, str]: - """A dictionary containing request route parameters.""" - pass - - @abc.abstractmethod - def get_body(self) -> bytes: - """Return request body as bytes.""" - pass - - @abc.abstractmethod - def get_json(self) -> typing.Any: - """Decode and return request body as JSON. - - :raises ValueError: - when the request does not contain valid JSON data. - """ - pass - - -class HttpResponse(abc.ABC): - - @property - @abc.abstractmethod - def status_code(self) -> int: - pass - - @property - @abc.abstractmethod - def mimetype(self): - pass - - @property - @abc.abstractmethod - def charset(self): - pass - - @property - @abc.abstractmethod - def headers(self) -> typing.MutableMapping[str, str]: - pass - - @abc.abstractmethod - def get_body(self) -> bytes: - pass - - -class TimerRequest(abc.ABC): - """Timer request object.""" - - @property - @abc.abstractmethod - def past_due(self) -> bool: - """Whether the timer is past due.""" - pass - - -class InputStream(io.BufferedIOBase, abc.ABC): - """File-like object representing an input blob.""" - - @abc.abstractmethod - def read(self, size=-1) -> bytes: - """Return and read up to *size* bytes. - - :param int size: - The number of bytes to read. If the argument is omitted, - ``None``, or negative, data is read and returned until - EOF is reached. - - :return: - Bytes read from the input stream. - """ - pass - - @property - @abc.abstractmethod - def name(self) -> typing.Optional[str]: - """The name of the blob.""" - pass - - @property - @abc.abstractmethod - def length(self) -> typing.Optional[int]: - """The size of the blob in bytes.""" - pass - - @property - @abc.abstractmethod - def uri(self) -> typing.Optional[str]: - """The blob's primary location URI.""" - pass - - -class QueueMessage(abc.ABC): - - @property - @abc.abstractmethod - def id(self) -> typing.Optional[str]: - pass - - @abc.abstractmethod - def get_body(self) -> typing.Union[str, bytes]: - pass - - @abc.abstractmethod - def get_json(self) -> typing.Any: - pass - - @property - @abc.abstractmethod - def dequeue_count(self) -> typing.Optional[int]: - pass - - @property - @abc.abstractmethod - def expiration_time(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def insertion_time(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def time_next_visible(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def pop_receipt(self) -> typing.Optional[str]: - pass - - -class EventGridEvent(abc.ABC): - @property - @abc.abstractmethod - def id(self) -> str: - pass - - @abc.abstractmethod - def get_json(self) -> typing.Any: - pass - - @property - @abc.abstractmethod - def topic(self) -> str: - pass - - @property - @abc.abstractmethod - def subject(self) -> str: - pass - - @property - @abc.abstractmethod - def event_type(self) -> str: - pass - - @property - @abc.abstractmethod - def event_time(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def data_version(self) -> str: - pass - - -class EventGridOutputEvent(abc.ABC): - @property - @abc.abstractmethod - def id(self) -> str: - pass - - @abc.abstractmethod - def get_json(self) -> typing.Any: - pass - - @property - @abc.abstractmethod - def subject(self) -> str: - pass - - @property - @abc.abstractmethod - def event_type(self) -> str: - pass - - @property - @abc.abstractmethod - def event_time(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def data_version(self) -> str: - pass - - -class Document(abc.ABC): - - @classmethod - @abc.abstractmethod - def from_json(cls, json_data: str) -> 'Document': - pass - - @classmethod - @abc.abstractmethod - def from_dict(cls, dct: dict) -> 'Document': - pass - - @abc.abstractmethod - def __getitem__(self, key): - pass - - @abc.abstractmethod - def __setitem__(self, key, value): - pass - - @abc.abstractmethod - def to_json(self) -> str: - pass - - -class DocumentList(abc.ABC): - pass - - -class EventHubEvent(abc.ABC): - - @abc.abstractmethod - def get_body(self) -> bytes: - pass - - @property - @abc.abstractmethod - def partition_key(self) -> typing.Optional[str]: - pass - - @property - @abc.abstractmethod - def sequence_number(self) -> typing.Optional[int]: - pass - - @property - @abc.abstractmethod - def iothub_metadata(self) -> typing.Optional[typing.Mapping[str, str]]: - pass - - @property - @abc.abstractmethod - def enqueued_time(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def offset(self) -> typing.Optional[str]: - pass - - -class OrchestrationContext(abc.ABC): - @property - @abc.abstractmethod - def body(self) -> str: - pass diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_http.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_http.py deleted file mode 100644 index 89ee2678c..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_http.py +++ /dev/null @@ -1,230 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import collections.abc -import io -import json -import types -import typing - -from . import _abc -from ._thirdparty.werkzeug import datastructures as _wk_datastructures -from ._thirdparty.werkzeug import formparser as _wk_parser -from ._thirdparty.werkzeug import http as _wk_http - - -class BaseHeaders(collections.abc.Mapping): - - def __init__(self, source: typing.Optional[typing.Mapping] = None) -> None: - self.__http_headers__: typing.Dict[str, str] = {} - - if source is not None: - self.__http_headers__.update( - {k.lower(): v for k, v in source.items()}) - - def __getitem__(self, key: str) -> str: - return self.__http_headers__[key.lower()] - - def __len__(self): - return len(self.__http_headers__) - - def __contains__(self, key: typing.Any): - return key.lower() in self.__http_headers__ - - def __iter__(self): - return iter(self.__http_headers__) - - -class HttpRequestHeaders(BaseHeaders): - pass - - -class HttpResponseHeaders(BaseHeaders, collections.abc.MutableMapping): - - def __setitem__(self, key: str, value: str): - self.__http_headers__[key.lower()] = value - - def __delitem__(self, key: str): - del self.__http_headers__[key.lower()] - - -class HttpResponse(_abc.HttpResponse): - """An HTTP response object. - - :param str/bytes body: - Optional response body. - - :param int status_code: - Response status code. If not specified, defaults to 200. - - :param dict headers: - An optional mapping containing response HTTP headers. - - :param str mimetype: - An optional response MIME type. If not specified, defaults to - ``'text/plain'``. - - :param str charset: - Response content text encoding. If not specified, defaults to - ``'utf-8'``. - """ - - def __init__(self, body=None, *, - status_code=None, headers=None, mimetype=None, charset=None): - if status_code is None: - status_code = 200 - self.__status_code = status_code - - if mimetype is None: - mimetype = 'text/plain' - self.__mimetype = mimetype - - if charset is None: - charset = 'utf-8' - self.__charset = charset - - if headers is None: - headers = {} - self.__headers = HttpResponseHeaders(headers) - - if body is not None: - self.__set_body(body) - else: - self.__body = b'' - - @property - def mimetype(self): - """Response MIME type.""" - return self.__mimetype - - @property - def charset(self): - """Response text encoding.""" - return self.__charset - - @property - def headers(self): - """A dictionary of response HTTP headers.""" - return self.__headers - - @property - def status_code(self): - """Response status code.""" - return self.__status_code - - def __set_body(self, body): - if isinstance(body, str): - body = body.encode(self.__charset) - - if not isinstance(body, (bytes, bytearray)): - raise TypeError( - f'response is expected to be either of ' - f'str, bytes, or bytearray, got {type(body).__name__}') - - self.__body = bytes(body) - - def get_body(self) -> bytes: - """Response body as a bytes object.""" - return self.__body - - -class HttpRequest(_abc.HttpRequest): - """An HTTP request object. - - :param str method: - HTTP request method name. - - :param str url: - HTTP URL. - - :param dict headers: - An optional mapping containing HTTP request headers. - - :param dict params: - An optional mapping containing HTTP request params. - - :param dict route_params: - An optional mapping containing HTTP request route params. - - :param bytes body: - HTTP request body. - """ - - def __init__(self, - method: str, - url: str, *, - headers: typing.Optional[typing.Mapping[str, str]] = None, - params: typing.Optional[typing.Mapping[str, str]] = None, - route_params: typing.Optional[ - typing.Mapping[str, str]] = None, - body: bytes) -> None: - self.__method = method - self.__url = url - self.__headers = HttpRequestHeaders(headers or {}) - self.__params = types.MappingProxyType(params or {}) - self.__route_params = types.MappingProxyType(route_params or {}) - self.__body_bytes = body - self.__form_parsed = False - self.__form = None - self.__files = None - - @property - def url(self): - return self.__url - - @property - def method(self): - return self.__method.upper() - - @property - def headers(self): - return self.__headers - - @property - def params(self): - return self.__params - - @property - def route_params(self): - return self.__route_params - - @property - def form(self): - self._parse_form_data() - return self.__form - - @property - def files(self): - self._parse_form_data() - return self.__files - - def get_body(self) -> bytes: - return self.__body_bytes - - def get_json(self) -> typing.Any: - return json.loads(self.__body_bytes.decode('utf-8')) - - def _parse_form_data(self): - if self.__form_parsed: - return - - body = self.get_body() - content_type = self.headers.get('Content-Type', '') - content_length = len(body) - mimetype, options = _wk_http.parse_options_header(content_type) - parser = _wk_parser.FormDataParser( - _wk_parser.default_stream_factory, - options.get('charset') or 'utf-8', - 'replace', - None, - None, - _wk_datastructures.ImmutableMultiDict, - ) - - body_stream = io.BytesIO(body) - - _, self.__form, self.__files = parser.parse( - body_stream, mimetype, content_length, options - ) - - self.__form_parsed = True diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_thirdparty/__init__.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_thirdparty/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_thirdparty/typing_inspect.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_thirdparty/typing_inspect.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/datastructures.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/datastructures.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/formparser.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/formparser.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/http.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/http.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_utils.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_utils.py deleted file mode 100644 index a1bd9f3b8..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/_utils.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -from datetime import datetime, timedelta -from typing import List, Optional, Tuple - - -def try_parse_datetime_with_formats( - datetime_str: str, - datetime_formats: List[str] -) -> Tuple[Optional[datetime], Optional[str], Optional[Exception]]: - """Try parsing the datetime string with a list of formats - Parameters - ---------- - datetime_str: str - The datetime string needs to be parsed (e.g. 2018-12-12T03:16:34.2191Z) - datetime_formats: List[str] - A list of datetime formats that the parser would try to match - - Returns - ------- - dict_obj: A serializable dictionary with enough metadata to reconstruct - `obj` - - Exceptions - ---------- - Tuple[Optional[datetime], Optional[str], Optional[Exception]]: - If the datetime can be successfully parsed, the first element is the - paresd datetime object and the second is the matched format. - If the datetime cannot be parsed, the first and second element will be - None, and the third is the exception from the datetime.strptime() - method. - """ - for fmt in datetime_formats: - try: - dt = datetime.strptime(datetime_str, fmt) - return (dt, fmt, None) - except ValueError as ve: - last_exception = ve - - return (None, None, last_exception) - - -def try_parse_timedelta_with_formats( - timedelta_str: str, - timedelta_formats: List[str] -) -> Tuple[Optional[timedelta], Optional[str], Optional[Exception]]: - """Try parsing the datetime delta string with a list of formats - Parameters - ---------- - timedelta_str: str - The timedelta string needs to be parsed (e.g. 12:34:56) - timedelta_formats: List[str] - A list of datetime formats that the parser would try to match - - Returns - ------- - dict_obj: A serializable dictionary with enough metadata to reconstruct - `obj` - - Exceptions - ---------- - Tuple[Optional[timedelta], Optional[str], Optional[Exception]]: - If the timedelta can be successfully parsed, the first element is the - paresd timedelta object and the second is the matched format. - If the timedelta cannot be parsed, the first and second element will be - None, and the third is the exception from the datetime.strptime() - method. - """ - - for fmt in timedelta_formats: - try: - # If singular form %S, %M, %H, will just return the timedelta - if fmt == '%S': - td = timedelta(seconds=int(timedelta_str)) - elif fmt == '%M': - td = timedelta(minutes=int(timedelta_str)) - elif fmt == '%H': - td = timedelta(hours=int(timedelta_str)) - else: - dt = datetime.strptime(timedelta_str, fmt) - td = timedelta(hours=dt.hour, - minutes=dt.minute, - seconds=dt.second) - return (td, fmt, None) - except ValueError as ve: - last_exception = ve - - return (None, None, last_exception) diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/http.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/http.py deleted file mode 100644 index 211711d67..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/http.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json -import typing - -from azure.functions import _abc as azf_abc -from azure.functions import _http as azf_http - -from . import meta - - -class HttpRequest(azf_http.HttpRequest): - """An HTTP request object.""" - - __body_bytes: typing.Optional[bytes] - __body_str: typing.Optional[str] - - def __init__(self, - method: str, - url: str, *, - headers: typing.Mapping[str, str], - params: typing.Mapping[str, str], - route_params: typing.Mapping[str, str], - body_type: str, - body: typing.Union[str, bytes]) -> None: - - body_str: typing.Optional[str] = None - body_bytes: typing.Optional[bytes] = None - if isinstance(body, str): - body_str = body - body_bytes = body_str.encode('utf-8') - elif isinstance(body, bytes): - body_bytes = body - else: - raise TypeError( - f'unexpected HTTP request body type: {type(body).__name__}') - - super().__init__(method=method, url=url, headers=headers, - params=params, route_params=route_params, - body=body_bytes) - - self.__body_type = body_type - self.__body_str = body_str - self.__body_bytes = body_bytes - - def get_body(self) -> bytes: - if self.__body_bytes is None: - assert self.__body_str is not None - self.__body_bytes = self.__body_str.encode('utf-8') - return self.__body_bytes - - def get_json(self) -> typing.Any: - if self.__body_type in ('json', 'string'): - assert self.__body_str is not None - return json.loads(self.__body_str) - elif self.__body_bytes is not None: - try: - return json.loads(self.__body_bytes.decode('utf-8')) - except ValueError as e: - raise ValueError( - 'HTTP request does not contain valid JSON data') from e - else: - raise ValueError( - 'Request body cannot be empty in JSON deserialization') - - -class HttpResponseConverter(meta.OutConverter, binding='http'): - - @classmethod - def check_output_type_annotation(cls, pytype: type) -> bool: - return issubclass(pytype, (azf_abc.HttpResponse, str)) - - @classmethod - def encode(cls, obj: typing.Any, *, - expected_type: typing.Optional[type]) -> meta.Datum: - if isinstance(obj, str): - return meta.Datum(type='string', value=obj) - - if isinstance(obj, azf_abc.HttpResponse): - status = obj.status_code - headers = dict(obj.headers) - if 'content-type' not in headers: - if obj.mimetype.startswith('text/'): - ct = f'{obj.mimetype}; charset={obj.charset}' - else: - ct = f'{obj.mimetype}' - headers['content-type'] = ct - - body = obj.get_body() - if body is not None: - datum_body = meta.Datum(type='bytes', value=body) - else: - datum_body = meta.Datum(type='bytes', value=b'') - - return meta.Datum( - type='http', - value=dict( - status_code=meta.Datum(type='string', value=str(status)), - headers={ - n: meta.Datum(type='string', value=h) - for n, h in headers.items() - }, - body=datum_body, - ) - ) - - raise NotImplementedError - - -class HttpRequestConverter(meta.InConverter, - binding='httpTrigger', trigger=True): - - @classmethod - def check_input_type_annotation(cls, pytype: type) -> bool: - return issubclass(pytype, azf_abc.HttpRequest) - - @classmethod - def decode(cls, data: meta.Datum, *, - trigger_metadata) -> typing.Any: - if data.type != 'http': - raise NotImplementedError - - val = data.value - - return HttpRequest( - method=val['method'].value, - url=val['url'].value, - headers={n: v.value for n, v in val['headers'].items()}, - params={n: v.value for n, v in val['query'].items()}, - route_params={n: v.value for n, v in val['params'].items()}, - body_type=val['body'].type, - body=val['body'].value, - ) diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/meta.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/meta.py deleted file mode 100644 index 3dcff6da8..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_newer_version/lib/site-packages/azure/functions/meta.py +++ /dev/null @@ -1,401 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import abc -import collections.abc -import datetime -import json -import re -from typing import Any, Dict, Mapping, Optional, Tuple, Union - -from ._thirdparty import typing_inspect -from ._utils import try_parse_datetime_with_formats, try_parse_timedelta_with_formats - - -def is_iterable_type_annotation(annotation: object, pytype: object) -> bool: - is_iterable_anno = ( - typing_inspect.is_generic_type(annotation) - and issubclass(typing_inspect.get_origin(annotation), - collections.abc.Iterable) - ) - - if not is_iterable_anno: - return False - - args = typing_inspect.get_args(annotation) - if not args: - return False - - if isinstance(pytype, tuple): - return any(isinstance(t, type) and issubclass(t, arg) - for t in pytype for arg in args) - else: - return any(isinstance(pytype, type) and issubclass(pytype, arg) - for arg in args) - - -class Datum: - def __init__(self, value: Any, type: Optional[str]): - self.value: Any = value - self.type: Optional[str] = type - - @property - def python_value(self) -> Any: - if self.value is None or self.type is None: - return None - elif self.type in ('bytes', 'string', 'int', 'double'): - return self.value - elif self.type == 'json': - return json.loads(self.value) - elif self.type == 'collection_string': - return [v for v in self.value.string] - elif self.type == 'collection_bytes': - return [v for v in self.value.bytes] - elif self.type == 'collection_double': - return [v for v in self.value.double] - elif self.type == 'collection_sint64': - return [v for v in self.value.sint64] - else: - return self.value - - @property - def python_type(self) -> type: - return type(self.python_value) - - def __eq__(self, other): - if not isinstance(other, type(self)): - return False - - return self.value == other.value and self.type == other.type - - def __hash__(self): - return hash((type(self), (self.value, self.type))) - - def __repr__(self): - val_repr = repr(self.value) - if len(val_repr) > 10: - val_repr = val_repr[:10] + '...' - return ''.format(self.type, val_repr) - - -class _ConverterMeta(abc.ABCMeta): - - _bindings: Dict[str, type] = {} - - def __new__(mcls, name, bases, dct, *, - binding: Optional[str], - trigger: Optional[str] = None): - cls = super().__new__(mcls, name, bases, dct) - cls._trigger = trigger # type: ignore - if binding is None: - return cls - - if binding in mcls._bindings: - raise RuntimeError( - f'cannot register a converter for {binding!r} binding: ' - f'another converter for this binding has already been ' - f'registered') - - mcls._bindings[binding] = cls - if trigger is not None: - mcls._bindings[trigger] = cls - - return cls - - @classmethod - def get(cls, binding_name): - return cls._bindings.get(binding_name) - - def has_trigger_support(cls) -> bool: - return cls._trigger is not None # type: ignore - - -class _BaseConverter(metaclass=_ConverterMeta, binding=None): - - @classmethod - def _decode_typed_data( - cls, data: Datum, *, - python_type: Union[type, Tuple[type, ...]], - context: str = 'data') -> Any: - if data is None: - return None - - data_type = data.type - if data_type == 'json': - result = json.loads(data.value) - - elif data_type == 'string': - result = data.value - - elif data_type == 'int': - result = data.value - - elif data_type == 'double': - result = data.value - - elif data_type == 'collection_bytes': - result = data.value - - elif data_type == 'collection_string': - result = data.value - - elif data_type == 'collection_sint64': - result = data.value - - elif data_type is None: - return None - - else: - raise ValueError( - f'unsupported type of {context}: {data_type}') - - if not isinstance(result, python_type): - if isinstance(python_type, (tuple, list, dict)): - raise ValueError( - f'unexpected value type in {context}: ' - f'{type(result).__name__}, expected one of: ' - f'{", ".join(t.__name__ for t in python_type)}') - else: - try: - # Try coercing into the requested type - result = python_type(result) - except (TypeError, ValueError) as e: - raise ValueError( - f'cannot convert value of {context} into ' - f'{python_type.__name__}: {e}') from None - - return result - - @classmethod - def _decode_trigger_metadata_field( - cls, trigger_metadata: Mapping[str, Datum], - field: str, *, - python_type: Union[type, Tuple[type, ...]]) \ - -> Any: - data = trigger_metadata.get(field) - if data is None: - return None - else: - return cls._decode_typed_data( - data, python_type=python_type, - context=f'field {field!r} in trigger metadata') - - @classmethod - def _parse_datetime_metadata( - cls, trigger_metadata: Mapping[str, Datum], - field: str) -> Optional[datetime.datetime]: - - datetime_str = cls._decode_trigger_metadata_field( - trigger_metadata, field, python_type=str) - - if datetime_str is None: - return None - else: - return cls._parse_datetime(datetime_str) - - @classmethod - def _parse_timedelta_metadata( - cls, trigger_metadata: Mapping[str, Datum], - field: str) -> Optional[datetime.timedelta]: - - timedelta_str = cls._decode_trigger_metadata_field( - trigger_metadata, field, python_type=str) - - if timedelta_str is None: - return None - else: - return cls._parse_timedelta(timedelta_str) - - @classmethod - def _parse_datetime( - cls, datetime_str: Optional[str]) -> Optional[datetime.datetime]: - - if not datetime_str: - return None - - too_fractional = re.match( - r'(.*\.\d{6})(\d+)(Z|[\+|-]\d{1,2}:\d{1,2}){0,1}', datetime_str) - - if too_fractional: - # The supplied value contains seven digits in the - # fractional second part, whereas Python expects - # a maxium of six, so strip it. - # https://github.com/Azure/azure-functions-python-worker/issues/269 - datetime_str = too_fractional.group(1) + ( - too_fractional.group(3) or '') - - # Try parse time - utc_time, utc_time_error = cls._parse_datetime_utc(datetime_str) - if not utc_time_error and utc_time: - return utc_time.replace(tzinfo=datetime.timezone.utc) - - local_time, local_time_error = cls._parse_datetime_local(datetime_str) - if not local_time_error and local_time: - return local_time.replace(tzinfo=None) - - # Report error - if utc_time_error: - raise utc_time_error - elif local_time_error: - raise local_time_error - else: - return None - - @classmethod - def _parse_timedelta( - cls, - timedelta_str: Optional[str] - ) -> Optional[datetime.timedelta]: - - if not timedelta_str: - return None - - # Try parse timedelta - timedelta, td_error = cls._parse_timedelta_internal(timedelta_str) - if timedelta is not None: - return timedelta - - # Report error - if td_error: - raise td_error - else: - return None - - @classmethod - def _parse_datetime_utc( - cls, - datetime_str: str - ) -> Tuple[Optional[datetime.datetime], Optional[Exception]]: - - # UTC ISO 8601 assumed - # 2018-08-07T23:17:57.461050Z - utc_formats = [ - '%Y-%m-%dT%H:%M:%S+00:00', - '%Y-%m-%dT%H:%M:%S-00:00', - '%Y-%m-%dT%H:%M:%S.%f+00:00', - '%Y-%m-%dT%H:%M:%S.%f-00:00', - '%Y-%m-%dT%H:%M:%SZ', - '%Y-%m-%dT%H:%M:%S.%fZ', - - '%m/%d/%Y %H:%M:%SZ', - '%m/%d/%Y %H:%M:%S.%fZ', - '%m/%d/%Y %H:%M:%S+00:00', - '%m/%d/%Y %H:%M:%S-00:00', - '%m/%d/%Y %H:%M:%S.%f+00:00', - '%m/%d/%Y %H:%M:%S.%f-00:00', - ] - - dt, _, excpt = try_parse_datetime_with_formats( - datetime_str, utc_formats) - - if excpt is not None: - return None, excpt - return dt, None - - @classmethod - def _parse_datetime_local( - cls, datetime_str: str - ) -> Tuple[Optional[datetime.datetime], Optional[Exception]]: - """Parse a string into a datetime object, accepts following formats - 1. Without fractional seconds (e.g. 2018-08-07T23:17:57) - 2. With fractional seconds (e.g. 2018-08-07T23:17:57.461050) - - Parameters - ---------- - datetime_str: str - The string represents a datetime - - Returns - ------- - Tuple[Optional[datetime.datetime], Optional[Exception]] - If the datetime_str is None, will return None immediately. - If the datetime_str can be parsed correctly, it will return as the - first element in the tuple. - If the datetime_str cannot be parsed with all attempts, it will - return None in the first element, the exception in the second - element. - """ - - local_formats = [ - '%Y-%m-%dT%H:%M:%S.%f', - '%Y-%m-%dT%H:%M:%S', - - '%m/%d/%YT%H:%M:%S.%f', - '%m/%d/%YT%H:%M:%S' - ] - - dt, _, excpt = try_parse_datetime_with_formats( - datetime_str, local_formats) - - if excpt is not None: - return None, excpt - return dt, None - - @classmethod - def _parse_timedelta_internal( - cls, timedelta_str: str - ) -> Tuple[Optional[datetime.timedelta], Optional[Exception]]: - """Parse a string into a timedelta object, accepts following formats - 1. HH:MM:SS (e.g. 12:34:56) - 2. MM:SS (e.g. 34:56) - 3. Pure integer as seconds (e.g. 5819) - - Parameters - ---------- - timedelta_str: str - The string represents a datetime - - Returns - ------- - Tuple[Optional[datetime.timedelta], Optional[Exception]] - If the timedelta_str is None, will return None immediately. - If the timedelta_str can be parsed correctly, it will return as the - first element in the tuple. - If the timedelta_str cannot be parsed with all attempts, it will - return None in the first element, the exception in the second - element. - """ - - timedelta_formats = [ - '%H:%M:%S', - '%M:%S', - '%S' - ] - - td, _, excpt = try_parse_timedelta_with_formats( - timedelta_str, timedelta_formats) - - if td is not None: - return td, None - return None, excpt - - -class InConverter(_BaseConverter, binding=None): - - @abc.abstractclassmethod - def check_input_type_annotation(cls, pytype: type) -> bool: - pass - - @abc.abstractclassmethod - def decode(cls, data: Datum, *, trigger_metadata) -> Any: - raise NotImplementedError - - @abc.abstractclassmethod - def has_implicit_output(cls) -> bool: - return False - - -class OutConverter(_BaseConverter, binding=None): - - @abc.abstractclassmethod - def check_output_type_annotation(cls, pytype: type) -> bool: - pass - - @abc.abstractclassmethod - def encode(cls, obj: Any, *, - expected_type: Optional[type]) -> Optional[Datum]: - raise NotImplementedError - - -def get_binding_registry(): - return _ConverterMeta diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/__init__.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/__init__.py deleted file mode 100644 index 36953c55b..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# Import binding implementations to register them -from . import http # NoQA -from ._abc import Context, Out -from ._http import HttpRequest, HttpResponse -from .meta import get_binding_registry - -__all__ = ( - # Functions - 'get_binding_registry', - - # Generics. - 'Context', - 'Out', - - # Binding rich types, sorted alphabetically. - 'HttpRequest', - 'HttpResponse', -) - -__version__ = '1.5.0' diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_abc.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_abc.py deleted file mode 100644 index 8add53c99..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_abc.py +++ /dev/null @@ -1,422 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import abc -import datetime -import io -import typing - -T = typing.TypeVar('T') - - -class Out(abc.ABC, typing.Generic[T]): - """An interface to set function output parameters.""" - - @abc.abstractmethod - def set(self, val: T) -> None: - """Set the value of the output parameter.""" - pass - - @abc.abstractmethod - def get(self) -> T: - """Get the value of the output parameter.""" - pass - - -class RpcException: - """Rpc Exception object.""" - - @property - @abc.abstractmethod - def source(self) -> str: - """Source of the exception.""" - pass - - @property - @abc.abstractmethod - def stack_trace(self) -> str: - """Stack trace for the exception.""" - pass - - @property - @abc.abstractmethod - def message(self) -> str: - """Textual message describing the exception.""" - pass - - -class TraceContext(abc.ABC): - """Trace context object.""" - - @property - @abc.abstractmethod - def trace_state(self) -> str: - """Gets trace state from trace-context.""" - pass - - @property - @abc.abstractmethod - def trace_parent(self) -> str: - """Gets trace parent from trace-context.""" - pass - - @property - @abc.abstractmethod - def attributes(self) -> typing.Dict[str, str]: - """Gets trace-context attributes.""" - pass - - -class RetryContext(abc.ABC): - """Retry Context object. - For more information refer: https://aka.ms/azfunc-retries-policies - """ - - @property - @abc.abstractmethod - def retry_count(self) -> int: - """Gets the current retry count from retry-context.""" - pass - - @property - @abc.abstractmethod - def max_retry_count(self) -> int: - """Gets the max retry count from retry-context.""" - pass - - @property - @abc.abstractmethod - def exception(self) -> RpcException: - """Gets the RpcException""" - pass - - -class Context(abc.ABC): - """Function invocation context.""" - - @property - @abc.abstractmethod - def invocation_id(self) -> str: - """Function invocation ID.""" - pass - - @property - @abc.abstractmethod - def function_name(self) -> str: - """Function name.""" - pass - - @property - @abc.abstractmethod - def function_directory(self) -> str: - """Function directory.""" - pass - - @property - @abc.abstractmethod - def trace_context(self) -> TraceContext: - """Context for distributed tracing.""" - pass - - @property - @abc.abstractmethod - def retry_context(self) -> RetryContext: - """Context for retries to the function.""" - pass - - -class HttpRequest(abc.ABC): - """HTTP request object.""" - - @property - @abc.abstractmethod - def method(self) -> str: - """Request method.""" - pass - - @property - @abc.abstractmethod - def url(self) -> str: - """Request URL.""" - pass - - @property - @abc.abstractmethod - def headers(self) -> typing.Mapping[str, str]: - """A dictionary containing request headers.""" - pass - - @property - @abc.abstractmethod - def params(self) -> typing.Mapping[str, str]: - """A dictionary containing request GET parameters.""" - pass - - @property - @abc.abstractmethod - def route_params(self) -> typing.Mapping[str, str]: - """A dictionary containing request route parameters.""" - pass - - @abc.abstractmethod - def get_body(self) -> bytes: - """Return request body as bytes.""" - pass - - @abc.abstractmethod - def get_json(self) -> typing.Any: - """Decode and return request body as JSON. - - :raises ValueError: - when the request does not contain valid JSON data. - """ - pass - - -class HttpResponse(abc.ABC): - - @property - @abc.abstractmethod - def status_code(self) -> int: - pass - - @property - @abc.abstractmethod - def mimetype(self): - pass - - @property - @abc.abstractmethod - def charset(self): - pass - - @property - @abc.abstractmethod - def headers(self) -> typing.MutableMapping[str, str]: - pass - - @abc.abstractmethod - def get_body(self) -> bytes: - pass - - -class TimerRequest(abc.ABC): - """Timer request object.""" - - @property - @abc.abstractmethod - def past_due(self) -> bool: - """Whether the timer is past due.""" - pass - - -class InputStream(io.BufferedIOBase, abc.ABC): - """File-like object representing an input blob.""" - - @abc.abstractmethod - def read(self, size=-1) -> bytes: - """Return and read up to *size* bytes. - - :param int size: - The number of bytes to read. If the argument is omitted, - ``None``, or negative, data is read and returned until - EOF is reached. - - :return: - Bytes read from the input stream. - """ - pass - - @property - @abc.abstractmethod - def name(self) -> typing.Optional[str]: - """The name of the blob.""" - pass - - @property - @abc.abstractmethod - def length(self) -> typing.Optional[int]: - """The size of the blob in bytes.""" - pass - - @property - @abc.abstractmethod - def uri(self) -> typing.Optional[str]: - """The blob's primary location URI.""" - pass - - -class QueueMessage(abc.ABC): - - @property - @abc.abstractmethod - def id(self) -> typing.Optional[str]: - pass - - @abc.abstractmethod - def get_body(self) -> typing.Union[str, bytes]: - pass - - @abc.abstractmethod - def get_json(self) -> typing.Any: - pass - - @property - @abc.abstractmethod - def dequeue_count(self) -> typing.Optional[int]: - pass - - @property - @abc.abstractmethod - def expiration_time(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def insertion_time(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def time_next_visible(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def pop_receipt(self) -> typing.Optional[str]: - pass - - -class EventGridEvent(abc.ABC): - @property - @abc.abstractmethod - def id(self) -> str: - pass - - @abc.abstractmethod - def get_json(self) -> typing.Any: - pass - - @property - @abc.abstractmethod - def topic(self) -> str: - pass - - @property - @abc.abstractmethod - def subject(self) -> str: - pass - - @property - @abc.abstractmethod - def event_type(self) -> str: - pass - - @property - @abc.abstractmethod - def event_time(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def data_version(self) -> str: - pass - - -class EventGridOutputEvent(abc.ABC): - @property - @abc.abstractmethod - def id(self) -> str: - pass - - @abc.abstractmethod - def get_json(self) -> typing.Any: - pass - - @property - @abc.abstractmethod - def subject(self) -> str: - pass - - @property - @abc.abstractmethod - def event_type(self) -> str: - pass - - @property - @abc.abstractmethod - def event_time(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def data_version(self) -> str: - pass - - -class Document(abc.ABC): - - @classmethod - @abc.abstractmethod - def from_json(cls, json_data: str) -> 'Document': - pass - - @classmethod - @abc.abstractmethod - def from_dict(cls, dct: dict) -> 'Document': - pass - - @abc.abstractmethod - def __getitem__(self, key): - pass - - @abc.abstractmethod - def __setitem__(self, key, value): - pass - - @abc.abstractmethod - def to_json(self) -> str: - pass - - -class DocumentList(abc.ABC): - pass - - -class EventHubEvent(abc.ABC): - - @abc.abstractmethod - def get_body(self) -> bytes: - pass - - @property - @abc.abstractmethod - def partition_key(self) -> typing.Optional[str]: - pass - - @property - @abc.abstractmethod - def sequence_number(self) -> typing.Optional[int]: - pass - - @property - @abc.abstractmethod - def iothub_metadata(self) -> typing.Optional[typing.Mapping[str, str]]: - pass - - @property - @abc.abstractmethod - def enqueued_time(self) -> typing.Optional[datetime.datetime]: - pass - - @property - @abc.abstractmethod - def offset(self) -> typing.Optional[str]: - pass - - -class OrchestrationContext(abc.ABC): - @property - @abc.abstractmethod - def body(self) -> str: - pass diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_http.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_http.py deleted file mode 100644 index 89ee2678c..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_http.py +++ /dev/null @@ -1,230 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import collections.abc -import io -import json -import types -import typing - -from . import _abc -from ._thirdparty.werkzeug import datastructures as _wk_datastructures -from ._thirdparty.werkzeug import formparser as _wk_parser -from ._thirdparty.werkzeug import http as _wk_http - - -class BaseHeaders(collections.abc.Mapping): - - def __init__(self, source: typing.Optional[typing.Mapping] = None) -> None: - self.__http_headers__: typing.Dict[str, str] = {} - - if source is not None: - self.__http_headers__.update( - {k.lower(): v for k, v in source.items()}) - - def __getitem__(self, key: str) -> str: - return self.__http_headers__[key.lower()] - - def __len__(self): - return len(self.__http_headers__) - - def __contains__(self, key: typing.Any): - return key.lower() in self.__http_headers__ - - def __iter__(self): - return iter(self.__http_headers__) - - -class HttpRequestHeaders(BaseHeaders): - pass - - -class HttpResponseHeaders(BaseHeaders, collections.abc.MutableMapping): - - def __setitem__(self, key: str, value: str): - self.__http_headers__[key.lower()] = value - - def __delitem__(self, key: str): - del self.__http_headers__[key.lower()] - - -class HttpResponse(_abc.HttpResponse): - """An HTTP response object. - - :param str/bytes body: - Optional response body. - - :param int status_code: - Response status code. If not specified, defaults to 200. - - :param dict headers: - An optional mapping containing response HTTP headers. - - :param str mimetype: - An optional response MIME type. If not specified, defaults to - ``'text/plain'``. - - :param str charset: - Response content text encoding. If not specified, defaults to - ``'utf-8'``. - """ - - def __init__(self, body=None, *, - status_code=None, headers=None, mimetype=None, charset=None): - if status_code is None: - status_code = 200 - self.__status_code = status_code - - if mimetype is None: - mimetype = 'text/plain' - self.__mimetype = mimetype - - if charset is None: - charset = 'utf-8' - self.__charset = charset - - if headers is None: - headers = {} - self.__headers = HttpResponseHeaders(headers) - - if body is not None: - self.__set_body(body) - else: - self.__body = b'' - - @property - def mimetype(self): - """Response MIME type.""" - return self.__mimetype - - @property - def charset(self): - """Response text encoding.""" - return self.__charset - - @property - def headers(self): - """A dictionary of response HTTP headers.""" - return self.__headers - - @property - def status_code(self): - """Response status code.""" - return self.__status_code - - def __set_body(self, body): - if isinstance(body, str): - body = body.encode(self.__charset) - - if not isinstance(body, (bytes, bytearray)): - raise TypeError( - f'response is expected to be either of ' - f'str, bytes, or bytearray, got {type(body).__name__}') - - self.__body = bytes(body) - - def get_body(self) -> bytes: - """Response body as a bytes object.""" - return self.__body - - -class HttpRequest(_abc.HttpRequest): - """An HTTP request object. - - :param str method: - HTTP request method name. - - :param str url: - HTTP URL. - - :param dict headers: - An optional mapping containing HTTP request headers. - - :param dict params: - An optional mapping containing HTTP request params. - - :param dict route_params: - An optional mapping containing HTTP request route params. - - :param bytes body: - HTTP request body. - """ - - def __init__(self, - method: str, - url: str, *, - headers: typing.Optional[typing.Mapping[str, str]] = None, - params: typing.Optional[typing.Mapping[str, str]] = None, - route_params: typing.Optional[ - typing.Mapping[str, str]] = None, - body: bytes) -> None: - self.__method = method - self.__url = url - self.__headers = HttpRequestHeaders(headers or {}) - self.__params = types.MappingProxyType(params or {}) - self.__route_params = types.MappingProxyType(route_params or {}) - self.__body_bytes = body - self.__form_parsed = False - self.__form = None - self.__files = None - - @property - def url(self): - return self.__url - - @property - def method(self): - return self.__method.upper() - - @property - def headers(self): - return self.__headers - - @property - def params(self): - return self.__params - - @property - def route_params(self): - return self.__route_params - - @property - def form(self): - self._parse_form_data() - return self.__form - - @property - def files(self): - self._parse_form_data() - return self.__files - - def get_body(self) -> bytes: - return self.__body_bytes - - def get_json(self) -> typing.Any: - return json.loads(self.__body_bytes.decode('utf-8')) - - def _parse_form_data(self): - if self.__form_parsed: - return - - body = self.get_body() - content_type = self.headers.get('Content-Type', '') - content_length = len(body) - mimetype, options = _wk_http.parse_options_header(content_type) - parser = _wk_parser.FormDataParser( - _wk_parser.default_stream_factory, - options.get('charset') or 'utf-8', - 'replace', - None, - None, - _wk_datastructures.ImmutableMultiDict, - ) - - body_stream = io.BytesIO(body) - - _, self.__form, self.__files = parser.parse( - body_stream, mimetype, content_length, options - ) - - self.__form_parsed = True diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_thirdparty/__init__.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_thirdparty/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_thirdparty/typing_inspect.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_thirdparty/typing_inspect.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/datastructures.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/datastructures.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/formparser.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/formparser.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/http.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_thirdparty/werkzeug/http.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_utils.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_utils.py deleted file mode 100644 index a1bd9f3b8..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/_utils.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -from datetime import datetime, timedelta -from typing import List, Optional, Tuple - - -def try_parse_datetime_with_formats( - datetime_str: str, - datetime_formats: List[str] -) -> Tuple[Optional[datetime], Optional[str], Optional[Exception]]: - """Try parsing the datetime string with a list of formats - Parameters - ---------- - datetime_str: str - The datetime string needs to be parsed (e.g. 2018-12-12T03:16:34.2191Z) - datetime_formats: List[str] - A list of datetime formats that the parser would try to match - - Returns - ------- - dict_obj: A serializable dictionary with enough metadata to reconstruct - `obj` - - Exceptions - ---------- - Tuple[Optional[datetime], Optional[str], Optional[Exception]]: - If the datetime can be successfully parsed, the first element is the - paresd datetime object and the second is the matched format. - If the datetime cannot be parsed, the first and second element will be - None, and the third is the exception from the datetime.strptime() - method. - """ - for fmt in datetime_formats: - try: - dt = datetime.strptime(datetime_str, fmt) - return (dt, fmt, None) - except ValueError as ve: - last_exception = ve - - return (None, None, last_exception) - - -def try_parse_timedelta_with_formats( - timedelta_str: str, - timedelta_formats: List[str] -) -> Tuple[Optional[timedelta], Optional[str], Optional[Exception]]: - """Try parsing the datetime delta string with a list of formats - Parameters - ---------- - timedelta_str: str - The timedelta string needs to be parsed (e.g. 12:34:56) - timedelta_formats: List[str] - A list of datetime formats that the parser would try to match - - Returns - ------- - dict_obj: A serializable dictionary with enough metadata to reconstruct - `obj` - - Exceptions - ---------- - Tuple[Optional[timedelta], Optional[str], Optional[Exception]]: - If the timedelta can be successfully parsed, the first element is the - paresd timedelta object and the second is the matched format. - If the timedelta cannot be parsed, the first and second element will be - None, and the third is the exception from the datetime.strptime() - method. - """ - - for fmt in timedelta_formats: - try: - # If singular form %S, %M, %H, will just return the timedelta - if fmt == '%S': - td = timedelta(seconds=int(timedelta_str)) - elif fmt == '%M': - td = timedelta(minutes=int(timedelta_str)) - elif fmt == '%H': - td = timedelta(hours=int(timedelta_str)) - else: - dt = datetime.strptime(timedelta_str, fmt) - td = timedelta(hours=dt.hour, - minutes=dt.minute, - seconds=dt.second) - return (td, fmt, None) - except ValueError as ve: - last_exception = ve - - return (None, None, last_exception) diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/http.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/http.py deleted file mode 100644 index 211711d67..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/http.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json -import typing - -from azure.functions import _abc as azf_abc -from azure.functions import _http as azf_http - -from . import meta - - -class HttpRequest(azf_http.HttpRequest): - """An HTTP request object.""" - - __body_bytes: typing.Optional[bytes] - __body_str: typing.Optional[str] - - def __init__(self, - method: str, - url: str, *, - headers: typing.Mapping[str, str], - params: typing.Mapping[str, str], - route_params: typing.Mapping[str, str], - body_type: str, - body: typing.Union[str, bytes]) -> None: - - body_str: typing.Optional[str] = None - body_bytes: typing.Optional[bytes] = None - if isinstance(body, str): - body_str = body - body_bytes = body_str.encode('utf-8') - elif isinstance(body, bytes): - body_bytes = body - else: - raise TypeError( - f'unexpected HTTP request body type: {type(body).__name__}') - - super().__init__(method=method, url=url, headers=headers, - params=params, route_params=route_params, - body=body_bytes) - - self.__body_type = body_type - self.__body_str = body_str - self.__body_bytes = body_bytes - - def get_body(self) -> bytes: - if self.__body_bytes is None: - assert self.__body_str is not None - self.__body_bytes = self.__body_str.encode('utf-8') - return self.__body_bytes - - def get_json(self) -> typing.Any: - if self.__body_type in ('json', 'string'): - assert self.__body_str is not None - return json.loads(self.__body_str) - elif self.__body_bytes is not None: - try: - return json.loads(self.__body_bytes.decode('utf-8')) - except ValueError as e: - raise ValueError( - 'HTTP request does not contain valid JSON data') from e - else: - raise ValueError( - 'Request body cannot be empty in JSON deserialization') - - -class HttpResponseConverter(meta.OutConverter, binding='http'): - - @classmethod - def check_output_type_annotation(cls, pytype: type) -> bool: - return issubclass(pytype, (azf_abc.HttpResponse, str)) - - @classmethod - def encode(cls, obj: typing.Any, *, - expected_type: typing.Optional[type]) -> meta.Datum: - if isinstance(obj, str): - return meta.Datum(type='string', value=obj) - - if isinstance(obj, azf_abc.HttpResponse): - status = obj.status_code - headers = dict(obj.headers) - if 'content-type' not in headers: - if obj.mimetype.startswith('text/'): - ct = f'{obj.mimetype}; charset={obj.charset}' - else: - ct = f'{obj.mimetype}' - headers['content-type'] = ct - - body = obj.get_body() - if body is not None: - datum_body = meta.Datum(type='bytes', value=body) - else: - datum_body = meta.Datum(type='bytes', value=b'') - - return meta.Datum( - type='http', - value=dict( - status_code=meta.Datum(type='string', value=str(status)), - headers={ - n: meta.Datum(type='string', value=h) - for n, h in headers.items() - }, - body=datum_body, - ) - ) - - raise NotImplementedError - - -class HttpRequestConverter(meta.InConverter, - binding='httpTrigger', trigger=True): - - @classmethod - def check_input_type_annotation(cls, pytype: type) -> bool: - return issubclass(pytype, azf_abc.HttpRequest) - - @classmethod - def decode(cls, data: meta.Datum, *, - trigger_metadata) -> typing.Any: - if data.type != 'http': - raise NotImplementedError - - val = data.value - - return HttpRequest( - method=val['method'].value, - url=val['url'].value, - headers={n: v.value for n, v in val['headers'].items()}, - params={n: v.value for n, v in val['query'].items()}, - route_params={n: v.value for n, v in val['params'].items()}, - body_type=val['body'].type, - body=val['body'].value, - ) diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/meta.py b/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/meta.py deleted file mode 100644 index 3dcff6da8..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_azf_older_version/lib/site-packages/azure/functions/meta.py +++ /dev/null @@ -1,401 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import abc -import collections.abc -import datetime -import json -import re -from typing import Any, Dict, Mapping, Optional, Tuple, Union - -from ._thirdparty import typing_inspect -from ._utils import try_parse_datetime_with_formats, try_parse_timedelta_with_formats - - -def is_iterable_type_annotation(annotation: object, pytype: object) -> bool: - is_iterable_anno = ( - typing_inspect.is_generic_type(annotation) - and issubclass(typing_inspect.get_origin(annotation), - collections.abc.Iterable) - ) - - if not is_iterable_anno: - return False - - args = typing_inspect.get_args(annotation) - if not args: - return False - - if isinstance(pytype, tuple): - return any(isinstance(t, type) and issubclass(t, arg) - for t in pytype for arg in args) - else: - return any(isinstance(pytype, type) and issubclass(pytype, arg) - for arg in args) - - -class Datum: - def __init__(self, value: Any, type: Optional[str]): - self.value: Any = value - self.type: Optional[str] = type - - @property - def python_value(self) -> Any: - if self.value is None or self.type is None: - return None - elif self.type in ('bytes', 'string', 'int', 'double'): - return self.value - elif self.type == 'json': - return json.loads(self.value) - elif self.type == 'collection_string': - return [v for v in self.value.string] - elif self.type == 'collection_bytes': - return [v for v in self.value.bytes] - elif self.type == 'collection_double': - return [v for v in self.value.double] - elif self.type == 'collection_sint64': - return [v for v in self.value.sint64] - else: - return self.value - - @property - def python_type(self) -> type: - return type(self.python_value) - - def __eq__(self, other): - if not isinstance(other, type(self)): - return False - - return self.value == other.value and self.type == other.type - - def __hash__(self): - return hash((type(self), (self.value, self.type))) - - def __repr__(self): - val_repr = repr(self.value) - if len(val_repr) > 10: - val_repr = val_repr[:10] + '...' - return ''.format(self.type, val_repr) - - -class _ConverterMeta(abc.ABCMeta): - - _bindings: Dict[str, type] = {} - - def __new__(mcls, name, bases, dct, *, - binding: Optional[str], - trigger: Optional[str] = None): - cls = super().__new__(mcls, name, bases, dct) - cls._trigger = trigger # type: ignore - if binding is None: - return cls - - if binding in mcls._bindings: - raise RuntimeError( - f'cannot register a converter for {binding!r} binding: ' - f'another converter for this binding has already been ' - f'registered') - - mcls._bindings[binding] = cls - if trigger is not None: - mcls._bindings[trigger] = cls - - return cls - - @classmethod - def get(cls, binding_name): - return cls._bindings.get(binding_name) - - def has_trigger_support(cls) -> bool: - return cls._trigger is not None # type: ignore - - -class _BaseConverter(metaclass=_ConverterMeta, binding=None): - - @classmethod - def _decode_typed_data( - cls, data: Datum, *, - python_type: Union[type, Tuple[type, ...]], - context: str = 'data') -> Any: - if data is None: - return None - - data_type = data.type - if data_type == 'json': - result = json.loads(data.value) - - elif data_type == 'string': - result = data.value - - elif data_type == 'int': - result = data.value - - elif data_type == 'double': - result = data.value - - elif data_type == 'collection_bytes': - result = data.value - - elif data_type == 'collection_string': - result = data.value - - elif data_type == 'collection_sint64': - result = data.value - - elif data_type is None: - return None - - else: - raise ValueError( - f'unsupported type of {context}: {data_type}') - - if not isinstance(result, python_type): - if isinstance(python_type, (tuple, list, dict)): - raise ValueError( - f'unexpected value type in {context}: ' - f'{type(result).__name__}, expected one of: ' - f'{", ".join(t.__name__ for t in python_type)}') - else: - try: - # Try coercing into the requested type - result = python_type(result) - except (TypeError, ValueError) as e: - raise ValueError( - f'cannot convert value of {context} into ' - f'{python_type.__name__}: {e}') from None - - return result - - @classmethod - def _decode_trigger_metadata_field( - cls, trigger_metadata: Mapping[str, Datum], - field: str, *, - python_type: Union[type, Tuple[type, ...]]) \ - -> Any: - data = trigger_metadata.get(field) - if data is None: - return None - else: - return cls._decode_typed_data( - data, python_type=python_type, - context=f'field {field!r} in trigger metadata') - - @classmethod - def _parse_datetime_metadata( - cls, trigger_metadata: Mapping[str, Datum], - field: str) -> Optional[datetime.datetime]: - - datetime_str = cls._decode_trigger_metadata_field( - trigger_metadata, field, python_type=str) - - if datetime_str is None: - return None - else: - return cls._parse_datetime(datetime_str) - - @classmethod - def _parse_timedelta_metadata( - cls, trigger_metadata: Mapping[str, Datum], - field: str) -> Optional[datetime.timedelta]: - - timedelta_str = cls._decode_trigger_metadata_field( - trigger_metadata, field, python_type=str) - - if timedelta_str is None: - return None - else: - return cls._parse_timedelta(timedelta_str) - - @classmethod - def _parse_datetime( - cls, datetime_str: Optional[str]) -> Optional[datetime.datetime]: - - if not datetime_str: - return None - - too_fractional = re.match( - r'(.*\.\d{6})(\d+)(Z|[\+|-]\d{1,2}:\d{1,2}){0,1}', datetime_str) - - if too_fractional: - # The supplied value contains seven digits in the - # fractional second part, whereas Python expects - # a maxium of six, so strip it. - # https://github.com/Azure/azure-functions-python-worker/issues/269 - datetime_str = too_fractional.group(1) + ( - too_fractional.group(3) or '') - - # Try parse time - utc_time, utc_time_error = cls._parse_datetime_utc(datetime_str) - if not utc_time_error and utc_time: - return utc_time.replace(tzinfo=datetime.timezone.utc) - - local_time, local_time_error = cls._parse_datetime_local(datetime_str) - if not local_time_error and local_time: - return local_time.replace(tzinfo=None) - - # Report error - if utc_time_error: - raise utc_time_error - elif local_time_error: - raise local_time_error - else: - return None - - @classmethod - def _parse_timedelta( - cls, - timedelta_str: Optional[str] - ) -> Optional[datetime.timedelta]: - - if not timedelta_str: - return None - - # Try parse timedelta - timedelta, td_error = cls._parse_timedelta_internal(timedelta_str) - if timedelta is not None: - return timedelta - - # Report error - if td_error: - raise td_error - else: - return None - - @classmethod - def _parse_datetime_utc( - cls, - datetime_str: str - ) -> Tuple[Optional[datetime.datetime], Optional[Exception]]: - - # UTC ISO 8601 assumed - # 2018-08-07T23:17:57.461050Z - utc_formats = [ - '%Y-%m-%dT%H:%M:%S+00:00', - '%Y-%m-%dT%H:%M:%S-00:00', - '%Y-%m-%dT%H:%M:%S.%f+00:00', - '%Y-%m-%dT%H:%M:%S.%f-00:00', - '%Y-%m-%dT%H:%M:%SZ', - '%Y-%m-%dT%H:%M:%S.%fZ', - - '%m/%d/%Y %H:%M:%SZ', - '%m/%d/%Y %H:%M:%S.%fZ', - '%m/%d/%Y %H:%M:%S+00:00', - '%m/%d/%Y %H:%M:%S-00:00', - '%m/%d/%Y %H:%M:%S.%f+00:00', - '%m/%d/%Y %H:%M:%S.%f-00:00', - ] - - dt, _, excpt = try_parse_datetime_with_formats( - datetime_str, utc_formats) - - if excpt is not None: - return None, excpt - return dt, None - - @classmethod - def _parse_datetime_local( - cls, datetime_str: str - ) -> Tuple[Optional[datetime.datetime], Optional[Exception]]: - """Parse a string into a datetime object, accepts following formats - 1. Without fractional seconds (e.g. 2018-08-07T23:17:57) - 2. With fractional seconds (e.g. 2018-08-07T23:17:57.461050) - - Parameters - ---------- - datetime_str: str - The string represents a datetime - - Returns - ------- - Tuple[Optional[datetime.datetime], Optional[Exception]] - If the datetime_str is None, will return None immediately. - If the datetime_str can be parsed correctly, it will return as the - first element in the tuple. - If the datetime_str cannot be parsed with all attempts, it will - return None in the first element, the exception in the second - element. - """ - - local_formats = [ - '%Y-%m-%dT%H:%M:%S.%f', - '%Y-%m-%dT%H:%M:%S', - - '%m/%d/%YT%H:%M:%S.%f', - '%m/%d/%YT%H:%M:%S' - ] - - dt, _, excpt = try_parse_datetime_with_formats( - datetime_str, local_formats) - - if excpt is not None: - return None, excpt - return dt, None - - @classmethod - def _parse_timedelta_internal( - cls, timedelta_str: str - ) -> Tuple[Optional[datetime.timedelta], Optional[Exception]]: - """Parse a string into a timedelta object, accepts following formats - 1. HH:MM:SS (e.g. 12:34:56) - 2. MM:SS (e.g. 34:56) - 3. Pure integer as seconds (e.g. 5819) - - Parameters - ---------- - timedelta_str: str - The string represents a datetime - - Returns - ------- - Tuple[Optional[datetime.timedelta], Optional[Exception]] - If the timedelta_str is None, will return None immediately. - If the timedelta_str can be parsed correctly, it will return as the - first element in the tuple. - If the timedelta_str cannot be parsed with all attempts, it will - return None in the first element, the exception in the second - element. - """ - - timedelta_formats = [ - '%H:%M:%S', - '%M:%S', - '%S' - ] - - td, _, excpt = try_parse_timedelta_with_formats( - timedelta_str, timedelta_formats) - - if td is not None: - return td, None - return None, excpt - - -class InConverter(_BaseConverter, binding=None): - - @abc.abstractclassmethod - def check_input_type_annotation(cls, pytype: type) -> bool: - pass - - @abc.abstractclassmethod - def decode(cls, data: Datum, *, trigger_metadata) -> Any: - raise NotImplementedError - - @abc.abstractclassmethod - def has_implicit_output(cls) -> bool: - return False - - -class OutConverter(_BaseConverter, binding=None): - - @abc.abstractclassmethod - def check_output_type_annotation(cls, pytype: type) -> bool: - pass - - @abc.abstractclassmethod - def encode(cls, obj: Any, *, - expected_type: Optional[type]) -> Optional[Datum]: - raise NotImplementedError - - -def get_binding_registry(): - return _ConverterMeta diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_grpc_protobuf/lib/site-packages/google/protobuf/__init__.py b/tests/endtoend/dependency_isolation_functions/.python_packages_grpc_protobuf/lib/site-packages/google/protobuf/__init__.py deleted file mode 100644 index 814beb791..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_grpc_protobuf/lib/site-packages/google/protobuf/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# This is a dummy protobuf==3.9.0 package used for E2E -# testing in Azure Functions Python Worker - -__version__ = '3.9.0' diff --git a/tests/endtoend/dependency_isolation_functions/.python_packages_grpc_protobuf/lib/site-packages/grpc/__init__.py b/tests/endtoend/dependency_isolation_functions/.python_packages_grpc_protobuf/lib/site-packages/grpc/__init__.py deleted file mode 100644 index fc42cae68..000000000 --- a/tests/endtoend/dependency_isolation_functions/.python_packages_grpc_protobuf/lib/site-packages/grpc/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# This is a dummy grpcio==1.35.0 package used for E2E -# testing in Azure Functions Python Worker. - -__version__ = '1.35.0' diff --git a/tests/endtoend/dependency_isolation_functions/report_dependencies/__init__.py b/tests/endtoend/dependency_isolation_functions/report_dependencies/__init__.py deleted file mode 100644 index 709fd6ca1..000000000 --- a/tests/endtoend/dependency_isolation_functions/report_dependencies/__init__.py +++ /dev/null @@ -1,50 +0,0 @@ -import json -import os -import sys - -import azure.functions as func -import google.protobuf as proto -import grpc - -# Load dependency manager from customer' context -from azure_functions_worker.utils.dependency import DependencyManager as dm - - -def main(req: func.HttpRequest) -> func.HttpResponse: - """This function is an HttpTrigger to check if the modules are loaded from - customer's dependencies. We have mock a .python_packages/ folder in - this e2e test function app which contains the following stub package: - - protobuf==3.9.0 - grpc==1.35.0 - - If the version we check is the same as the one in local .python_packages/, - that means the isolate worker dependencies are working as expected. - """ - result = { - "sys.path": list(sys.path), - "dependency_manager": { - "cx_deps_path": dm._get_cx_deps_path(), - "cx_working_dir": dm._get_cx_working_dir(), - "worker_deps_path": dm._get_worker_deps_path(), - }, - "libraries": { - "func.version": func.__version__, - "func.file": func.__file__, - "proto.expected.version": "3.9.0", - "proto.version": proto.__version__, - "proto.file": proto.__file__, - "grpc.expected.version": "1.35.0", - "grpc.version": grpc.__version__, - "grpc.file": grpc.__file__, - }, - "environments": { - "PYTHON_ISOLATE_WORKER_DEPENDENCIES": ( - os.getenv('PYTHON_ISOLATE_WORKER_DEPENDENCIES') - ), - "AzureWebJobsScriptRoot": os.getenv('AzureWebJobsScriptRoot'), - "PYTHONPATH": os.getenv('PYTHONPATH'), - "HOST_VERSION": os.getenv('HOST_VERSION') - } - } - return func.HttpResponse(json.dumps(result)) diff --git a/tests/endtoend/dependency_isolation_functions/report_dependencies/function.json b/tests/endtoend/dependency_isolation_functions/report_dependencies/function.json deleted file mode 100644 index c76954425..000000000 --- a/tests/endtoend/dependency_isolation_functions/report_dependencies/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] - } diff --git a/tests/endtoend/durable_functions/DurableFunctionsHttpStart/__init__.py b/tests/endtoend/durable_functions/DurableFunctionsHttpStart/__init__.py deleted file mode 100644 index 18709fccd..000000000 --- a/tests/endtoend/durable_functions/DurableFunctionsHttpStart/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# This function an HTTP starter function for Durable Functions. -# Before running this sample, please: -# - create a Durable orchestration function -# - create a Durable activity function (default name is "Hello") -# - add azure-functions-durable to requirements.txt -# - run pip install -r requirements.txt -import logging - -import azure.durable_functions as df -import azure.functions as func - - -async def main(req: func.HttpRequest, starter: str) -> func.HttpResponse: - client = df.DurableOrchestrationClient(starter) - instance_id = await client.start_new(req.route_params["functionName"], None, - None) - - logging.info(f"Started orchestration with ID = '{instance_id}'.") - - return client.create_check_status_response(req, instance_id) diff --git a/tests/endtoend/durable_functions/DurableFunctionsHttpStart/function.json b/tests/endtoend/durable_functions/DurableFunctionsHttpStart/function.json deleted file mode 100644 index 51a0411ca..000000000 --- a/tests/endtoend/durable_functions/DurableFunctionsHttpStart/function.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "name": "req", - "type": "httpTrigger", - "direction": "in", - "route": "orchestrators/{functionName}", - "methods": [ - "post", - "get" - ] - }, - { - "name": "$return", - "type": "http", - "direction": "out" - }, - { - "name": "starter", - "type": "durableClient", - "direction": "in" - } - ] -} diff --git a/tests/endtoend/durable_functions/DurableFunctionsOrchestrator/__init__.py b/tests/endtoend/durable_functions/DurableFunctionsOrchestrator/__init__.py deleted file mode 100644 index 9a7c43ce8..000000000 --- a/tests/endtoend/durable_functions/DurableFunctionsOrchestrator/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# This function is not intended to be invoked directly. Instead it will be -# triggered by an HTTP starter function. -# Before running this sample, please: -# - create a Durable activity function (default name is "Hello") -# - create a Durable HTTP starter function -# - add azure-functions-durable to requirements.txt -# - run pip install -r requirements.txt -import azure.durable_functions as df - - -def orchestrator_function(context: df.DurableOrchestrationContext): - result1 = yield context.call_activity('Hello', "Tokyo") - result2 = yield context.call_activity('Hello', "Seattle") - result3 = yield context.call_activity('Hello', "London") - return [result1, result2, result3] - - -main = df.Orchestrator.create(orchestrator_function) diff --git a/tests/endtoend/durable_functions/DurableFunctionsOrchestrator/function.json b/tests/endtoend/durable_functions/DurableFunctionsOrchestrator/function.json deleted file mode 100644 index 83baac61e..000000000 --- a/tests/endtoend/durable_functions/DurableFunctionsOrchestrator/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "name": "context", - "type": "orchestrationTrigger", - "direction": "in" - } - ] -} diff --git a/tests/endtoend/durable_functions/Hello/__init__.py b/tests/endtoend/durable_functions/Hello/__init__.py deleted file mode 100644 index 15cee6a5b..000000000 --- a/tests/endtoend/durable_functions/Hello/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# This function is not intended to be invoked directly. Instead it will be -# triggered by an orchestrator function. -# Before running this sample, please: -# - create a Durable orchestration function -# - create a Durable HTTP starter function -# - add azure-functions-durable to requirements.txt -# - run pip install -r requirements.txt - -def main(name: str) -> str: - return f"Hello {name}!" diff --git a/tests/endtoend/durable_functions/Hello/function.json b/tests/endtoend/durable_functions/Hello/function.json deleted file mode 100644 index 1b03f1100..000000000 --- a/tests/endtoend/durable_functions/Hello/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "name": "name", - "type": "activityTrigger", - "direction": "in" - } - ] -} diff --git a/tests/endtoend/durable_functions/durable_functions_stein/function_app.py b/tests/endtoend/durable_functions/durable_functions_stein/function_app.py deleted file mode 100644 index f0e95135b..000000000 --- a/tests/endtoend/durable_functions/durable_functions_stein/function_app.py +++ /dev/null @@ -1,29 +0,0 @@ -import logging - -import azure.durable_functions as df -import azure.functions as func - -app = df.DFApp() - - -@app.orchestration_trigger(context_name="context") -def durablefunctionsorchestrator(context): - result1 = yield context.call_activity('Hello', "Tokyo") - result2 = yield context.call_activity('Hello', "Seattle") - result3 = yield context.call_activity('Hello', "London") - return [result1, result2, result3] - - -@app.route(route="orchestrators/{functionName}", - auth_level=func.AuthLevel.ANONYMOUS) -@app.durable_client_input(client_name="client") -async def durable_client(req: func.HttpRequest, client) -> func.HttpResponse: - instance_id = await client.start_new(req.route_params["functionName"], None, - None) - logging.info(f"Started orchestration with ID = '{instance_id}'.") - return client.create_check_status_response(req, instance_id) - - -@app.activity_trigger(input_name="name") -def hello(name: str) -> str: - return f"Hello {name}!" diff --git a/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/function_app.py b/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/function_app.py deleted file mode 100644 index 94f05bf22..000000000 --- a/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/function_app.py +++ /dev/null @@ -1,83 +0,0 @@ -import json -from datetime import datetime - -import azure.functions as func - -from azure_functions_worker import logging - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="eventGridTrigger") -@app.event_grid_trigger(arg_name="event") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-eventgrid-triggered.txt", - connection="AzureWebJobsStorage") -def event_grid_trigger(event: func.EventGridEvent) -> str: - logging.info("Event grid function is triggered!") - return json.dumps({ - 'id': event.id, - 'data': event.get_json(), - 'topic': event.topic, - 'subject': event.subject, - 'event_type': event.event_type, - }) - - -@app.function_name(name="eventgrid_output_binding") -@app.route(route="eventgrid_output_binding") -@app.event_grid_output( - arg_name="outputEvent", - topic_endpoint_uri="AzureWebJobsEventGridTopicUri", - topic_key_setting="AzureWebJobsEventGridConnectionKey") -def eventgrid_output_binding( - req: func.HttpRequest, - outputEvent: func.Out[func.EventGridOutputEvent]) -> func.HttpResponse: - test_uuid = req.params.get('test_uuid') - data_to_event_grid = func.EventGridOutputEvent(id="test-id", - data={ - "test_uuid": test_uuid - }, - subject="test-subject", - event_type="test-event-1", - event_time=datetime.utcnow(), - data_version="1.0") - - outputEvent.set(data_to_event_grid) - r_value = "Sent event with subject: {}, id: {}, data: {}, event_type: {} " \ - "to EventGrid!".format(data_to_event_grid.subject, - data_to_event_grid.id, - data_to_event_grid.get_json(), - data_to_event_grid.event_type) - return func.HttpResponse(r_value) - - -@app.function_name(name="eventgrid_output_binding_message_to_blobstore") -@app.queue_trigger(arg_name="msg", queue_name="test-event-grid-storage-queue", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-eventgrid-output-binding.txt", - connection="AzureWebJobsStorage") -def eventgrid_output_binding_message_to_blobstore( - msg: func.QueueMessage) -> bytes: - return msg.get_body() - - -@app.function_name(name="eventgrid_output_binding_success") -@app.route(route="eventgrid_output_binding_success") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-eventgrid-output-binding.txt", - connection="AzureWebJobsStorage") -def eventgrid_output_binding_success( - req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_eventgrid_triggered") -@app.route(route="get_eventgrid_triggered") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-eventgrid-triggered.txt", - connection="AzureWebJobsStorage") -def get_eventgrid_triggered( - req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/generic/function_app.py b/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/generic/function_app.py deleted file mode 100644 index 5dff24d80..000000000 --- a/tests/endtoend/eventgrid_functions/eventgrid_functions_stein/generic/function_app.py +++ /dev/null @@ -1,100 +0,0 @@ -import json -from datetime import datetime - -import azure.functions as func - -from azure_functions_worker import logging - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="eventGridTrigger") -@app.event_grid_trigger(arg_name="event", type="eventGridTrigger") -@app.generic_output_binding( - arg_name="$return", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-eventgrid-triggered.txt") -def event_grid_trigger(event: func.EventGridEvent) -> str: - logging.info("Event grid function is triggered!") - return json.dumps({ - 'id': event.id, - 'data': event.get_json(), - 'topic': event.topic, - 'subject': event.subject, - 'event_type': event.event_type, - }) - - -@app.function_name(name="eventgrid_output_binding") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="eventgrid_output_binding") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_output_binding( - arg_name="outputEvent", - type="eventGrid", - topic_endpoint_uri="AzureWebJobsEventGridTopicUri", - topic_key_setting="AzureWebJobsEventGridConnectionKey") -def eventgrid_output_binding( - req: func.HttpRequest, - outputEvent: func.Out[func.EventGridOutputEvent]) -> func.HttpResponse: - test_uuid = req.params.get('test_uuid') - data_to_event_grid = func.EventGridOutputEvent(id="test-id", - data={ - "test_uuid": test_uuid - }, - subject="test-subject", - event_type="test-event-1", - event_time=datetime.utcnow(), - data_version="1.0") - - outputEvent.set(data_to_event_grid) - r_value = "Sent event with subject: {}, id: {}, data: {}, event_type: {} " \ - "to EventGrid!".format(data_to_event_grid.subject, - data_to_event_grid.id, - data_to_event_grid.get_json(), - data_to_event_grid.event_type) - return func.HttpResponse(r_value) - - -@app.function_name(name="eventgrid_output_binding_message_to_blobstore") -@app.generic_trigger(arg_name="msg", - type="queueTrigger", - queue_name="test-event-grid-storage-queue", - connection="AzureWebJobsStorage") -@app.generic_output_binding( - arg_name="$return", - type="blob", - connection="AzureWebJobsStorage", - path="python-worker-tests/test-eventgrid-output-binding.txt") -def eventgrid_output_binding_message_to_blobstore( - msg: func.QueueMessage) -> bytes: - return msg.get_body() - - -@app.function_name(name="eventgrid_output_binding_success") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="eventgrid_output_binding_success") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - type="blob", - path="python-worker-tests/test-eventgrid-output-binding.txt", - connection="AzureWebJobsStorage") -def eventgrid_output_binding_success( - req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') - - -@app.function_name(name="get_eventgrid_triggered") -@app.generic_trigger(arg_name="req", type="httpTrigger", - route="get_eventgrid_triggered") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding( - arg_name="file", - type="blob", - path="python-worker-tests/test-eventgrid-triggered.txt", - connection="AzureWebJobsStorage") -def get_eventgrid_triggered( - req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/endtoend/eventgrid_functions/eventgrid_output_binding/__init__.py b/tests/endtoend/eventgrid_functions/eventgrid_output_binding/__init__.py deleted file mode 100644 index 817e31396..000000000 --- a/tests/endtoend/eventgrid_functions/eventgrid_output_binding/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -from datetime import datetime - -import azure.functions as func - - -def main(req: func.HttpRequest, - outputEvent: func.Out[func.EventGridOutputEvent]) -> func.HttpResponse: - test_uuid = req.params.get('test_uuid') - data_to_event_grid = func.EventGridOutputEvent(id="test-id", - data={ - "test_uuid": test_uuid - }, - subject="test-subject", - event_type="test-event-1", - event_time=datetime.utcnow(), - data_version="1.0") - - outputEvent.set(data_to_event_grid) - r_value = "Sent event with subject: {}, id: {}, data: {}, event_type: {} " \ - "to EventGrid!".format(data_to_event_grid.subject, - data_to_event_grid.id, - data_to_event_grid.get_json(), - data_to_event_grid.event_type) - return func.HttpResponse(r_value) diff --git a/tests/endtoend/eventgrid_functions/eventgrid_output_binding/function.json b/tests/endtoend/eventgrid_functions/eventgrid_output_binding/function.json deleted file mode 100644 index 1c0343465..000000000 --- a/tests/endtoend/eventgrid_functions/eventgrid_output_binding/function.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "eventGrid", - "name": "outputEvent", - "topicEndpointUri": "AzureWebJobsEventGridTopicUri", - "topicKeySetting": "AzureWebJobsEventGridConnectionKey", - "direction": "out" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/eventgrid_functions/eventgrid_output_binding_message_to_blobstore/__init__.py b/tests/endtoend/eventgrid_functions/eventgrid_output_binding_message_to_blobstore/__init__.py deleted file mode 100644 index b0d9c3096..000000000 --- a/tests/endtoend/eventgrid_functions/eventgrid_output_binding_message_to_blobstore/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import azure.functions as func - - -def main(msg: func.QueueMessage) -> bytes: - return msg.get_body() diff --git a/tests/endtoend/eventgrid_functions/eventgrid_output_binding_message_to_blobstore/function.json b/tests/endtoend/eventgrid_functions/eventgrid_output_binding_message_to_blobstore/function.json deleted file mode 100644 index f25661fdb..000000000 --- a/tests/endtoend/eventgrid_functions/eventgrid_output_binding_message_to_blobstore/function.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "name": "msg", - "type": "queueTrigger", - "direction": "in", - "queueName": "test-event-grid-storage-queue", - "connection": "AzureWebJobsStorage" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-eventgrid-output-binding.txt" - } - ] -} diff --git a/tests/endtoend/eventgrid_functions/eventgrid_output_binding_success/__init__.py b/tests/endtoend/eventgrid_functions/eventgrid_output_binding_success/__init__.py deleted file mode 100644 index 7e9725f0b..000000000 --- a/tests/endtoend/eventgrid_functions/eventgrid_output_binding_success/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import azure.functions as func - - -def main(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/endtoend/eventgrid_functions/eventgrid_output_binding_success/function.json b/tests/endtoend/eventgrid_functions/eventgrid_output_binding_success/function.json deleted file mode 100644 index e63945d3a..000000000 --- a/tests/endtoend/eventgrid_functions/eventgrid_output_binding_success/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-eventgrid-output-binding.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/eventgrid_functions/eventgrid_trigger/__init__.py b/tests/endtoend/eventgrid_functions/eventgrid_trigger/__init__.py deleted file mode 100644 index b2b414623..000000000 --- a/tests/endtoend/eventgrid_functions/eventgrid_trigger/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as func - - -def main(event: func.EventGridEvent) -> str: - return json.dumps({ - 'id': event.id, - 'data': event.get_json(), - 'topic': event.topic, - 'subject': event.subject, - 'event_type': event.event_type, - }) diff --git a/tests/endtoend/eventgrid_functions/eventgrid_trigger/function.json b/tests/endtoend/eventgrid_functions/eventgrid_trigger/function.json deleted file mode 100644 index bf33c7072..000000000 --- a/tests/endtoend/eventgrid_functions/eventgrid_trigger/function.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "eventGridTrigger", - "direction": "in", - "name": "event" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-eventgrid-triggered.txt" - } - ] -} diff --git a/tests/endtoend/eventgrid_functions/get_eventgrid_triggered/function.json b/tests/endtoend/eventgrid_functions/get_eventgrid_triggered/function.json deleted file mode 100644 index 2c2727754..000000000 --- a/tests/endtoend/eventgrid_functions/get_eventgrid_triggered/function.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "authLevel": "anonymous" - }, - { - "type": "blob", - "direction": "in", - "name": "file", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-eventgrid-triggered.txt" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/eventgrid_functions/get_eventgrid_triggered/main.py b/tests/endtoend/eventgrid_functions/get_eventgrid_triggered/main.py deleted file mode 100644 index 167c7a574..000000000 --- a/tests/endtoend/eventgrid_functions/get_eventgrid_triggered/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -def main(req: func.HttpRequest, file: func.InputStream) -> str: - return file.read().decode('utf-8') diff --git a/tests/endtoend/http_functions/common_libs_functions/common_libs_functions_stein/function_app.py b/tests/endtoend/http_functions/common_libs_functions/common_libs_functions_stein/function_app.py deleted file mode 100644 index e6174cae9..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/common_libs_functions_stein/function_app.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func -import cv2 -import dotenv -import numpy as np -import plotly -import requests -from pandas import DataFrame -from sklearn.datasets import load_iris - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.route(route="dotenv_func") -def dotenv_func(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - res = "found" if "load_dotenv" in dotenv.__all__ else "not found" - - return func.HttpResponse(res) - - -@app.route(route="numpy_func") -def numpy_func(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - res = "numpy version: {}".format(np.__version__) - - return func.HttpResponse(res) - - -@app.route(route="opencv_func") -def opencv_func(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - res = "opencv version: {}".format(cv2.__version__) - - return func.HttpResponse(res) - - -@app.route(route="pandas_func") -def pandas_func(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - array = np.arange(6).reshape(3, 2) - df = DataFrame(array, columns=['x', 'y'], index=['T1', 'T2', 'T3']) - - res = "two-dimensional DataFrame: \n {}".format(df) - - return func.HttpResponse(res) - - -@app.route(route="plotly_func") -def plotly_func(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - res = "plotly version: {}".format(plotly.__version__) - - return func.HttpResponse(res) - - -@app.route(route="requests_func") -def requests_func(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - req = requests.get('https://github.com') - res = "req status code: {}".format(req.status_code) - - return func.HttpResponse(res) - - -@app.route(route="sklearn_func") -def sklearn_func(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - iris = load_iris() - - res = "First 5 records of array: \n {}".format(iris.data[:5]) - - return func.HttpResponse(res) diff --git a/tests/endtoend/http_functions/common_libs_functions/dotenv_func/__init__.py b/tests/endtoend/http_functions/common_libs_functions/dotenv_func/__init__.py deleted file mode 100644 index 2046b551a..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/dotenv_func/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func -import dotenv - - -def main(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - res = "found" if "load_dotenv" in dotenv.__all__ else "not found" - - return func.HttpResponse(res) diff --git a/tests/endtoend/http_functions/common_libs_functions/dotenv_func/function.json b/tests/endtoend/http_functions/common_libs_functions/dotenv_func/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/dotenv_func/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/common_libs_functions/numpy_func/__init__.py b/tests/endtoend/http_functions/common_libs_functions/numpy_func/__init__.py deleted file mode 100644 index 588ebb061..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/numpy_func/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func -import numpy as np - - -def main(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - res = "numpy version: {}".format(np.__version__) - - return func.HttpResponse(res) diff --git a/tests/endtoend/http_functions/common_libs_functions/numpy_func/function.json b/tests/endtoend/http_functions/common_libs_functions/numpy_func/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/numpy_func/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/common_libs_functions/opencv_func/__init__.py b/tests/endtoend/http_functions/common_libs_functions/opencv_func/__init__.py deleted file mode 100644 index e102878b9..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/opencv_func/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func -import cv2 - - -def main(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - res = "opencv version: {}".format(cv2.__version__) - - return func.HttpResponse(res) diff --git a/tests/endtoend/http_functions/common_libs_functions/opencv_func/function.json b/tests/endtoend/http_functions/common_libs_functions/opencv_func/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/opencv_func/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/common_libs_functions/pandas_func/__init__.py b/tests/endtoend/http_functions/common_libs_functions/pandas_func/__init__.py deleted file mode 100644 index 4d6692015..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/pandas_func/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func -import numpy as np -from pandas import DataFrame - - -def main(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - array = np.arange(6).reshape(3, 2) - df = DataFrame(array, columns=['x', 'y'], index=['T1', 'T2', 'T3']) - - res = "two-dimensional DataFrame: \n {}".format(df) - - return func.HttpResponse(res) diff --git a/tests/endtoend/http_functions/common_libs_functions/pandas_func/function.json b/tests/endtoend/http_functions/common_libs_functions/pandas_func/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/pandas_func/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/common_libs_functions/plotly_func/__init__.py b/tests/endtoend/http_functions/common_libs_functions/plotly_func/__init__.py deleted file mode 100644 index 4d93da913..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/plotly_func/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func -import plotly - - -def main(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - res = "plotly version: {}".format(plotly.__version__) - - return func.HttpResponse(res) diff --git a/tests/endtoend/http_functions/common_libs_functions/plotly_func/function.json b/tests/endtoend/http_functions/common_libs_functions/plotly_func/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/plotly_func/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/common_libs_functions/requests_func/__init__.py b/tests/endtoend/http_functions/common_libs_functions/requests_func/__init__.py deleted file mode 100644 index ffee90749..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/requests_func/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func -import requests - - -def main(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - req = requests.get('https://github.com') - res = "req status code: {}".format(req.status_code) - - return func.HttpResponse(res) diff --git a/tests/endtoend/http_functions/common_libs_functions/requests_func/function.json b/tests/endtoend/http_functions/common_libs_functions/requests_func/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/requests_func/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/common_libs_functions/sklearn_func/__init__.py b/tests/endtoend/http_functions/common_libs_functions/sklearn_func/__init__.py deleted file mode 100644 index 578e925e8..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/sklearn_func/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func -from sklearn.datasets import load_iris - - -def main(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - iris = load_iris() - - res = "First 5 records of array: \n {}".format(iris.data[:5]) - - return func.HttpResponse(res) diff --git a/tests/endtoend/http_functions/common_libs_functions/sklearn_func/function.json b/tests/endtoend/http_functions/common_libs_functions/sklearn_func/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/common_libs_functions/sklearn_func/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/default_template/__init__.py b/tests/endtoend/http_functions/default_template/__init__.py deleted file mode 100644 index 464477bb9..000000000 --- a/tests/endtoend/http_functions/default_template/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# flake8: noqa -import logging - -import azure.functions as func - - -def main(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - name = req.params.get('name') - if not name: - try: - req_body = req.get_json() - except ValueError: - pass - else: - name = req_body.get('name') - - if name: - return func.HttpResponse(f"Hello, {name}. This HTTP triggered function executed successfully.") - else: - return func.HttpResponse( - "This HTTP triggered function executed successfully. Pass a name in the query string or in the request body for a personalized response.", - status_code=200 - ) diff --git a/tests/endtoend/http_functions/default_template/function.json b/tests/endtoend/http_functions/default_template/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/default_template/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/http_func/__init__.py b/tests/endtoend/http_functions/http_func/__init__.py deleted file mode 100644 index 0743efe9a..000000000 --- a/tests/endtoend/http_functions/http_func/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import time -from datetime import datetime - -# flake8: noqa -import azure.functions as func - - -def main(req: func.HttpRequest) -> func.HttpResponse: - time.sleep(1) - - current_time = datetime.now().strftime("%H:%M:%S") - return func.HttpResponse(f"{current_time}") diff --git a/tests/endtoend/http_functions/http_func/function.json b/tests/endtoend/http_functions/http_func/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/http_func/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/http_functions_stein/function_app.py b/tests/endtoend/http_functions/http_functions_stein/function_app.py deleted file mode 100644 index f60697475..000000000 --- a/tests/endtoend/http_functions/http_functions_stein/function_app.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging -import time -from datetime import datetime - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.route(route="default_template") -def default_template(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - name = req.params.get('name') - if not name: - try: - req_body = req.get_json() - except ValueError: - pass - else: - name = req_body.get('name') - - if name: - return func.HttpResponse( - f"Hello, {name}. This HTTP triggered function " - f"executed successfully.") - else: - return func.HttpResponse( - "This HTTP triggered function executed successfully. " - "Pass a name in the query string or in the request body for a" - " personalized response.", - status_code=200 - ) - - -@app.route(route="http_func") -def http_func(req: func.HttpRequest) -> func.HttpResponse: - time.sleep(1) - - current_time = datetime.now().strftime("%H:%M:%S") - return func.HttpResponse(f"{current_time}") diff --git a/tests/endtoend/http_functions/user_thread_logging/async_thread/__init__.py b/tests/endtoend/http_functions/user_thread_logging/async_thread/__init__.py deleted file mode 100644 index 64da1d3ae..000000000 --- a/tests/endtoend/http_functions/user_thread_logging/async_thread/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# flake8: noqa -import logging -import threading - -import azure.functions as func - - -async def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse: - logging.info('Before threads.') - - t1 = threading.Thread(target=thread_function, args=(context, 'Thread1 used.')) - t2 = threading.Thread(target=thread_function, args=(context, 'Thread2 used.')) - t3 = threading.Thread(target=thread_function, args=(context, 'Thread3 used.')) - - t1.start() - t2.start() - t3.start() - - t1.join() - t2.join() - t3.join() - - logging.info('After threads.') - - return func.HttpResponse('This HTTP triggered function executed successfully.', status_code=200) - - -def thread_function(context: func.Context, message: str): - context.thread_local_storage.invocation_id = context.invocation_id - logging.info(message) diff --git a/tests/endtoend/http_functions/user_thread_logging/async_thread/function.json b/tests/endtoend/http_functions/user_thread_logging/async_thread/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/user_thread_logging/async_thread/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/user_thread_logging/async_thread_pool_executor/__init__.py b/tests/endtoend/http_functions/user_thread_logging/async_thread_pool_executor/__init__.py deleted file mode 100644 index 3c124f4da..000000000 --- a/tests/endtoend/http_functions/user_thread_logging/async_thread_pool_executor/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import concurrent.futures - -# flake8: noqa -import logging - -import azure.functions as func - - -async def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse: - logging.info('Before TPE.') - - with concurrent.futures.ThreadPoolExecutor() as tpe: - tpe.submit(thread_function, context, 'Using TPE.') - - logging.info('After TPE.') - - return func.HttpResponse('This HTTP triggered function executed successfully.', status_code=200) - - -def thread_function(context: func.Context, message: str): - context.thread_local_storage.invocation_id = context.invocation_id - logging.info(message) diff --git a/tests/endtoend/http_functions/user_thread_logging/async_thread_pool_executor/function.json b/tests/endtoend/http_functions/user_thread_logging/async_thread_pool_executor/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/user_thread_logging/async_thread_pool_executor/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/user_thread_logging/thread/__init__.py b/tests/endtoend/http_functions/user_thread_logging/thread/__init__.py deleted file mode 100644 index 327b91f83..000000000 --- a/tests/endtoend/http_functions/user_thread_logging/thread/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# flake8: noqa -import logging -import threading - -import azure.functions as func - - -def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse: - logging.info('Before threads.') - - t1 = threading.Thread(target=thread_function, args=(context, 'Thread1 used.')) - t2 = threading.Thread(target=thread_function, args=(context, 'Thread2 used.')) - t3 = threading.Thread(target=thread_function, args=(context, 'Thread3 used.')) - - t1.start() - t2.start() - t3.start() - - t1.join() - t2.join() - t3.join() - - logging.info('After threads.') - - return func.HttpResponse('This HTTP triggered function executed successfully.', status_code=200) - - -def thread_function(context: func.Context, message: str): - context.thread_local_storage.invocation_id = context.invocation_id - logging.info(message) diff --git a/tests/endtoend/http_functions/user_thread_logging/thread/function.json b/tests/endtoend/http_functions/user_thread_logging/thread/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/user_thread_logging/thread/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/http_functions/user_thread_logging/thread_pool_executor/__init__.py b/tests/endtoend/http_functions/user_thread_logging/thread_pool_executor/__init__.py deleted file mode 100644 index 4ed94266e..000000000 --- a/tests/endtoend/http_functions/user_thread_logging/thread_pool_executor/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import concurrent.futures - -# flake8: noqa -import logging - -import azure.functions as func - - -def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse: - logging.info('Before TPE.') - - with concurrent.futures.ThreadPoolExecutor() as tpe: - tpe.submit(thread_function, context, 'Using TPE.') - - logging.info('After TPE.') - - return func.HttpResponse('This HTTP triggered function executed successfully.', status_code=200) - - -def thread_function(context: func.Context, message: str): - context.thread_local_storage.invocation_id = context.invocation_id - logging.info(message) diff --git a/tests/endtoend/http_functions/user_thread_logging/thread_pool_executor/function.json b/tests/endtoend/http_functions/user_thread_logging/thread_pool_executor/function.json deleted file mode 100644 index 8c4cbe307..000000000 --- a/tests/endtoend/http_functions/user_thread_logging/thread_pool_executor/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/endtoend/retry_policy_functions/exponential_strategy/function_app.py b/tests/endtoend/retry_policy_functions/exponential_strategy/function_app.py deleted file mode 100644 index 5180119ee..000000000 --- a/tests/endtoend/retry_policy_functions/exponential_strategy/function_app.py +++ /dev/null @@ -1,23 +0,0 @@ -import logging - -from azure.functions import AuthLevel, Context, FunctionApp, TimerRequest - -app = FunctionApp(http_auth_level=AuthLevel.ANONYMOUS) - - -@app.timer_trigger(schedule="*/1 * * * * *", arg_name="mytimer", - run_on_startup=False, - use_monitor=False) -@app.retry(strategy="exponential_backoff", max_retry_count="3", - minimum_interval="00:00:01", - maximum_interval="00:00:02") -def mytimer(mytimer: TimerRequest, context: Context) -> None: - logging.info(f'Current retry count: {context.retry_context.retry_count}') - - if context.retry_context.retry_count == \ - context.retry_context.max_retry_count: - logging.info( - f"Max retries of {context.retry_context.max_retry_count} for " - f"function {context.function_name} has been reached") - else: - raise Exception("This is a retryable exception") diff --git a/tests/endtoend/retry_policy_functions/fixed_strategy/function_app.py b/tests/endtoend/retry_policy_functions/fixed_strategy/function_app.py deleted file mode 100644 index 1f5863f47..000000000 --- a/tests/endtoend/retry_policy_functions/fixed_strategy/function_app.py +++ /dev/null @@ -1,22 +0,0 @@ -import logging - -from azure.functions import AuthLevel, Context, FunctionApp, TimerRequest - -app = FunctionApp(http_auth_level=AuthLevel.ANONYMOUS) - - -@app.timer_trigger(schedule="*/1 * * * * *", arg_name="mytimer", - run_on_startup=False, - use_monitor=False) -@app.retry(strategy="fixed_delay", max_retry_count="3", - delay_interval="00:00:01") -def mytimer(mytimer: TimerRequest, context: Context) -> None: - logging.info(f'Current retry count: {context.retry_context.retry_count}') - - if context.retry_context.retry_count == \ - context.retry_context.max_retry_count: - logging.info( - f"Max retries of {context.retry_context.max_retry_count} for " - f"function {context.function_name} has been reached") - else: - raise Exception("This is a retryable exception") diff --git a/tests/endtoend/sql_functions/sql_functions_stein/function_app.py b/tests/endtoend/sql_functions/sql_functions_stein/function_app.py deleted file mode 100644 index 07c78c385..000000000 --- a/tests/endtoend/sql_functions/sql_functions_stein/function_app.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.route(route="sql_input/{productid}") -@app.sql_input(arg_name="products", - command_text="SELECT * FROM Products WHERE ProductId = @ProductId", - command_type="Text", - parameters="@ProductId={productid}", - connection_string_setting="AzureWebJobsSqlConnectionString") -def sql_input(req: func.HttpRequest, products: func.SqlRowList) \ - -> func.HttpResponse: - rows = list(map(lambda r: json.loads(r.to_json()), products)) - - return func.HttpResponse( - json.dumps(rows), - status_code=200, - mimetype="application/json" - ) - - -@app.route(route="sql_input2/{productid}") -@app.sql_input(arg_name="products", - command_text="SELECT * FROM Products2 WHERE ProductId = @ProductId", - command_type="Text", - parameters="@ProductId={productid}", - connection_string_setting="AzureWebJobsSqlConnectionString") -def sql_input2(req: func.HttpRequest, products: func.SqlRowList) -> func.HttpResponse: - rows = list(map(lambda r: json.loads(r.to_json()), products)) - - return func.HttpResponse( - json.dumps(rows), - status_code=200, - mimetype="application/json" - ) - - -@app.route(route="sql_output") -@app.sql_output(arg_name="r", - command_text="[dbo].[Products]", - connection_string_setting="AzureWebJobsSqlConnectionString") -def sql_output(req: func.HttpRequest, r: func.Out[func.SqlRow]) -> func.HttpResponse: - body = json.loads(req.get_body()) - row = func.SqlRow.from_dict(body) - r.set(row) - - return func.HttpResponse( - body=req.get_body(), - status_code=201, - mimetype="application/json" - ) - - -@app.sql_trigger(arg_name="changes", - table_name="Products", - connection_string_setting="AzureWebJobsSqlConnectionString") -@app.sql_output(arg_name="r", - command_text="[dbo].[Products2]", - connection_string_setting="AzureWebJobsSqlConnectionString") -def sql_trigger(changes, r: func.Out[func.SqlRow]) -> str: - row = func.SqlRow.from_dict(json.loads(changes)[0]["Item"]) - r.set(row) - return "OK" diff --git a/tests/endtoend/sql_functions/sql_functions_stein/generic/function_app.py b/tests/endtoend/sql_functions/sql_functions_stein/generic/function_app.py deleted file mode 100644 index 2e796f8bb..000000000 --- a/tests/endtoend/sql_functions/sql_functions_stein/generic/function_app.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.generic_trigger(arg_name="req", type="httpTrigger", route="sql_input/{productid}") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding(arg_name="products", type="sql", - command_text="SELECT * FROM Products " - "WHERE ProductId = @ProductId", - command_type="Text", - parameters="@ProductId={productid}", - connection_string_setting="AzureWebJobsSqlConnectionString") -def sql_input(req: func.HttpRequest, products: func.SqlRowList) -> func.HttpResponse: - rows = list(map(lambda r: json.loads(r.to_json()), products)) - - return func.HttpResponse( - json.dumps(rows), - status_code=200, - mimetype="application/json" - ) - - -@app.generic_trigger(arg_name="req", type="httpTrigger", route="sql_input2/{productid}") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_input_binding(arg_name="products", type="sql", - command_text="SELECT * FROM Products2 " - "WHERE ProductId = @ProductId", - command_type="Text", - parameters="@ProductId={productid}", - connection_string_setting="AzureWebJobsSqlConnectionString") -def sql_input2(req: func.HttpRequest, products: func.SqlRowList) -> func.HttpResponse: - rows = list(map(lambda r: json.loads(r.to_json()), products)) - - return func.HttpResponse( - json.dumps(rows), - status_code=200, - mimetype="application/json" - ) - - -@app.generic_trigger(arg_name="req", type="httpTrigger", route="sql_output") -@app.generic_output_binding(arg_name="$return", type="http") -@app.generic_output_binding(arg_name="r", type="sql", - command_text="[dbo].[Products]", - connection_string_setting="AzureWebJobs" - "SqlConnectionString") -def sql_output(req: func.HttpRequest, r: func.Out[func.SqlRow]) \ - -> func.HttpResponse: - body = json.loads(req.get_body()) - row = func.SqlRow.from_dict(body) - r.set(row) - - return func.HttpResponse( - body=req.get_body(), - status_code=201, - mimetype="application/json" - ) - - -@app.generic_trigger(arg_name="changes", type="sqlTrigger", - table_name="Products", - connection_string_setting="AzureWebJobsSqlConnectionString") -@app.generic_output_binding(arg_name="r", type="sql", - command_text="[dbo].[Products2]", - connection_string_setting="AzureWebJobsSqlConnectionString") -def sql_trigger(changes, r: func.Out[func.SqlRow]) -> str: - row = func.SqlRow.from_dict(json.loads(changes)[0]["Item"]) - r.set(row) - return "OK" diff --git a/tests/endtoend/sql_functions/sql_input/__init__.py b/tests/endtoend/sql_functions/sql_input/__init__.py deleted file mode 100644 index 03c622492..000000000 --- a/tests/endtoend/sql_functions/sql_input/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json - -import azure.functions as func - - -def main(req: func.HttpRequest, products: func.SqlRowList) -> func.HttpResponse: - rows = list(map(lambda r: json.loads(r.to_json()), products)) - - return func.HttpResponse( - json.dumps(rows), - status_code=200, - mimetype="application/json" - ) diff --git a/tests/endtoend/sql_functions/sql_input/function.json b/tests/endtoend/sql_functions/sql_input/function.json deleted file mode 100644 index 38ec00f6f..000000000 --- a/tests/endtoend/sql_functions/sql_input/function.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "name": "req", - "type": "httpTrigger", - "direction": "in", - "methods": [ - "get" - ], - "route": "sql_input/{productid}" - }, - { - "name": "$return", - "type": "http", - "direction": "out" - }, - { - "name": "products", - "type": "sql", - "direction": "in", - "commandText": "SELECT * FROM Products WHERE ProductId = @ProductId", - "commandType": "Text", - "parameters": "@ProductId={productid}", - "connectionStringSetting": "AzureWebJobsSqlConnectionString" - } - ] -} \ No newline at end of file diff --git a/tests/endtoend/sql_functions/sql_input2/__init__.py b/tests/endtoend/sql_functions/sql_input2/__init__.py deleted file mode 100644 index 03c622492..000000000 --- a/tests/endtoend/sql_functions/sql_input2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json - -import azure.functions as func - - -def main(req: func.HttpRequest, products: func.SqlRowList) -> func.HttpResponse: - rows = list(map(lambda r: json.loads(r.to_json()), products)) - - return func.HttpResponse( - json.dumps(rows), - status_code=200, - mimetype="application/json" - ) diff --git a/tests/endtoend/sql_functions/sql_input2/function.json b/tests/endtoend/sql_functions/sql_input2/function.json deleted file mode 100644 index 07f8d7722..000000000 --- a/tests/endtoend/sql_functions/sql_input2/function.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "name": "req", - "type": "httpTrigger", - "direction": "in", - "methods": [ - "get" - ], - "route": "sql_input2/{productid}" - }, - { - "name": "$return", - "type": "http", - "direction": "out" - }, - { - "name": "products", - "type": "sql", - "direction": "in", - "commandText": "SELECT * FROM Products2 WHERE ProductId = @ProductId", - "commandType": "Text", - "parameters": "@ProductId={productid}", - "connectionStringSetting": "AzureWebJobsSqlConnectionString" - } - ] -} \ No newline at end of file diff --git a/tests/endtoend/sql_functions/sql_output/__init__.py b/tests/endtoend/sql_functions/sql_output/__init__.py deleted file mode 100644 index 42a21ff24..000000000 --- a/tests/endtoend/sql_functions/sql_output/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json - -import azure.functions as func - - -def main(req: func.HttpRequest, r: func.Out[func.SqlRow]) -> func.HttpResponse: - body = json.loads(req.get_body()) - row = func.SqlRow.from_dict(body) - r.set(row) - - return func.HttpResponse( - body=req.get_body(), - status_code=201, - mimetype="application/json" - ) diff --git a/tests/endtoend/sql_functions/sql_output/function.json b/tests/endtoend/sql_functions/sql_output/function.json deleted file mode 100644 index 44ede8421..000000000 --- a/tests/endtoend/sql_functions/sql_output/function.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "name": "req", - "type": "httpTrigger", - "direction": "in", - "methods": [ - "post" - ] - }, - { - "name": "$return", - "type": "http", - "direction": "out" - }, - { - "name": "r", - "type": "sql", - "direction": "out", - "commandText": "[dbo].[Products]", - "connectionStringSetting": "AzureWebJobsSqlConnectionString" - } - ] -} diff --git a/tests/endtoend/sql_functions/sql_trigger/__init__.py b/tests/endtoend/sql_functions/sql_trigger/__init__.py deleted file mode 100644 index 56115c75d..000000000 --- a/tests/endtoend/sql_functions/sql_trigger/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as func - - -def main(changes, r: func.Out[func.SqlRow]) -> str: - row = func.SqlRow.from_dict(json.loads(changes)[0]["Item"]) - r.set(row) - return "OK" diff --git a/tests/endtoend/sql_functions/sql_trigger/function.json b/tests/endtoend/sql_functions/sql_trigger/function.json deleted file mode 100644 index db68da83d..000000000 --- a/tests/endtoend/sql_functions/sql_trigger/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "disabled": false, - "bindings": [ - { - "name": "changes", - "type": "sqlTrigger", - "direction": "in", - "tableName": "dbo.Products", - "connectionStringSetting": "AzureWebJobsSqlConnectionString" - }, - { - "name": "r", - "type": "sql", - "direction": "out", - "commandText": "[dbo].[Products2]", - "connectionStringSetting": "AzureWebJobsSqlConnectionString" - } - ] -} \ No newline at end of file diff --git a/tests/endtoend/test_blueprint_functions.py b/tests/endtoend/test_blueprint_functions.py deleted file mode 100644 index c421f583b..000000000 --- a/tests/endtoend/test_blueprint_functions.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -from tests.utils import testutils - - -class TestFunctionInBluePrintOnly(testutils.WebHostTestCase): - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'blueprint_functions' / \ - 'functions_in_blueprint_only' - - def test_function_in_blueprint_only(self): - r = self.webhost.request('GET', 'default_template') - self.assertTrue(r.ok) - - -class TestFunctionsInBothBlueprintAndFuncApp(testutils.WebHostTestCase): - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'blueprint_functions' / \ - 'functions_in_both_blueprint_functionapp' - - def test_functions_in_both_blueprint_functionapp(self): - r = self.webhost.request('GET', 'default_template') - self.assertTrue(r.ok) - - r = self.webhost.request('GET', 'return_http') - self.assertTrue(r.ok) - - -class TestMultipleFunctionRegisters(testutils.WebHostTestCase): - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'blueprint_functions' / \ - 'multiple_function_registers' - - def test_function_in_blueprint_only(self): - r = self.webhost.request('GET', 'return_http') - self.assertEqual(r.status_code, 404) - - -class TestOnlyBlueprint(testutils.WebHostTestCase): - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'blueprint_functions' / \ - 'only_blueprint' - - def test_only_blueprint(self): - """Test if the default template of Http trigger in Python - Function app - will return OK - """ - r = self.webhost.request('GET', 'default_template') - self.assertEqual(r.status_code, 404) - - -class TestBlueprintDifferentDirectory(testutils.WebHostTestCase): - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'blueprint_functions' / \ - 'blueprint_different_dir' - - def test_blueprint_in_different_dir(self): - r = self.webhost.request('GET', 'default_template') - self.assertTrue(r.ok) - - r = self.webhost.request('GET', 'http_func') - self.assertTrue(r.ok) diff --git a/tests/endtoend/test_cosmosdb_functions.py b/tests/endtoend/test_cosmosdb_functions.py deleted file mode 100644 index 13b32c9cf..000000000 --- a/tests/endtoend/test_cosmosdb_functions.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import time - -from tests.utils import testutils - - -class TestCosmosDBFunctions(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'cosmosdb_functions' - - @testutils.retryable_test(3, 5) - def test_cosmosdb_trigger(self): - time.sleep(5) - data = str(round(time.time())) - doc = {'id': 'cosmosdb-trigger-test', - 'data': data} - r = self.webhost.request('POST', 'put_document', - data=json.dumps(doc)) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - max_retries = 10 - - for try_no in range(max_retries): - # Allow trigger to fire - time.sleep(2) - - try: - # Check that the trigger has fired - r = self.webhost.request('GET', 'get_cosmosdb_triggered') - self.assertEqual(r.status_code, 200) - response = r.json() - response.pop('_metadata', None) - - self.assertEqual(response['id'], doc['id']) - self.assertEqual(response['data'], doc['data']) - self.assertTrue('_etag' in response) - self.assertTrue('_lsn' in response) - self.assertTrue('_rid' in response) - self.assertTrue('_self' in response) - self.assertTrue('_ts' in response) - except AssertionError: - if try_no == max_retries - 1: - raise - else: - break - - def test_cosmosdb_input(self): - time.sleep(5) - data = str(round(time.time())) - doc = {'id': 'cosmosdb-input-test', - 'data': data} - r = self.webhost.request('POST', 'put_document', - data=json.dumps(doc)) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - max_retries = 10 - - for try_no in range(max_retries): - # Allow trigger to fire - time.sleep(2) - - try: - # Check that the trigger has fired - r = self.webhost.request('GET', 'cosmosdb_input') - self.assertEqual(r.status_code, 200) - response = r.json() - - # _lsn is present for cosmosdb change feed only, - # ref https://aka.ms/cosmos-change-feed - self.assertEqual(response['id'], doc['id']) - self.assertEqual(response['data'], doc['data']) - self.assertTrue('_etag' in response) - self.assertTrue('_rid' in response) - self.assertTrue('_self' in response) - self.assertTrue('_ts' in response) - except AssertionError: - if try_no == max_retries - 1: - raise - else: - break - - -class TestCosmosDBFunctionsStein(TestCosmosDBFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'cosmosdb_functions' / \ - 'cosmosdb_functions_stein' - - -class TestCosmosDBFunctionsSteinGeneric(TestCosmosDBFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'cosmosdb_functions' / \ - 'cosmosdb_functions_stein' / 'generic' diff --git a/tests/endtoend/test_dependency_isolation_functions.py b/tests/endtoend/test_dependency_isolation_functions.py deleted file mode 100644 index 7ada0ae69..000000000 --- a/tests/endtoend/test_dependency_isolation_functions.py +++ /dev/null @@ -1,235 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import importlib.util -import os -from unittest import skip -from unittest.case import skipIf -from unittest.mock import patch - -from requests import Response -from tests.utils import testutils -from tests.utils.constants import ( - CONSUMPTION_DOCKER_TEST, - DEDICATED_DOCKER_TEST, - PYAZURE_INTEGRATION_TEST, -) - -from azure_functions_worker.utils.common import is_envvar_true - -REQUEST_TIMEOUT_SEC = 5 - - -@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST) - or is_envvar_true(CONSUMPTION_DOCKER_TEST), - 'Docker tests do not work with dependency isolation ') -class TestGRPCandProtobufDependencyIsolationOnDedicated( - testutils.WebHostTestCase): - """Test the dependency manager E2E scenario via Http Trigger. - - The following E2E tests ensures the dependency manager is behaving as - expected. They are tested against the dependency_isolation_functions/ - folder which contain a dummy .python_packages_grpc_protobuf folder. - This testcase checks if the customers library version of grpc and protobuf - are being loaded in the functionapp - """ - function_name = 'dependency_isolation_functions' - package_name = '.python_packages_grpc_protobuf' - project_root = testutils.E2E_TESTS_ROOT / function_name - customer_deps = project_root / package_name / 'lib' / 'site-packages' - - @classmethod - def setUpClass(cls): - # Turn on feature flag - cls.env_variables['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = '1' - - # Emulate Python worker in Azure environment. - # For how the PYTHONPATH is set in Azure, check prodV4/worker.py. - cls.env_variables['PYTHONPATH'] = str(cls.customer_deps) - - os_environ = os.environ.copy() - os_environ.update(cls.env_variables) - - cls._patch_environ = patch.dict('os.environ', os_environ) - cls._patch_environ.start() - super().setUpClass() - - @classmethod - def tearDownClass(self): - super().tearDownClass() - self._patch_environ.stop() - - @classmethod - def get_script_dir(cls): - return cls.project_root - - @classmethod - def get_environment_variables(cls): - return cls.env_variables - - def test_dependency_function_should_return_ok(self): - """The common scenario of general import should return OK in any - circumstances - """ - r: Response = self.webhost.request('GET', 'report_dependencies') - self.assertTrue(r.ok) - - def test_feature_flag_is_turned_on(self): - """Since passing the feature flag PYTHON_ISOLATE_WORKER_DEPENDENCIES to - the host, the customer's function should also be able to receive it - """ - r: Response = self.webhost.request('GET', 'report_dependencies') - environments = r.json()['environments'] - flag_value = environments['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] - self.assertEqual(flag_value, '1') - - def test_working_directory_resolution(self): - """Check from the dependency manager and see if the current working - directory is resolved correctly - """ - r: Response = self.webhost.request('GET', 'report_dependencies') - environments = r.json()['environments'] - - dir = os.path.dirname(__file__) - self.assertEqual( - environments['AzureWebJobsScriptRoot'].lower(), - os.path.join(dir, 'dependency_isolation_functions').lower() - ) - - @skipIf(is_envvar_true(PYAZURE_INTEGRATION_TEST), - 'Integration test expects dependencies derived from core ' - 'tools folder') - def test_paths_resolution(self): - """Dependency manager requires paths to be resolved correctly before - switching to customer's modules. This test is to ensure when the app - is in ready state, check if the paths are in good state. - """ - r: Response = self.webhost.request('GET', 'report_dependencies') - dm = r.json()['dependency_manager'] - self.assertEqual( - dm['cx_working_dir'].lower(), str(self.project_root).lower() - ) - self.assertEqual( - dm['cx_deps_path'].lower(), str(self.customer_deps).lower() - ) - - # Should derive the package location from the built-in azure.functions - azf_spec = importlib.util.find_spec('azure.functions') - self.assertEqual( - dm['worker_deps_path'].lower(), - os.path.abspath( - os.path.join(os.path.dirname(azf_spec.origin), '..', '..') - ).lower() - ) - - @skipIf(is_envvar_true('skipTest'), - 'Running tests using an editable azure-functions package.') - def test_loading_libraries_from_customers_package(self): - """Since the Python now loaded the customer's dependencies, the - libraries version should match the ones in - .python_packages_grpc_protobuf/ folder - """ - r: Response = self.webhost.request('GET', 'report_dependencies') - libraries = r.json()['libraries'] - self.assertEqual( - libraries['proto.expected.version'], libraries['proto.version'] - ) - - self.assertEqual( - libraries['grpc.expected.version'], libraries['grpc.version'] - ) - - -@skip("Skipping dependency isolation test for dedicated. Needs investigation") -class TestOlderVersionOfAzFuncDependencyIsolationOnDedicated( - testutils.WebHostTestCase): - """Test the dependency manager E2E scenario via Http Trigger. - - The following E2E tests ensures the dependency manager is behaving as - expected. They are tested against the dependency_isolation_functions/ - folder which contain a dummy .python_packages_azf_older_version folder. - This testcase checks if the customers older library version of azure - functions is being loaded in the functionapp - """ - - function_name = 'dependency_isolation_functions' - package_name = '.python_packages_azf_older_version' - project_root = testutils.E2E_TESTS_ROOT / function_name - customer_deps = project_root / package_name / 'lib' / 'site-packages' - expected_azfunc_version = '1.5.0' - - @classmethod - def setUpClass(cls): - os_environ = os.environ.copy() - # Turn on feature flag - os_environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = '1' - # Emulate Python worker in Azure environment. - # For how the PYTHONPATH is set in Azure, check prodV3/worker.py. - os_environ['PYTHONPATH'] = str(cls.customer_deps) - - cls._patch_environ = patch.dict('os.environ', os_environ) - cls._patch_environ.start() - super().setUpClass() - - @classmethod - def tearDownClass(self): - super().tearDownClass() - self._patch_environ.stop() - - @classmethod - def get_script_dir(cls): - return cls.project_root - - def test_loading_libraries_from_customers_package(self): - r: Response = self.webhost.request('GET', 'report_dependencies') - libraries = r.json()['libraries'] - - self.assertEqual( - self.expected_azfunc_version, libraries['func.version']) - - -@skip("Skipping dependency isolation test for dedicated. Needs investigation") -class TestNewerVersionOfAzFuncDependencyIsolationOnDedicated( - testutils.WebHostTestCase): - """Test the dependency manager E2E scenario via Http Trigger. - - The following E2E tests ensures the dependency manager is behaving as - expected. They are tested against the dependency_isolation_functions/ - folder which contain a dummy .python_packages_azf_newer_version folder. - This testcase checks if the customers newer library version of azure - functions is being loaded in the functionapp - """ - - function_name = 'dependency_isolation_functions' - package_name = '.python_packages_azf_newer_version' - project_root = testutils.E2E_TESTS_ROOT / function_name - customer_deps = project_root / package_name / 'lib' / 'site-packages' - expected_azfunc_version = '9.9.9' - - @classmethod - def setUpClass(cls): - os_environ = os.environ.copy() - # Turn on feature flag - os_environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = '1' - # Emulate Python worker in Azure environment. - # For how the PYTHONPATH is set in Azure, check prodV3/worker.py. - os_environ['PYTHONPATH'] = str(cls.customer_deps) - - cls._patch_environ = patch.dict('os.environ', os_environ) - cls._patch_environ.start() - super().setUpClass() - - @classmethod - def tearDownClass(self): - super().tearDownClass() - self._patch_environ.stop() - - @classmethod - def get_script_dir(cls): - return cls.project_root - - def test_loading_libraries_from_customers_package(self): - r: Response = self.webhost.request('GET', 'report_dependencies') - libraries = r.json()['libraries'] - - self.assertEqual( - self.expected_azfunc_version, libraries['func.version']) diff --git a/tests/endtoend/test_durable_functions.py b/tests/endtoend/test_durable_functions.py deleted file mode 100644 index 8cab19b6f..000000000 --- a/tests/endtoend/test_durable_functions.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import os -import time -from unittest import skipIf - -import requests -from tests.utils import testutils -from tests.utils.constants import CONSUMPTION_DOCKER_TEST, DEDICATED_DOCKER_TEST - -from azure_functions_worker.utils.common import is_envvar_true - - -@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST) - or is_envvar_true(CONSUMPTION_DOCKER_TEST), - "Docker tests cannot retrieve port needed for a webhook") -class TestDurableFunctions(testutils.WebHostTestCase): - - @classmethod - def setUpClass(cls): - os.environ["WEBSITE_HOSTNAME"] = "http:" - super().setUpClass() - - @classmethod - def tearDownClass(cls): - # Remove the WEBSITE_HOSTNAME environment variable - os.environ.pop('WEBSITE_HOSTNAME') - super().tearDownClass() - - @classmethod - def get_environment_variables(cls): - return cls.env_variables - - @classmethod - def get_libraries_to_install(cls): - return ['azure-functions-durable'] - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'durable_functions' - - @testutils.retryable_test(3, 5) - def test_durable(self): - r = self.webhost.request('GET', - 'orchestrators/DurableFunctionsOrchestrator') - time.sleep(4) # wait for the activity to complete - self.assertEqual(r.status_code, 202) - content = json.loads(r.content) - - status = requests.get(content['statusQueryGetUri']) - self.assertEqual(status.status_code, 200) - - status_content = json.loads(status.content) - self.assertEqual(status_content['runtimeStatus'], 'Completed') - self.assertEqual(status_content['output'], - ['Hello Tokyo!', 'Hello Seattle!', 'Hello London!']) - - -class TestDurableFunctionsStein(TestDurableFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'durable_functions' / \ - 'durable_functions_stein' diff --git a/tests/endtoend/test_eventgrid_functions.py b/tests/endtoend/test_eventgrid_functions.py deleted file mode 100644 index 7a878ca32..000000000 --- a/tests/endtoend/test_eventgrid_functions.py +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import time -import unittest -import uuid - -import requests -from tests.utils import testutils - - -class TestEventGridFunctions(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'eventgrid_functions' - - def eventgrid_webhook_request(self, meth, funcname, *args, **kwargs): - request_method = getattr(requests, meth.lower()) - url = f'{self.webhost._addr}/runtime/webhooks/eventgrid' - params = dict(kwargs.pop('params', {})) - params['functionName'] = funcname - if 'code' not in params: - params['code'] = 'testSystemKey' - headers = dict(kwargs.pop('headers', {})) - headers['aeg-event-type'] = 'Notification' - return request_method(url, *args, params=params, headers=headers, - **kwargs) - - @unittest.skip("Run locally. Running on Azure fails with 401/403 as the" - "host does not pick up the SecretKey from the" - "azure_functions_worker.testutils.py.SECRETS_TEMPLATE and" - "because of which we cannot test eventGrid webhook" - "invocation correctly.") - def test_eventgrid_trigger(self): - """test event_grid trigger - - This test calls the eventgrid_trigger function, sends in `data` as body - to the webhook for eventgrid. Once the event is received, the function - writes the data to the blob store. - - Then get_eventgrid_triggered gets called (httpTrigger) and takes blob - input binding, reading the previously written text in blob store - `python-worker-tests/test-eventgrid-triggered.txt`, and then we validate - that the written text matches the one passed to the eventgrid trigger. - """ - data = [{ - "topic": "test-topic", - "subject": "test-subject", - "eventType": "Microsoft.Storage.BlobCreated", - "eventTime": "2018-01-01T00:00:00.000000123Z", - "id": str(uuid.uuid4()), - "data": { - "api": "PutBlockList", - "clientRequestId": "2c169f2f-7b3b-4d99-839b-c92a2d25801b", - "requestId": "44d4f022-001e-003c-466b-940cba000000", - "eTag": "0x8D562831044DDD0", - "contentType": "application/octet-stream", - "contentLength": 2248, - "blobType": "BlockBlob", - "ur1": "foo", - "sequencer": "000000000000272D000000000003D60F", - "storageDiagnostics": { - "batchId": "b4229b3a-4d50-4ff4-a9f2-039ccf26efe9" - } - }, - "dataVersion": "", - "metadataVersion": "1" - }] - - r = self.eventgrid_webhook_request('POST', 'eventgrid_trigger', - json=data) - self.assertEqual(r.status_code, 202) - - max_retries = 10 - - for try_no in range(max_retries): - # Allow trigger to fire. - time.sleep(2) - - try: - # Check that the trigger has fired. - r = self.webhost.request('GET', 'get_eventgrid_triggered') - self.assertEqual(r.status_code, 200) - - response = r.json() - self.assertLessEqual(response.items(), data[0].items()) - except AssertionError: - if try_no == max_retries - 1: - raise - else: - break - - def test_eventgrid_output_binding(self): - """test event_grid output binding - - This test needs three functions to work. - 1. `eventgrid_output_binding` - 2. `eventgrid_output_binding_message_to_blobstore` - 3. `eventgrid_output_binding_success` - - This test calls the eventgrid_output_binding function, sends in a unique - uuid as `data` in the body to the httpTrigger which sends in that value - in the eventGrid output data. The eventGrid topic is configured to - send the event to a storage queue. - - The second function (`eventgrid_output_binding_message_to_blobstore`) - reads from that storage queue and puts into a blob store. - - The third function (`eventgrid_output_binding_success`) reads the - text from the blob store and compares with the expected result. The - unique uuid should confirm if the message went through correctly to - EventGrid and came back as a blob. - """ - - test_uuid = uuid.uuid4().__str__() - expected_response = "Sent event with subject: {}, id: {}, data: {}, " \ - "event_type: {} to EventGrid!".format( - "test-subject", "test-id", - f"{{'test_uuid': '{test_uuid}'}}", - "test-event-1") - expected_final_data = { - 'id': 'test-id', 'subject': 'test-subject', 'dataVersion': '1.0', - 'eventType': 'test-event-1', - 'data': {'test_uuid': test_uuid} - } - - r = self.webhost.request('GET', 'eventgrid_output_binding', - params={'test_uuid': test_uuid}) - self.assertEqual(r.status_code, 200) - response = r.text - - self.assertEqual(expected_response, response) - - max_retries = 10 - for try_no in range(max_retries): - # Allow trigger to fire. - time.sleep(2) - - try: - # Check that the trigger has fired. - r = self.webhost.request('GET', - 'eventgrid_output_binding_success') - self.assertEqual(r.status_code, 200) - response = r.json() - - # list of fields to check are limited as other fields contain - # datetime or other uncertain values - for f in ['data', 'id', 'eventType', 'subject', 'dataVersion']: - self.assertEqual(response[f], expected_final_data[f]) - - except AssertionError: - if try_no == max_retries - 1: - raise - else: - break - - -class TestEventGridFunctionsStein(TestEventGridFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'eventgrid_functions' / \ - 'eventgrid_functions_stein' - - -class TestEventGridFunctionsGeneric(TestEventGridFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'eventgrid_functions' / \ - 'eventgrid_functions_stein' / 'generic' diff --git a/tests/endtoend/test_file_name_functions.py b/tests/endtoend/test_file_name_functions.py deleted file mode 100644 index faf3b722f..000000000 --- a/tests/endtoend/test_file_name_functions.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os - -import requests -from tests.utils import testutils - -from azure_functions_worker.constants import PYTHON_SCRIPT_FILE_NAME - -REQUEST_TIMEOUT_SEC = 10 - - -class TestHttpFunctionsFileName(testutils.WebHostTestCase): - """Test the native Http Trigger in the local webhost. - - This test class will spawn a webhost from your /build/webhost - folder and replace the built-in Python with azure_functions_worker from - your code base. Since the Http Trigger is a native suport from host, we - don't need to setup any external resources. - - Compared to the unittests/test_http_functions.py, this file is more focus - on testing the E2E flow scenarios. - """ - - @classmethod - def setUpClass(cls): - os.environ["PYTHON_SCRIPT_FILE_NAME"] = "main.py" - cls.env_variables['PYTHON_SCRIPT_FILE_NAME'] = 'main.py' - super().setUpClass() - - @classmethod - def tearDownClass(cls): - # Remove the PYTHON_SCRIPT_FILE_NAME environment variable - os.environ.pop('PYTHON_SCRIPT_FILE_NAME') - super().tearDownClass() - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' / \ - 'http_functions_stein' / \ - 'file_name' - - @classmethod - def get_environment_variables(cls): - return cls.env_variables - - @testutils.retryable_test(3, 5) - def test_index_page_should_return_ok(self): - """The index page of Azure Functions should return OK in any - circumstances - """ - r = self.webhost.request('GET', '', no_prefix=True, - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - - @testutils.retryable_test(3, 5) - def test_default_http_template_should_return_ok(self): - """Test if the default template of Http trigger in Python Function app - will return OK - """ - r = self.webhost.request('GET', 'default_template', - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - - @testutils.retryable_test(3, 5) - def test_default_http_template_should_accept_query_param(self): - """Test if the azure.functions SDK is able to deserialize query - parameter from the default template - """ - r = self.webhost.request('GET', 'default_template', - params={'name': 'query'}, - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertEqual( - r.content, - b'Hello, query. This HTTP triggered function executed successfully.' - ) - - @testutils.retryable_test(3, 5) - def test_default_http_template_should_accept_body(self): - """Test if the azure.functions SDK is able to deserialize http body - and pass it to default template - """ - r = self.webhost.request('POST', 'default_template', - data='{ "name": "body" }'.encode('utf-8'), - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertEqual( - r.content, - b'Hello, body. This HTTP triggered function executed successfully.' - ) - - @testutils.retryable_test(3, 5) - def test_worker_status_endpoint_should_return_ok(self): - """Test if the worker status endpoint will trigger - _handle__worker_status_request and sends a worker status response back - to host - """ - root_url = self.webhost._addr - health_check_url = f'{root_url}/admin/host/ping' - r = requests.post(health_check_url, - params={'checkHealth': '1'}, - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - - @testutils.retryable_test(3, 5) - def test_worker_status_endpoint_should_return_ok_when_disabled(self): - """Test if the worker status endpoint will trigger - _handle__worker_status_request and sends a worker status response back - to host - """ - os.environ['WEBSITE_PING_METRICS_SCALE_ENABLED'] = '0' - root_url = self.webhost._addr - health_check_url = f'{root_url}/admin/host/ping' - r = requests.post(health_check_url, - params={'checkHealth': '1'}, - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - - def test_correct_file_name(self): - self.assertIsNotNone(os.environ.get(PYTHON_SCRIPT_FILE_NAME)) - self.assertEqual(os.environ.get(PYTHON_SCRIPT_FILE_NAME), - 'main.py') diff --git a/tests/endtoend/test_http_functions.py b/tests/endtoend/test_http_functions.py deleted file mode 100644 index 3128dfd38..000000000 --- a/tests/endtoend/test_http_functions.py +++ /dev/null @@ -1,289 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os -import typing -from unittest.mock import patch - -import requests -from tests.utils import testutils - -from azure_functions_worker.constants import PYTHON_ENABLE_INIT_INDEXING - -REQUEST_TIMEOUT_SEC = 5 - - -class TestHttpFunctions(testutils.WebHostTestCase): - """Test the native Http Trigger in the local webhost. - - This test class will spawn a webhost from your /build/webhost - folder and replace the built-in Python with azure_functions_worker from - your code base. Since the Http Trigger is a native suport from host, we - don't need to setup any external resources. - - Compared to the unittests/test_http_functions.py, this file is more focus - on testing the E2E flow scenarios. - """ - - def setUp(self): - self._patch_environ = patch.dict('os.environ', os.environ.copy()) - self._patch_environ.start() - super().setUp() - - def tearDown(self): - super().tearDown() - self._patch_environ.stop() - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' - - @testutils.retryable_test(3, 5) - def test_function_index_page_should_return_ok(self): - """The index page of Azure Functions should return OK in any - circumstances - """ - r = self.webhost.request('GET', '', no_prefix=True, - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - - @testutils.retryable_test(3, 5) - def test_default_http_template_should_return_ok(self): - """Test if the default template of Http trigger in Python Function app - will return OK - """ - r = self.webhost.request('GET', 'default_template', - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - - @testutils.retryable_test(3, 5) - def test_default_http_template_should_accept_query_param(self): - """Test if the azure.functions SDK is able to deserialize query - parameter from the default template - """ - r = self.webhost.request('GET', 'default_template', - params={'name': 'query'}, - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertEqual( - r.content, - b'Hello, query. This HTTP triggered function executed successfully.' - ) - - @testutils.retryable_test(3, 5) - def test_default_http_template_should_accept_body(self): - """Test if the azure.functions SDK is able to deserialize http body - and pass it to default template - """ - r = self.webhost.request('POST', 'default_template', - data='{ "name": "body" }'.encode('utf-8'), - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertEqual( - r.content, - b'Hello, body. This HTTP triggered function executed successfully.' - ) - - @testutils.retryable_test(3, 5) - def test_worker_status_endpoint_should_return_ok(self): - """Test if the worker status endpoint will trigger - _handle__worker_status_request and sends a worker status response back - to host - """ - root_url = self.webhost._addr - health_check_url = f'{root_url}/admin/host/ping' - r = requests.post(health_check_url, - params={'checkHealth': '1'}, - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - - @testutils.retryable_test(3, 5) - def test_worker_status_endpoint_should_return_ok_when_disabled(self): - """Test if the worker status endpoint will trigger - _handle__worker_status_request and sends a worker status response back - to host - """ - os.environ['WEBSITE_PING_METRICS_SCALE_ENABLED'] = '0' - root_url = self.webhost._addr - health_check_url = f'{root_url}/admin/host/ping' - r = requests.post(health_check_url, - params={'checkHealth': '1'}, - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - - -class TestHttpFunctionsStein(TestHttpFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' / \ - 'http_functions_stein' - - -class TestHttpFunctionsSteinGeneric(TestHttpFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' / \ - 'http_functions_stein' / \ - 'generic' - - -class TestCommonLibsHttpFunctions(testutils.WebHostTestCase): - """Test the common libs scenarios in the local webhost. - - This test class will spawn a webhost from your /build/webhost - folder and replace the built-in Python with azure_functions_worker from - your code base. this file is more focus on testing the E2E flow scenarios. - """ - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' / \ - 'common_libs_functions' - - @classmethod - def get_libraries_to_install(cls): - return ['requests', 'python-dotenv', "plotly", "scikit-learn", - "opencv-python", "pandas", "numpy"] - - @testutils.retryable_test(3, 5) - def test_numpy(self): - r = self.webhost.request('GET', 'numpy_func', - timeout=REQUEST_TIMEOUT_SEC) - - self.assertIn("numpy version", r.content.decode("UTF-8")) - - def test_requests(self): - r = self.webhost.request('GET', 'requests_func', - timeout=10) - - self.assertTrue(r.ok) - self.assertEqual(r.content.decode("UTF-8"), 'req status code: 200') - - def test_pandas(self): - r = self.webhost.request('GET', 'pandas_func', - timeout=REQUEST_TIMEOUT_SEC) - - self.assertIn("two-dimensional", - r.content.decode("UTF-8")) - - def test_sklearn(self): - r = self.webhost.request('GET', 'sklearn_func', - timeout=REQUEST_TIMEOUT_SEC) - - self.assertIn("First 5 records of array:", - r.content.decode("UTF-8")) - - def test_opencv(self): - r = self.webhost.request('GET', 'opencv_func', - timeout=REQUEST_TIMEOUT_SEC) - - self.assertIn("opencv version:", - r.content.decode("UTF-8")) - - def test_dotenv(self): - r = self.webhost.request('GET', 'dotenv_func', - timeout=REQUEST_TIMEOUT_SEC) - - self.assertEqual(r.content.decode("UTF-8"), "found") - - def test_plotly(self): - r = self.webhost.request('GET', 'plotly_func', - timeout=REQUEST_TIMEOUT_SEC) - - self.assertIn("plotly version:", - r.content.decode("UTF-8")) - - -class TestCommonLibsHttpFunctionsStein(TestCommonLibsHttpFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' / \ - 'common_libs_functions' / \ - 'common_libs_functions_stein' - - -class TestHttpFunctionsWithInitIndexing(TestHttpFunctions): - - @classmethod - def setUpClass(cls): - cls.env_variables[PYTHON_ENABLE_INIT_INDEXING] = '1' - os.environ[PYTHON_ENABLE_INIT_INDEXING] = "1" - super().setUpClass() - - @classmethod - def tearDownClass(cls): - os.environ.pop(PYTHON_ENABLE_INIT_INDEXING) - super().tearDownClass() - - @classmethod - def get_environment_variables(cls): - return cls.env_variables - - -class TestUserThreadLoggingHttpFunctions(testutils.WebHostTestCase): - """Test the Http trigger that contains logging with user threads. - - This test class will spawn a webhost from your /build/webhost - folder and replace the built-in Python with azure_functions_worker from - your code base. this file is more focus on testing the E2E flow scenarios. - """ - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' / \ - 'user_thread_logging' - - @testutils.retryable_test(3, 5) - def test_http_thread(self): - r = self.webhost.request('GET', 'thread', - timeout=REQUEST_TIMEOUT_SEC) - - self.assertTrue(r.ok) - - def check_log_http_thread(self, host_out: typing.List[str]): - self.assertEqual(host_out.count('Before threads.'), 1) - self.assertEqual(host_out.count('Thread1 used.'), 1) - self.assertEqual(host_out.count('Thread2 used.'), 1) - self.assertEqual(host_out.count('Thread3 used.'), 1) - self.assertEqual(host_out.count('After threads.'), 1) - - @testutils.retryable_test(3, 5) - def test_http_async_thread(self): - r = self.webhost.request('GET', 'async_thread', - timeout=REQUEST_TIMEOUT_SEC) - - self.assertTrue(r.ok) - - def check_log_http_async_thread(self, host_out: typing.List[str]): - self.assertEqual(host_out.count('Before threads.'), 1) - self.assertEqual(host_out.count('Thread1 used.'), 1) - self.assertEqual(host_out.count('Thread2 used.'), 1) - self.assertEqual(host_out.count('Thread3 used.'), 1) - self.assertEqual(host_out.count('After threads.'), 1) - - @testutils.retryable_test(3, 5) - def test_http_thread_pool_executor(self): - r = self.webhost.request('GET', 'thread_pool_executor', - timeout=REQUEST_TIMEOUT_SEC) - - self.assertTrue(r.ok) - - def check_log_http_thread_pool_executor(self, host_out: typing.List[str]): - self.assertEqual(host_out.count('Before TPE.'), 1) - self.assertEqual(host_out.count('Using TPE.'), 1) - self.assertEqual(host_out.count('After TPE.'), 1) - - @testutils.retryable_test(3, 5) - def test_http_async_thread_pool_executor(self): - r = self.webhost.request('GET', 'async_thread_pool_executor', - timeout=REQUEST_TIMEOUT_SEC) - - self.assertTrue(r.ok) - - def check_log_http_async_thread_pool_executor(self, - host_out: typing.List[str]): - self.assertEqual(host_out.count('Before TPE.'), 1) - self.assertEqual(host_out.count('Using TPE.'), 1) - self.assertEqual(host_out.count('After TPE.'), 1) diff --git a/tests/endtoend/test_retry_policy_functions.py b/tests/endtoend/test_retry_policy_functions.py deleted file mode 100644 index 58851f353..000000000 --- a/tests/endtoend/test_retry_policy_functions.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import time -import typing - -from tests.utils import testutils - - -class TestFixedRetryPolicyFunctions(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'retry_policy_functions' / \ - 'fixed_strategy' - - def test_fixed_retry_policy(self): - # Checking webhost status. - time.sleep(5) - r = self.webhost.request('GET', '', no_prefix=True) - self.assertTrue(r.ok) - - def check_log_fixed_retry_policy(self, host_out: typing.List[str]): - self.assertIn('Current retry count: 0', host_out) - self.assertIn('Current retry count: 1', host_out) - self.assertIn("Max retries of 3 for function mytimer" - " has been reached", host_out) - - -class TestExponentialRetryPolicyFunctions(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'retry_policy_functions' / \ - 'exponential_strategy' - - def test_retry_policy(self): - # Checking webhost status. - r = self.webhost.request('GET', '', no_prefix=True, - timeout=5) - time.sleep(5) - self.assertTrue(r.ok) - - def check_log_retry_policy(self, host_out: typing.List[str]): - self.assertIn('Current retry count: 1', host_out) - self.assertIn('Current retry count: 2', host_out) - self.assertIn('Current retry count: 3', host_out) - self.assertIn("Max retries of 3 for function mytimer" - " has been reached", host_out) diff --git a/tests/endtoend/test_sql_functions.py b/tests/endtoend/test_sql_functions.py deleted file mode 100644 index 842f0a27a..000000000 --- a/tests/endtoend/test_sql_functions.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import time - -from tests.utils import testutils - - -class TestSqlFunctions(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'sql_functions' - - @testutils.retryable_test(3, 5) - def test_sql_binding_trigger(self): - id = str(round(time.time())) - row = {"ProductId": id, "Name": "test", "Cost": 100} - # Insert a row into Products table using sql_output function - r = self.webhost.request('POST', 'sql_output', - data=json.dumps(row)) - self.assertEqual(r.status_code, 201) - - # Check that the row was successfully inserted using sql_input function - r = self.webhost.request('GET', 'sql_input/' + id) - self.assertEqual(r.status_code, 200) - expectedText = "[{\"ProductId\": " + id + \ - ", \"Name\": \"test\", \"Cost\": 100}]" - self.assertEqual(r.text, expectedText) - - # Check that the sql_trigger function has been triggered and - # the row has been inserted into Products2 table using sql_input2 - # function - max_retries = 10 - - for try_no in range(max_retries): - # Allow trigger to fire - time.sleep(2) - - try: - # Check that the trigger has fired - r = self.webhost.request('GET', 'sql_input2/' + id) - self.assertEqual(r.status_code, 200) - expectedText = "[{\"ProductId\": " + id + \ - ", \"Name\": \"test\", \"Cost\": 100}]" - self.assertEqual(r.text, expectedText) - - except AssertionError: - if try_no == max_retries - 1: - raise - else: - break - - -class TestSqlFunctionsStein(TestSqlFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'sql_functions' / \ - 'sql_functions_stein' - - -class TestSqlFunctionsSteinGeneric(TestSqlFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'sql_functions' / \ - 'sql_functions_stein' / 'generic' diff --git a/tests/endtoend/test_third_party_http_functions.py b/tests/endtoend/test_third_party_http_functions.py deleted file mode 100644 index 5fa01a2bd..000000000 --- a/tests/endtoend/test_third_party_http_functions.py +++ /dev/null @@ -1,162 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os - -import requests -from tests.utils import testutils as utils -from tests.utils.testutils import E2E_TESTS_ROOT - -HOST_JSON_TEMPLATE = """\ -{ - "version": "2.0", - "logging": { - "logLevel": { - "default": "Trace" - } - }, - "extensions": { - "http": { - "routePrefix": "" - } - }, - "functionTimeout": "00:05:00" -} -""" - - -class ThirdPartyHttpFunctionsTestBase: - """Base test class containing common asgi/wsgi testcases, only testcases - in classes extending TestThirdPartyHttpFunctions will by run""" - - class TestThirdPartyHttpFunctions(utils.WebHostTestCase): - @classmethod - def setUpClass(cls): - host_json = cls.get_script_dir() / 'host.json' - with open(host_json, 'w+') as f: - f.write(HOST_JSON_TEMPLATE) - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - @classmethod - def get_script_dir(cls): - pass - - @classmethod - def get_libraries_to_install(cls): - libraries_required = ["flask", "fastapi"] - return libraries_required - - @utils.retryable_test(3, 5) - def test_function_index_page_should_return_undefined(self): - root_url = self.webhost._addr - r = requests.get(root_url) - self.assertEqual(r.status_code, 404) - - @utils.retryable_test(3, 5) - def test_get_endpoint_should_return_ok(self): - """Test if the default template of Http trigger in Python - Function app - will return OK - """ - r = self.webhost.request('GET', 'get_query_param', no_prefix=True) - self.assertTrue(r.ok) - self.assertEqual(r.text, "hello world") - - @utils.retryable_test(3, 5) - def test_get_endpoint_should_accept_query_param(self): - """Test if the azure.functions SDK is able to deserialize query - parameter from the default template - """ - r = self.webhost.request('GET', 'get_query_param', - params={'name': 'dummy'}, no_prefix=True) - self.assertTrue(r.ok) - self.assertEqual( - r.text, - "hello dummy" - ) - - @utils.retryable_test(3, 5) - def test_post_endpoint_should_accept_body(self): - """Test if the azure.functions SDK is able to deserialize http body - and pass it to default template - """ - r = self.webhost.request('POST', 'post_str', - data="dummy", - headers={'content-type': 'text/plain'}, - no_prefix=True) - self.assertTrue(r.ok) - self.assertEqual( - r.text, - "hello dummy" - ) - - @utils.retryable_test(3, 5) - def test_worker_status_endpoint_should_return_ok(self): - """Test if the worker status endpoint will trigger - _handle__worker_status_request and sends a worker status - response back - to host - """ - root_url = self.webhost._addr - health_check_url = f'{root_url}/admin/host/ping' - r = requests.post(health_check_url, - params={'checkHealth': '1'}) - self.assertTrue(r.ok) - - @utils.retryable_test(3, 5) - def test_worker_status_endpoint_should_return_ok_when_disabled(self): - """Test if the worker status endpoint will trigger - _handle__worker_status_request and sends a worker status - response back - to host - """ - os.environ['WEBSITE_PING_METRICS_SCALE_ENABLED'] = '0' - root_url = self.webhost._addr - health_check_url = f'{root_url}/admin/host/ping' - r = requests.post(health_check_url, - params={'checkHealth': '1'}) - self.assertTrue(r.ok) - - @utils.retryable_test(3, 5) - def test_get_endpoint_should_accept_path_param(self): - r = self.webhost.request('GET', 'get_path_param/1', no_prefix=True) - self.assertTrue(r.ok) - self.assertEqual(r.text, "hello 1") - - @utils.retryable_test(3, 5) - def test_post_json_body_and_return_json_response(self): - test_data = { - "name": "apple", - "description": "yummy" - } - r = self.webhost.request('POST', 'post_json_return_json_response', - json=test_data, - no_prefix=True) - self.assertTrue(r.ok) - self.assertEqual(r.json(), test_data) - - @utils.retryable_test(3, 5) - def test_raise_exception_should_return_not_found(self): - r = self.webhost.request('GET', 'raise_http_exception', - no_prefix=True) - self.assertEqual(r.status_code, 404) - self.assertEqual(r.json(), {"detail": "Item not found"}) - - -class TestAsgiHttpFunctions( - ThirdPartyHttpFunctionsTestBase.TestThirdPartyHttpFunctions): - @classmethod - def get_script_dir(cls): - return E2E_TESTS_ROOT / 'third_party_http_functions' / 'stein' / \ - 'asgi_function' - - -class TestWsgiHttpFunctions( - ThirdPartyHttpFunctionsTestBase.TestThirdPartyHttpFunctions): - @classmethod - def get_script_dir(cls): - return E2E_TESTS_ROOT / 'third_party_http_functions' / 'stein' / \ - 'wsgi_function' diff --git a/tests/endtoend/test_threadpool_thread_count_functions.py b/tests/endtoend/test_threadpool_thread_count_functions.py deleted file mode 100644 index 2388aae9c..000000000 --- a/tests/endtoend/test_threadpool_thread_count_functions.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os -from datetime import datetime -from threading import Thread - -from tests.utils import testutils - - -class TestPythonThreadpoolThreadCount(testutils.WebHostTestCase): - """ Test the Http Trigger with setting up the python threadpool thread - count to 2. this test will check if both requests should be processed - at the same time. this file is more focus on testing the E2E flow - scenarios. - """ - - @classmethod - def setUpClass(cls): - os.environ["PYTHON_THREADPOOL_THREAD_COUNT"] = "2" - - super().setUpClass() - - def tearDown(self): - os.environ.pop('PYTHON_THREADPOOL_THREAD_COUNT') - - super().tearDown() - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' - - @testutils.retryable_test(4, 5) - def test_http_func_with_thread_count(self): - response = [None, None] - - def http_req(res_num): - r = self.webhost.request('GET', 'http_func') - self.assertTrue(r.ok) - response[res_num] = datetime.strptime( - r.content.decode("utf-8"), "%H:%M:%S") - - # creating 2 different threads to send HTTP request - thread1 = Thread(target=http_req, args=(0,)) - thread2 = Thread(target=http_req, args=(1,)) - thread1.start() - thread2.start() - thread1.join() - thread2.join() - """function execution time difference between both HTTP request - should be less than 1 since both the request should be processed at - the same time because PYTHON_THREADPOOL_THREAD_COUNT is 2. - """ - time_diff_in_seconds = abs((response[0] - response[1]).total_seconds()) - self.assertTrue(time_diff_in_seconds < 1) - - -class TestPythonThreadpoolThreadCountStein(TestPythonThreadpoolThreadCount): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' / \ - 'http_functions_stein' diff --git a/tests/endtoend/test_timer_functions.py b/tests/endtoend/test_timer_functions.py deleted file mode 100644 index 7923637bd..000000000 --- a/tests/endtoend/test_timer_functions.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import time -import typing - -from tests.utils import testutils - -REQUEST_TIMEOUT_SEC = 5 - - -class TestTimerFunctions(testutils.WebHostTestCase): - """Test the Timer in the local webhost. - - This test class will spawn a webhost from your /build/webhost - folder and replace the built-in Python with azure_functions_worker from - your code base. Since the Timer Trigger is a native support from host, we - don't need to setup any external resources. - - Compared to the unittests/test_timer_functions.py, this file is more focus - on testing the E2E flow scenarios. - """ - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'timer_functions' - - def test_timer(self): - time.sleep(1) - # Checking webhost status. - r = self.webhost.request('GET', '', no_prefix=True, - timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - - def check_log_timer(self, host_out: typing.List[str]): - self.assertEqual(host_out.count("This timer trigger function executed " - "successfully"), 1) - - -class TestTimerFunctionsStein(TestTimerFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'timer_functions' / \ - 'timer_functions_stein' diff --git a/tests/endtoend/test_warmup_functions.py b/tests/endtoend/test_warmup_functions.py deleted file mode 100644 index b33eee26f..000000000 --- a/tests/endtoend/test_warmup_functions.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import typing -from unittest import skipIf - -from tests.utils import testutils -from tests.utils.constants import CONSUMPTION_DOCKER_TEST, DEDICATED_DOCKER_TEST - -from azure_functions_worker.utils.common import is_envvar_true - - -@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST) - or is_envvar_true(CONSUMPTION_DOCKER_TEST), - "Docker tests cannot call admin functions") -class TestWarmupFunctions(testutils.WebHostTestCase): - """Test the Warmup Trigger in the local webhost. - - This test class will spawn a webhost from your /build/webhost - folder and replace the built-in Python with azure_functions_worker from - your code base. This test is more focused on testing e2e scenario for - warmup trigger function. - - """ - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'warmup_functions' - - def test_warmup(self): - r = self.webhost.request('GET', 'admin/warmup', no_prefix=True) - - self.assertTrue(r.ok) - - def check_log_warmup(self, host_out: typing.List[str]): - self.assertEqual(host_out.count("Function App instance is warm"), 1) - - -@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST) - or is_envvar_true(CONSUMPTION_DOCKER_TEST), - "Docker tests cannot call admin functions") -class TestWarmupFunctionsStein(TestWarmupFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'warmup_functions' / \ - 'warmup_functions_stein' diff --git a/tests/endtoend/test_worker_process_count_functions.py b/tests/endtoend/test_worker_process_count_functions.py deleted file mode 100644 index 44abcd2e2..000000000 --- a/tests/endtoend/test_worker_process_count_functions.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os - -from datetime import datetime -from threading import Thread -from unittest import skipIf - -from tests.utils import testutils -from azure_functions_worker.utils.common import is_envvar_true -from tests.utils.constants import CONSUMPTION_DOCKER_TEST, DEDICATED_DOCKER_TEST - - -@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST) - or is_envvar_true(CONSUMPTION_DOCKER_TEST), - "Tests are flaky when running on Docker") -class TestWorkerProcessCount(testutils.WebHostTestCase): - """Test the Http Trigger with setting up the python worker process count - to 2. this test will check if both requests should be processed at the - same time. this file is more focused on testing the E2E flow scenario for - FUNCTIONS_WORKER_PROCESS_COUNT feature. - """ - @classmethod - def setUpClass(cls): - cls.env_variables['PYTHON_THREADPOOL_THREAD_COUNT'] = '1' - cls.env_variables['FUNCTIONS_WORKER_PROCESS_COUNT'] = '2' - - os.environ["PYTHON_THREADPOOL_THREAD_COUNT"] = "1" - os.environ["FUNCTIONS_WORKER_PROCESS_COUNT"] = "2" - - super().setUpClass() - - @classmethod - def tearDownClass(cls): - os.environ.pop('PYTHON_THREADPOOL_THREAD_COUNT') - os.environ.pop('FUNCTIONS_WORKER_PROCESS_COUNT') - - super().tearDownClass() - - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' - - @classmethod - def get_environment_variables(cls): - return cls.env_variables - - @testutils.retryable_test(4, 5) - def test_http_func_with_worker_process_count_2(self): - response = [None, None] - - def http_req(res_num): - r = self.webhost.request('GET', 'http_func') - self.assertTrue(r.ok) - response[res_num] = datetime.strptime( - r.content.decode("utf-8"), "%H:%M:%S") - - # creating 2 different threads to send HTTP request - thread1 = Thread(target=http_req, args=(0,)) - thread2 = Thread(target=http_req, args=(1,)) - thread1.start() - thread2.start() - thread1.join() - thread2.join() - '''function execution time difference between both HTTP request - should be less than 1 since both request should be processed at the - same time because FUNCTIONS_WORKER_PROCESS_COUNT is 2. - ''' - time_diff_in_seconds = abs((response[0] - response[1]).total_seconds()) - self.assertTrue(time_diff_in_seconds < 1) - - -@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST) - or is_envvar_true(CONSUMPTION_DOCKER_TEST), - "Tests are flaky when running on Docker") -class TestWorkerProcessCountStein(TestWorkerProcessCount): - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' /\ - 'http_functions_stein' - - -@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST) - or is_envvar_true(CONSUMPTION_DOCKER_TEST), - "Tests are flaky when running on Docker") -class TestWorkerProcessCountWithBlueprintStein(TestWorkerProcessCount): - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'blueprint_functions' /\ - 'functions_in_blueprint_only' - - -@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST) - or is_envvar_true(CONSUMPTION_DOCKER_TEST), - "Tests are flaky when running on Docker") -class TestWorkerProcessCountWithBlueprintDiffDirStein(TestWorkerProcessCount): - @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'blueprint_functions' /\ - 'blueprint_different_dir' diff --git a/tests/endtoend/third_party_http_functions/stein/asgi_function/function_app.py b/tests/endtoend/third_party_http_functions/stein/asgi_function/function_app.py deleted file mode 100644 index 391d036c0..000000000 --- a/tests/endtoend/third_party_http_functions/stein/asgi_function/function_app.py +++ /dev/null @@ -1,41 +0,0 @@ -from typing import Optional - -import azure.functions as func -from fastapi import Body, FastAPI, HTTPException, Response -from pydantic import BaseModel - -fast_app = FastAPI() - - -class Fruit(BaseModel): - name: str - description: Optional[str] = None - - -@fast_app.get("/get_query_param") -async def get_query_param(name: str = "world"): - return Response(content=f"hello {name}", media_type="text/plain") - - -@fast_app.post("/post_str") -async def post_str(person: str = Body(...)): - return Response(content=f"hello {person}", media_type="text/plain") - - -@fast_app.post("/post_json_return_json_response") -async def post_json_return_json_response(fruit: Fruit): - return fruit - - -@fast_app.get("/get_path_param/{id}") -async def get_path_param(id): - return Response(content=f"hello {id}", media_type="text/plain") - - -@fast_app.get("/raise_http_exception") -async def raise_http_exception(): - raise HTTPException(status_code=404, detail="Item not found") - - -app = func.AsgiFunctionApp(app=fast_app, - http_auth_level=func.AuthLevel.ANONYMOUS) diff --git a/tests/endtoend/third_party_http_functions/stein/wsgi_function/function_app.py b/tests/endtoend/third_party_http_functions/stein/wsgi_function/function_app.py deleted file mode 100644 index 264a67a03..000000000 --- a/tests/endtoend/third_party_http_functions/stein/wsgi_function/function_app.py +++ /dev/null @@ -1,36 +0,0 @@ -import azure.functions as func -from flask import Flask, request - -flask_app = Flask(__name__) - - -@flask_app.get("/get_query_param") -def get_query_param(): - name = request.args.get("name") - if name is None: - name = "world" - return f"hello {name}" - - -@flask_app.post("/post_str") -def post_str(): - return f"hello {request.data.decode()}" - - -@flask_app.post("/post_json_return_json_response") -def post_json_return_json_response(): - return request.get_json() - - -@flask_app.get("/get_path_param/") -def get_path_param(id): - return f"hello {id}" - - -@flask_app.get("/raise_http_exception") -def raise_http_exception(): - return {"detail": "Item not found"}, 404 - - -app = func.WsgiFunctionApp(app=flask_app.wsgi_app, - http_auth_level=func.AuthLevel.ANONYMOUS) diff --git a/tests/endtoend/timer_functions/timer_func/__init__.py b/tests/endtoend/timer_functions/timer_func/__init__.py deleted file mode 100644 index 5cdd1a102..000000000 --- a/tests/endtoend/timer_functions/timer_func/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - - -def main(mytimer: func.TimerRequest) -> None: - logging.info("This timer trigger function executed successfully") diff --git a/tests/endtoend/timer_functions/timer_func/function.json b/tests/endtoend/timer_functions/timer_func/function.json deleted file mode 100644 index dba900a90..000000000 --- a/tests/endtoend/timer_functions/timer_func/function.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "name": "mytimer", - "type": "timerTrigger", - "direction": "in", - "schedule": "*/1 * * * * *", - "runOnStartup": false - } - ] -} \ No newline at end of file diff --git a/tests/endtoend/timer_functions/timer_functions_stein/function_app.py b/tests/endtoend/timer_functions/timer_functions_stein/function_app.py deleted file mode 100644 index 25937d316..000000000 --- a/tests/endtoend/timer_functions/timer_functions_stein/function_app.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="mytimer") -@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer", - run_on_startup=False, - use_monitor=False) -def mytimer(mytimer: func.TimerRequest) -> None: - logging.info("This timer trigger function executed successfully") diff --git a/tests/endtoend/warmup_functions/warmup/__init__.py b/tests/endtoend/warmup_functions/warmup/__init__.py deleted file mode 100644 index 0d186eab6..000000000 --- a/tests/endtoend/warmup_functions/warmup/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - - -def main(warmupContext) -> None: - logging.info('Function App instance is warm') diff --git a/tests/endtoend/warmup_functions/warmup/function.json b/tests/endtoend/warmup_functions/warmup/function.json deleted file mode 100644 index 04c3f9d07..000000000 --- a/tests/endtoend/warmup_functions/warmup/function.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "bindings": [ - { - "type": "warmupTrigger", - "direction": "in", - "name": "warmupContext" - } - ] -} \ No newline at end of file diff --git a/tests/endtoend/warmup_functions/warmup_functions_stein/function_app.py b/tests/endtoend/warmup_functions/warmup_functions_stein/function_app.py deleted file mode 100644 index 83968cc4d..000000000 --- a/tests/endtoend/warmup_functions/warmup_functions_stein/function_app.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - -app = func.FunctionApp() - - -@app.warm_up_trigger('warmup') -def warmup(warmup) -> None: - logging.info('Function App instance is warm') diff --git a/tests/extension_tests/deferred_bindings_tests/deferred_bindings_blob_functions/function_app.py b/tests/extension_tests/deferred_bindings_tests/deferred_bindings_blob_functions/function_app.py deleted file mode 100644 index 075d8a78a..000000000 --- a/tests/extension_tests/deferred_bindings_tests/deferred_bindings_blob_functions/function_app.py +++ /dev/null @@ -1,294 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as func -import azurefunctions.extensions.bindings.blob as blob - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="put_bc_trigger") -@app.blob_output(arg_name="file", - path="python-worker-tests/test-blobclient-trigger.txt", - connection="AzureWebJobsStorage") -@app.route(route="put_bc_trigger") -def put_bc_trigger(req: func.HttpRequest, file: func.Out[str]) -> str: - file.set(req.get_body()) - return 'OK' - - -@app.function_name(name="bc_blob_trigger") -@app.blob_trigger(arg_name="client", - path="python-worker-tests/test-blobclient-trigger.txt", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-blobclient-triggered.txt", - connection="AzureWebJobsStorage") -def bc_blob_trigger(client: blob.BlobClient) -> str: - blob_properties = client.get_blob_properties() - file = client.download_blob(encoding='utf-8').readall() - return json.dumps({ - 'name': blob_properties.name, - 'length': blob_properties.size, - 'content': file - }) - - -@app.function_name(name="get_bc_blob_triggered") -@app.blob_input(arg_name="client", - path="python-worker-tests/test-blobclient-triggered.txt", - connection="AzureWebJobsStorage") -@app.route(route="get_bc_blob_triggered") -def get_bc_blob_triggered(req: func.HttpRequest, - client: blob.BlobClient) -> str: - return client.download_blob(encoding='utf-8').readall() - - -@app.function_name(name="put_cc_trigger") -@app.blob_output(arg_name="file", - path="python-worker-tests/test-containerclient-trigger.txt", - connection="AzureWebJobsStorage") -@app.route(route="put_cc_trigger") -def put_cc_trigger(req: func.HttpRequest, file: func.Out[str]) -> str: - file.set(req.get_body()) - return 'OK' - - -@app.function_name(name="cc_blob_trigger") -@app.blob_trigger(arg_name="client", - path="python-worker-tests/test-containerclient-trigger.txt", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-containerclient-triggered.txt", - connection="AzureWebJobsStorage") -def cc_blob_trigger(client: blob.ContainerClient) -> str: - container_properties = client.get_container_properties() - file = client.download_blob("test-containerclient-trigger.txt", - encoding='utf-8').readall() - return json.dumps({ - 'name': container_properties.name, - 'content': file - }) - - -@app.function_name(name="get_cc_blob_triggered") -@app.blob_input(arg_name="client", - path="python-worker-tests/test-containerclient-triggered.txt", - connection="AzureWebJobsStorage") -@app.route(route="get_cc_blob_triggered") -def get_cc_blob_triggered(req: func.HttpRequest, - client: blob.ContainerClient) -> str: - return client.download_blob("test-containerclient-triggered.txt", - encoding='utf-8').readall() - - -@app.function_name(name="put_ssd_trigger") -@app.blob_output(arg_name="file", - path="python-worker-tests/test-ssd-trigger.txt", - connection="AzureWebJobsStorage") -@app.route(route="put_ssd_trigger") -def put_ssd_trigger(req: func.HttpRequest, file: func.Out[str]) -> str: - file.set(req.get_body()) - return 'OK' - - -@app.function_name(name="ssd_blob_trigger") -@app.blob_trigger(arg_name="stream", - path="python-worker-tests/test-ssd-trigger.txt", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-ssd-triggered.txt", - connection="AzureWebJobsStorage") -def ssd_blob_trigger(stream: blob.StorageStreamDownloader) -> str: - # testing chunking - file = "" - for chunk in stream.chunks(): - file += chunk.decode("utf-8") - return json.dumps({ - 'content': file - }) - - -@app.function_name(name="get_ssd_blob_triggered") -@app.blob_input(arg_name="stream", - path="python-worker-tests/test-ssd-triggered.txt", - connection="AzureWebJobsStorage") -@app.route(route="get_ssd_blob_triggered") -def get_ssd_blob_triggered(req: func.HttpRequest, - stream: blob.StorageStreamDownloader) -> str: - return stream.readall().decode('utf-8') - - -@app.function_name(name="get_bc_bytes") -@app.route(route="get_bc_bytes") -@app.blob_input(arg_name="client", - path="python-worker-tests/test-blob-extension-bytes.txt", - connection="AzureWebJobsStorage") -def get_bc_bytes(req: func.HttpRequest, client: blob.BlobClient) -> str: - return client.download_blob(encoding='utf-8').readall() - - -@app.function_name(name="get_cc_bytes") -@app.route(route="get_cc_bytes") -@app.blob_input(arg_name="client", - path="python-worker-tests/test-blob-extension-bytes.txt", - connection="AzureWebJobsStorage") -def get_cc_bytes(req: func.HttpRequest, - client: blob.ContainerClient) -> str: - return client.download_blob("test-blob-extension-bytes.txt", - encoding='utf-8').readall() - - -@app.function_name(name="get_ssd_bytes") -@app.route(route="get_ssd_bytes") -@app.blob_input(arg_name="stream", - path="python-worker-tests/test-blob-extension-bytes.txt", - connection="AzureWebJobsStorage") -def get_ssd_bytes(req: func.HttpRequest, - stream: blob.StorageStreamDownloader) -> str: - return stream.readall().decode('utf-8') - - -@app.function_name(name="get_bc_str") -@app.route(route="get_bc_str") -@app.blob_input(arg_name="client", - path="python-worker-tests/test-blob-extension-str.txt", - connection="AzureWebJobsStorage") -def get_bc_str(req: func.HttpRequest, client: blob.BlobClient) -> str: - return client.download_blob(encoding='utf-8').readall() - - -@app.function_name(name="get_cc_str") -@app.route(route="get_cc_str") -@app.blob_input(arg_name="client", - path="python-worker-tests", - connection="AzureWebJobsStorage") -def get_cc_str(req: func.HttpRequest, client: blob.ContainerClient) -> str: - return client.download_blob("test-blob-extension-str.txt", - encoding='utf-8').readall() - - -@app.function_name(name="get_ssd_str") -@app.route(route="get_ssd_str") -@app.blob_input(arg_name="stream", - path="python-worker-tests/test-blob-extension-str.txt", - connection="AzureWebJobsStorage") -def get_ssd_str(req: func.HttpRequest, stream: blob.StorageStreamDownloader) -> str: - return stream.readall().decode('utf-8') - - -@app.function_name(name="bc_and_inputstream_input") -@app.route(route="bc_and_inputstream_input") -@app.blob_input(arg_name="client", - path="python-worker-tests/test-blob-extension-str.txt", - data_type="STRING", - connection="AzureWebJobsStorage") -@app.blob_input(arg_name="blob", - path="python-worker-tests/test-blob-extension-str.txt", - data_type="STRING", - connection="AzureWebJobsStorage") -def bc_and_inputstream_input(req: func.HttpRequest, client: blob.BlobClient, - blob: func.InputStream) -> str: - output_msg = "" - file = blob.read().decode('utf-8') - client_file = client.download_blob(encoding='utf-8').readall() - output_msg = file + " - input stream " + client_file + " - blob client" - return output_msg - - -@app.function_name(name="inputstream_and_bc_input") -@app.route(route="inputstream_and_bc_input") -@app.blob_input(arg_name="blob", - path="python-worker-tests/test-blob-extension-str.txt", - data_type="STRING", - connection="AzureWebJobsStorage") -@app.blob_input(arg_name="client", - path="python-worker-tests/test-blob-extension-str.txt", - data_type="STRING", - connection="AzureWebJobsStorage") -def inputstream_and_bc_input(req: func.HttpRequest, blob: func.InputStream, - client: blob.BlobClient) -> str: - output_msg = "" - file = blob.read().decode('utf-8') - client_file = client.download_blob(encoding='utf-8').readall() - output_msg = file + " - input stream " + client_file + " - blob client" - return output_msg - - -@app.function_name(name="type_undefined") -@app.route(route="type_undefined") -@app.blob_input(arg_name="file", - path="python-worker-tests/test-blob-extension-str.txt", - data_type="STRING", - connection="AzureWebJobsStorage") -def type_undefined(req: func.HttpRequest, file) -> str: - assert not isinstance(file, blob.BlobClient) - assert not isinstance(file, blob.ContainerClient) - assert not isinstance(file, blob.StorageStreamDownloader) - return file.read().decode('utf-8') - - -@app.function_name(name="put_blob_str") -@app.blob_output(arg_name="file", - path="python-worker-tests/test-blob-extension-str.txt", - connection="AzureWebJobsStorage") -@app.route(route="put_blob_str") -def put_blob_str(req: func.HttpRequest, file: func.Out[str]) -> str: - file.set(req.get_body()) - return 'OK' - - -@app.function_name(name="put_blob_bytes") -@app.blob_output(arg_name="file", - path="python-worker-tests/test-blob-extension-bytes.txt", - connection="AzureWebJobsStorage") -@app.route(route="put_blob_bytes") -def put_blob_bytes(req: func.HttpRequest, file: func.Out[bytes]) -> str: - file.set(req.get_body()) - return 'OK' - - -@app.function_name(name="blob_cache") -@app.blob_input(arg_name="cachedClient", - path="python-worker-tests/test-blobclient-triggered.txt", - connection="AzureWebJobsStorage") -@app.route(route="blob_cache") -def blob_cache(req: func.HttpRequest, - cachedClient: blob.BlobClient) -> str: - return func.HttpResponse(repr(cachedClient)) - - -@app.function_name(name="blob_cache2") -@app.blob_input(arg_name="cachedClient", - path="python-worker-tests/test-blobclient-triggered.txt", - connection="AzureWebJobsStorage") -@app.route(route="blob_cache2") -def blob_cache2(req: func.HttpRequest, - cachedClient: blob.BlobClient) -> func.HttpResponse: - return func.HttpResponse(repr(cachedClient)) - - -@app.function_name(name="blob_cache3") -@app.blob_input(arg_name="cachedClient", - path="python-worker-tests/test-blobclient-triggered.txt", - connection="AzureWebJobsStorage") -@app.blob_input(arg_name="cachedClient2", - path="python-worker-tests/test-blobclient-triggered.txt", - connection="AzureWebJobsStorage") -@app.route(route="blob_cache3") -def blob_cache3(req: func.HttpRequest, - cachedClient: blob.BlobClient, - cachedClient2: blob.BlobClient) -> func.HttpResponse: - return func.HttpResponse("Client 1: " + repr(cachedClient) - + " | Client 2: " + repr(cachedClient2)) - - -@app.function_name(name="invalid_connection_info") -@app.blob_input(arg_name="client", - path="python-worker-tests/test-blobclient-triggered.txt", - connection="NotARealConnectionString") -@app.route(route="invalid_connection_info") -def invalid_connection_info(req: func.HttpRequest, - client: blob.BlobClient) -> func.HttpResponse: - return func.HttpResponse(repr(client)) diff --git a/tests/extension_tests/deferred_bindings_tests/deferred_bindings_functions/deferred_bindings_disabled/function_app.py b/tests/extension_tests/deferred_bindings_tests/deferred_bindings_functions/deferred_bindings_disabled/function_app.py deleted file mode 100644 index 2af0d9c20..000000000 --- a/tests/extension_tests/deferred_bindings_tests/deferred_bindings_functions/deferred_bindings_disabled/function_app.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="blob_trigger_only") -@app.blob_trigger(arg_name="file", - path="python-worker-tests/test-blob-trigger.txt", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-blob-triggered.txt", - connection="AzureWebJobsStorage") -def blob_trigger_only(file: func.InputStream) -> str: - return json.dumps({ - 'name': file.name, - 'length': file.length, - 'content': file.read().decode('utf-8') - }) diff --git a/tests/extension_tests/deferred_bindings_tests/deferred_bindings_functions/deferred_bindings_enabled/function_app.py b/tests/extension_tests/deferred_bindings_tests/deferred_bindings_functions/deferred_bindings_enabled/function_app.py deleted file mode 100644 index 8613ea467..000000000 --- a/tests/extension_tests/deferred_bindings_tests/deferred_bindings_functions/deferred_bindings_enabled/function_app.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func -import azurefunctions.extensions.bindings.blob as blob - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="blob_input_only") -@app.blob_input(arg_name="client", - path="python-worker-tests/test-blobclient-triggered.txt", - connection="AzureWebJobsStorage") -@app.route(route="blob_input_only") -def blob_input_only(req: func.HttpRequest, - client: blob.BlobClient) -> str: - return client.download_blob(encoding='utf-8').readall() diff --git a/tests/extension_tests/deferred_bindings_tests/deferred_bindings_functions/deferred_bindings_enabled_dual/function_app.py b/tests/extension_tests/deferred_bindings_tests/deferred_bindings_functions/deferred_bindings_enabled_dual/function_app.py deleted file mode 100644 index a22d8b7b7..000000000 --- a/tests/extension_tests/deferred_bindings_tests/deferred_bindings_functions/deferred_bindings_enabled_dual/function_app.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as func -import azurefunctions.extensions.bindings.blob as blob - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="get_bc_blob_triggered_dual") -@app.blob_input(arg_name="client", - path="python-worker-tests/test-blobclient-triggered.txt", - connection="AzureWebJobsStorage") -@app.route(route="get_bc_blob_triggered_dual") -def get_bc_blob_triggered_dual(req: func.HttpRequest, - client: blob.BlobClient) -> str: - return client.download_blob(encoding='utf-8').readall() - - -@app.function_name(name="blob_trigger_dual") -@app.blob_trigger(arg_name="file", - path="python-worker-tests/test-blob-trigger.txt", - connection="AzureWebJobsStorage") -@app.blob_output(arg_name="$return", - path="python-worker-tests/test-blob-triggered.txt", - connection="AzureWebJobsStorage") -def blob_trigger_dual(file: func.InputStream) -> str: - return json.dumps({ - 'name': file.name, - 'length': file.length, - 'content': file.read().decode('utf-8') - }) diff --git a/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings.py b/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings.py deleted file mode 100644 index 1899f9e75..000000000 --- a/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings.py +++ /dev/null @@ -1,198 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import sys -import unittest - -import azure.functions as func -from tests.utils import testutils - -from azure_functions_worker import protos -from azure_functions_worker.bindings import datumdef, meta - -# Even if the tests are skipped for <=3.8, the library is still imported as -# it is used for these tests. -if sys.version_info.minor >= 9: - from azurefunctions.extensions.bindings.blob import (BlobClient, - BlobClientConverter, - ContainerClient, - StorageStreamDownloader) - -DEFERRED_BINDINGS_ENABLED_DIR = testutils.EXTENSION_TESTS_FOLDER / \ - 'deferred_bindings_tests' / \ - 'deferred_bindings_functions' / \ - 'deferred_bindings_enabled' -DEFERRED_BINDINGS_DISABLED_DIR = testutils.EXTENSION_TESTS_FOLDER / \ - 'deferred_bindings_tests' / \ - 'deferred_bindings_functions' / \ - 'deferred_bindings_disabled' -DEFERRED_BINDINGS_ENABLED_DUAL_DIR = testutils.EXTENSION_TESTS_FOLDER / \ - 'deferred_bindings_tests' / \ - 'deferred_bindings_functions' / \ - 'deferred_bindings_enabled_dual' - - -class MockMBD: - def __init__(self, version: str, source: str, - content_type: str, content: str): - self.version = version - self.source = source - self.content_type = content_type - self.content = content - - -@unittest.skipIf(sys.version_info.minor <= 8, "The base extension" - "is only supported for 3.9+.") -class TestDeferredBindingsEnabled(testutils.AsyncTestCase): - - @testutils.retryable_test(3, 5) - async def test_deferred_bindings_enabled_metadata(self): - async with testutils.start_mockhost( - script_root=DEFERRED_BINDINGS_ENABLED_DIR) as host: - await host.init_worker() - r = await host.get_functions_metadata() - self.assertIsInstance(r.response, protos.FunctionMetadataResponse) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - del sys.modules['function_app'] - - @testutils.retryable_test(3, 5) - async def test_deferred_bindings_enabled_log(self): - async with testutils.start_mockhost( - script_root=DEFERRED_BINDINGS_ENABLED_DIR) as host: - await host.init_worker() - r = await host.get_functions_metadata() - enabled_log_present = False - for log in r.logs: - message = log.message - if "Deferred bindings enabled: True" in message: - enabled_log_present = True - break - self.assertTrue(enabled_log_present) - del sys.modules['function_app'] - - -@unittest.skipIf(sys.version_info.minor <= 8, "The base extension" - "is only supported for 3.9+.") -class TestDeferredBindingsDisabled(testutils.AsyncTestCase): - - @testutils.retryable_test(3, 5) - async def test_deferred_bindings_disabled_metadata(self): - async with testutils.start_mockhost( - script_root=DEFERRED_BINDINGS_DISABLED_DIR) as host: - await host.init_worker() - r = await host.get_functions_metadata() - self.assertIsInstance(r.response, protos.FunctionMetadataResponse) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - del sys.modules['function_app'] - - @testutils.retryable_test(3, 5) - async def test_deferred_bindings_disabled_log(self): - async with testutils.start_mockhost( - script_root=DEFERRED_BINDINGS_DISABLED_DIR) as host: - await host.init_worker() - r = await host.get_functions_metadata() - disabled_log_present = False - for log in r.logs: - message = log.message - if "Deferred bindings enabled: False" in message: - disabled_log_present = True - break - self.assertTrue(disabled_log_present) - del sys.modules['function_app'] - - -@unittest.skipIf(sys.version_info.minor <= 8, "The base extension" - "is only supported for 3.9+.") -class TestDeferredBindingsEnabledDual(testutils.AsyncTestCase): - - @testutils.retryable_test(3, 5) - async def test_deferred_bindings_dual_metadata(self): - async with testutils.start_mockhost( - script_root=DEFERRED_BINDINGS_ENABLED_DUAL_DIR) as host: - await host.init_worker() - r = await host.get_functions_metadata() - self.assertIsInstance(r.response, protos.FunctionMetadataResponse) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - del sys.modules['function_app'] - - @testutils.retryable_test(3, 5) - async def test_deferred_bindings_dual_enabled_log(self): - async with testutils.start_mockhost( - script_root=DEFERRED_BINDINGS_ENABLED_DUAL_DIR) as host: - await host.init_worker() - r = await host.get_functions_metadata() - enabled_log_present = False - for log in r.logs: - message = log.message - if "Deferred bindings enabled: True" in message: - enabled_log_present = True - break - self.assertTrue(enabled_log_present) - del sys.modules['function_app'] - - -@unittest.skipIf(sys.version_info.minor <= 8, "The base extension" - "is only supported for 3.9+.") -class TestDeferredBindingsHelpers(testutils.AsyncTestCase): - - def test_deferred_bindings_enabled_decode(self): - binding = BlobClientConverter - pb = protos.ParameterBinding(name='test', - data=protos.TypedData( - string='test')) - sample_mbd = MockMBD(version="1.0", - source="AzureStorageBlobs", - content_type="application/json", - content="{\"Connection\":\"AzureWebJobsStorage\"," - "\"ContainerName\":" - "\"python-worker-tests\"," - "\"BlobName\":" - "\"test-blobclient-trigger.txt\"}") - datum = datumdef.Datum(value=sample_mbd, type='model_binding_data') - - obj = meta.deferred_bindings_decode(binding=binding, pb=pb, - pytype=BlobClient, datum=datum, metadata={}, - function_name="test_function") - - self.assertIsNotNone(obj) - - async def test_check_deferred_bindings_enabled(self): - """ - check_deferred_bindings_enabled checks if deferred bindings is enabled at fx - and single binding level. - - The first bool represents if deferred bindings is enabled at a fx level. This - means that at least one binding in the function is a deferred binding type. - - The second represents if the current binding is deferred binding. If this is - True, then deferred bindings must also be enabled at the function level. - """ - async with testutils.start_mockhost( - script_root=DEFERRED_BINDINGS_ENABLED_DIR) as host: - await host.init_worker() - - # Type is not supported, deferred_bindings_enabled is not yet set - self.assertEqual(meta.check_deferred_bindings_enabled( - func.InputStream, False), (False, False)) - - # Type is not supported, deferred_bindings_enabled already set - self.assertEqual(meta.check_deferred_bindings_enabled( - func.InputStream, True), (True, False)) - - # Type is supported, deferred_bindings_enabled is not yet set - self.assertEqual(meta.check_deferred_bindings_enabled( - BlobClient, False), (True, True)) - self.assertEqual(meta.check_deferred_bindings_enabled( - ContainerClient, False), (True, True)) - self.assertEqual(meta.check_deferred_bindings_enabled( - StorageStreamDownloader, False), (True, True)) - - # Type is supported, deferred_bindings_enabled is already set - self.assertEqual(meta.check_deferred_bindings_enabled( - BlobClient, True), (True, True)) - self.assertEqual(meta.check_deferred_bindings_enabled( - ContainerClient, True), (True, True)) - self.assertEqual(meta.check_deferred_bindings_enabled( - StorageStreamDownloader, True), (True, True)) diff --git a/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings_blob_functions.py b/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings_blob_functions.py deleted file mode 100644 index ed441a077..000000000 --- a/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings_blob_functions.py +++ /dev/null @@ -1,232 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import sys -import time -import unittest - -from tests.utils import testutils - - -@unittest.skipIf(sys.version_info.minor <= 8, "The base extension" - "is only supported for 3.9+.") -class TestDeferredBindingsBlobFunctions(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.EXTENSION_TESTS_FOLDER / 'deferred_bindings_tests' / \ - 'deferred_bindings_blob_functions' - - @classmethod - def get_libraries_to_install(cls): - return ['azurefunctions-extensions-bindings-blob'] - - def test_blob_str(self): - r = self.webhost.request('POST', 'put_blob_str', data='test-data') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - time.sleep(5) - - r = self.webhost.request('GET', 'get_bc_str') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-data') - - r = self.webhost.request('GET', 'get_cc_str') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-data') - - r = self.webhost.request('GET', 'get_ssd_str') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-data') - - def test_blob_bytes(self): - r = self.webhost.request('POST', 'put_blob_bytes', - data='test-dată'.encode('utf-8')) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - time.sleep(5) - - r = self.webhost.request('POST', 'get_bc_bytes') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-dată') - - r = self.webhost.request('POST', 'get_cc_bytes') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-dată') - - r = self.webhost.request('POST', 'get_ssd_bytes') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-dată') - - def test_bc_blob_trigger(self): - data = "DummyData" - - r = self.webhost.request('POST', 'put_bc_trigger', - data=data.encode('utf-8')) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - # Blob trigger may be processed after some delay - # We check it every 2 seconds to allow the trigger to be fired - max_retries = 10 - for try_no in range(max_retries): - time.sleep(5) - - try: - # Check that the trigger has fired - r = self.webhost.request('GET', 'get_bc_blob_triggered') - self.assertEqual(r.status_code, 200) - response = r.json() - - self.assertEqual(response['name'], - 'test-blobclient-trigger.txt') - self.assertEqual(response['content'], data) - - break - except AssertionError: - if try_no == max_retries - 1: - raise - - def test_cc_blob_trigger(self): - data = "DummyData" - - r = self.webhost.request('POST', 'put_cc_trigger', - data=data.encode('utf-8')) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - # Blob trigger may be processed after some delay - # We check it every 2 seconds to allow the trigger to be fired - max_retries = 10 - for try_no in range(max_retries): - time.sleep(5) - - try: - # Check that the trigger has fired - r = self.webhost.request('GET', 'get_cc_blob_triggered') - self.assertEqual(r.status_code, 200) - response = r.json() - - self.assertEqual(response['name'], - 'python-worker-tests') - self.assertEqual(response['content'], data) - - break - except AssertionError: - if try_no == max_retries - 1: - raise - - def test_ssd_blob_trigger(self): - data = "DummyData" - - r = self.webhost.request('POST', 'put_ssd_trigger', - data=data.encode('utf-8')) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - # Blob trigger may be processed after some delay - # We check it every 2 seconds to allow the trigger to be fired - max_retries = 10 - for try_no in range(max_retries): - time.sleep(5) - - try: - # Check that the trigger has fired - r = self.webhost.request('GET', 'get_ssd_blob_triggered') - self.assertEqual(r.status_code, 200) - response = r.json() - - self.assertEqual(response['content'], data) - - break - except AssertionError: - if try_no == max_retries - 1: - raise - - def test_bc_and_inputstream_input(self): - r = self.webhost.request('POST', 'put_blob_str', data='test-data') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - r = self.webhost.request('GET', 'bc_and_inputstream_input') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-data - input stream test-data - blob client') - - def test_inputstream_and_bc_input(self): - r = self.webhost.request('POST', 'put_blob_str', data='test-data') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - r = self.webhost.request('GET', 'inputstream_and_bc_input') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-data - input stream test-data - blob client') - - def test_type_undefined(self): - r = self.webhost.request('POST', 'put_blob_str', data='test-data') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - r = self.webhost.request('GET', 'type_undefined') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'test-data') - - def test_caching(self): - ''' - The cache returns the same type based on resource and function name. - Two different functions with clients that access the same resource - will have two different clients. This tests that the same client - is returned for each invocation and that the clients are different - between the two functions. - ''' - - r = self.webhost.request('GET', 'blob_cache') - r2 = self.webhost.request('GET', 'blob_cache2') - self.assertEqual(r.status_code, 200) - self.assertEqual(r2.status_code, 200) - client = r.text - client2 = r2.text - self.assertNotEqual(client, client2) - - r = self.webhost.request('GET', 'blob_cache') - r2 = self.webhost.request('GET', 'blob_cache2') - self.assertEqual(r.status_code, 200) - self.assertEqual(r2.status_code, 200) - self.assertEqual(r.text, client) - self.assertEqual(r2.text, client2) - self.assertNotEqual(r.text, r2.text) - - r = self.webhost.request('GET', 'blob_cache') - r2 = self.webhost.request('GET', 'blob_cache2') - self.assertEqual(r.status_code, 200) - self.assertEqual(r2.status_code, 200) - self.assertEqual(r.text, client) - self.assertEqual(r2.text, client2) - self.assertNotEqual(r.text, r2.text) - - def test_caching_same_resource(self): - ''' - The cache returns the same type based on param name. - One functions with two clients that access the same resource - will have two different clients. This tests that the same clients - are returned for each invocation and that the clients are different - between the two bindings. - ''' - - r = self.webhost.request('GET', 'blob_cache3') - self.assertEqual(r.status_code, 200) - clients = r.text.split(" | ") - self.assertNotEqual(clients[0], clients[1]) - - r2 = self.webhost.request('GET', 'blob_cache3') - self.assertEqual(r2.status_code, 200) - clients_second_call = r2.text.split(" | ") - self.assertEqual(clients[0], clients_second_call[0]) - self.assertEqual(clients[1], clients_second_call[1]) - self.assertNotEqual(clients_second_call[0], clients_second_call[1]) - - def test_failed_client_creation(self): - r = self.webhost.request('GET', 'invalid_connection_info') - # Without the http_v2_enabled default definition, this request would time out. - # Instead, it fails immediately - self.assertEqual(r.status_code, 500) diff --git a/tests/extension_tests/http_v2_tests/test_http_v2.py b/tests/extension_tests/http_v2_tests/test_http_v2.py deleted file mode 100644 index 8c1d5b48e..000000000 --- a/tests/extension_tests/http_v2_tests/test_http_v2.py +++ /dev/null @@ -1,194 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import concurrent -import os -import sys -import unittest -from concurrent.futures import ThreadPoolExecutor - -import requests -from tests.utils import testutils -from azure_functions_worker.utils.common import is_envvar_true -from tests.utils.constants import CONSUMPTION_DOCKER_TEST, DEDICATED_DOCKER_TEST - -from azure_functions_worker.constants import PYTHON_ENABLE_INIT_INDEXING - -REQUEST_TIMEOUT_SEC = 5 - - -@unittest.skipIf(is_envvar_true(DEDICATED_DOCKER_TEST) - or is_envvar_true(CONSUMPTION_DOCKER_TEST), - "Tests are flaky when running on Docker") -@unittest.skipIf(sys.version_info.minor < 8, "HTTPv2" - "is only supported for 3.8+.") -class TestHttpFunctionsWithInitIndexing(testutils.WebHostTestCase): - @classmethod - def setUpClass(cls): - cls.env_variables[PYTHON_ENABLE_INIT_INDEXING] = '1' - os.environ[PYTHON_ENABLE_INIT_INDEXING] = "1" - super().setUpClass() - - @classmethod - def tearDownClass(cls): - os.environ.pop(PYTHON_ENABLE_INIT_INDEXING) - super().tearDownClass() - - @classmethod - def get_environment_variables(cls): - return cls.env_variables - - @classmethod - def get_script_dir(cls): - return testutils.EXTENSION_TESTS_FOLDER / 'http_v2_tests' / \ - 'http_functions_v2' / \ - 'fastapi' - - @classmethod - def get_libraries_to_install(cls): - return ['azurefunctions-extensions-http-fastapi', 'orjson', 'ujson'] - - @testutils.retryable_test(3, 5) - def test_return_streaming(self): - """Test if the return_streaming function returns a streaming - response""" - root_url = self.webhost._addr - streaming_url = f'{root_url}/api/return_streaming' - r = requests.get( - streaming_url, timeout=REQUEST_TIMEOUT_SEC, stream=True) - self.assertTrue(r.ok) - # Validate streaming content - expected_content = [b'First', b' chun', b'k\nSec', b'ond c', b'hunk\n'] - received_content = [] - for chunk in r.iter_content(chunk_size=5): - if chunk: - received_content.append(chunk) - self.assertEqual(received_content, expected_content) - - @testutils.retryable_test(3, 5) - def test_return_streaming_concurrently(self): - """Test if the return_streaming function returns a streaming - response concurrently""" - root_url = self.webhost._addr - streaming_url = f'{root_url}/return_streaming' - - # Function to make a streaming request and validate content - def make_request(): - r = requests.get(streaming_url, timeout=REQUEST_TIMEOUT_SEC, - stream=True) - self.assertTrue(r.ok) - expected_content = [b"First chunk\n", b"Second chunk\n"] - received_content = [] - for chunk in r.iter_content(chunk_size=1024): - if chunk: - received_content.append(chunk) - self.assertEqual(received_content, expected_content) - - # Make concurrent requests - with ThreadPoolExecutor(max_workers=2) as executor: - executor.map(make_request, range(2)) - - @testutils.retryable_test(3, 5) - def test_return_html(self): - """Test if the return_html function returns an HTML response""" - root_url = self.webhost._addr - html_url = f'{root_url}/api/return_html' - r = requests.get(html_url, timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertEqual(r.headers['content-type'], - 'text/html; charset=utf-8') - # Validate HTML content - expected_html = "

Hello, World!

" - self.assertEqual(r.text, expected_html) - - @testutils.retryable_test(3, 5) - def test_return_ujson(self): - """Test if the return_ujson function returns a UJSON response""" - root_url = self.webhost._addr - ujson_url = f'{root_url}/api/return_ujson' - r = requests.get(ujson_url, timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertEqual(r.headers['content-type'], 'application/json') - self.assertEqual(r.text, '{"message":"Hello, World!"}') - - @testutils.retryable_test(3, 5) - def test_return_orjson(self): - """Test if the return_orjson function returns an ORJSON response""" - root_url = self.webhost._addr - orjson_url = f'{root_url}/api/return_orjson' - r = requests.get(orjson_url, timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertEqual(r.headers['content-type'], 'application/json') - self.assertEqual(r.text, '{"message":"Hello, World!"}') - - @testutils.retryable_test(3, 5) - def test_return_file(self): - """Test if the return_file function returns a file response""" - root_url = self.webhost._addr - file_url = f'{root_url}/api/return_file' - r = requests.get(file_url, timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertIn('@app.route(route="default_template")', r.text) - - @testutils.retryable_test(3, 5) - def test_upload_data_stream(self): - """Test if the upload_data_stream function receives streaming data - and returns the complete data""" - root_url = self.webhost._addr - upload_url = f'{root_url}/api/upload_data_stream' - - # Define the streaming data - data_chunks = [b"First chunk\n", b"Second chunk\n"] - - # Define a function to simulate streaming by reading from an - # iterator - def stream_data(data_chunks): - for chunk in data_chunks: - yield chunk - - # Send a POST request with streaming data - r = requests.post(upload_url, data=stream_data(data_chunks)) - - # Assert that the request was successful - self.assertTrue(r.ok) - - # Assert that the response content matches the concatenation of - # all data chunks - complete_data = b"".join(data_chunks) - self.assertEqual(r.content, complete_data) - - @testutils.retryable_test(3, 5) - def test_upload_data_stream_concurrently(self): - """Test if the upload_data_stream function receives streaming data - and returns the complete data""" - root_url = self.webhost._addr - upload_url = f'{root_url}/api/upload_data_stream' - - # Define the streaming data - data_chunks = [b"First chunk\n", b"Second chunk\n"] - - # Define a function to simulate streaming by reading from an - # iterator - def stream_data(data_chunks): - for chunk in data_chunks: - yield chunk - - # Define the number of concurrent requests - num_requests = 5 - - # Define a function to send a single request - def send_request(): - r = requests.post(upload_url, data=stream_data(data_chunks)) - return r.ok, r.content - - # Send multiple requests concurrently - with concurrent.futures.ThreadPoolExecutor() as executor: - futures = [executor.submit(send_request) for _ in - range(num_requests)] - - # Assert that all requests were successful and the response - # contents are correct - for future in concurrent.futures.as_completed(futures): - ok, content = future.result() - self.assertTrue(ok) - complete_data = b"".join(data_chunks) - self.assertEqual(content, complete_data) diff --git a/tests/protos/FunctionRpc_pb2.py b/tests/protos/FunctionRpc_pb2.py new file mode 100644 index 000000000..df996ff4f --- /dev/null +++ b/tests/protos/FunctionRpc_pb2.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: FunctionRpc.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from .identity import ClaimsIdentityRpc_pb2 as identity_dot_ClaimsIdentityRpc__pb2 +from .shared import NullableTypes_pb2 as shared_dot_NullableTypes__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x46unctionRpc.proto\x12\x19\x41zureFunctionsRpcMessages\x1a\x1egoogle/protobuf/duration.proto\x1a identity/ClaimsIdentityRpc.proto\x1a\x1ashared/NullableTypes.proto\"\x8c\x11\n\x10StreamingMessage\x12\x12\n\nrequest_id\x18\x01 \x01(\t\x12>\n\x0cstart_stream\x18\x14 \x01(\x0b\x32&.AzureFunctionsRpcMessages.StartStreamH\x00\x12K\n\x13worker_init_request\x18\x11 \x01(\x0b\x32,.AzureFunctionsRpcMessages.WorkerInitRequestH\x00\x12M\n\x14worker_init_response\x18\x10 \x01(\x0b\x32-.AzureFunctionsRpcMessages.WorkerInitResponseH\x00\x12\x46\n\x10worker_heartbeat\x18\x0f \x01(\x0b\x32*.AzureFunctionsRpcMessages.WorkerHeartbeatH\x00\x12\x46\n\x10worker_terminate\x18\x0e \x01(\x0b\x32*.AzureFunctionsRpcMessages.WorkerTerminateH\x00\x12O\n\x15worker_status_request\x18\x0c \x01(\x0b\x32..AzureFunctionsRpcMessages.WorkerStatusRequestH\x00\x12Q\n\x16worker_status_response\x18\r \x01(\x0b\x32/.AzureFunctionsRpcMessages.WorkerStatusResponseH\x00\x12V\n\x19\x66ile_change_event_request\x18\x06 \x01(\x0b\x32\x31.AzureFunctionsRpcMessages.FileChangeEventRequestH\x00\x12Q\n\x16worker_action_response\x18\x07 \x01(\x0b\x32/.AzureFunctionsRpcMessages.WorkerActionResponseH\x00\x12O\n\x15\x66unction_load_request\x18\x08 \x01(\x0b\x32..AzureFunctionsRpcMessages.FunctionLoadRequestH\x00\x12Q\n\x16\x66unction_load_response\x18\t \x01(\x0b\x32/.AzureFunctionsRpcMessages.FunctionLoadResponseH\x00\x12J\n\x12invocation_request\x18\x04 \x01(\x0b\x32,.AzureFunctionsRpcMessages.InvocationRequestH\x00\x12L\n\x13invocation_response\x18\x05 \x01(\x0b\x32-.AzureFunctionsRpcMessages.InvocationResponseH\x00\x12H\n\x11invocation_cancel\x18\x15 \x01(\x0b\x32+.AzureFunctionsRpcMessages.InvocationCancelH\x00\x12\x34\n\x07rpc_log\x18\x02 \x01(\x0b\x32!.AzureFunctionsRpcMessages.RpcLogH\x00\x12j\n#function_environment_reload_request\x18\x19 \x01(\x0b\x32;.AzureFunctionsRpcMessages.FunctionEnvironmentReloadRequestH\x00\x12l\n$function_environment_reload_response\x18\x1a \x01(\x0b\x32<.AzureFunctionsRpcMessages.FunctionEnvironmentReloadResponseH\x00\x12m\n%close_shared_memory_resources_request\x18\x1b \x01(\x0b\x32<.AzureFunctionsRpcMessages.CloseSharedMemoryResourcesRequestH\x00\x12o\n&close_shared_memory_resources_response\x18\x1c \x01(\x0b\x32=.AzureFunctionsRpcMessages.CloseSharedMemoryResourcesResponseH\x00\x12Y\n\x1a\x66unctions_metadata_request\x18\x1d \x01(\x0b\x32\x33.AzureFunctionsRpcMessages.FunctionsMetadataRequestH\x00\x12Y\n\x1a\x66unction_metadata_response\x18\x1e \x01(\x0b\x32\x33.AzureFunctionsRpcMessages.FunctionMetadataResponseH\x00\x12\x64\n function_load_request_collection\x18\x1f \x01(\x0b\x32\x38.AzureFunctionsRpcMessages.FunctionLoadRequestCollectionH\x00\x12\x66\n!function_load_response_collection\x18 \x01(\x0b\x32\x39.AzureFunctionsRpcMessages.FunctionLoadResponseCollectionH\x00\x12O\n\x15worker_warmup_request\x18! \x01(\x0b\x32..AzureFunctionsRpcMessages.WorkerWarmupRequestH\x00\x12Q\n\x16worker_warmup_response\x18\" \x01(\x0b\x32/.AzureFunctionsRpcMessages.WorkerWarmupResponseH\x00\x42\t\n\x07\x63ontent\" \n\x0bStartStream\x12\x11\n\tworker_id\x18\x02 \x01(\t\"\xa6\x03\n\x11WorkerInitRequest\x12\x14\n\x0chost_version\x18\x01 \x01(\t\x12T\n\x0c\x63\x61pabilities\x18\x02 \x03(\x0b\x32>.AzureFunctionsRpcMessages.WorkerInitRequest.CapabilitiesEntry\x12W\n\x0elog_categories\x18\x03 \x03(\x0b\x32?.AzureFunctionsRpcMessages.WorkerInitRequest.LogCategoriesEntry\x12\x18\n\x10worker_directory\x18\x04 \x01(\t\x12\x1e\n\x16\x66unction_app_directory\x18\x05 \x01(\t\x1a\x33\n\x11\x43\x61pabilitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a]\n\x12LogCategoriesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0e\x32\'.AzureFunctionsRpcMessages.RpcLog.Level:\x02\x38\x01\"\xb5\x02\n\x12WorkerInitResponse\x12\x16\n\x0eworker_version\x18\x01 \x01(\t\x12U\n\x0c\x63\x61pabilities\x18\x02 \x03(\x0b\x32?.AzureFunctionsRpcMessages.WorkerInitResponse.CapabilitiesEntry\x12\x37\n\x06result\x18\x03 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\x12\x42\n\x0fworker_metadata\x18\x04 \x01(\x0b\x32).AzureFunctionsRpcMessages.WorkerMetadata\x1a\x33\n\x11\x43\x61pabilitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x84\x02\n\x0eWorkerMetadata\x12\x14\n\x0cruntime_name\x18\x01 \x01(\t\x12\x17\n\x0fruntime_version\x18\x02 \x01(\t\x12\x16\n\x0eworker_version\x18\x03 \x01(\t\x12\x16\n\x0eworker_bitness\x18\x04 \x01(\t\x12Z\n\x11\x63ustom_properties\x18\x05 \x03(\x0b\x32?.AzureFunctionsRpcMessages.WorkerMetadata.CustomPropertiesEntry\x1a\x37\n\x15\x43ustomPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xfe\x01\n\x0cStatusResult\x12>\n\x06status\x18\x04 \x01(\x0e\x32..AzureFunctionsRpcMessages.StatusResult.Status\x12\x0e\n\x06result\x18\x01 \x01(\t\x12:\n\texception\x18\x02 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.RpcException\x12/\n\x04logs\x18\x03 \x03(\x0b\x32!.AzureFunctionsRpcMessages.RpcLog\"1\n\x06Status\x12\x0b\n\x07\x46\x61ilure\x10\x00\x12\x0b\n\x07Success\x10\x01\x12\r\n\tCancelled\x10\x02\"\x11\n\x0fWorkerHeartbeat\"B\n\x0fWorkerTerminate\x12/\n\x0cgrace_period\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\"\xd1\x01\n\x16\x46ileChangeEventRequest\x12\x44\n\x04type\x18\x01 \x01(\x0e\x32\x36.AzureFunctionsRpcMessages.FileChangeEventRequest.Type\x12\x11\n\tfull_path\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\"P\n\x04Type\x12\x0b\n\x07Unknown\x10\x00\x12\x0b\n\x07\x43reated\x10\x01\x12\x0b\n\x07\x44\x65leted\x10\x02\x12\x0b\n\x07\x43hanged\x10\x04\x12\x0b\n\x07Renamed\x10\x08\x12\x07\n\x03\x41ll\x10\x0f\"\x91\x01\n\x14WorkerActionResponse\x12\x46\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x36.AzureFunctionsRpcMessages.WorkerActionResponse.Action\x12\x0e\n\x06reason\x18\x02 \x01(\t\"!\n\x06\x41\x63tion\x12\x0b\n\x07Restart\x10\x00\x12\n\n\x06Reload\x10\x01\"\x15\n\x13WorkerStatusRequest\"\x16\n\x14WorkerStatusResponse\"\xf5\x01\n FunctionEnvironmentReloadRequest\x12t\n\x15\x65nvironment_variables\x18\x01 \x03(\x0b\x32U.AzureFunctionsRpcMessages.FunctionEnvironmentReloadRequest.EnvironmentVariablesEntry\x12\x1e\n\x16\x66unction_app_directory\x18\x02 \x01(\t\x1a;\n\x19\x45nvironmentVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbb\x02\n!FunctionEnvironmentReloadResponse\x12\x42\n\x0fworker_metadata\x18\x01 \x01(\x0b\x32).AzureFunctionsRpcMessages.WorkerMetadata\x12\x64\n\x0c\x63\x61pabilities\x18\x02 \x03(\x0b\x32N.AzureFunctionsRpcMessages.FunctionEnvironmentReloadResponse.CapabilitiesEntry\x12\x37\n\x06result\x18\x03 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\x1a\x33\n\x11\x43\x61pabilitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"6\n!CloseSharedMemoryResourcesRequest\x12\x11\n\tmap_names\x18\x01 \x03(\t\"\xcb\x01\n\"CloseSharedMemoryResourcesResponse\x12m\n\x11\x63lose_map_results\x18\x01 \x03(\x0b\x32R.AzureFunctionsRpcMessages.CloseSharedMemoryResourcesResponse.CloseMapResultsEntry\x1a\x36\n\x14\x43loseMapResultsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\"o\n\x1d\x46unctionLoadRequestCollection\x12N\n\x16\x66unction_load_requests\x18\x01 \x03(\x0b\x32..AzureFunctionsRpcMessages.FunctionLoadRequest\"r\n\x1e\x46unctionLoadResponseCollection\x12P\n\x17\x66unction_load_responses\x18\x01 \x03(\x0b\x32/.AzureFunctionsRpcMessages.FunctionLoadResponse\"\x90\x01\n\x13\x46unctionLoadRequest\x12\x13\n\x0b\x66unction_id\x18\x01 \x01(\t\x12@\n\x08metadata\x18\x02 \x01(\x0b\x32..AzureFunctionsRpcMessages.RpcFunctionMetadata\x12\"\n\x1amanaged_dependency_enabled\x18\x03 \x01(\x08\"\x86\x01\n\x14\x46unctionLoadResponse\x12\x13\n\x0b\x66unction_id\x18\x01 \x01(\t\x12\x37\n\x06result\x18\x02 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\x12 \n\x18is_dependency_downloaded\x18\x03 \x01(\x08\"\xff\x04\n\x13RpcFunctionMetadata\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x11\n\tdirectory\x18\x01 \x01(\t\x12\x13\n\x0bscript_file\x18\x02 \x01(\t\x12\x13\n\x0b\x65ntry_point\x18\x03 \x01(\t\x12N\n\x08\x62indings\x18\x06 \x03(\x0b\x32<.AzureFunctionsRpcMessages.RpcFunctionMetadata.BindingsEntry\x12\x10\n\x08is_proxy\x18\x07 \x01(\x08\x12\x37\n\x06status\x18\x08 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\x12\x10\n\x08language\x18\t \x01(\t\x12\x14\n\x0craw_bindings\x18\n \x03(\t\x12\x13\n\x0b\x66unction_id\x18\r \x01(\t\x12\"\n\x1amanaged_dependency_enabled\x18\x0e \x01(\x08\x12\x41\n\rretry_options\x18\x0f \x01(\x0b\x32*.AzureFunctionsRpcMessages.RpcRetryOptions\x12R\n\nproperties\x18\x10 \x03(\x0b\x32>.AzureFunctionsRpcMessages.RpcFunctionMetadata.PropertiesEntry\x1aW\n\rBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.AzureFunctionsRpcMessages.BindingInfo:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\":\n\x18\x46unctionsMetadataRequest\x12\x1e\n\x16\x66unction_app_directory\x18\x01 \x01(\t\"\xcd\x01\n\x18\x46unctionMetadataResponse\x12Q\n\x19\x66unction_metadata_results\x18\x01 \x03(\x0b\x32..AzureFunctionsRpcMessages.RpcFunctionMetadata\x12\x37\n\x06result\x18\x02 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\x12%\n\x1duse_default_metadata_indexing\x18\x03 \x01(\x08\"\xbe\x03\n\x11InvocationRequest\x12\x15\n\rinvocation_id\x18\x01 \x01(\t\x12\x13\n\x0b\x66unction_id\x18\x02 \x01(\t\x12?\n\ninput_data\x18\x03 \x03(\x0b\x32+.AzureFunctionsRpcMessages.ParameterBinding\x12[\n\x10trigger_metadata\x18\x04 \x03(\x0b\x32\x41.AzureFunctionsRpcMessages.InvocationRequest.TriggerMetadataEntry\x12\x41\n\rtrace_context\x18\x05 \x01(\x0b\x32*.AzureFunctionsRpcMessages.RpcTraceContext\x12>\n\rretry_context\x18\x06 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.RetryContext\x1a\\\n\x14TriggerMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x33\n\x05value\x18\x02 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedData:\x02\x38\x01\"\xbf\x01\n\x0fRpcTraceContext\x12\x14\n\x0ctrace_parent\x18\x01 \x01(\t\x12\x13\n\x0btrace_state\x18\x02 \x01(\t\x12N\n\nattributes\x18\x03 \x03(\x0b\x32:.AzureFunctionsRpcMessages.RpcTraceContext.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"x\n\x0cRetryContext\x12\x13\n\x0bretry_count\x18\x01 \x01(\x05\x12\x17\n\x0fmax_retry_count\x18\x02 \x01(\x05\x12:\n\texception\x18\x03 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.RpcException\"Z\n\x10InvocationCancel\x12\x15\n\rinvocation_id\x18\x02 \x01(\t\x12/\n\x0cgrace_period\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\"\xe2\x01\n\x12InvocationResponse\x12\x15\n\rinvocation_id\x18\x01 \x01(\t\x12@\n\x0boutput_data\x18\x02 \x03(\x0b\x32+.AzureFunctionsRpcMessages.ParameterBinding\x12:\n\x0creturn_value\x18\x04 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedData\x12\x37\n\x06result\x18\x03 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\"/\n\x13WorkerWarmupRequest\x12\x18\n\x10worker_directory\x18\x01 \x01(\t\"O\n\x14WorkerWarmupResponse\x12\x37\n\x06result\x18\x01 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\"\xfe\x04\n\tTypedData\x12\x10\n\x06string\x18\x01 \x01(\tH\x00\x12\x0e\n\x04json\x18\x02 \x01(\tH\x00\x12\x0f\n\x05\x62ytes\x18\x03 \x01(\x0cH\x00\x12\x10\n\x06stream\x18\x04 \x01(\x0cH\x00\x12\x32\n\x04http\x18\x05 \x01(\x0b\x32\".AzureFunctionsRpcMessages.RpcHttpH\x00\x12\r\n\x03int\x18\x06 \x01(\x12H\x00\x12\x10\n\x06\x64ouble\x18\x07 \x01(\x01H\x00\x12\x46\n\x10\x63ollection_bytes\x18\x08 \x01(\x0b\x32*.AzureFunctionsRpcMessages.CollectionBytesH\x00\x12H\n\x11\x63ollection_string\x18\t \x01(\x0b\x32+.AzureFunctionsRpcMessages.CollectionStringH\x00\x12H\n\x11\x63ollection_double\x18\n \x01(\x0b\x32+.AzureFunctionsRpcMessages.CollectionDoubleH\x00\x12H\n\x11\x63ollection_sint64\x18\x0b \x01(\x0b\x32+.AzureFunctionsRpcMessages.CollectionSInt64H\x00\x12I\n\x12model_binding_data\x18\x0c \x01(\x0b\x32+.AzureFunctionsRpcMessages.ModelBindingDataH\x00\x12^\n\x1d\x63ollection_model_binding_data\x18\r \x01(\x0b\x32\x35.AzureFunctionsRpcMessages.CollectionModelBindingDataH\x00\x42\x06\n\x04\x64\x61ta\"t\n\x0fRpcSharedMemory\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06offset\x18\x02 \x01(\x03\x12\r\n\x05\x63ount\x18\x03 \x01(\x03\x12\x34\n\x04type\x18\x04 \x01(\x0e\x32&.AzureFunctionsRpcMessages.RpcDataType\"\"\n\x10\x43ollectionString\x12\x0e\n\x06string\x18\x01 \x03(\t\" \n\x0f\x43ollectionBytes\x12\r\n\x05\x62ytes\x18\x01 \x03(\x0c\"\"\n\x10\x43ollectionDouble\x12\x0e\n\x06\x64ouble\x18\x01 \x03(\x01\"\"\n\x10\x43ollectionSInt64\x12\x0e\n\x06sint64\x18\x01 \x03(\x12\"\xab\x01\n\x10ParameterBinding\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedDataH\x00\x12G\n\x11rpc_shared_memory\x18\x03 \x01(\x0b\x32*.AzureFunctionsRpcMessages.RpcSharedMemoryH\x00\x42\n\n\x08rpc_data\"\x8b\x03\n\x0b\x42indingInfo\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x43\n\tdirection\x18\x03 \x01(\x0e\x32\x30.AzureFunctionsRpcMessages.BindingInfo.Direction\x12\x42\n\tdata_type\x18\x04 \x01(\x0e\x32/.AzureFunctionsRpcMessages.BindingInfo.DataType\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x36.AzureFunctionsRpcMessages.BindingInfo.PropertiesEntry\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\'\n\tDirection\x12\x06\n\x02in\x10\x00\x12\x07\n\x03out\x10\x01\x12\t\n\x05inout\x10\x02\"=\n\x08\x44\x61taType\x12\r\n\tundefined\x10\x00\x12\n\n\x06string\x10\x01\x12\n\n\x06\x62inary\x10\x02\x12\n\n\x06stream\x10\x03\"\xe7\x04\n\x06RpcLog\x12\x15\n\rinvocation_id\x18\x01 \x01(\t\x12\x10\n\x08\x63\x61tegory\x18\x02 \x01(\t\x12\x36\n\x05level\x18\x03 \x01(\x0e\x32\'.AzureFunctionsRpcMessages.RpcLog.Level\x12\x0f\n\x07message\x18\x04 \x01(\t\x12\x10\n\x08\x65vent_id\x18\x05 \x01(\t\x12:\n\texception\x18\x06 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.RpcException\x12\x12\n\nproperties\x18\x07 \x01(\t\x12\x46\n\x0clog_category\x18\x08 \x01(\x0e\x32\x30.AzureFunctionsRpcMessages.RpcLog.RpcLogCategory\x12K\n\rpropertiesMap\x18\t \x03(\x0b\x32\x34.AzureFunctionsRpcMessages.RpcLog.PropertiesMapEntry\x1aZ\n\x12PropertiesMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x33\n\x05value\x18\x02 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedData:\x02\x38\x01\"^\n\x05Level\x12\t\n\x05Trace\x10\x00\x12\t\n\x05\x44\x65\x62ug\x10\x01\x12\x0f\n\x0bInformation\x10\x02\x12\x0b\n\x07Warning\x10\x03\x12\t\n\x05\x45rror\x10\x04\x12\x0c\n\x08\x43ritical\x10\x05\x12\x08\n\x04None\x10\x06\"8\n\x0eRpcLogCategory\x12\x08\n\x04User\x10\x00\x12\n\n\x06System\x10\x01\x12\x10\n\x0c\x43ustomMetric\x10\x02\"m\n\x0cRpcException\x12\x0e\n\x06source\x18\x03 \x01(\t\x12\x13\n\x0bstack_trace\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x19\n\x11is_user_exception\x18\x04 \x01(\x08\x12\x0c\n\x04type\x18\x05 \x01(\t\"\xf7\x02\n\rRpcHttpCookie\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\x12\x1f\n\x06\x64omain\x18\x03 \x01(\x0b\x32\x0f.NullableString\x12\x1d\n\x04path\x18\x04 \x01(\x0b\x32\x0f.NullableString\x12#\n\x07\x65xpires\x18\x05 \x01(\x0b\x32\x12.NullableTimestamp\x12\x1d\n\x06secure\x18\x06 \x01(\x0b\x32\r.NullableBool\x12 \n\thttp_only\x18\x07 \x01(\x0b\x32\r.NullableBool\x12\x44\n\tsame_site\x18\x08 \x01(\x0e\x32\x31.AzureFunctionsRpcMessages.RpcHttpCookie.SameSite\x12 \n\x07max_age\x18\t \x01(\x0b\x32\x0f.NullableDouble\";\n\x08SameSite\x12\x08\n\x04None\x10\x00\x12\x07\n\x03Lax\x10\x01\x12\n\n\x06Strict\x10\x02\x12\x10\n\x0c\x45xplicitNone\x10\x03\"\xc5\x08\n\x07RpcHttp\x12\x0e\n\x06method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12@\n\x07headers\x18\x03 \x03(\x0b\x32/.AzureFunctionsRpcMessages.RpcHttp.HeadersEntry\x12\x32\n\x04\x62ody\x18\x04 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedData\x12>\n\x06params\x18\n \x03(\x0b\x32..AzureFunctionsRpcMessages.RpcHttp.ParamsEntry\x12\x13\n\x0bstatus_code\x18\x0c \x01(\t\x12<\n\x05query\x18\x0f \x03(\x0b\x32-.AzureFunctionsRpcMessages.RpcHttp.QueryEntry\x12\"\n\x1a\x65nable_content_negotiation\x18\x10 \x01(\x08\x12\x35\n\x07rawBody\x18\x11 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedData\x12&\n\nidentities\x18\x12 \x03(\x0b\x32\x12.RpcClaimsIdentity\x12\x39\n\x07\x63ookies\x18\x13 \x03(\x0b\x32(.AzureFunctionsRpcMessages.RpcHttpCookie\x12Q\n\x10nullable_headers\x18\x14 \x03(\x0b\x32\x37.AzureFunctionsRpcMessages.RpcHttp.NullableHeadersEntry\x12O\n\x0fnullable_params\x18\x15 \x03(\x0b\x32\x36.AzureFunctionsRpcMessages.RpcHttp.NullableParamsEntry\x12M\n\x0enullable_query\x18\x16 \x03(\x0b\x32\x35.AzureFunctionsRpcMessages.RpcHttp.NullableQueryEntry\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a-\n\x0bParamsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a,\n\nQueryEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aG\n\x14NullableHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1e\n\x05value\x18\x02 \x01(\x0b\x32\x0f.NullableString:\x02\x38\x01\x1a\x46\n\x13NullableParamsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1e\n\x05value\x18\x02 \x01(\x0b\x32\x0f.NullableString:\x02\x38\x01\x1a\x45\n\x12NullableQueryEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1e\n\x05value\x18\x02 \x01(\x0b\x32\x0f.NullableString:\x02\x38\x01\"Z\n\x10ModelBindingData\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x0e\n\x06source\x18\x02 \x01(\t\x12\x14\n\x0c\x63ontent_type\x18\x03 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x04 \x01(\x0c\"e\n\x1a\x43ollectionModelBindingData\x12G\n\x12model_binding_data\x18\x01 \x03(\x0b\x32+.AzureFunctionsRpcMessages.ModelBindingData\"\xd4\x02\n\x0fRpcRetryOptions\x12\x17\n\x0fmax_retry_count\x18\x02 \x01(\x05\x12\x31\n\x0e\x64\x65lay_interval\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x33\n\x10minimum_interval\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x33\n\x10maximum_interval\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12P\n\x0eretry_strategy\x18\x06 \x01(\x0e\x32\x38.AzureFunctionsRpcMessages.RpcRetryOptions.RetryStrategy\"9\n\rRetryStrategy\x12\x17\n\x13\x65xponential_backoff\x10\x00\x12\x0f\n\x0b\x66ixed_delay\x10\x01*\xc1\x01\n\x0bRpcDataType\x12\x0b\n\x07unknown\x10\x00\x12\n\n\x06string\x10\x01\x12\x08\n\x04json\x10\x02\x12\t\n\x05\x62ytes\x10\x03\x12\n\n\x06stream\x10\x04\x12\x08\n\x04http\x10\x05\x12\x07\n\x03int\x10\x06\x12\n\n\x06\x64ouble\x10\x07\x12\x14\n\x10\x63ollection_bytes\x10\x08\x12\x15\n\x11\x63ollection_string\x10\t\x12\x15\n\x11\x63ollection_double\x10\n\x12\x15\n\x11\x63ollection_sint64\x10\x0b\x32|\n\x0b\x46unctionRpc\x12m\n\x0b\x45ventStream\x12+.AzureFunctionsRpcMessages.StreamingMessage\x1a+.AzureFunctionsRpcMessages.StreamingMessage\"\x00(\x01\x30\x01\x42\xa5\x01\n*com.microsoft.azure.functions.rpc.messagesB\rFunctionProtoP\x01Z7github.com/Azure/azure-functions-go-worker/internal/rpc\xaa\x02,Microsoft.Azure.WebJobs.Script.Grpc.Messagesb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'FunctionRpc_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n*com.microsoft.azure.functions.rpc.messagesB\rFunctionProtoP\001Z7github.com/Azure/azure-functions-go-worker/internal/rpc\252\002,Microsoft.Azure.WebJobs.Script.Grpc.Messages' + _WORKERINITREQUEST_CAPABILITIESENTRY._options = None + _WORKERINITREQUEST_CAPABILITIESENTRY._serialized_options = b'8\001' + _WORKERINITREQUEST_LOGCATEGORIESENTRY._options = None + _WORKERINITREQUEST_LOGCATEGORIESENTRY._serialized_options = b'8\001' + _WORKERINITRESPONSE_CAPABILITIESENTRY._options = None + _WORKERINITRESPONSE_CAPABILITIESENTRY._serialized_options = b'8\001' + _WORKERMETADATA_CUSTOMPROPERTIESENTRY._options = None + _WORKERMETADATA_CUSTOMPROPERTIESENTRY._serialized_options = b'8\001' + _FUNCTIONENVIRONMENTRELOADREQUEST_ENVIRONMENTVARIABLESENTRY._options = None + _FUNCTIONENVIRONMENTRELOADREQUEST_ENVIRONMENTVARIABLESENTRY._serialized_options = b'8\001' + _FUNCTIONENVIRONMENTRELOADRESPONSE_CAPABILITIESENTRY._options = None + _FUNCTIONENVIRONMENTRELOADRESPONSE_CAPABILITIESENTRY._serialized_options = b'8\001' + _CLOSESHAREDMEMORYRESOURCESRESPONSE_CLOSEMAPRESULTSENTRY._options = None + _CLOSESHAREDMEMORYRESOURCESRESPONSE_CLOSEMAPRESULTSENTRY._serialized_options = b'8\001' + _RPCFUNCTIONMETADATA_BINDINGSENTRY._options = None + _RPCFUNCTIONMETADATA_BINDINGSENTRY._serialized_options = b'8\001' + _RPCFUNCTIONMETADATA_PROPERTIESENTRY._options = None + _RPCFUNCTIONMETADATA_PROPERTIESENTRY._serialized_options = b'8\001' + _INVOCATIONREQUEST_TRIGGERMETADATAENTRY._options = None + _INVOCATIONREQUEST_TRIGGERMETADATAENTRY._serialized_options = b'8\001' + _RPCTRACECONTEXT_ATTRIBUTESENTRY._options = None + _RPCTRACECONTEXT_ATTRIBUTESENTRY._serialized_options = b'8\001' + _BINDINGINFO_PROPERTIESENTRY._options = None + _BINDINGINFO_PROPERTIESENTRY._serialized_options = b'8\001' + _RPCLOG_PROPERTIESMAPENTRY._options = None + _RPCLOG_PROPERTIESMAPENTRY._serialized_options = b'8\001' + _RPCHTTP_HEADERSENTRY._options = None + _RPCHTTP_HEADERSENTRY._serialized_options = b'8\001' + _RPCHTTP_PARAMSENTRY._options = None + _RPCHTTP_PARAMSENTRY._serialized_options = b'8\001' + _RPCHTTP_QUERYENTRY._options = None + _RPCHTTP_QUERYENTRY._serialized_options = b'8\001' + _RPCHTTP_NULLABLEHEADERSENTRY._options = None + _RPCHTTP_NULLABLEHEADERSENTRY._serialized_options = b'8\001' + _RPCHTTP_NULLABLEPARAMSENTRY._options = None + _RPCHTTP_NULLABLEPARAMSENTRY._serialized_options = b'8\001' + _RPCHTTP_NULLABLEQUERYENTRY._options = None + _RPCHTTP_NULLABLEQUERYENTRY._serialized_options = b'8\001' + _globals['_RPCDATATYPE']._serialized_start=11800 + _globals['_RPCDATATYPE']._serialized_end=11993 + _globals['_STREAMINGMESSAGE']._serialized_start=143 + _globals['_STREAMINGMESSAGE']._serialized_end=2331 + _globals['_STARTSTREAM']._serialized_start=2333 + _globals['_STARTSTREAM']._serialized_end=2365 + _globals['_WORKERINITREQUEST']._serialized_start=2368 + _globals['_WORKERINITREQUEST']._serialized_end=2790 + _globals['_WORKERINITREQUEST_CAPABILITIESENTRY']._serialized_start=2644 + _globals['_WORKERINITREQUEST_CAPABILITIESENTRY']._serialized_end=2695 + _globals['_WORKERINITREQUEST_LOGCATEGORIESENTRY']._serialized_start=2697 + _globals['_WORKERINITREQUEST_LOGCATEGORIESENTRY']._serialized_end=2790 + _globals['_WORKERINITRESPONSE']._serialized_start=2793 + _globals['_WORKERINITRESPONSE']._serialized_end=3102 + _globals['_WORKERINITRESPONSE_CAPABILITIESENTRY']._serialized_start=2644 + _globals['_WORKERINITRESPONSE_CAPABILITIESENTRY']._serialized_end=2695 + _globals['_WORKERMETADATA']._serialized_start=3105 + _globals['_WORKERMETADATA']._serialized_end=3365 + _globals['_WORKERMETADATA_CUSTOMPROPERTIESENTRY']._serialized_start=3310 + _globals['_WORKERMETADATA_CUSTOMPROPERTIESENTRY']._serialized_end=3365 + _globals['_STATUSRESULT']._serialized_start=3368 + _globals['_STATUSRESULT']._serialized_end=3622 + _globals['_STATUSRESULT_STATUS']._serialized_start=3573 + _globals['_STATUSRESULT_STATUS']._serialized_end=3622 + _globals['_WORKERHEARTBEAT']._serialized_start=3624 + _globals['_WORKERHEARTBEAT']._serialized_end=3641 + _globals['_WORKERTERMINATE']._serialized_start=3643 + _globals['_WORKERTERMINATE']._serialized_end=3709 + _globals['_FILECHANGEEVENTREQUEST']._serialized_start=3712 + _globals['_FILECHANGEEVENTREQUEST']._serialized_end=3921 + _globals['_FILECHANGEEVENTREQUEST_TYPE']._serialized_start=3841 + _globals['_FILECHANGEEVENTREQUEST_TYPE']._serialized_end=3921 + _globals['_WORKERACTIONRESPONSE']._serialized_start=3924 + _globals['_WORKERACTIONRESPONSE']._serialized_end=4069 + _globals['_WORKERACTIONRESPONSE_ACTION']._serialized_start=4036 + _globals['_WORKERACTIONRESPONSE_ACTION']._serialized_end=4069 + _globals['_WORKERSTATUSREQUEST']._serialized_start=4071 + _globals['_WORKERSTATUSREQUEST']._serialized_end=4092 + _globals['_WORKERSTATUSRESPONSE']._serialized_start=4094 + _globals['_WORKERSTATUSRESPONSE']._serialized_end=4116 + _globals['_FUNCTIONENVIRONMENTRELOADREQUEST']._serialized_start=4119 + _globals['_FUNCTIONENVIRONMENTRELOADREQUEST']._serialized_end=4364 + _globals['_FUNCTIONENVIRONMENTRELOADREQUEST_ENVIRONMENTVARIABLESENTRY']._serialized_start=4305 + _globals['_FUNCTIONENVIRONMENTRELOADREQUEST_ENVIRONMENTVARIABLESENTRY']._serialized_end=4364 + _globals['_FUNCTIONENVIRONMENTRELOADRESPONSE']._serialized_start=4367 + _globals['_FUNCTIONENVIRONMENTRELOADRESPONSE']._serialized_end=4682 + _globals['_FUNCTIONENVIRONMENTRELOADRESPONSE_CAPABILITIESENTRY']._serialized_start=2644 + _globals['_FUNCTIONENVIRONMENTRELOADRESPONSE_CAPABILITIESENTRY']._serialized_end=2695 + _globals['_CLOSESHAREDMEMORYRESOURCESREQUEST']._serialized_start=4684 + _globals['_CLOSESHAREDMEMORYRESOURCESREQUEST']._serialized_end=4738 + _globals['_CLOSESHAREDMEMORYRESOURCESRESPONSE']._serialized_start=4741 + _globals['_CLOSESHAREDMEMORYRESOURCESRESPONSE']._serialized_end=4944 + _globals['_CLOSESHAREDMEMORYRESOURCESRESPONSE_CLOSEMAPRESULTSENTRY']._serialized_start=4890 + _globals['_CLOSESHAREDMEMORYRESOURCESRESPONSE_CLOSEMAPRESULTSENTRY']._serialized_end=4944 + _globals['_FUNCTIONLOADREQUESTCOLLECTION']._serialized_start=4946 + _globals['_FUNCTIONLOADREQUESTCOLLECTION']._serialized_end=5057 + _globals['_FUNCTIONLOADRESPONSECOLLECTION']._serialized_start=5059 + _globals['_FUNCTIONLOADRESPONSECOLLECTION']._serialized_end=5173 + _globals['_FUNCTIONLOADREQUEST']._serialized_start=5176 + _globals['_FUNCTIONLOADREQUEST']._serialized_end=5320 + _globals['_FUNCTIONLOADRESPONSE']._serialized_start=5323 + _globals['_FUNCTIONLOADRESPONSE']._serialized_end=5457 + _globals['_RPCFUNCTIONMETADATA']._serialized_start=5460 + _globals['_RPCFUNCTIONMETADATA']._serialized_end=6099 + _globals['_RPCFUNCTIONMETADATA_BINDINGSENTRY']._serialized_start=5961 + _globals['_RPCFUNCTIONMETADATA_BINDINGSENTRY']._serialized_end=6048 + _globals['_RPCFUNCTIONMETADATA_PROPERTIESENTRY']._serialized_start=6050 + _globals['_RPCFUNCTIONMETADATA_PROPERTIESENTRY']._serialized_end=6099 + _globals['_FUNCTIONSMETADATAREQUEST']._serialized_start=6101 + _globals['_FUNCTIONSMETADATAREQUEST']._serialized_end=6159 + _globals['_FUNCTIONMETADATARESPONSE']._serialized_start=6162 + _globals['_FUNCTIONMETADATARESPONSE']._serialized_end=6367 + _globals['_INVOCATIONREQUEST']._serialized_start=6370 + _globals['_INVOCATIONREQUEST']._serialized_end=6816 + _globals['_INVOCATIONREQUEST_TRIGGERMETADATAENTRY']._serialized_start=6724 + _globals['_INVOCATIONREQUEST_TRIGGERMETADATAENTRY']._serialized_end=6816 + _globals['_RPCTRACECONTEXT']._serialized_start=6819 + _globals['_RPCTRACECONTEXT']._serialized_end=7010 + _globals['_RPCTRACECONTEXT_ATTRIBUTESENTRY']._serialized_start=6961 + _globals['_RPCTRACECONTEXT_ATTRIBUTESENTRY']._serialized_end=7010 + _globals['_RETRYCONTEXT']._serialized_start=7012 + _globals['_RETRYCONTEXT']._serialized_end=7132 + _globals['_INVOCATIONCANCEL']._serialized_start=7134 + _globals['_INVOCATIONCANCEL']._serialized_end=7224 + _globals['_INVOCATIONRESPONSE']._serialized_start=7227 + _globals['_INVOCATIONRESPONSE']._serialized_end=7453 + _globals['_WORKERWARMUPREQUEST']._serialized_start=7455 + _globals['_WORKERWARMUPREQUEST']._serialized_end=7502 + _globals['_WORKERWARMUPRESPONSE']._serialized_start=7504 + _globals['_WORKERWARMUPRESPONSE']._serialized_end=7583 + _globals['_TYPEDDATA']._serialized_start=7586 + _globals['_TYPEDDATA']._serialized_end=8224 + _globals['_RPCSHAREDMEMORY']._serialized_start=8226 + _globals['_RPCSHAREDMEMORY']._serialized_end=8342 + _globals['_COLLECTIONSTRING']._serialized_start=8344 + _globals['_COLLECTIONSTRING']._serialized_end=8378 + _globals['_COLLECTIONBYTES']._serialized_start=8380 + _globals['_COLLECTIONBYTES']._serialized_end=8412 + _globals['_COLLECTIONDOUBLE']._serialized_start=8414 + _globals['_COLLECTIONDOUBLE']._serialized_end=8448 + _globals['_COLLECTIONSINT64']._serialized_start=8450 + _globals['_COLLECTIONSINT64']._serialized_end=8484 + _globals['_PARAMETERBINDING']._serialized_start=8487 + _globals['_PARAMETERBINDING']._serialized_end=8658 + _globals['_BINDINGINFO']._serialized_start=8661 + _globals['_BINDINGINFO']._serialized_end=9056 + _globals['_BINDINGINFO_PROPERTIESENTRY']._serialized_start=6050 + _globals['_BINDINGINFO_PROPERTIESENTRY']._serialized_end=6099 + _globals['_BINDINGINFO_DIRECTION']._serialized_start=8954 + _globals['_BINDINGINFO_DIRECTION']._serialized_end=8993 + _globals['_BINDINGINFO_DATATYPE']._serialized_start=8995 + _globals['_BINDINGINFO_DATATYPE']._serialized_end=9056 + _globals['_RPCLOG']._serialized_start=9059 + _globals['_RPCLOG']._serialized_end=9674 + _globals['_RPCLOG_PROPERTIESMAPENTRY']._serialized_start=9430 + _globals['_RPCLOG_PROPERTIESMAPENTRY']._serialized_end=9520 + _globals['_RPCLOG_LEVEL']._serialized_start=9522 + _globals['_RPCLOG_LEVEL']._serialized_end=9616 + _globals['_RPCLOG_RPCLOGCATEGORY']._serialized_start=9618 + _globals['_RPCLOG_RPCLOGCATEGORY']._serialized_end=9674 + _globals['_RPCEXCEPTION']._serialized_start=9676 + _globals['_RPCEXCEPTION']._serialized_end=9785 + _globals['_RPCHTTPCOOKIE']._serialized_start=9788 + _globals['_RPCHTTPCOOKIE']._serialized_end=10163 + _globals['_RPCHTTPCOOKIE_SAMESITE']._serialized_start=10104 + _globals['_RPCHTTPCOOKIE_SAMESITE']._serialized_end=10163 + _globals['_RPCHTTP']._serialized_start=10166 + _globals['_RPCHTTP']._serialized_end=11259 + _globals['_RPCHTTP_HEADERSENTRY']._serialized_start=10904 + _globals['_RPCHTTP_HEADERSENTRY']._serialized_end=10950 + _globals['_RPCHTTP_PARAMSENTRY']._serialized_start=10952 + _globals['_RPCHTTP_PARAMSENTRY']._serialized_end=10997 + _globals['_RPCHTTP_QUERYENTRY']._serialized_start=10999 + _globals['_RPCHTTP_QUERYENTRY']._serialized_end=11043 + _globals['_RPCHTTP_NULLABLEHEADERSENTRY']._serialized_start=11045 + _globals['_RPCHTTP_NULLABLEHEADERSENTRY']._serialized_end=11116 + _globals['_RPCHTTP_NULLABLEPARAMSENTRY']._serialized_start=11118 + _globals['_RPCHTTP_NULLABLEPARAMSENTRY']._serialized_end=11188 + _globals['_RPCHTTP_NULLABLEQUERYENTRY']._serialized_start=11190 + _globals['_RPCHTTP_NULLABLEQUERYENTRY']._serialized_end=11259 + _globals['_MODELBINDINGDATA']._serialized_start=11261 + _globals['_MODELBINDINGDATA']._serialized_end=11351 + _globals['_COLLECTIONMODELBINDINGDATA']._serialized_start=11353 + _globals['_COLLECTIONMODELBINDINGDATA']._serialized_end=11454 + _globals['_RPCRETRYOPTIONS']._serialized_start=11457 + _globals['_RPCRETRYOPTIONS']._serialized_end=11797 + _globals['_RPCRETRYOPTIONS_RETRYSTRATEGY']._serialized_start=11740 + _globals['_RPCRETRYOPTIONS_RETRYSTRATEGY']._serialized_end=11797 + _globals['_FUNCTIONRPC']._serialized_start=11995 + _globals['_FUNCTIONRPC']._serialized_end=12119 +# @@protoc_insertion_point(module_scope) diff --git a/tests/protos/FunctionRpc_pb2_grpc.py b/tests/protos/FunctionRpc_pb2_grpc.py new file mode 100644 index 000000000..364658aa9 --- /dev/null +++ b/tests/protos/FunctionRpc_pb2_grpc.py @@ -0,0 +1,69 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from . import FunctionRpc_pb2 as FunctionRpc__pb2 + + +class FunctionRpcStub(object): + """Interface exported by the server. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.EventStream = channel.stream_stream( + '/AzureFunctionsRpcMessages.FunctionRpc/EventStream', + request_serializer=FunctionRpc__pb2.StreamingMessage.SerializeToString, + response_deserializer=FunctionRpc__pb2.StreamingMessage.FromString, + ) + + +class FunctionRpcServicer(object): + """Interface exported by the server. + """ + + def EventStream(self, request_iterator, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_FunctionRpcServicer_to_server(servicer, server): + rpc_method_handlers = { + 'EventStream': grpc.stream_stream_rpc_method_handler( + servicer.EventStream, + request_deserializer=FunctionRpc__pb2.StreamingMessage.FromString, + response_serializer=FunctionRpc__pb2.StreamingMessage.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'AzureFunctionsRpcMessages.FunctionRpc', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class FunctionRpc(object): + """Interface exported by the server. + """ + + @staticmethod + def EventStream(request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.stream_stream(request_iterator, target, '/AzureFunctionsRpcMessages.FunctionRpc/EventStream', + FunctionRpc__pb2.StreamingMessage.SerializeToString, + FunctionRpc__pb2.StreamingMessage.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/azure_functions_worker/protos/__init__.py b/tests/protos/__init__.py similarity index 100% rename from azure_functions_worker/protos/__init__.py rename to tests/protos/__init__.py diff --git a/tests/protos/identity/ClaimsIdentityRpc_pb2.py b/tests/protos/identity/ClaimsIdentityRpc_pb2.py new file mode 100644 index 000000000..e4a2be477 --- /dev/null +++ b/tests/protos/identity/ClaimsIdentityRpc_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: identity/ClaimsIdentityRpc.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from ..shared import NullableTypes_pb2 as shared_dot_NullableTypes__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n identity/ClaimsIdentityRpc.proto\x1a\x1ashared/NullableTypes.proto\"\xb0\x01\n\x11RpcClaimsIdentity\x12,\n\x13\x61uthentication_type\x18\x01 \x01(\x0b\x32\x0f.NullableString\x12(\n\x0fname_claim_type\x18\x02 \x01(\x0b\x32\x0f.NullableString\x12(\n\x0frole_claim_type\x18\x03 \x01(\x0b\x32\x0f.NullableString\x12\x19\n\x06\x63laims\x18\x04 \x03(\x0b\x32\t.RpcClaim\"\'\n\x08RpcClaim\x12\r\n\x05value\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\tB,\n*com.microsoft.azure.functions.rpc.messagesb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'identity.ClaimsIdentityRpc_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n*com.microsoft.azure.functions.rpc.messages' + _globals['_RPCCLAIMSIDENTITY']._serialized_start=65 + _globals['_RPCCLAIMSIDENTITY']._serialized_end=241 + _globals['_RPCCLAIM']._serialized_start=243 + _globals['_RPCCLAIM']._serialized_end=282 +# @@protoc_insertion_point(module_scope) diff --git a/tests/protos/identity/ClaimsIdentityRpc_pb2_grpc.py b/tests/protos/identity/ClaimsIdentityRpc_pb2_grpc.py new file mode 100644 index 000000000..2daafffeb --- /dev/null +++ b/tests/protos/identity/ClaimsIdentityRpc_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/azure_functions_worker/_thirdparty/__init__.py b/tests/protos/identity/__init__.py similarity index 100% rename from azure_functions_worker/_thirdparty/__init__.py rename to tests/protos/identity/__init__.py diff --git a/tests/protos/shared/NullableTypes_pb2.py b/tests/protos/shared/NullableTypes_pb2.py new file mode 100644 index 000000000..0b5b96bf1 --- /dev/null +++ b/tests/protos/shared/NullableTypes_pb2.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: shared/NullableTypes.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ashared/NullableTypes.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"+\n\x0eNullableString\x12\x0f\n\x05value\x18\x01 \x01(\tH\x00\x42\x08\n\x06string\"+\n\x0eNullableDouble\x12\x0f\n\x05value\x18\x01 \x01(\x01H\x00\x42\x08\n\x06\x64ouble\"\'\n\x0cNullableBool\x12\x0f\n\x05value\x18\x01 \x01(\x08H\x00\x42\x06\n\x04\x62ool\"M\n\x11NullableTimestamp\x12+\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x0b\n\ttimestampB,\n*com.microsoft.azure.functions.rpc.messagesb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'shared.NullableTypes_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n*com.microsoft.azure.functions.rpc.messages' + _globals['_NULLABLESTRING']._serialized_start=63 + _globals['_NULLABLESTRING']._serialized_end=106 + _globals['_NULLABLEDOUBLE']._serialized_start=108 + _globals['_NULLABLEDOUBLE']._serialized_end=151 + _globals['_NULLABLEBOOL']._serialized_start=153 + _globals['_NULLABLEBOOL']._serialized_end=192 + _globals['_NULLABLETIMESTAMP']._serialized_start=194 + _globals['_NULLABLETIMESTAMP']._serialized_end=271 +# @@protoc_insertion_point(module_scope) diff --git a/tests/protos/shared/NullableTypes_pb2_grpc.py b/tests/protos/shared/NullableTypes_pb2_grpc.py new file mode 100644 index 000000000..2daafffeb --- /dev/null +++ b/tests/protos/shared/NullableTypes_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/azure_functions_worker/protos/identity/__init__.py b/tests/protos/shared/__init__.py similarity index 100% rename from azure_functions_worker/protos/identity/__init__.py rename to tests/protos/shared/__init__.py diff --git a/tests/test_setup.py b/tests/test_setup.py deleted file mode 100644 index fd6f0044e..000000000 --- a/tests/test_setup.py +++ /dev/null @@ -1,304 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -""" -Usage: -This file defines tasks for building Protos, webhost and extensions - -To use these tasks, you can run the following commands: - -1. Build protos: - invoke -c test_setup build-protos - -2. Set up the Azure Functions Web Host: - invoke -c test_setup webhost - -3. Install WebJobs extensions: - invoke -c test_setup extensions -""" - -import glob -import json -import os -import pathlib -import re -import shutil -import subprocess -import sys -import tempfile -import urllib.request -import zipfile -from distutils import dir_util - -from invoke import task - -from utils.constants import EXTENSIONS_CSPROJ_TEMPLATE, NUGET_CONFIG - -ROOT_DIR = pathlib.Path(__file__).parent.parent -BUILD_DIR = ROOT_DIR / 'build' -WEBHOST_GITHUB_API = "https://api.github.com/repos/Azure/azure-functions-host" -WEBHOST_GIT_REPO = "https://github.com/Azure/azure-functions-host/archive" -WEBHOST_TAG_PREFIX = "v4." - - -def get_webhost_version() -> str: - # Return the latest matched version (e.g. 4.39.1) - github_api_url = f"{WEBHOST_GITHUB_API}/tags?page=1&per_page=10" - print(f"Checking latest webhost version from {github_api_url}") - github_response = urllib.request.urlopen(github_api_url) - tags = json.loads(github_response.read()) - - # As tags are placed in time desending order, the latest v3 - # tag should be the first occurance starts with 'v3.' string - latest = [gt for gt in tags if gt["name"].startswith(WEBHOST_TAG_PREFIX)] - return latest[0]["name"].replace("v", "") - - -def download_webhost_zip(version, branch): - with tempfile.NamedTemporaryFile(delete=False) as temp_file: - if branch: - zip_url = f"{WEBHOST_GIT_REPO}/refs/heads/{branch}.zip" - else: - zip_url = f"{WEBHOST_GIT_REPO}/v{version}.zip" - - print(f"Downloading Functions Host from {zip_url}") - try: - urllib.request.urlretrieve(zip_url, temp_file.name) - except Exception as e: - print( - f"Failed to download Functions Host source code from {zip_url}: {e}", - file=sys.stderr) - sys.exit(1) - return temp_file.name - - -def create_webhost_folder(dest_folder): - if dest_folder.exists(): - shutil.rmtree(dest_folder) - os.makedirs(dest_folder, exist_ok=True) - print(f"Functions Host folder is created in {dest_folder}") - - -def extract_webhost_zip(version, src_zip, dest): - print(f"Extracting Functions Host from {src_zip}") - with zipfile.ZipFile(src_zip, 'r') as archive: - for archive_name in archive.namelist(): - prefix = f"azure-functions-host-{version}/" - if archive_name.startswith(prefix): - sanitized_name = archive_name.replace("\\", os.sep).replace( - prefix, "") - dest_filename = dest / sanitized_name - zipinfo = archive.getinfo(archive_name) - if not dest_filename.parent.exists(): - os.makedirs(dest_filename.parent, exist_ok=True) - if zipinfo.is_dir(): - os.makedirs(dest_filename, exist_ok=True) - else: - with archive.open(archive_name) as src, open(dest_filename, - "wb") as dst: - dst.write(src.read()) - print(f"Functions Host is extracted into {dest}") - - -def chmod_protobuf_generation_script(webhost_dir): - script_path = webhost_dir / "src" / "WebJobs.Script.Grpc" / "generate_protos.sh" - if sys.platform != "win32" and script_path.exists(): - print("Change generate_protos.sh script permission") - os.chmod(script_path, 0o555) - - -def compile_webhost(webhost_dir): - print(f"Compiling Functions Host from {webhost_dir}") - try: - subprocess.run( - ["dotnet", "build", "WebJobs.Script.sln", "-o", "bin", - "/p:TreatWarningsAsErrors=false"], - check=True, - cwd=str(webhost_dir), - stdout=sys.stdout, - stderr=sys.stderr, - ) - except subprocess.CalledProcessError: - print( - f"Failed to compile webhost in {webhost_dir}. " - ".NET Core SDK is required to build the solution. " - "Please visit https://aka.ms/dotnet-download", - file=sys.stderr, - ) - sys.exit(1) - print("Functions Host is compiled successfully") - - -def gen_grpc(): - proto_root_dir = ROOT_DIR / "azure_functions_worker" / "protos" - proto_src_dir = proto_root_dir / "_src" / "src" / "proto" - staging_root_dir = BUILD_DIR / "protos" - staging_dir = staging_root_dir / "azure_functions_worker" / "protos" - built_protos_dir = BUILD_DIR / "built_protos" - - if os.path.exists(BUILD_DIR): - shutil.rmtree(BUILD_DIR) - - shutil.copytree(proto_src_dir, staging_dir) - os.makedirs(built_protos_dir) - - protos = [ - os.sep.join(("shared", "NullableTypes.proto")), - os.sep.join(("identity", "ClaimsIdentityRpc.proto")), - "FunctionRpc.proto", - ] - - for proto in protos: - subprocess.run( - [ - sys.executable, - "-m", - "grpc_tools.protoc", - "-I", - os.sep.join(("azure_functions_worker", "protos")), - "--python_out", - str(built_protos_dir), - "--grpc_python_out", - str(built_protos_dir), - os.sep.join(("azure_functions_worker", "protos", proto)), - ], - check=True, - stdout=sys.stdout, - stderr=sys.stderr, - cwd=staging_root_dir, - ) - - compiled_files = glob.glob( - str(built_protos_dir / "**" / "*.py"), recursive=True - ) - - if not compiled_files: - print("grpc_tools.protoc produced no Python files", file=sys.stderr) - sys.exit(1) - - # Needed to support absolute imports in files. See - # https://github.com/protocolbuffers/protobuf/issues/1491 - make_absolute_imports(compiled_files) - - dir_util.copy_tree(str(built_protos_dir), str(proto_root_dir)) - - -def make_absolute_imports(compiled_files): - for compiled in compiled_files: - with open(compiled, "r+") as f: - content = f.read() - f.seek(0) - # Convert lines of the form: - # import xxx_pb2 as xxx__pb2 to - # from azure_functions_worker.protos import xxx_pb2 as.. - p1 = re.sub( - r"\nimport (.*?_pb2)", - r"\nfrom azure_functions_worker.protos import \g<1>", - content, - ) - # Convert lines of the form: - # from identity import xxx_pb2 as.. to - # from azure_functions_worker.protos.identity import xxx_pb2.. - p2 = re.sub( - r"from ([a-z]*) (import.*_pb2)", - r"from azure_functions_worker.protos.\g<1> \g<2>", - p1, - ) - f.write(p2) - f.truncate() - - -def install_extensions(extensions_dir): - if not extensions_dir.exists(): - os.makedirs(extensions_dir, exist_ok=True) - - if not (extensions_dir / "host.json").exists(): - with open(extensions_dir / "host.json", "w") as f: - f.write("{}") - - if not (extensions_dir / "extensions.csproj").exists(): - with open(extensions_dir / "extensions.csproj", "w") as f: - f.write(EXTENSIONS_CSPROJ_TEMPLATE) - - with open(extensions_dir / "NuGet.config", "w") as f: - f.write(NUGET_CONFIG) - - env = os.environ.copy() - env["TERM"] = "xterm" # ncurses 6.1 workaround - try: - subprocess.run( - args=["dotnet", "build", "-o", "."], - check=True, - cwd=str(extensions_dir), - stdout=sys.stdout, - stderr=sys.stderr, - env=env, - ) - except subprocess.CalledProcessError: - print( - ".NET Core SDK is required to build the extensions. " - "Please visit https://aka.ms/dotnet-download" - ) - sys.exit(1) - - -@task -def extensions(c, clean=False, extensions_dir=None): - """Build extensions.""" - extensions_dir = extensions_dir or BUILD_DIR / "extensions" - if clean: - print(f"Deleting Extensions Directory: {extensions_dir}") - shutil.rmtree(extensions_dir, ignore_errors=True) - print("Deleted Extensions Directory") - return - - print("Installing Extensions") - install_extensions(extensions_dir) - print("Extensions installed successfully.") - - -@task -def build_protos(c, clean=False): - """Build gRPC bindings.""" - - if clean: - shutil.rmtree(BUILD_DIR / 'protos') - return - print("Generating gRPC bindings...") - gen_grpc() - print("gRPC bindings generated successfully.") - - -@task -def webhost(c, clean=False, webhost_version=None, webhost_dir=None, - branch_name=None): - """Builds the webhost""" - - if webhost_dir is None: - webhost_dir = BUILD_DIR / "webhost" - else: - webhost_dir = pathlib.Path(webhost_dir) - - if clean: - print("Deleting webhost dir") - shutil.rmtree(webhost_dir, ignore_errors=True) - print("Deleted webhost dir") - return - - if webhost_version is None: - webhost_version = get_webhost_version() - - zip_path = download_webhost_zip(webhost_version, branch_name) - create_webhost_folder(webhost_dir) - version = branch_name or webhost_version - extract_webhost_zip(version.replace("/", "-"), zip_path, webhost_dir) - chmod_protobuf_generation_script(webhost_dir) - compile_webhost(webhost_dir) - - -@task -def clean(c): - """Clean build directory.""" - - print("Deleting build directory") - shutil.rmtree(BUILD_DIR, ignore_errors=True) - print("Deleted build directory") diff --git a/tests/unittests/azure_namespace_import/azure_namespace_import.py b/tests/unittests/azure_namespace_import/azure_namespace_import.py deleted file mode 100644 index a7490cf50..000000000 --- a/tests/unittests/azure_namespace_import/azure_namespace_import.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import os -import shutil -import sys - -from azure_functions_worker import protos - -from ...utils.testutils import UNIT_TESTS_ROOT, create_dummy_dispatcher - - -async def verify_nested_namespace_import(): - test_env = {} - request = protos.FunctionEnvironmentReloadRequest( - environment_variables=test_env) - - request_msg = protos.StreamingMessage( - request_id='0', - function_environment_reload_request=request) - - disp = create_dummy_dispatcher() - - # Mock intepreter starts in placeholder mode - import azure.module_a as mod_a # noqa: F401 - - # Mock function specialization, load customer's libraries and functionapps - ns_root = os.path.join( - UNIT_TESTS_ROOT, - 'azure_namespace_import', - 'namespace_location_b') - test_path = os.path.join(ns_root, 'azure', 'namespace_b', 'module_b') - test_mod_path = os.path.join(test_path, 'test_module.py') - - os.makedirs(test_path) - with open(test_mod_path, 'w') as f: - f.write('MESSAGE = "module_b is imported"') - - try: - # Mock a customer uses test_module - if sys.argv[1].lower() == 'true': - await disp._handle__function_environment_reload_request( - request_msg) - from azure.namespace_b.module_b import test_module - print(test_module.MESSAGE) - except ModuleNotFoundError: - print('module_b fails to import') - finally: - # Cleanup - shutil.rmtree(ns_root) - - -if __name__ == '__main__': - loop = asyncio.get_event_loop() - loop.run_until_complete(verify_nested_namespace_import()) - loop.close() diff --git a/tests/unittests/azure_namespace_import/namespace_location_a/azure/module_a/__init__.py b/tests/unittests/azure_namespace_import/namespace_location_a/azure/module_a/__init__.py deleted file mode 100644 index 30adb862c..000000000 --- a/tests/unittests/azure_namespace_import/namespace_location_a/azure/module_a/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -MESSAGE = "module_a is imported" diff --git a/tests/unittests/azure_namespace_import/test_azure_namespace_import.sh b/tests/unittests/azure_namespace_import/test_azure_namespace_import.sh deleted file mode 100644 index b6afe032f..000000000 --- a/tests/unittests/azure_namespace_import/test_azure_namespace_import.sh +++ /dev/null @@ -1,10 +0,0 @@ -#! /bin/bash - -# $1 controls whether we allow reload module ("true" or "false") - -SCRIPT_DIR="$(dirname $0)" -export PYTHONPATH="$SCRIPT_DIR/namespace_location_a:$SCRIPT_DIR/namespace_location_b" - -python $SCRIPT_DIR/azure_namespace_import.py $1 - -unset PYTHONPATH \ No newline at end of file diff --git a/tests/endtoend/http_functions/http_functions_stein/file_name/main.py b/tests/unittests/basic_function/function_app.py similarity index 100% rename from tests/endtoend/http_functions/http_functions_stein/file_name/main.py rename to tests/unittests/basic_function/function_app.py diff --git a/tests/unittests/broken_functions/README.md b/tests/unittests/broken_functions/README.md deleted file mode 100644 index 9601a892a..000000000 --- a/tests/unittests/broken_functions/README.md +++ /dev/null @@ -1,3 +0,0 @@ -Functions in this directory are purposefully "broken". They either have -missing information in `function.json`, or invalid signatures, or even -syntax errors. They are tested in "test_broken_functions.py". diff --git a/tests/unittests/broken_functions/bad_out_annotation/function.json b/tests/unittests/broken_functions/bad_out_annotation/function.json deleted file mode 100644 index 736b93690..000000000 --- a/tests/unittests/broken_functions/bad_out_annotation/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "direction": "out", - "name": "foo", - "type": "int" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/bad_out_annotation/main.py b/tests/unittests/broken_functions/bad_out_annotation/main.py deleted file mode 100644 index 3c8cf73c4..000000000 --- a/tests/unittests/broken_functions/bad_out_annotation/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req, foo: azf.Out): - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/import_error/function.json b/tests/unittests/broken_functions/import_error/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/broken_functions/import_error/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/import_error/main.py b/tests/unittests/broken_functions/import_error/main.py deleted file mode 100644 index ade8ed183..000000000 --- a/tests/unittests/broken_functions/import_error/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from sys import __nonexistent # should raise ImportError - - -def main(req): - __nonexistent() diff --git a/tests/unittests/broken_functions/inout_param/function.json b/tests/unittests/broken_functions/inout_param/function.json deleted file mode 100644 index 6f5f71254..000000000 --- a/tests/unittests/broken_functions/inout_param/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "inout", - "name": "abc" - } - ] -} diff --git a/tests/unittests/broken_functions/inout_param/main.py b/tests/unittests/broken_functions/inout_param/main.py deleted file mode 100644 index 2ab233cef..000000000 --- a/tests/unittests/broken_functions/inout_param/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req, abc): - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/invalid_app_stein/function_app.py b/tests/unittests/broken_functions/invalid_app_stein/function_app.py deleted file mode 100644 index 3454a59ed..000000000 --- a/tests/unittests/broken_functions/invalid_app_stein/function_app.py +++ /dev/null @@ -1,5 +0,0 @@ -import azure.functions as func - - -def main(req: func.HttpRequest): - pass diff --git a/tests/unittests/broken_functions/invalid_context_param/function.json b/tests/unittests/broken_functions/invalid_context_param/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/broken_functions/invalid_context_param/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/invalid_context_param/main.py b/tests/unittests/broken_functions/invalid_context_param/main.py deleted file mode 100644 index 290c270a7..000000000 --- a/tests/unittests/broken_functions/invalid_context_param/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req, context: int): - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/invalid_datatype/function.json b/tests/unittests/broken_functions/invalid_datatype/function.json deleted file mode 100644 index 247beea27..000000000 --- a/tests/unittests/broken_functions/invalid_datatype/function.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "dataType" : "string", - "name": "req" - } - ] -} diff --git a/tests/unittests/broken_functions/invalid_datatype/main.py b/tests/unittests/broken_functions/invalid_datatype/main.py deleted file mode 100644 index 0fbe6b520..000000000 --- a/tests/unittests/broken_functions/invalid_datatype/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpResponse): - return 'This function should fail!!' diff --git a/tests/unittests/broken_functions/invalid_http_trigger_anno/function.json b/tests/unittests/broken_functions/invalid_http_trigger_anno/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/broken_functions/invalid_http_trigger_anno/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/invalid_http_trigger_anno/main.py b/tests/unittests/broken_functions/invalid_http_trigger_anno/main.py deleted file mode 100644 index 6f25e6b41..000000000 --- a/tests/unittests/broken_functions/invalid_http_trigger_anno/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req: int): - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/invalid_in_anno/function.json b/tests/unittests/broken_functions/invalid_in_anno/function.json deleted file mode 100644 index da37649e4..000000000 --- a/tests/unittests/broken_functions/invalid_in_anno/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - } - ] -} diff --git a/tests/unittests/broken_functions/invalid_in_anno/main.py b/tests/unittests/broken_functions/invalid_in_anno/main.py deleted file mode 100644 index fc0ae8ad4..000000000 --- a/tests/unittests/broken_functions/invalid_in_anno/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpResponse): # should be azf.HttpRequest - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/invalid_in_anno_non_type/function.json b/tests/unittests/broken_functions/invalid_in_anno_non_type/function.json deleted file mode 100644 index da37649e4..000000000 --- a/tests/unittests/broken_functions/invalid_in_anno_non_type/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - } - ] -} diff --git a/tests/unittests/broken_functions/invalid_in_anno_non_type/main.py b/tests/unittests/broken_functions/invalid_in_anno_non_type/main.py deleted file mode 100644 index fa44422a1..000000000 --- a/tests/unittests/broken_functions/invalid_in_anno_non_type/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req: 123): # annotations must be types! - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/invalid_out_anno/function.json b/tests/unittests/broken_functions/invalid_out_anno/function.json deleted file mode 100644 index 0c06cc22f..000000000 --- a/tests/unittests/broken_functions/invalid_out_anno/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "ret" - } - ] -} diff --git a/tests/unittests/broken_functions/invalid_out_anno/main.py b/tests/unittests/broken_functions/invalid_out_anno/main.py deleted file mode 100644 index b50a8d536..000000000 --- a/tests/unittests/broken_functions/invalid_out_anno/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req, ret: azf.Out[azf.HttpRequest]): - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/invalid_return_anno/function.json b/tests/unittests/broken_functions/invalid_return_anno/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/broken_functions/invalid_return_anno/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/invalid_return_anno/main.py b/tests/unittests/broken_functions/invalid_return_anno/main.py deleted file mode 100644 index e15ef70d7..000000000 --- a/tests/unittests/broken_functions/invalid_return_anno/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req) -> int: - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/invalid_return_anno_non_type/function.json b/tests/unittests/broken_functions/invalid_return_anno_non_type/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/broken_functions/invalid_return_anno_non_type/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/invalid_return_anno_non_type/main.py b/tests/unittests/broken_functions/invalid_return_anno_non_type/main.py deleted file mode 100644 index b3fdb6842..000000000 --- a/tests/unittests/broken_functions/invalid_return_anno_non_type/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req) -> 123: - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/invalid_stein/function_app.py b/tests/unittests/broken_functions/invalid_stein/function_app.py deleted file mode 100644 index d6ddd39d9..000000000 --- a/tests/unittests/broken_functions/invalid_stein/function_app.py +++ /dev/null @@ -1,8 +0,0 @@ -import azure.functions as func - -app = func.FunctionApp() - - -@app.route() -def main(): - pass diff --git a/tests/unittests/broken_functions/missing_json_param/function.json b/tests/unittests/broken_functions/missing_json_param/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/broken_functions/missing_json_param/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/missing_json_param/main.py b/tests/unittests/broken_functions/missing_json_param/main.py deleted file mode 100644 index 110dfad1f..000000000 --- a/tests/unittests/broken_functions/missing_json_param/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req, spam): - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/missing_module/function.json b/tests/unittests/broken_functions/missing_module/function.json deleted file mode 100644 index 985453fcf..000000000 --- a/tests/unittests/broken_functions/missing_module/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] - } diff --git a/tests/unittests/broken_functions/missing_module/main.py b/tests/unittests/broken_functions/missing_module/main.py deleted file mode 100644 index 16e9e5d5f..000000000 --- a/tests/unittests/broken_functions/missing_module/main.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions -import does_not_exist # Noqa - -logger = logging.getLogger('my function') - - -def main(req: azure.functions.HttpRequest): - logger.info('Function should fail before hitting main') - return 'OK-async' diff --git a/tests/unittests/broken_functions/missing_py_param/function.json b/tests/unittests/broken_functions/missing_py_param/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/broken_functions/missing_py_param/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/missing_py_param/main.py b/tests/unittests/broken_functions/missing_py_param/main.py deleted file mode 100644 index 7ac88c6dc..000000000 --- a/tests/unittests/broken_functions/missing_py_param/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(): - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/module_not_found_error/function.json b/tests/unittests/broken_functions/module_not_found_error/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/broken_functions/module_not_found_error/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/module_not_found_error/main.py b/tests/unittests/broken_functions/module_not_found_error/main.py deleted file mode 100644 index 57f5f134f..000000000 --- a/tests/unittests/broken_functions/module_not_found_error/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from __nonexistent import foo # should raise ModuleNotFoundError - - -def main(req): - foo() diff --git a/tests/unittests/broken_functions/return_param_in/function.json b/tests/unittests/broken_functions/return_param_in/function.json deleted file mode 100644 index 2d96d3cf5..000000000 --- a/tests/unittests/broken_functions/return_param_in/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "in", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/return_param_in/main.py b/tests/unittests/broken_functions/return_param_in/main.py deleted file mode 100644 index cc865f340..000000000 --- a/tests/unittests/broken_functions/return_param_in/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req): - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/syntax_error/function.json b/tests/unittests/broken_functions/syntax_error/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/broken_functions/syntax_error/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/syntax_error/main.py b/tests/unittests/broken_functions/syntax_error/main.py deleted file mode 100644 index 22df71a7a..000000000 --- a/tests/unittests/broken_functions/syntax_error/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req): - 1 / # noqa diff --git a/tests/unittests/broken_functions/wrong_binding_dir/function.json b/tests/unittests/broken_functions/wrong_binding_dir/function.json deleted file mode 100644 index 47ebf1791..000000000 --- a/tests/unittests/broken_functions/wrong_binding_dir/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "direction": "in", - "name": "foo", - "type": "int" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/wrong_binding_dir/main.py b/tests/unittests/broken_functions/wrong_binding_dir/main.py deleted file mode 100644 index ed51e46cf..000000000 --- a/tests/unittests/broken_functions/wrong_binding_dir/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req, foo: azf.Out[str]): - return 'trust me, it is OK!' diff --git a/tests/unittests/broken_functions/wrong_param_dir/function.json b/tests/unittests/broken_functions/wrong_param_dir/function.json deleted file mode 100644 index 736b93690..000000000 --- a/tests/unittests/broken_functions/wrong_param_dir/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "direction": "out", - "name": "foo", - "type": "int" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/broken_functions/wrong_param_dir/main.py b/tests/unittests/broken_functions/wrong_param_dir/main.py deleted file mode 100644 index e52d77d34..000000000 --- a/tests/unittests/broken_functions/wrong_param_dir/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req, foo: int): - return 'trust me, it is OK!' diff --git a/tests/unittests/dispatcher_functions/dispatcher_functions_stein/function_app.py b/tests/unittests/dispatcher_functions/dispatcher_functions_stein/function_app.py deleted file mode 100644 index fe9af2d32..000000000 --- a/tests/unittests/dispatcher_functions/dispatcher_functions_stein/function_app.py +++ /dev/null @@ -1,9 +0,0 @@ -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.route(route="http_trigger") -def http_trigger(req: func.HttpRequest) -> func.HttpResponse: - - return func.HttpResponse("Hello.") diff --git a/tests/unittests/dispatcher_functions/http_v2/fastapi/function_app.py b/tests/unittests/dispatcher_functions/http_v2/fastapi/function_app.py deleted file mode 100644 index 8a8982349..000000000 --- a/tests/unittests/dispatcher_functions/http_v2/fastapi/function_app.py +++ /dev/null @@ -1,9 +0,0 @@ -import azure.functions as func -from azurefunctions.extensions.http.fastapi import Request, Response - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.route(route="http_trigger") -def http_trigger(req: Request) -> Response: - return Response("ok") diff --git a/tests/unittests/dispatcher_functions/show_context/__init__.py b/tests/unittests/dispatcher_functions/show_context/__init__.py deleted file mode 100644 index 31f9766f9..000000000 --- a/tests/unittests/dispatcher_functions/show_context/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as func - - -def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse: - result = { - 'function_directory': context.function_directory, - 'function_name': context.function_name - } - return func.HttpResponse(body=json.dumps(result), - mimetype='application/json') diff --git a/tests/unittests/dispatcher_functions/show_context/function.json b/tests/unittests/dispatcher_functions/show_context/function.json deleted file mode 100644 index 7239e0fcc..000000000 --- a/tests/unittests/dispatcher_functions/show_context/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} \ No newline at end of file diff --git a/tests/unittests/dispatcher_functions/show_context_async/__init__.py b/tests/unittests/dispatcher_functions/show_context_async/__init__.py deleted file mode 100644 index e43e8fa7d..000000000 --- a/tests/unittests/dispatcher_functions/show_context_async/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as func - - -async def main(req: func.HttpRequest, - context: func.Context) -> func.HttpResponse: - result = { - 'function_directory': context.function_directory, - 'function_name': context.function_name - } - return func.HttpResponse(body=json.dumps(result), - mimetype='application/json') diff --git a/tests/unittests/dispatcher_functions/show_context_async/function.json b/tests/unittests/dispatcher_functions/show_context_async/function.json deleted file mode 100644 index 7239e0fcc..000000000 --- a/tests/unittests/dispatcher_functions/show_context_async/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} \ No newline at end of file diff --git a/tests/unittests/durable_functions/activity_trigger/function.json b/tests/unittests/durable_functions/activity_trigger/function.json deleted file mode 100644 index ebf8bfa62..000000000 --- a/tests/unittests/durable_functions/activity_trigger/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "activityTrigger", - "name": "input", - "direction": "in" - } - ] - } diff --git a/tests/unittests/durable_functions/activity_trigger/main.py b/tests/unittests/durable_functions/activity_trigger/main.py deleted file mode 100644 index b3fee32cc..000000000 --- a/tests/unittests/durable_functions/activity_trigger/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(input: str) -> str: - return input diff --git a/tests/unittests/durable_functions/activity_trigger_dict/function.json b/tests/unittests/durable_functions/activity_trigger_dict/function.json deleted file mode 100644 index cb44d98cc..000000000 --- a/tests/unittests/durable_functions/activity_trigger_dict/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "activityTrigger", - "name": "input", - "direction": "in" - } - ] -} diff --git a/tests/unittests/durable_functions/activity_trigger_dict/main.py b/tests/unittests/durable_functions/activity_trigger_dict/main.py deleted file mode 100644 index 0045c198c..000000000 --- a/tests/unittests/durable_functions/activity_trigger_dict/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from typing import Dict - - -def main(input: Dict[str, str]) -> Dict[str, str]: - result = input.copy() - if result.get('bird'): - result['bird'] = result['bird'][::-1] - - return result diff --git a/tests/unittests/durable_functions/activity_trigger_int_to_float/function.json b/tests/unittests/durable_functions/activity_trigger_int_to_float/function.json deleted file mode 100644 index cb44d98cc..000000000 --- a/tests/unittests/durable_functions/activity_trigger_int_to_float/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "activityTrigger", - "name": "input", - "direction": "in" - } - ] -} diff --git a/tests/unittests/durable_functions/activity_trigger_int_to_float/main.py b/tests/unittests/durable_functions/activity_trigger_int_to_float/main.py deleted file mode 100644 index 4faf3ef8b..000000000 --- a/tests/unittests/durable_functions/activity_trigger_int_to_float/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(input: int) -> float: - return float(input) * (-1.1) diff --git a/tests/unittests/durable_functions/activity_trigger_no_anno/function.json b/tests/unittests/durable_functions/activity_trigger_no_anno/function.json deleted file mode 100644 index ebf8bfa62..000000000 --- a/tests/unittests/durable_functions/activity_trigger_no_anno/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "activityTrigger", - "name": "input", - "direction": "in" - } - ] - } diff --git a/tests/unittests/durable_functions/activity_trigger_no_anno/main.py b/tests/unittests/durable_functions/activity_trigger_no_anno/main.py deleted file mode 100644 index 6a7f9c971..000000000 --- a/tests/unittests/durable_functions/activity_trigger_no_anno/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(input): - return input diff --git a/tests/unittests/durable_functions/orchestration_trigger/function.json b/tests/unittests/durable_functions/orchestration_trigger/function.json deleted file mode 100644 index c8ef14a94..000000000 --- a/tests/unittests/durable_functions/orchestration_trigger/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "orchestrationTrigger", - "name": "context", - "direction": "in" - } - ] - } diff --git a/tests/unittests/durable_functions/orchestration_trigger/main.py b/tests/unittests/durable_functions/orchestration_trigger/main.py deleted file mode 100644 index 40b5919c4..000000000 --- a/tests/unittests/durable_functions/orchestration_trigger/main.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -# import azure.durable_functions as df - - -def generator_function(context): - final_result = yield context.call_activity('activity_trigger', 'foobar') - return final_result - - -def main(context): - # orchestrate = df.Orchestrator.create(generator_function) - # result = orchestrate(context) - # return result - return f'{context} :)' diff --git a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many/__init__.py b/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many/__init__.py deleted file mode 100644 index eb75a0012..000000000 --- a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from typing import List - -import azure.functions as func - - -# This is testing the function load feature for the multiple events annotation -def main(events: List[func.EventHubEvent]) -> str: - return 'OK_MANY' diff --git a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many/function.json b/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many/function.json deleted file mode 100644 index 39d2d0059..000000000 --- a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "eventHubTrigger", - "name": "events", - "direction": "in", - "eventHubName": "python-worker-iot-ci", - "connection": "AzureWebJobsEventHubConnectionString", - "cardinality": "many" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-eventhub-iot-triggered.txt" - } - ] - } diff --git a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many_bad_anno/__init__.py b/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many_bad_anno/__init__.py deleted file mode 100644 index ed0f9f118..000000000 --- a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many_bad_anno/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from typing import List - - -# This is testing the function load feature for the multiple events annotation -# The event shouldn't be List[str] -def main(events: List[str]) -> str: - return 'BAD' diff --git a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many_bad_anno/function.json b/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many_bad_anno/function.json deleted file mode 100644 index 39d2d0059..000000000 --- a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_many_bad_anno/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "eventHubTrigger", - "name": "events", - "direction": "in", - "eventHubName": "python-worker-iot-ci", - "connection": "AzureWebJobsEventHubConnectionString", - "cardinality": "many" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-eventhub-iot-triggered.txt" - } - ] - } diff --git a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one/__init__.py b/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one/__init__.py deleted file mode 100644 index 0f6852a36..000000000 --- a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -# This is testing the function load feature for the single event annotation -def main(event: func.EventHubEvent) -> str: - return 'OK_ONE' diff --git a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one/function.json b/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one/function.json deleted file mode 100644 index 4c9ae1e74..000000000 --- a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "eventHubTrigger", - "name": "event", - "direction": "in", - "eventHubName": "python-worker-iot-ci", - "connection": "AzureWebJobsEventHubConnectionString", - "cardinality": "one" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-eventhub-iot-triggered.txt" - } - ] - } diff --git a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one_bad_anno/__init__.py b/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one_bad_anno/__init__.py deleted file mode 100644 index 69b24e476..000000000 --- a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one_bad_anno/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -# This is testing the function load feature for the single event annotation -# The event shouldn't be int -def main(event: int) -> str: - return 'BAD' diff --git a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one_bad_anno/function.json b/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one_bad_anno/function.json deleted file mode 100644 index 4c9ae1e74..000000000 --- a/tests/unittests/eventhub_mock_functions/eventhub_cardinality_one_bad_anno/function.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "eventHubTrigger", - "name": "event", - "direction": "in", - "eventHubName": "python-worker-iot-ci", - "connection": "AzureWebJobsEventHubConnectionString", - "cardinality": "one" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-eventhub-iot-triggered.txt" - } - ] - } diff --git a/tests/unittests/eventhub_mock_functions/eventhub_trigger_iot/__init__.py b/tests/unittests/eventhub_mock_functions/eventhub_trigger_iot/__init__.py deleted file mode 100644 index 918548403..000000000 --- a/tests/unittests/eventhub_mock_functions/eventhub_trigger_iot/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions as func - - -def main(event: func.EventHubEvent) -> str: - return json.dumps(event.iothub_metadata) diff --git a/tests/unittests/eventhub_mock_functions/eventhub_trigger_iot/function.json b/tests/unittests/eventhub_mock_functions/eventhub_trigger_iot/function.json deleted file mode 100644 index c14ae132f..000000000 --- a/tests/unittests/eventhub_mock_functions/eventhub_trigger_iot/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - - "bindings": [ - { - "type": "eventHubTrigger", - "name": "event", - "direction": "in", - "eventHubName": "python-worker-iot-ci", - "connection": "AzureWebJobsEventHubConnectionString" - }, - { - "type": "blob", - "direction": "out", - "name": "$return", - "connection": "AzureWebJobsStorage", - "path": "python-worker-tests/test-eventhub-iot-triggered.txt" - } - ] -} diff --git a/tests/unittests/file_name_functions/default_file_name/function_app.py b/tests/unittests/file_name_functions/default_file_name/function_app.py deleted file mode 100644 index 7eeb55331..000000000 --- a/tests/unittests/file_name_functions/default_file_name/function_app.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import azure.functions as func - -app = func.FunctionApp() - - -@app.route(route="return_str") -def return_str(req: func.HttpRequest) -> str: - return 'Hello World!' diff --git a/tests/unittests/file_name_functions/invalid_file_name/main b/tests/unittests/file_name_functions/invalid_file_name/main deleted file mode 100644 index 7eeb55331..000000000 --- a/tests/unittests/file_name_functions/invalid_file_name/main +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import azure.functions as func - -app = func.FunctionApp() - - -@app.route(route="return_str") -def return_str(req: func.HttpRequest) -> str: - return 'Hello World!' diff --git a/tests/unittests/file_name_functions/new_file_name/test.py b/tests/unittests/file_name_functions/new_file_name/test.py deleted file mode 100644 index 7eeb55331..000000000 --- a/tests/unittests/file_name_functions/new_file_name/test.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import azure.functions as func - -app = func.FunctionApp() - - -@app.route(route="return_str") -def return_str(req: func.HttpRequest) -> str: - return 'Hello World!' diff --git a/tests/unittests/generic_functions/foobar_as_bytes/function.json b/tests/unittests/generic_functions/foobar_as_bytes/function.json deleted file mode 100644 index f0117f606..000000000 --- a/tests/unittests/generic_functions/foobar_as_bytes/function.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "foobar", - "name": "input", - "direction": "in", - "dataType": "binary" - }, - { - "direction": "out", - "name": "$return", - "type": "foobar", - "dataType": "binary" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_as_bytes/main.py b/tests/unittests/generic_functions/foobar_as_bytes/main.py deleted file mode 100644 index e4c9d11a9..000000000 --- a/tests/unittests/generic_functions/foobar_as_bytes/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(input: bytes) -> bytes: - return input diff --git a/tests/unittests/generic_functions/foobar_as_bytes_no_anno/function.json b/tests/unittests/generic_functions/foobar_as_bytes_no_anno/function.json deleted file mode 100644 index f0117f606..000000000 --- a/tests/unittests/generic_functions/foobar_as_bytes_no_anno/function.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "foobar", - "name": "input", - "direction": "in", - "dataType": "binary" - }, - { - "direction": "out", - "name": "$return", - "type": "foobar", - "dataType": "binary" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_as_bytes_no_anno/main.py b/tests/unittests/generic_functions/foobar_as_bytes_no_anno/main.py deleted file mode 100644 index c03a4f0fb..000000000 --- a/tests/unittests/generic_functions/foobar_as_bytes_no_anno/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -# Input as bytes, without annotation - - -def main(input): - return input diff --git a/tests/unittests/generic_functions/foobar_as_none/function.json b/tests/unittests/generic_functions/foobar_as_none/function.json deleted file mode 100644 index 7a458eb7f..000000000 --- a/tests/unittests/generic_functions/foobar_as_none/function.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "direction": "out", - "name": "$return", - "type": "foobar", - "dataType": "binary" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_as_none/main.py b/tests/unittests/generic_functions/foobar_as_none/main.py deleted file mode 100644 index b7acadcdd..000000000 --- a/tests/unittests/generic_functions/foobar_as_none/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(): - return "hello" diff --git a/tests/unittests/generic_functions/foobar_as_str/function.json b/tests/unittests/generic_functions/foobar_as_str/function.json deleted file mode 100644 index 144593c6a..000000000 --- a/tests/unittests/generic_functions/foobar_as_str/function.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "foobar", - "name": "input", - "direction": "in", - "dataType": "string" - }, - { - "direction": "out", - "name": "$return", - "type": "foobar", - "dataType": "string" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_as_str/main.py b/tests/unittests/generic_functions/foobar_as_str/main.py deleted file mode 100644 index b3fee32cc..000000000 --- a/tests/unittests/generic_functions/foobar_as_str/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(input: str) -> str: - return input diff --git a/tests/unittests/generic_functions/foobar_as_str_no_anno/function.json b/tests/unittests/generic_functions/foobar_as_str_no_anno/function.json deleted file mode 100644 index 144593c6a..000000000 --- a/tests/unittests/generic_functions/foobar_as_str_no_anno/function.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "foobar", - "name": "input", - "direction": "in", - "dataType": "string" - }, - { - "direction": "out", - "name": "$return", - "type": "foobar", - "dataType": "string" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_as_str_no_anno/main.py b/tests/unittests/generic_functions/foobar_as_str_no_anno/main.py deleted file mode 100644 index 9626e2aef..000000000 --- a/tests/unittests/generic_functions/foobar_as_str_no_anno/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -# Input as string, without annotation - - -def main(input): - return input diff --git a/tests/unittests/generic_functions/foobar_implicit_output/function.json b/tests/unittests/generic_functions/foobar_implicit_output/function.json deleted file mode 100644 index 6f8a83ec0..000000000 --- a/tests/unittests/generic_functions/foobar_implicit_output/function.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "foobar", - "name": "input", - "direction": "in", - "dataType": "string" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_implicit_output/main.py b/tests/unittests/generic_functions/foobar_implicit_output/main.py deleted file mode 100644 index 53124993e..000000000 --- a/tests/unittests/generic_functions/foobar_implicit_output/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -# Input as string, without annotation - - -def main(input: str): - return input diff --git a/tests/unittests/generic_functions/foobar_implicit_output_exemption/function.json b/tests/unittests/generic_functions/foobar_implicit_output_exemption/function.json deleted file mode 100644 index 82a015bbb..000000000 --- a/tests/unittests/generic_functions/foobar_implicit_output_exemption/function.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "durableClient", - "name": "input", - "direction": "in", - "dataType": "string" - } - ] - } - \ No newline at end of file diff --git a/tests/unittests/generic_functions/foobar_implicit_output_exemption/main.py b/tests/unittests/generic_functions/foobar_implicit_output_exemption/main.py deleted file mode 100644 index 53124993e..000000000 --- a/tests/unittests/generic_functions/foobar_implicit_output_exemption/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -# Input as string, without annotation - - -def main(input: str): - return input diff --git a/tests/unittests/generic_functions/foobar_nil_data/function.json b/tests/unittests/generic_functions/foobar_nil_data/function.json deleted file mode 100644 index 4cced7c56..000000000 --- a/tests/unittests/generic_functions/foobar_nil_data/function.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "generic", - "name": "input", - "direction": "in" - } - ] - } - \ No newline at end of file diff --git a/tests/unittests/generic_functions/foobar_nil_data/main.py b/tests/unittests/generic_functions/foobar_nil_data/main.py deleted file mode 100644 index a41823ddc..000000000 --- a/tests/unittests/generic_functions/foobar_nil_data/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - - -def main(input) -> None: - logging.info("Hello World") diff --git a/tests/unittests/generic_functions/foobar_return_bool/function.json b/tests/unittests/generic_functions/foobar_return_bool/function.json deleted file mode 100644 index 6f8a83ec0..000000000 --- a/tests/unittests/generic_functions/foobar_return_bool/function.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "foobar", - "name": "input", - "direction": "in", - "dataType": "string" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_return_bool/main.py b/tests/unittests/generic_functions/foobar_return_bool/main.py deleted file mode 100644 index 4fadd2bff..000000000 --- a/tests/unittests/generic_functions/foobar_return_bool/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(input): - return True diff --git a/tests/unittests/generic_functions/foobar_return_dict/function.json b/tests/unittests/generic_functions/foobar_return_dict/function.json deleted file mode 100644 index 6f8a83ec0..000000000 --- a/tests/unittests/generic_functions/foobar_return_dict/function.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "foobar", - "name": "input", - "direction": "in", - "dataType": "string" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_return_dict/main.py b/tests/unittests/generic_functions/foobar_return_dict/main.py deleted file mode 100644 index c8aef81a3..000000000 --- a/tests/unittests/generic_functions/foobar_return_dict/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(input): - return {"hello": "world"} diff --git a/tests/unittests/generic_functions/foobar_return_double/function.json b/tests/unittests/generic_functions/foobar_return_double/function.json deleted file mode 100644 index 6f8a83ec0..000000000 --- a/tests/unittests/generic_functions/foobar_return_double/function.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "foobar", - "name": "input", - "direction": "in", - "dataType": "string" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_return_double/main.py b/tests/unittests/generic_functions/foobar_return_double/main.py deleted file mode 100644 index 42aac3fc0..000000000 --- a/tests/unittests/generic_functions/foobar_return_double/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(input): - return 12.34 diff --git a/tests/unittests/generic_functions/foobar_return_int/function.json b/tests/unittests/generic_functions/foobar_return_int/function.json deleted file mode 100644 index 6f8a83ec0..000000000 --- a/tests/unittests/generic_functions/foobar_return_int/function.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "foobar", - "name": "input", - "direction": "in", - "dataType": "string" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_return_int/main.py b/tests/unittests/generic_functions/foobar_return_int/main.py deleted file mode 100644 index 8beb85606..000000000 --- a/tests/unittests/generic_functions/foobar_return_int/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(input): - return 12 diff --git a/tests/unittests/generic_functions/foobar_return_list/function.json b/tests/unittests/generic_functions/foobar_return_list/function.json deleted file mode 100644 index 6f8a83ec0..000000000 --- a/tests/unittests/generic_functions/foobar_return_list/function.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "foobar", - "name": "input", - "direction": "in", - "dataType": "string" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_return_list/main.py b/tests/unittests/generic_functions/foobar_return_list/main.py deleted file mode 100644 index 1d1a4a5ea..000000000 --- a/tests/unittests/generic_functions/foobar_return_list/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(input): - return [1, 2, 3] diff --git a/tests/unittests/generic_functions/foobar_with_no_datatype/function.json b/tests/unittests/generic_functions/foobar_with_no_datatype/function.json deleted file mode 100644 index 4e49f1942..000000000 --- a/tests/unittests/generic_functions/foobar_with_no_datatype/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "foobar", - "name": "input", - "direction": "in" - } - ] -} diff --git a/tests/unittests/generic_functions/foobar_with_no_datatype/main.py b/tests/unittests/generic_functions/foobar_with_no_datatype/main.py deleted file mode 100644 index b3fee32cc..000000000 --- a/tests/unittests/generic_functions/foobar_with_no_datatype/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(input: str) -> str: - return input diff --git a/tests/unittests/http_functions/accept_json/function.json b/tests/unittests/http_functions/accept_json/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/accept_json/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/accept_json/main.py b/tests/unittests/http_functions/accept_json/main.py deleted file mode 100644 index 368a33741..000000000 --- a/tests/unittests/http_functions/accept_json/main.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions - - -def main(req: azure.functions.HttpRequest): - return json.dumps({ - 'method': req.method, - 'url': req.url, - 'headers': dict(req.headers), - 'params': dict(req.params), - 'get_body': req.get_body().decode(), - 'get_json': req.get_json() - }) diff --git a/tests/unittests/http_functions/async_logging/function.json b/tests/unittests/http_functions/async_logging/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/async_logging/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/async_logging/main.py b/tests/unittests/http_functions/async_logging/main.py deleted file mode 100644 index e30b88660..000000000 --- a/tests/unittests/http_functions/async_logging/main.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import logging - -import azure.functions - -logger = logging.getLogger('my function') - - -async def main(req: azure.functions.HttpRequest): - logger.info('hello %s', 'info') - - await asyncio.sleep(0.1) - - # Create a nested task to check if invocation_id is still - # logged correctly. - await asyncio.ensure_future(nested()) - - await asyncio.sleep(0.1) - - return 'OK-async' - - -async def nested(): - try: - 1 / 0 - except ZeroDivisionError: - logger.error('and another error', exc_info=True) diff --git a/tests/unittests/http_functions/async_return_str/function.json b/tests/unittests/http_functions/async_return_str/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/async_return_str/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/async_return_str/main.py b/tests/unittests/http_functions/async_return_str/main.py deleted file mode 100644 index a64f811f7..000000000 --- a/tests/unittests/http_functions/async_return_str/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio - -import azure.functions - - -async def main(req: azure.functions.HttpRequest, context): - await asyncio.sleep(0.1) - return 'Hello Async World!' diff --git a/tests/unittests/http_functions/create_task_with_context/function.json b/tests/unittests/http_functions/create_task_with_context/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/create_task_with_context/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/create_task_with_context/main.py b/tests/unittests/http_functions/create_task_with_context/main.py deleted file mode 100644 index f603acd1b..000000000 --- a/tests/unittests/http_functions/create_task_with_context/main.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import contextvars - -import azure.functions - -num = contextvars.ContextVar('num') - - -async def count(name: str): - # The number of times the loop is executed - # depends on the val set in context - val = num.get() - for i in range(val): - await asyncio.sleep(0.5) - return f"Finished {name} in {val}" - - -async def main(req: azure.functions.HttpRequest): - # Create first task with context num = 5 - num.set(5) - first_ctx = contextvars.copy_context() - first_count_task = asyncio.create_task(count("Hello World"), context=first_ctx) - - # Create second task with context num = 10 - num.set(10) - second_ctx = contextvars.copy_context() - second_count_task = asyncio.create_task(count("Hello World"), context=second_ctx) - - # Execute tasks - first_count_val = await first_count_task - second_count_val = await second_count_task - - return f'{first_count_val + " | " + second_count_val}' diff --git a/tests/unittests/http_functions/create_task_without_context/function.json b/tests/unittests/http_functions/create_task_without_context/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/create_task_without_context/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/create_task_without_context/main.py b/tests/unittests/http_functions/create_task_without_context/main.py deleted file mode 100644 index c7ee21f7b..000000000 --- a/tests/unittests/http_functions/create_task_without_context/main.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio - -import azure.functions - - -async def count(name: str, num: int): - # The number of times the loop executes is decided by a - # user-defined param - for i in range(num): - await asyncio.sleep(0.5) - return f"Finished {name} in {num}" - - -async def main(req: azure.functions.HttpRequest): - # No context is being sent into asyncio.create_task - count_task = asyncio.create_task(count("Hello World", 5)) - count_val = await count_task - return f'{count_val}' diff --git a/tests/unittests/http_functions/debug_logging/function.json b/tests/unittests/http_functions/debug_logging/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/debug_logging/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/debug_logging/main.py b/tests/unittests/http_functions/debug_logging/main.py deleted file mode 100644 index 628896cbc..000000000 --- a/tests/unittests/http_functions/debug_logging/main.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions - - -def main(req: azure.functions.HttpRequest): - logging.critical('logging critical', exc_info=True) - logging.info('logging info', exc_info=True) - logging.warning('logging warning', exc_info=True) - logging.debug('logging debug', exc_info=True) - logging.error('logging error', exc_info=True) - return 'OK-debug' diff --git a/tests/unittests/http_functions/hijack_current_event_loop/function.json b/tests/unittests/http_functions/hijack_current_event_loop/function.json deleted file mode 100644 index 059791f31..000000000 --- a/tests/unittests/http_functions/hijack_current_event_loop/function.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] - } diff --git a/tests/unittests/http_functions/hijack_current_event_loop/main.py b/tests/unittests/http_functions/hijack_current_event_loop/main.py deleted file mode 100644 index 10856ca46..000000000 --- a/tests/unittests/http_functions/hijack_current_event_loop/main.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import logging -import sys - -import azure.functions as func - -logger = logging.getLogger('custom_logger') - -# Attempt to log info into system log from customer code -disguised_logger = logging.getLogger('azure_functions_worker') - - -async def parallelly_print(): - await asyncio.sleep(0.1) - print('parallelly_print') - - -async def parallelly_log_info(): - await asyncio.sleep(0.2) - logging.info('parallelly_log_info at root logger') - - -async def parallelly_log_warning(): - await asyncio.sleep(0.3) - logging.warning('parallelly_log_warning at root logger') - - -async def parallelly_log_error(): - await asyncio.sleep(0.4) - logging.error('parallelly_log_error at root logger') - - -async def parallelly_log_exception(): - await asyncio.sleep(0.5) - try: - raise Exception('custom exception') - except Exception: - logging.exception('parallelly_log_exception at root logger', - exc_info=sys.exc_info()) - - -async def parallelly_log_custom(): - await asyncio.sleep(0.6) - logger.info('parallelly_log_custom at custom_logger') - - -async def parallelly_log_system(): - await asyncio.sleep(0.7) - disguised_logger.info('parallelly_log_system at disguised_logger') - - -async def main(req: func.HttpRequest) -> func.HttpResponse: - loop = asyncio.get_event_loop() - - # Create multiple tasks and schedule it into one asyncio.wait blocker - task_print: asyncio.Task = loop.create_task(parallelly_print()) - task_info: asyncio.Task = loop.create_task(parallelly_log_info()) - task_warning: asyncio.Task = loop.create_task(parallelly_log_warning()) - task_error: asyncio.Task = loop.create_task(parallelly_log_error()) - task_exception: asyncio.Task = loop.create_task(parallelly_log_exception()) - task_custom: asyncio.Task = loop.create_task(parallelly_log_custom()) - task_disguise: asyncio.Task = loop.create_task(parallelly_log_system()) - - # Create an awaitable future and occupy the current event loop resource - future = loop.create_future() - loop.call_soon_threadsafe(future.set_result, 'callsoon_log') - - # WaitAll - await asyncio.wait([task_print, task_info, task_warning, task_error, - task_exception, task_custom, task_disguise, future]) - - # Log asyncio low-level future result - logging.info(future.result()) - - return 'OK-hijack-current-event-loop' diff --git a/tests/unittests/http_functions/http_functions_stein/function_app.py b/tests/unittests/http_functions/http_functions_stein/function_app.py deleted file mode 100644 index 112813de9..000000000 --- a/tests/unittests/http_functions/http_functions_stein/function_app.py +++ /dev/null @@ -1,455 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import contextvars -import hashlib -import json -import logging -import sys -import time -from urllib.request import urlopen - -import azure.functions as func - -app = func.FunctionApp() - -logger = logging.getLogger("my-function") - -num = contextvars.ContextVar('num') - - -async def count_with_context(name: str): - # The number of times the loop is executed - # depends on the val set in context - val = num.get() - for i in range(val): - await asyncio.sleep(0.5) - return f"Finished {name} in {val}" - - -async def count_without_context(name: str, number: int): - # The number of times the loop executes is decided by a - # user-defined param - for i in range(number): - await asyncio.sleep(0.5) - return f"Finished {name} in {number}" - - -@app.route(route="return_str") -def return_str(req: func.HttpRequest) -> str: - return 'Hello World!' - - -@app.route(route="accept_json") -def accept_json(req: func.HttpRequest): - return json.dumps({ - 'method': req.method, - 'url': req.url, - 'headers': dict(req.headers), - 'params': dict(req.params), - 'get_body': req.get_body().decode(), - 'get_json': req.get_json() - }) - - -async def nested(): - try: - 1 / 0 - except ZeroDivisionError: - logger.error('and another error', exc_info=True) - - -@app.route(route="async_logging") -async def async_logging(req: func.HttpRequest): - logger.info('hello %s', 'info') - - await asyncio.sleep(0.1) - - # Create a nested task to check if invocation_id is still - # logged correctly. - await asyncio.ensure_future(nested()) - - await asyncio.sleep(0.1) - - return 'OK-async' - - -@app.route(route="async_return_str") -async def async_return_str(req: func.HttpRequest): - await asyncio.sleep(0.1) - return 'Hello Async World!' - - -@app.route(route="debug_logging") -def debug_logging(req: func.HttpRequest): - logging.critical('logging critical', exc_info=True) - logging.info('logging info', exc_info=True) - logging.warning('logging warning', exc_info=True) - logging.debug('logging debug', exc_info=True) - logging.error('logging error', exc_info=True) - return 'OK-debug' - - -@app.route(route="debug_user_logging") -def debug_user_logging(req: func.HttpRequest): - logger.setLevel(logging.DEBUG) - - logging.critical('logging critical', exc_info=True) - logger.info('logging info', exc_info=True) - logger.warning('logging warning', exc_info=True) - logger.debug('logging debug', exc_info=True) - logger.error('logging error', exc_info=True) - return 'OK-user-debug' - - -# Attempt to log info into system log from customer code -disguised_logger = logging.getLogger('azure_functions_worker') - - -async def parallelly_print(): - await asyncio.sleep(0.1) - print('parallelly_print') - - -async def parallelly_log_info(): - await asyncio.sleep(0.2) - logging.info('parallelly_log_info at root logger') - - -async def parallelly_log_warning(): - await asyncio.sleep(0.3) - logging.warning('parallelly_log_warning at root logger') - - -async def parallelly_log_error(): - await asyncio.sleep(0.4) - logging.error('parallelly_log_error at root logger') - - -async def parallelly_log_exception(): - await asyncio.sleep(0.5) - try: - raise Exception('custom exception') - except Exception: - logging.exception('parallelly_log_exception at root logger', - exc_info=sys.exc_info()) - - -async def parallelly_log_custom(): - await asyncio.sleep(0.6) - logger.info('parallelly_log_custom at custom_logger') - - -async def parallelly_log_system(): - await asyncio.sleep(0.7) - disguised_logger.info('parallelly_log_system at disguised_logger') - - -@app.route(route="hijack_current_event_loop") -async def hijack_current_event_loop(req: func.HttpRequest) -> func.HttpResponse: - loop = asyncio.get_event_loop() - - # Create multiple tasks and schedule it into one asyncio.wait blocker - task_print: asyncio.Task = loop.create_task(parallelly_print()) - task_info: asyncio.Task = loop.create_task(parallelly_log_info()) - task_warning: asyncio.Task = loop.create_task(parallelly_log_warning()) - task_error: asyncio.Task = loop.create_task(parallelly_log_error()) - task_exception: asyncio.Task = loop.create_task(parallelly_log_exception()) - task_custom: asyncio.Task = loop.create_task(parallelly_log_custom()) - task_disguise: asyncio.Task = loop.create_task(parallelly_log_system()) - - # Create an awaitable future and occupy the current event loop resource - future = loop.create_future() - loop.call_soon_threadsafe(future.set_result, 'callsoon_log') - - # WaitAll - await asyncio.wait([task_print, task_info, task_warning, task_error, - task_exception, task_custom, task_disguise, future]) - - # Log asyncio low-level future result - logging.info(future.result()) - - return 'OK-hijack-current-event-loop' - - -@app.route(route="no_return") -def no_return(req: func.HttpRequest): - logger.info('hi') - - -@app.route(route="no_return_returns") -def no_return_returns(req): - return 'ABC' - - -@app.route(route="print_logging") -def print_logging(req: func.HttpRequest): - flush_required = False - is_console_log = False - is_stderr = False - message = req.params.get('message', '') - - if req.params.get('flush') == 'true': - flush_required = True - if req.params.get('console') == 'true': - is_console_log = True - if req.params.get('is_stderr') == 'true': - is_stderr = True - - # Adding LanguageWorkerConsoleLog will make function host to treat - # this as system log and will be propagated to kusto - prefix = 'LanguageWorkerConsoleLog' if is_console_log else '' - print(f'{prefix} {message}'.strip(), - file=sys.stderr if is_stderr else sys.stdout, - flush=flush_required) - - return 'OK-print-logging' - - -@app.route(route="raw_body_bytes") -def raw_body_bytes(req: func.HttpRequest) -> func.HttpResponse: - body = req.get_body() - body_len = str(len(body)) - - headers = {'body-len': body_len} - return func.HttpResponse(body=body, status_code=200, headers=headers) - - -@app.route(route="remapped_context") -def remapped_context(req: func.HttpRequest): - return req.method - - -@app.route(route="return_bytes") -def return_bytes(req: func.HttpRequest): - # This function will fail, as we don't auto-convert "bytes" to "http". - return b'Hello World!' - - -@app.route(route="return_context") -def return_context(req: func.HttpRequest, context: func.Context): - return json.dumps({ - 'method': req.method, - 'ctx_func_name': context.function_name, - 'ctx_func_dir': context.function_directory, - 'ctx_invocation_id': context.invocation_id, - 'ctx_trace_context_Traceparent': context.trace_context.Traceparent, - 'ctx_trace_context_Tracestate': context.trace_context.Tracestate, - }) - - -@app.route(route="return_http") -def return_http(req: func.HttpRequest): - return func.HttpResponse('

Hello World™

', - mimetype='text/html') - - -@app.route(route="return_http_404") -def return_http_404(req: func.HttpRequest): - return func.HttpResponse('bye', status_code=404) - - -@app.route(route="return_http_auth_admin", auth_level=func.AuthLevel.ADMIN) -def return_http_auth_admin(req: func.HttpRequest): - return func.HttpResponse('

Hello World™

', - mimetype='text/html') - - -@app.route(route="return_http_no_body") -def return_http_no_body(req: func.HttpRequest): - return func.HttpResponse() - - -@app.route(route="return_http_redirect") -def return_http_redirect(req: func.HttpRequest): - location = 'return_http?code={}'.format(req.params['code']) - return func.HttpResponse( - status_code=302, - headers={'location': location}) - - -@app.route(route="return_out", binding_arg_name="foo") -def return_out(req: func.HttpRequest, foo: func.Out[func.HttpResponse]): - foo.set(func.HttpResponse(body='hello', status_code=201)) - - -@app.route(route="return_request") -def return_request(req: func.HttpRequest): - params = dict(req.params) - params.pop('code', None) - body = req.get_body() - return json.dumps({ - 'method': req.method, - 'url': req.url, - 'headers': dict(req.headers), - 'params': params, - 'get_body': body.decode(), - 'body_hash': hashlib.sha256(body).hexdigest(), - }) - - -@app.route(route="return_route_params/{param1}/{param2}") -def return_route_params(req: func.HttpRequest) -> str: - return json.dumps(dict(req.route_params)) - - -@app.route(route="sync_logging") -def main(req: func.HttpRequest): - try: - 1 / 0 - except ZeroDivisionError: - logger.error('a gracefully handled error', exc_info=True) - logger.error('a gracefully handled critical error', exc_info=True) - time.sleep(0.05) - return 'OK-sync' - - -@app.route(route="unhandled_error") -def unhandled_error(req: func.HttpRequest): - 1 / 0 - - -@app.route(route="unhandled_urllib_error") -def unhandled_urllib_error(req: func.HttpRequest) -> str: - image_url = req.params.get('img') - urlopen(image_url).read() - - -class UnserializableException(Exception): - def __str__(self): - raise RuntimeError('cannot serialize me') - - -@app.route(route="unhandled_unserializable_error") -def unhandled_unserializable_error(req: func.HttpRequest) -> str: - raise UnserializableException('foo') - - -async def try_log(): - logger.info("try_log") - - -@app.route(route="user_event_loop") -def user_event_loop(req: func.HttpRequest) -> func.HttpResponse: - loop = asyncio.SelectorEventLoop() - asyncio.set_event_loop(loop) - - # This line should throws an asyncio RuntimeError exception - loop.run_until_complete(try_log()) - loop.close() - return 'OK-user-event-loop' - - -@app.route(route="multiple_set_cookie_resp_headers") -def multiple_set_cookie_resp_headers( - req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", - 'foo3=42; Domain=example.com; Expires=Thu, 12-Jan-2017 ' - '13:55:08 GMT; Path=/; Max-Age=10000000; Secure; ' - 'HttpOnly') - resp.headers.add("Set-Cookie", - 'foo3=43; Domain=example.com; Expires=Thu, 12-Jan-2018 ' - '13:55:08 GMT; Path=/; Max-Age=10000000; Secure; ' - 'HttpOnly') - resp.headers.add("HELLO", 'world') - - return resp - - -@app.route(route="response_cookie_header_nullable_bool_err") -def response_cookie_header_nullable_bool_err( - req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", - 'foo3=42; Domain=example.com; Expires=Thu, 12-Jan-2017 ' - '13:55:08 GMT; Path=/; Max-Age=10000000; SecureFalse; ' - 'HttpOnly') - - return resp - - -@app.route(route="response_cookie_header_nullable_double_err") -def response_cookie_header_nullable_double_err( - req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", - 'foo3=42; Domain=example.com; Expires=Thu, 12-Jan-2017 ' - '13:55:08 GMT; Path=/; Max-Age=Dummy; SecureFalse; ' - 'HttpOnly') - - return resp - - -@app.route(route="response_cookie_header_nullable_timestamp_err") -def response_cookie_header_nullable_timestamp_err( - req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", 'foo=bar; Domain=123; Expires=Dummy') - - return resp - - -@app.route(route="set_cookie_resp_header_default_values") -def set_cookie_resp_header_default_values( - req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", 'foo=bar') - - return resp - - -@app.route(route="set_cookie_resp_header_empty") -def set_cookie_resp_header_empty( - req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", '') - - return resp - - -@app.route('create_task_with_context') -async def create_task_with_context(req: func.HttpRequest): - # Create first task with context num = 5 - num.set(5) - first_ctx = contextvars.copy_context() - first_count_task = asyncio.create_task( - count_with_context("Hello World"), context=first_ctx) - - # Create second task with context num = 10 - num.set(10) - second_ctx = contextvars.copy_context() - second_count_task = asyncio.create_task( - count_with_context("Hello World"), context=second_ctx) - - # Execute tasks - first_count_val = await first_count_task - second_count_val = await second_count_task - - return f'{first_count_val + " | " + second_count_val}' - - -@app.route('create_task_without_context') -async def create_task_without_context(req: func.HttpRequest): - # No context is being sent into asyncio.create_task - count_task = asyncio.create_task(count_without_context("Hello World", 5)) - count_val = await count_task - return f'{count_val}' diff --git a/tests/unittests/http_functions/http_v2_functions/fastapi/function_app.py b/tests/unittests/http_functions/http_v2_functions/fastapi/function_app.py deleted file mode 100644 index c10bcb8ec..000000000 --- a/tests/unittests/http_functions/http_v2_functions/fastapi/function_app.py +++ /dev/null @@ -1,438 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import hashlib -import logging -import sys -import time -from urllib.request import urlopen - -import azure.functions as func -from azurefunctions.extensions.http.fastapi import ( - HTMLResponse, - RedirectResponse, - Request, - Response, -) -from pydantic import BaseModel - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - -logger = logging.getLogger("my-function") - - -class Item(BaseModel): - name: str - description: str - - -@app.route(route="no_type_hint") -def no_type_hint(req): - return 'no_type_hint' - - -@app.route(route="return_int") -def return_int(req) -> int: - return 1000 - - -@app.route(route="return_float") -def return_float(req) -> float: - return 1000.0 - - -@app.route(route="return_bool") -def return_bool(req) -> bool: - return True - - -@app.route(route="return_dict") -def return_dict(req) -> dict: - return {"key": "value"} - - -@app.route(route="return_list") -def return_list(req): - return ["value1", "value2"] - - -@app.route(route="return_pydantic_model") -def return_pydantic_model(req) -> Item: - item = Item(name="item1", description="description1") - return item - - -@app.route(route="return_pydantic_model_with_missing_fields") -def return_pydantic_model_with_missing_fields(req) -> Item: - item = Item(name="item1") - return item - - -@app.route(route="accept_json") -async def accept_json(req: Request): - return await req.json() - - -async def nested(): - try: - 1 / 0 - except ZeroDivisionError: - logger.error('and another error', exc_info=True) - - -@app.route(route="async_logging") -async def async_logging(req: Request): - logger.info('hello %s', 'info') - - await asyncio.sleep(0.1) - - # Create a nested task to check if invocation_id is still - # logged correctly. - await asyncio.ensure_future(nested()) - - await asyncio.sleep(0.1) - - return 'OK-async' - - -@app.route(route="async_return_str") -async def async_return_str(req: Request): - await asyncio.sleep(0.1) - return 'Hello Async World!' - - -@app.route(route="debug_logging") -def debug_logging(req: Request): - logging.critical('logging critical', exc_info=True) - logging.info('logging info', exc_info=True) - logging.warning('logging warning', exc_info=True) - logging.debug('logging debug', exc_info=True) - logging.error('logging error', exc_info=True) - return 'OK-debug' - - -@app.route(route="debug_user_logging") -def debug_user_logging(req: Request): - logger.setLevel(logging.DEBUG) - - logging.critical('logging critical', exc_info=True) - logger.info('logging info', exc_info=True) - logger.warning('logging warning', exc_info=True) - logger.debug('logging debug', exc_info=True) - logger.error('logging error', exc_info=True) - return 'OK-user-debug' - - -# Attempt to log info into system log from customer code -disguised_logger = logging.getLogger('azure_functions_worker') - - -async def parallelly_print(): - await asyncio.sleep(0.1) - print('parallelly_print') - - -async def parallelly_log_info(): - await asyncio.sleep(0.2) - logging.info('parallelly_log_info at root logger') - - -async def parallelly_log_warning(): - await asyncio.sleep(0.3) - logging.warning('parallelly_log_warning at root logger') - - -async def parallelly_log_error(): - await asyncio.sleep(0.4) - logging.error('parallelly_log_error at root logger') - - -async def parallelly_log_exception(): - await asyncio.sleep(0.5) - try: - raise Exception('custom exception') - except Exception: - logging.exception('parallelly_log_exception at root logger', - exc_info=sys.exc_info()) - - -async def parallelly_log_custom(): - await asyncio.sleep(0.6) - logger.info('parallelly_log_custom at custom_logger') - - -async def parallelly_log_system(): - await asyncio.sleep(0.7) - disguised_logger.info('parallelly_log_system at disguised_logger') - - -@app.route(route="hijack_current_event_loop") -async def hijack_current_event_loop(req: Request) -> Response: - loop = asyncio.get_event_loop() - - # Create multiple tasks and schedule it into one asyncio.wait blocker - task_print: asyncio.Task = loop.create_task(parallelly_print()) - task_info: asyncio.Task = loop.create_task(parallelly_log_info()) - task_warning: asyncio.Task = loop.create_task(parallelly_log_warning()) - task_error: asyncio.Task = loop.create_task(parallelly_log_error()) - task_exception: asyncio.Task = loop.create_task(parallelly_log_exception()) - task_custom: asyncio.Task = loop.create_task(parallelly_log_custom()) - task_disguise: asyncio.Task = loop.create_task(parallelly_log_system()) - - # Create an awaitable future and occupy the current event loop resource - future = loop.create_future() - loop.call_soon_threadsafe(future.set_result, 'callsoon_log') - - # WaitAll - await asyncio.wait([task_print, task_info, task_warning, task_error, - task_exception, task_custom, task_disguise, future]) - - # Log asyncio low-level future result - logging.info(future.result()) - - return 'OK-hijack-current-event-loop' - - -@app.route(route="print_logging") -def print_logging(req: Request): - flush_required = False - is_console_log = False - is_stderr = False - message = req.query_params.get('message', '') - - if req.query_params.get('flush') == 'true': - flush_required = True - if req.query_params.get('console') == 'true': - is_console_log = True - if req.query_params.get('is_stderr') == 'true': - is_stderr = True - - # Adding LanguageWorkerConsoleLog will make function host to treat - # this as system log and will be propagated to kusto - prefix = 'LanguageWorkerConsoleLog' if is_console_log else '' - print(f'{prefix} {message}'.strip(), - file=sys.stderr if is_stderr else sys.stdout, - flush=flush_required) - - return 'OK-print-logging' - - -@app.route(route="raw_body_bytes") -async def raw_body_bytes(req: Request) -> Response: - body = await req.body() - body_len = str(len(body)) - - headers = {'body-len': body_len} - return Response(content=body, status_code=200, headers=headers) - - -@app.route(route="remapped_context") -def remapped_context(req: Request): - return req.method - - -@app.route(route="return_bytes") -def return_bytes(req: Request): - return b"Hello World" - - -@app.route(route="return_context") -def return_context(req: Request, context: func.Context): - return { - 'method': req.method, - 'ctx_func_name': context.function_name, - 'ctx_func_dir': context.function_directory, - 'ctx_invocation_id': context.invocation_id, - 'ctx_trace_context_Traceparent': context.trace_context.Traceparent, - 'ctx_trace_context_Tracestate': context.trace_context.Tracestate, - } - - -@app.route(route="return_http") -def return_http(req: Request) -> HTMLResponse: - return HTMLResponse('

Hello World™

') - - -@app.route(route="return_http_404") -def return_http_404(req: Request): - return Response('bye', status_code=404) - - -@app.route(route="return_http_auth_admin", auth_level=func.AuthLevel.ADMIN) -def return_http_auth_admin(req: Request) -> HTMLResponse: - return HTMLResponse('

Hello World™

') - - -@app.route(route="return_http_no_body") -def return_http_no_body(req: Request): - return Response() - - -@app.route(route="return_http_redirect") -def return_http_redirect(req: Request): - return RedirectResponse(url='/api/return_http', status_code=302) - - -@app.route(route="return_request") -async def return_request(req: Request): - params = dict(req.query_params) - params.pop('code', None) # Remove 'code' parameter if present - - # Get the body content and calculate its hash - body = await req.body() - body_hash = hashlib.sha256(body).hexdigest() if body else None - - # Return a dictionary containing request information - return { - 'method': req.method, - 'url': str(req.url), - 'headers': dict(req.headers), - 'params': params, - 'body': body.decode() if body else None, - 'body_hash': body_hash, - } - - -@app.route(route="return_route_params/{param1}/{param2}") -def return_route_params(req: Request) -> str: - # log type of req - logger.info(f"req type: {type(req)}") - # log req path params - logger.info(f"req path params: {req.path_params}") - return req.path_params - - -@app.route(route="sync_logging") -def main(req: Request): - try: - 1 / 0 - except ZeroDivisionError: - logger.error('a gracefully handled error', exc_info=True) - logger.error('a gracefully handled critical error', exc_info=True) - time.sleep(0.05) - return 'OK-sync' - - -@app.route(route="unhandled_error") -def unhandled_error(req: Request): - 1 / 0 - - -@app.route(route="unhandled_urllib_error") -def unhandled_urllib_error(req: Request) -> str: - image_url = req.params.get('img') - urlopen(image_url).read() - - -class UnserializableException(Exception): - def __str__(self): - raise RuntimeError('cannot serialize me') - - -@app.route(route="unhandled_unserializable_error") -def unhandled_unserializable_error(req: Request) -> str: - raise UnserializableException('foo') - - -async def try_log(): - logger.info("try_log") - - -@app.route(route="user_event_loop") -def user_event_loop(req: Request) -> Response: - loop = asyncio.SelectorEventLoop() - asyncio.set_event_loop(loop) - - # This line should throws an asyncio RuntimeError exception - loop.run_until_complete(try_log()) - loop.close() - return 'OK-user-event-loop' - - -@app.route(route="multiple_set_cookie_resp_headers") -async def multiple_set_cookie_resp_headers(req: Request): - logging.info('Python HTTP trigger function processed a request.') - resp = Response( - "This HTTP triggered function executed successfully.") - - expires_1 = "Thu, 12 Jan 2017 13:55:08 GMT" - expires_2 = "Fri, 12 Jan 2018 13:55:08 GMT" - - resp.set_cookie( - key='foo3', - value='42', - domain='example.com', - expires=expires_1, - path='/', - max_age=10000000, - secure=True, - httponly=True, - samesite='Lax' - ) - - resp.set_cookie( - key='foo3', - value='43', - domain='example.com', - expires=expires_2, - path='/', - max_age=10000000, - secure=True, - httponly=True, - samesite='Lax' - ) - - return resp - - -@app.route(route="response_cookie_header_nullable_bool_err") -def response_cookie_header_nullable_bool_err( - req: Request) -> Response: - logging.info('Python HTTP trigger function processed a request.') - resp = Response( - "This HTTP triggered function executed successfully.") - - # Set the cookie with Secure attribute set to False - resp.set_cookie( - key='foo3', - value='42', - domain='example.com', - expires='Thu, 12-Jan-2017 13:55:08 GMT', - path='/', - max_age=10000000, - secure=False, - httponly=True - ) - - return resp - - -@app.route(route="response_cookie_header_nullable_timestamp_err") -def response_cookie_header_nullable_timestamp_err( - req: Request) -> Response: - logging.info('Python HTTP trigger function processed a request.') - resp = Response( - "This HTTP triggered function executed successfully.") - - resp.set_cookie( - key='foo3', - value='42', - domain='example.com' - ) - - return resp - - -@app.route(route="set_cookie_resp_header_default_values") -def set_cookie_resp_header_default_values( - req: Request) -> Response: - logging.info('Python HTTP trigger function processed a request.') - resp = Response( - "This HTTP triggered function executed successfully.") - - resp.set_cookie( - key='foo3', - value='42' - ) - - return resp diff --git a/tests/unittests/http_functions/multiple_set_cookie_resp_headers/function.json b/tests/unittests/http_functions/multiple_set_cookie_resp_headers/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/multiple_set_cookie_resp_headers/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/multiple_set_cookie_resp_headers/main.py b/tests/unittests/http_functions/multiple_set_cookie_resp_headers/main.py deleted file mode 100644 index 450496fb4..000000000 --- a/tests/unittests/http_functions/multiple_set_cookie_resp_headers/main.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions as func - - -def main(req: func.HttpRequest): - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", - 'foo3=42; Domain=example.com; Expires=Thu, 12-Jan-2017 ' - '13:55:08 GMT; Path=/; Max-Age=10000000; Secure; ' - 'HttpOnly') - resp.headers.add("Set-Cookie", - 'foo3=43; Domain=example.com; Expires=Thu, 12-Jan-2018 ' - '13:55:08 GMT; Path=/; Max-Age=10000000; Secure; ' - 'HttpOnly') - resp.headers.add("HELLO", 'world') - - return resp diff --git a/tests/unittests/http_functions/no_return/function.json b/tests/unittests/http_functions/no_return/function.json deleted file mode 100644 index da37649e4..000000000 --- a/tests/unittests/http_functions/no_return/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - } - ] -} diff --git a/tests/unittests/http_functions/no_return/main.py b/tests/unittests/http_functions/no_return/main.py deleted file mode 100644 index 1e9c228fc..000000000 --- a/tests/unittests/http_functions/no_return/main.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -logger = logging.getLogger('test') - - -def main(req): - logger.error('hi') diff --git a/tests/unittests/http_functions/no_return_returns/function.json b/tests/unittests/http_functions/no_return_returns/function.json deleted file mode 100644 index da37649e4..000000000 --- a/tests/unittests/http_functions/no_return_returns/function.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - } - ] -} diff --git a/tests/unittests/http_functions/no_return_returns/main.py b/tests/unittests/http_functions/no_return_returns/main.py deleted file mode 100644 index f6722b5f8..000000000 --- a/tests/unittests/http_functions/no_return_returns/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req): - return 'ABC' diff --git a/tests/unittests/http_functions/print_logging/function.json b/tests/unittests/http_functions/print_logging/function.json deleted file mode 100644 index 985453fcf..000000000 --- a/tests/unittests/http_functions/print_logging/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] - } diff --git a/tests/unittests/http_functions/print_logging/main.py b/tests/unittests/http_functions/print_logging/main.py deleted file mode 100644 index 87fd693f6..000000000 --- a/tests/unittests/http_functions/print_logging/main.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import sys - -import azure.functions - - -def main(req: azure.functions.HttpRequest): - flush_required = False - is_console_log = False - is_stderr = False - message = req.params.get('message', '') - - if req.params.get('flush') == 'true': - flush_required = True - if req.params.get('console') == 'true': - is_console_log = True - if req.params.get('is_stderr') == 'true': - is_stderr = True - - # Adding LanguageWorkerConsoleLog will make function host to treat - # this as system log and will be propagated to kusto - prefix = 'LanguageWorkerConsoleLog' if is_console_log else '' - print(f'{prefix} {message}'.strip(), - file=sys.stderr if is_stderr else sys.stdout, - flush=flush_required) - - return 'OK-print-logging' diff --git a/tests/unittests/http_functions/raw_body_bytes/function.json b/tests/unittests/http_functions/raw_body_bytes/function.json deleted file mode 100644 index c8d851b10..000000000 --- a/tests/unittests/http_functions/raw_body_bytes/function.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/raw_body_bytes/main.py b/tests/unittests/http_functions/raw_body_bytes/main.py deleted file mode 100644 index 02dc72d8f..000000000 --- a/tests/unittests/http_functions/raw_body_bytes/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -def main(req: func.HttpRequest) -> func.HttpResponse: - body = req.get_body() - body_len = str(len(body)) - - headers = {'body-len': body_len} - return func.HttpResponse(body=body, status_code=200, headers=headers) diff --git a/tests/unittests/http_functions/remapped_context/function.json b/tests/unittests/http_functions/remapped_context/function.json deleted file mode 100644 index f6f7bc7e5..000000000 --- a/tests/unittests/http_functions/remapped_context/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "context" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/remapped_context/main.py b/tests/unittests/http_functions/remapped_context/main.py deleted file mode 100644 index 11faca410..000000000 --- a/tests/unittests/http_functions/remapped_context/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(context): - return context.method diff --git a/tests/unittests/http_functions/response_cookie_header_nullable_bool_err/function.json b/tests/unittests/http_functions/response_cookie_header_nullable_bool_err/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/response_cookie_header_nullable_bool_err/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/response_cookie_header_nullable_bool_err/main.py b/tests/unittests/http_functions/response_cookie_header_nullable_bool_err/main.py deleted file mode 100644 index 630a33dff..000000000 --- a/tests/unittests/http_functions/response_cookie_header_nullable_bool_err/main.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions as func - - -def main(req: func.HttpRequest): - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", - 'foo3=42; Domain=example.com; Expires=Thu, 12-Jan-2017 ' - '13:55:08 GMT; Path=/; Max-Age=10000000; SecureFalse; ' - 'HttpOnly') - - return resp diff --git a/tests/unittests/http_functions/response_cookie_header_nullable_double_err/function.json b/tests/unittests/http_functions/response_cookie_header_nullable_double_err/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/response_cookie_header_nullable_double_err/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/response_cookie_header_nullable_double_err/main.py b/tests/unittests/http_functions/response_cookie_header_nullable_double_err/main.py deleted file mode 100644 index 81601b8b9..000000000 --- a/tests/unittests/http_functions/response_cookie_header_nullable_double_err/main.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions as func - - -def main(req: func.HttpRequest): - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", - 'foo3=42; Domain=example.com; Expires=Thu, 12-Jan-2017 ' - '13:55:08 GMT; Path=/; Max-Age=Dummy; SecureFalse; ' - 'HttpOnly') - - return resp diff --git a/tests/unittests/http_functions/response_cookie_header_nullable_timestamp_err/function.json b/tests/unittests/http_functions/response_cookie_header_nullable_timestamp_err/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/response_cookie_header_nullable_timestamp_err/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/response_cookie_header_nullable_timestamp_err/main.py b/tests/unittests/http_functions/response_cookie_header_nullable_timestamp_err/main.py deleted file mode 100644 index 6a7c8cfef..000000000 --- a/tests/unittests/http_functions/response_cookie_header_nullable_timestamp_err/main.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions as func - - -def main(req: func.HttpRequest): - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", 'foo=bar; Domain=123; Expires=Dummy') - - return resp diff --git a/tests/unittests/http_functions/return_bytes/function.json b/tests/unittests/http_functions/return_bytes/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/return_bytes/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/return_bytes/main.py b/tests/unittests/http_functions/return_bytes/main.py deleted file mode 100644 index bf2eda7cf..000000000 --- a/tests/unittests/http_functions/return_bytes/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req): - # This function will fail, as we don't auto-convert "bytes" to "http". - return b'Hello World!' diff --git a/tests/unittests/http_functions/return_context/function.json b/tests/unittests/http_functions/return_context/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/return_context/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/return_context/main.py b/tests/unittests/http_functions/return_context/main.py deleted file mode 100644 index 2b17ef301..000000000 --- a/tests/unittests/http_functions/return_context/main.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions - - -def main(req: azure.functions.HttpRequest, context: azure.functions.Context): - return json.dumps({ - 'method': req.method, - 'ctx_func_name': context.function_name, - 'ctx_func_dir': context.function_directory, - 'ctx_invocation_id': context.invocation_id, - 'ctx_trace_context_Traceparent': context.trace_context.Traceparent, - 'ctx_trace_context_Tracestate': context.trace_context.Tracestate, - }) diff --git a/tests/unittests/http_functions/return_http/function.json b/tests/unittests/http_functions/return_http/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/return_http/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/return_http/main.py b/tests/unittests/http_functions/return_http/main.py deleted file mode 100644 index d2abfb7b4..000000000 --- a/tests/unittests/http_functions/return_http/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest): - return azf.HttpResponse('

Hello World™

', - mimetype='text/html') diff --git a/tests/unittests/http_functions/return_http_404/function.json b/tests/unittests/http_functions/return_http_404/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/return_http_404/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/return_http_404/main.py b/tests/unittests/http_functions/return_http_404/main.py deleted file mode 100644 index bd4e254c2..000000000 --- a/tests/unittests/http_functions/return_http_404/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest): - return azf.HttpResponse('bye', status_code=404) diff --git a/tests/unittests/http_functions/return_http_auth_admin/function.json b/tests/unittests/http_functions/return_http_auth_admin/function.json deleted file mode 100644 index 1baa699c4..000000000 --- a/tests/unittests/http_functions/return_http_auth_admin/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "authLevel": "admin", - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/return_http_auth_admin/main.py b/tests/unittests/http_functions/return_http_auth_admin/main.py deleted file mode 100644 index d2abfb7b4..000000000 --- a/tests/unittests/http_functions/return_http_auth_admin/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest): - return azf.HttpResponse('

Hello World™

', - mimetype='text/html') diff --git a/tests/unittests/http_functions/return_http_no_body/function.json b/tests/unittests/http_functions/return_http_no_body/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/return_http_no_body/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/return_http_no_body/main.py b/tests/unittests/http_functions/return_http_no_body/main.py deleted file mode 100644 index 8ac6c4a50..000000000 --- a/tests/unittests/http_functions/return_http_no_body/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest): - return azf.HttpResponse() diff --git a/tests/unittests/http_functions/return_http_redirect/function.json b/tests/unittests/http_functions/return_http_redirect/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/return_http_redirect/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/return_http_redirect/main.py b/tests/unittests/http_functions/return_http_redirect/main.py deleted file mode 100644 index 54fb8bbed..000000000 --- a/tests/unittests/http_functions/return_http_redirect/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest): - location = 'return_http?code={}'.format(req.params['code']) - return azf.HttpResponse( - status_code=302, - headers={'location': location}) diff --git a/tests/unittests/http_functions/return_out/function.json b/tests/unittests/http_functions/return_out/function.json deleted file mode 100644 index 1cbac7ad1..000000000 --- a/tests/unittests/http_functions/return_out/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "direction": "out", - "name": "foo", - "type": "http" - } - ] -} diff --git a/tests/unittests/http_functions/return_out/main.py b/tests/unittests/http_functions/return_out/main.py deleted file mode 100644 index 53e8cbb5d..000000000 --- a/tests/unittests/http_functions/return_out/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest, foo: azf.Out[azf.HttpResponse]): - foo.set(azf.HttpResponse(body='hello', status_code=201)) diff --git a/tests/unittests/http_functions/return_request/function.json b/tests/unittests/http_functions/return_request/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/return_request/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/return_request/main.py b/tests/unittests/http_functions/return_request/main.py deleted file mode 100644 index 842e18581..000000000 --- a/tests/unittests/http_functions/return_request/main.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import hashlib -import json - -import azure.functions - - -def main(req: azure.functions.HttpRequest): - params = dict(req.params) - params.pop('code', None) - body = req.get_body() - return json.dumps({ - 'method': req.method, - 'url': req.url, - 'headers': dict(req.headers), - 'params': params, - 'get_body': body.decode(), - 'body_hash': hashlib.sha256(body).hexdigest(), - }) diff --git a/tests/unittests/http_functions/return_route_params/function.json b/tests/unittests/http_functions/return_route_params/function.json deleted file mode 100644 index 2ca2c9196..000000000 --- a/tests/unittests/http_functions/return_route_params/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req", - "route": "return_route_params/{param1}/{param2}" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/return_route_params/main.py b/tests/unittests/http_functions/return_route_params/main.py deleted file mode 100644 index 1cc7f0740..000000000 --- a/tests/unittests/http_functions/return_route_params/main.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -import azure.functions - - -def main(req: azure.functions.HttpRequest) -> str: - return json.dumps(dict(req.route_params)) diff --git a/tests/unittests/http_functions/return_str/function.json b/tests/unittests/http_functions/return_str/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/return_str/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/return_str/main.py b/tests/unittests/http_functions/return_str/main.py deleted file mode 100644 index 9fa7e56cc..000000000 --- a/tests/unittests/http_functions/return_str/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions - - -def main(req: azure.functions.HttpRequest, context) -> str: - return 'Hello World!' diff --git a/tests/unittests/http_functions/set_cookie_resp_header_default_values/function.json b/tests/unittests/http_functions/set_cookie_resp_header_default_values/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/set_cookie_resp_header_default_values/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/set_cookie_resp_header_default_values/main.py b/tests/unittests/http_functions/set_cookie_resp_header_default_values/main.py deleted file mode 100644 index a29b693b9..000000000 --- a/tests/unittests/http_functions/set_cookie_resp_header_default_values/main.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions as func - - -def main(req: func.HttpRequest): - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", 'foo=bar') - - return resp diff --git a/tests/unittests/http_functions/set_cookie_resp_header_empty/function.json b/tests/unittests/http_functions/set_cookie_resp_header_empty/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/set_cookie_resp_header_empty/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/set_cookie_resp_header_empty/main.py b/tests/unittests/http_functions/set_cookie_resp_header_empty/main.py deleted file mode 100644 index 3e33bc8e4..000000000 --- a/tests/unittests/http_functions/set_cookie_resp_header_empty/main.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions as func - - -def main(req: func.HttpRequest): - logging.info('Python HTTP trigger function processed a request.') - resp = func.HttpResponse( - "This HTTP triggered function executed successfully.") - - resp.headers.add("Set-Cookie", '') - - return resp diff --git a/tests/unittests/http_functions/sync_logging/function.json b/tests/unittests/http_functions/sync_logging/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/sync_logging/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/sync_logging/main.py b/tests/unittests/http_functions/sync_logging/main.py deleted file mode 100644 index 9b4d89634..000000000 --- a/tests/unittests/http_functions/sync_logging/main.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging -import time - -import azure.functions - -logger = logging.getLogger('my function') - - -def main(req: azure.functions.HttpRequest): - try: - 1 / 0 - except ZeroDivisionError: - logger.error('a gracefully handled error', exc_info=True) - logger.error('a gracefully handled critical error', exc_info=True) - time.sleep(0.05) - return 'OK-sync' diff --git a/tests/unittests/http_functions/unhandled_error/function.json b/tests/unittests/http_functions/unhandled_error/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/unhandled_error/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/unhandled_error/main.py b/tests/unittests/http_functions/unhandled_error/main.py deleted file mode 100644 index 9fb1153c7..000000000 --- a/tests/unittests/http_functions/unhandled_error/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(req: azf.HttpRequest): - 1 / 0 diff --git a/tests/unittests/http_functions/unhandled_unserializable_error/function.json b/tests/unittests/http_functions/unhandled_unserializable_error/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/unhandled_unserializable_error/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/unhandled_unserializable_error/main.py b/tests/unittests/http_functions/unhandled_unserializable_error/main.py deleted file mode 100644 index aafa850f6..000000000 --- a/tests/unittests/http_functions/unhandled_unserializable_error/main.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as func - - -class UnserializableException(Exception): - def __str__(self): - raise RuntimeError('cannot serialize me') - - -def main(req: func.HttpRequest) -> str: - raise UnserializableException('foo') diff --git a/tests/unittests/http_functions/unhandled_urllib_error/function.json b/tests/unittests/http_functions/unhandled_urllib_error/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/http_functions/unhandled_urllib_error/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/unhandled_urllib_error/main.py b/tests/unittests/http_functions/unhandled_urllib_error/main.py deleted file mode 100644 index 6835fd631..000000000 --- a/tests/unittests/http_functions/unhandled_urllib_error/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from urllib.request import urlopen - -import azure.functions as func - - -def main(req: func.HttpRequest) -> str: - image_url = req.params.get('img') - urlopen(image_url).read() diff --git a/tests/unittests/http_functions/user_event_loop/function.json b/tests/unittests/http_functions/user_event_loop/function.json deleted file mode 100644 index 91360208a..000000000 --- a/tests/unittests/http_functions/user_event_loop/function.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/http_functions/user_event_loop/main.py b/tests/unittests/http_functions/user_event_loop/main.py deleted file mode 100644 index 879b1cf86..000000000 --- a/tests/unittests/http_functions/user_event_loop/main.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import logging - -import azure.functions as func - -logger = logging.getLogger('my function') - - -async def try_log(): - logger.info("try_log") - - -def main(req: func.HttpRequest) -> func.HttpResponse: - loop = asyncio.SelectorEventLoop() - asyncio.set_event_loop(loop) - - # This line should throws an asyncio RuntimeError exception - loop.run_until_complete(try_log()) - loop.close() - return 'OK-user-event-loop' diff --git a/tests/endtoend/http_functions/http_functions_stein/generic/function_app.py b/tests/unittests/indexing_exception_function/function_app.py similarity index 78% rename from tests/endtoend/http_functions/http_functions_stein/generic/function_app.py rename to tests/unittests/indexing_exception_function/function_app.py index 17e715a89..c4f22a38b 100644 --- a/tests/endtoend/http_functions/http_functions_stein/generic/function_app.py +++ b/tests/unittests/indexing_exception_function/function_app.py @@ -1,38 +1,35 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import logging - -import azure.functions as func - -app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) - - -@app.function_name(name="default_template") -@app.generic_trigger(arg_name="req", - type="httpTrigger", - route="default_template") -@app.generic_output_binding(arg_name="$return", type="http") -def default_template(req: func.HttpRequest) -> func.HttpResponse: - logging.info('Python HTTP trigger function processed a request.') - - name = req.params.get('name') - if not name: - try: - req_body = req.get_json() - except ValueError: - pass - else: - name = req_body.get('name') - - if name: - return func.HttpResponse( - f"Hello, {name}. This HTTP triggered function " - f"executed successfully.") - else: - return func.HttpResponse( - "This HTTP triggered function executed successfully. " - "Pass a name in the query string or in the request body for a" - " personalized response.", - status_code=200 - ) +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import logging +import nonexistent.package # noqa + +import azure.functions as func + +app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS) + + +@app.route(route="default_template") +def default_template(req: func.HttpRequest) -> func.HttpResponse: + logging.info('Python HTTP trigger function processed a request.') + + name = req.params.get('name') + if not name: + try: + req_body = req.get_json() + except ValueError: + pass + else: + name = req_body.get('name') + + if name: + return func.HttpResponse( + f"Hello, {name}. This HTTP triggered function " + f"executed successfully.") + else: + return func.HttpResponse( + "This HTTP triggered function executed successfully. " + "Pass a name in the query string or in the request body for a" + " personalized response.", + status_code=200 + ) diff --git a/tests/unittests/load_functions/absolute_thirdparty/function.json b/tests/unittests/load_functions/absolute_thirdparty/function.json deleted file mode 100644 index cb4469e61..000000000 --- a/tests/unittests/load_functions/absolute_thirdparty/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "main.py", - "entryPoint": "main", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/absolute_thirdparty/main.py b/tests/unittests/load_functions/absolute_thirdparty/main.py deleted file mode 100644 index e00bf2f3f..000000000 --- a/tests/unittests/load_functions/absolute_thirdparty/main.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# Import a module from thirdparty package azure-eventhub -import azure.eventhub as eh - - -def main(req) -> str: - return f'eh = {eh.__name__}' diff --git a/tests/unittests/load_functions/entrypoint/function.json b/tests/unittests/load_functions/entrypoint/function.json deleted file mode 100644 index cb426f2f4..000000000 --- a/tests/unittests/load_functions/entrypoint/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "main.py", - "entryPoint": "customentry", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/entrypoint/main.py b/tests/unittests/load_functions/entrypoint/main.py deleted file mode 100644 index 4bbdf7b3d..000000000 --- a/tests/unittests/load_functions/entrypoint/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def customentry(req) -> str: - return __name__ diff --git a/tests/unittests/load_functions/implicit_import/function.json b/tests/unittests/load_functions/implicit_import/function.json deleted file mode 100644 index ec10c6de0..000000000 --- a/tests/unittests/load_functions/implicit_import/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "main.py", - "entryPoint": "implicitinmport", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/implicit_import/main.py b/tests/unittests/load_functions/implicit_import/main.py deleted file mode 100644 index 96b929ab9..000000000 --- a/tests/unittests/load_functions/implicit_import/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# Import simple module with implicit statement should now be acceptable -# since sys.path is now appended with function script root -from simple.main import main as s_main - - -def implicitinmport(req) -> str: - return f's_main = {s_main(req)}' diff --git a/tests/unittests/load_functions/load_outside_main/function.json b/tests/unittests/load_functions/load_outside_main/function.json deleted file mode 100644 index 96d44a67a..000000000 --- a/tests/unittests/load_functions/load_outside_main/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} \ No newline at end of file diff --git a/tests/unittests/load_functions/load_outside_main/main.py b/tests/unittests/load_functions/load_outside_main/main.py deleted file mode 100644 index 3e5a5e132..000000000 --- a/tests/unittests/load_functions/load_outside_main/main.py +++ /dev/null @@ -1,23 +0,0 @@ -# This function app is to ensure the code outside main() function -# should only get loaded once in __init__.py - -import azure.functions as func - - -def main(req: func.HttpRequest): - if req.params['from'] == 'init': - # Ensure the module can still be loaded from package.__init__ - from __app__.stub_http_trigger.__init__ import main # NoQA - - from ..stub_http_trigger.__init__ import main # NoQA - - elif req.params['from'] == 'package': - # Ensure the module can still be loaded from package - from __app__.stub_http_trigger import main # NoQA - - # Ensure submodules can also be imported - from __app__.stub_http_trigger.stub_tools import FOO # NoQA - - from ..stub_http_trigger.stub_tools import FOO # NoQA - - return 'OK' diff --git a/tests/unittests/load_functions/module_not_found/function.json b/tests/unittests/load_functions/module_not_found/function.json deleted file mode 100644 index 77ad6ecb1..000000000 --- a/tests/unittests/load_functions/module_not_found/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "main.py", - "entryPoint": "modulenotfound", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/module_not_found/main.py b/tests/unittests/load_functions/module_not_found/main.py deleted file mode 100644 index 82e43227b..000000000 --- a/tests/unittests/load_functions/module_not_found/main.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -# Import simple module with implicit statement should now be acceptable -import notfound - - -def modulenotfound(req) -> str: - return f'notfound = {notfound.__name__}' diff --git a/tests/unittests/load_functions/name_collision/function.json b/tests/unittests/load_functions/name_collision/function.json deleted file mode 100644 index cb4469e61..000000000 --- a/tests/unittests/load_functions/name_collision/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "main.py", - "entryPoint": "main", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/name_collision/main.py b/tests/unittests/load_functions/name_collision/main.py deleted file mode 100644 index 41924a4c7..000000000 --- a/tests/unittests/load_functions/name_collision/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# Both customer code and third-party package has the same name pytest. -# Worker should pick the pytest from the third-party package -import pytest as pt - - -def main(req) -> str: - return f'pt.__version__ = {pt.__version__}' diff --git a/tests/unittests/load_functions/name_collision_app_import/function.json b/tests/unittests/load_functions/name_collision_app_import/function.json deleted file mode 100644 index cb4469e61..000000000 --- a/tests/unittests/load_functions/name_collision_app_import/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "main.py", - "entryPoint": "main", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/name_collision_app_import/main.py b/tests/unittests/load_functions/name_collision_app_import/main.py deleted file mode 100644 index 4767a1c7d..000000000 --- a/tests/unittests/load_functions/name_collision_app_import/main.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# Both customer code and third-party package has the same name pytest. -# When using absolute import, should pick customer's package. -import __app__.pytest as pt - - -def main(req) -> str: - return f'pt.__version__ = {pt.__version__}' diff --git a/tests/unittests/load_functions/no_script_file/function.json b/tests/unittests/load_functions/no_script_file/function.json deleted file mode 100644 index af28e6613..000000000 --- a/tests/unittests/load_functions/no_script_file/function.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/no_script_file/main.py b/tests/unittests/load_functions/no_script_file/main.py deleted file mode 100644 index d0e930835..000000000 --- a/tests/unittests/load_functions/no_script_file/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req) -> str: - return __name__ diff --git a/tests/unittests/load_functions/outside_main_code_in_init/__init__.py b/tests/unittests/load_functions/outside_main_code_in_init/__init__.py deleted file mode 100644 index d1046410c..000000000 --- a/tests/unittests/load_functions/outside_main_code_in_init/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# This function app is to ensure the code outside main() function -# should only get loaded once in __init__.py - -from .count import get_invoke_count, invoke, reset_count - -invoke() - - -def main(req): - count = get_invoke_count() - reset_count() - return f'executed count = {count}' diff --git a/tests/unittests/load_functions/outside_main_code_in_init/count.py b/tests/unittests/load_functions/outside_main_code_in_init/count.py deleted file mode 100644 index 4183ebd4d..000000000 --- a/tests/unittests/load_functions/outside_main_code_in_init/count.py +++ /dev/null @@ -1,19 +0,0 @@ -# This function app is to ensure the code outside main() function -# should only get loaded once in __init__.py - -_INVOCATION_COUNT: int = 0 - - -def invoke(): - global _INVOCATION_COUNT - _INVOCATION_COUNT += 1 - - -def get_invoke_count() -> int: - global _INVOCATION_COUNT - return _INVOCATION_COUNT - - -def reset_count(): - global _INVOCATION_COUNT - _INVOCATION_COUNT = 0 diff --git a/tests/unittests/load_functions/outside_main_code_in_init/function.json b/tests/unittests/load_functions/outside_main_code_in_init/function.json deleted file mode 100644 index 7239e0fcc..000000000 --- a/tests/unittests/load_functions/outside_main_code_in_init/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} \ No newline at end of file diff --git a/tests/unittests/load_functions/outside_main_code_in_main/count.py b/tests/unittests/load_functions/outside_main_code_in_main/count.py deleted file mode 100644 index 4183ebd4d..000000000 --- a/tests/unittests/load_functions/outside_main_code_in_main/count.py +++ /dev/null @@ -1,19 +0,0 @@ -# This function app is to ensure the code outside main() function -# should only get loaded once in __init__.py - -_INVOCATION_COUNT: int = 0 - - -def invoke(): - global _INVOCATION_COUNT - _INVOCATION_COUNT += 1 - - -def get_invoke_count() -> int: - global _INVOCATION_COUNT - return _INVOCATION_COUNT - - -def reset_count(): - global _INVOCATION_COUNT - _INVOCATION_COUNT = 0 diff --git a/tests/unittests/load_functions/outside_main_code_in_main/function.json b/tests/unittests/load_functions/outside_main_code_in_main/function.json deleted file mode 100644 index 96d44a67a..000000000 --- a/tests/unittests/load_functions/outside_main_code_in_main/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} \ No newline at end of file diff --git a/tests/unittests/load_functions/outside_main_code_in_main/main.py b/tests/unittests/load_functions/outside_main_code_in_main/main.py deleted file mode 100644 index d1046410c..000000000 --- a/tests/unittests/load_functions/outside_main_code_in_main/main.py +++ /dev/null @@ -1,12 +0,0 @@ -# This function app is to ensure the code outside main() function -# should only get loaded once in __init__.py - -from .count import get_invoke_count, invoke, reset_count - -invoke() - - -def main(req): - count = get_invoke_count() - reset_count() - return f'executed count = {count}' diff --git a/tests/unittests/load_functions/parentmodule/function.json b/tests/unittests/load_functions/parentmodule/function.json deleted file mode 100644 index 12ff3cc01..000000000 --- a/tests/unittests/load_functions/parentmodule/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "sub_module/main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/parentmodule/module.py b/tests/unittests/load_functions/parentmodule/module.py deleted file mode 100644 index 0cf4eb5be..000000000 --- a/tests/unittests/load_functions/parentmodule/module.py +++ /dev/null @@ -1,3 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -MODULE_NAME = 'PARENTMODULE' diff --git a/tests/unittests/load_functions/parentmodule/sub_module/__init__.py b/tests/unittests/load_functions/parentmodule/sub_module/__init__.py deleted file mode 100644 index 5b7f7a925..000000000 --- a/tests/unittests/load_functions/parentmodule/sub_module/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. diff --git a/tests/unittests/load_functions/parentmodule/sub_module/main.py b/tests/unittests/load_functions/parentmodule/sub_module/main.py deleted file mode 100644 index 46afb8d76..000000000 --- a/tests/unittests/load_functions/parentmodule/sub_module/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from .. import module - - -def main(req) -> str: - return module.__name__ diff --git a/tests/unittests/load_functions/pytest/__init__.py b/tests/unittests/load_functions/pytest/__init__.py deleted file mode 100644 index ab7de2090..000000000 --- a/tests/unittests/load_functions/pytest/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -"""This module pytest is provided inside customer's code, -used for checking module name collision""" -__version__ = 'from.customer.code' diff --git a/tests/unittests/load_functions/relimport/function.json b/tests/unittests/load_functions/relimport/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/load_functions/relimport/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/relimport/main.py b/tests/unittests/load_functions/relimport/main.py deleted file mode 100644 index a83fa4696..000000000 --- a/tests/unittests/load_functions/relimport/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from . import relative - - -def main(req) -> str: - return relative.__name__ diff --git a/tests/unittests/load_functions/relimport/relative.py b/tests/unittests/load_functions/relimport/relative.py deleted file mode 100644 index 5b7f7a925..000000000 --- a/tests/unittests/load_functions/relimport/relative.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. diff --git a/tests/unittests/load_functions/simple/function.json b/tests/unittests/load_functions/simple/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/load_functions/simple/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/simple/main.py b/tests/unittests/load_functions/simple/main.py deleted file mode 100644 index d0e930835..000000000 --- a/tests/unittests/load_functions/simple/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req) -> str: - return __name__ diff --git a/tests/unittests/load_functions/stub_http_trigger/__init__.py b/tests/unittests/load_functions/stub_http_trigger/__init__.py deleted file mode 100644 index bccd1fe4a..000000000 --- a/tests/unittests/load_functions/stub_http_trigger/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# This function app is to ensure the code outside main() function -# should only get loaded once in __init__.py - - -def main(req): - return 'OK' diff --git a/tests/unittests/load_functions/stub_http_trigger/function.json b/tests/unittests/load_functions/stub_http_trigger/function.json deleted file mode 100644 index 7239e0fcc..000000000 --- a/tests/unittests/load_functions/stub_http_trigger/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} \ No newline at end of file diff --git a/tests/unittests/load_functions/stub_http_trigger/stub_tools.py b/tests/unittests/load_functions/stub_http_trigger/stub_tools.py deleted file mode 100644 index 3be88dd3a..000000000 --- a/tests/unittests/load_functions/stub_http_trigger/stub_tools.py +++ /dev/null @@ -1,4 +0,0 @@ -# This function app is to ensure the code outside main() function -# should only get loaded once in __init__.py - -FOO = 'BAR' diff --git a/tests/unittests/load_functions/subdir/function.json b/tests/unittests/load_functions/subdir/function.json deleted file mode 100644 index 245cdb9c8..000000000 --- a/tests/unittests/load_functions/subdir/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "sub/main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/subdir/sub/main.py b/tests/unittests/load_functions/subdir/sub/main.py deleted file mode 100644 index d0e930835..000000000 --- a/tests/unittests/load_functions/subdir/sub/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req) -> str: - return __name__ diff --git a/tests/unittests/load_functions/submodule/function.json b/tests/unittests/load_functions/submodule/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/load_functions/submodule/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/load_functions/submodule/main.py b/tests/unittests/load_functions/submodule/main.py deleted file mode 100644 index 0dc4ecb60..000000000 --- a/tests/unittests/load_functions/submodule/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from .sub_module import module - - -def main(req) -> str: - return module.__name__ diff --git a/tests/unittests/load_functions/submodule/sub_module/__init__.py b/tests/unittests/load_functions/submodule/sub_module/__init__.py deleted file mode 100644 index 5b7f7a925..000000000 --- a/tests/unittests/load_functions/submodule/sub_module/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. diff --git a/tests/unittests/load_functions/submodule/sub_module/module.py b/tests/unittests/load_functions/submodule/sub_module/module.py deleted file mode 100644 index bd01f6df8..000000000 --- a/tests/unittests/load_functions/submodule/sub_module/module.py +++ /dev/null @@ -1,3 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -MODULE_NAME = 'SUB_MODULE' diff --git a/tests/unittests/log_filtering_functions/debug_logging/function.json b/tests/unittests/log_filtering_functions/debug_logging/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/log_filtering_functions/debug_logging/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/log_filtering_functions/debug_logging/main.py b/tests/unittests/log_filtering_functions/debug_logging/main.py deleted file mode 100644 index be3e2d506..000000000 --- a/tests/unittests/log_filtering_functions/debug_logging/main.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions - - -def main(req: azure.functions.HttpRequest): - logging.info('logging info', exc_info=True) - logging.warning('logging warning', exc_info=True) - logging.debug('logging debug', exc_info=True) - logging.error('logging error', exc_info=True) - return 'OK-debug' diff --git a/tests/unittests/log_filtering_functions/debug_user_logging/function.json b/tests/unittests/log_filtering_functions/debug_user_logging/function.json deleted file mode 100644 index 5d4d8285f..000000000 --- a/tests/unittests/log_filtering_functions/debug_user_logging/function.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} diff --git a/tests/unittests/log_filtering_functions/debug_user_logging/main.py b/tests/unittests/log_filtering_functions/debug_user_logging/main.py deleted file mode 100644 index 1f669b236..000000000 --- a/tests/unittests/log_filtering_functions/debug_user_logging/main.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions - -logger = logging.getLogger('my function') - - -def main(req: azure.functions.HttpRequest): - logger.info('logging info', exc_info=True) - logger.warning('logging warning', exc_info=True) - logger.debug('logging debug', exc_info=True) - logger.error('logging error', exc_info=True) - return 'OK-user-debug' diff --git a/tests/unittests/log_filtering_functions/sdk_logging/__init__.py b/tests/unittests/log_filtering_functions/sdk_logging/__init__.py deleted file mode 100644 index b45da85a5..000000000 --- a/tests/unittests/log_filtering_functions/sdk_logging/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions as func - -sdk_logger = logging.getLogger('azure.functions') - - -def main(req: func.HttpRequest): - sdk_logger.info('sdk_logger info') - sdk_logger.warning('sdk_logger warning') - sdk_logger.debug('sdk_logger debug') - sdk_logger.error('sdk_logger error', exc_info=True) - return 'OK-sdk-logger' diff --git a/tests/unittests/log_filtering_functions/sdk_logging/function.json b/tests/unittests/log_filtering_functions/sdk_logging/function.json deleted file mode 100644 index 9f7c2ae61..000000000 --- a/tests/unittests/log_filtering_functions/sdk_logging/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "main" -} diff --git a/tests/unittests/log_filtering_functions/sdk_submodule_logging/__init__.py b/tests/unittests/log_filtering_functions/sdk_submodule_logging/__init__.py deleted file mode 100644 index 5950d0432..000000000 --- a/tests/unittests/log_filtering_functions/sdk_submodule_logging/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import logging - -import azure.functions as func - -sdk_submodule_logger = logging.getLogger('azure.functions.submodule') - - -def main(req: func.HttpRequest): - sdk_submodule_logger.info('sdk_submodule_logger info') - sdk_submodule_logger.warning('sdk_submodule_logger warning') - sdk_submodule_logger.debug('sdk_submodule_logger debug') - sdk_submodule_logger.error('sdk_submodule_logger error', exc_info=True) - return 'OK-sdk-submodule-logging' diff --git a/tests/unittests/log_filtering_functions/sdk_submodule_logging/function.json b/tests/unittests/log_filtering_functions/sdk_submodule_logging/function.json deleted file mode 100644 index 9f7c2ae61..000000000 --- a/tests/unittests/log_filtering_functions/sdk_submodule_logging/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "type": "httpTrigger", - "direction": "in", - "name": "req" - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ], - "entryPoint": "main" -} diff --git a/tests/unittests/path_import/path_import.py b/tests/unittests/path_import/path_import.py deleted file mode 100644 index bc22de775..000000000 --- a/tests/unittests/path_import/path_import.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import os -import shutil -import sys - -from tests.utils import testutils - -from azure_functions_worker import protos - - -async def verify_path_imports(): - test_env = {} - request = protos.FunctionEnvironmentReloadRequest( - environment_variables=test_env) - - request_msg = protos.StreamingMessage( - request_id='0', - function_environment_reload_request=request) - - disp = testutils.create_dummy_dispatcher() - - test_path = 'test_module_dir' - test_mod_path = os.path.join(test_path, 'test_module.py') - - os.mkdir(test_path) - with open(test_mod_path, 'w') as f: - f.write('CONSTANT = "This module was imported!"') - - if (sys.argv[1] == 'success'): - await disp._handle__function_environment_reload_request(request_msg) - - try: - import test_module - print(test_module.CONSTANT) - finally: - # Cleanup - shutil.rmtree(test_path) - - -if __name__ == '__main__': - loop = asyncio.get_event_loop() - loop.run_until_complete(verify_path_imports()) - loop.close() diff --git a/tests/unittests/path_import/test_path_import.sh b/tests/unittests/path_import/test_path_import.sh deleted file mode 100644 index 881b4de02..000000000 --- a/tests/unittests/path_import/test_path_import.sh +++ /dev/null @@ -1,9 +0,0 @@ -#! /bin/bash - -# $2 is sys.path from caller -export PYTHONPATH="test_module_dir:$2" -SCRIPT_DIR="$(dirname $0)" - -python $SCRIPT_DIR/path_import.py $1 - -unset PYTHONPATH \ No newline at end of file diff --git a/tests/unittests/resources/customer_deps_path/azure/__init__.py b/tests/unittests/resources/customer_deps_path/azure/__init__.py deleted file mode 100644 index 649cbaa5f..000000000 --- a/tests/unittests/resources/customer_deps_path/azure/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/tests/unittests/resources/customer_deps_path/azure/functions/__init__.py b/tests/unittests/resources/customer_deps_path/azure/functions/__init__.py deleted file mode 100644 index e5e3779b1..000000000 --- a/tests/unittests/resources/customer_deps_path/azure/functions/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__version__: str = 'customer' - -import os - -# ./tests/unittests/resources/customer_deps_path/common_module -package_location: str = os.path.dirname(__file__) diff --git a/tests/unittests/resources/customer_deps_path/common_module/__init__.py b/tests/unittests/resources/customer_deps_path/common_module/__init__.py deleted file mode 100644 index e5e3779b1..000000000 --- a/tests/unittests/resources/customer_deps_path/common_module/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__version__: str = 'customer' - -import os - -# ./tests/unittests/resources/customer_deps_path/common_module -package_location: str = os.path.dirname(__file__) diff --git a/tests/unittests/resources/customer_deps_path/common_namespace/__init__.py b/tests/unittests/resources/customer_deps_path/common_namespace/__init__.py deleted file mode 100644 index 649cbaa5f..000000000 --- a/tests/unittests/resources/customer_deps_path/common_namespace/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/tests/unittests/resources/customer_deps_path/common_namespace/nested_module/__init__.py b/tests/unittests/resources/customer_deps_path/common_namespace/nested_module/__init__.py deleted file mode 100644 index 07afaec18..000000000 --- a/tests/unittests/resources/customer_deps_path/common_namespace/nested_module/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__version__: str = 'customer' - -import os - -# ./tests/unittests/resources/customer_deps_path/common_namespace/nested_module -package_location: str = os.path.dirname(__file__) diff --git a/tests/unittests/resources/customer_deps_path/readme.md b/tests/unittests/resources/customer_deps_path/readme.md deleted file mode 100644 index fc7905c7f..000000000 --- a/tests/unittests/resources/customer_deps_path/readme.md +++ /dev/null @@ -1,9 +0,0 @@ -This is a folder for containing a common_module in customer dependencies. - -It is used for testing import behavior with worker_deps_path. - -Adding this folder to sys.path and importing common_module, printing out the -common_module.__version__ will show which module is loaded. - -To test if the namespace is reloaded properly, printing out the -common_namespace.nested_common.__version__ will show which namespace is loaded. diff --git a/tests/unittests/resources/customer_func_path/HttpTrigger/__init__.py b/tests/unittests/resources/customer_func_path/HttpTrigger/__init__.py deleted file mode 100644 index d5d57e3eb..000000000 --- a/tests/unittests/resources/customer_func_path/HttpTrigger/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import os - -import azure.functions as func # NoQA - - -def main(): - return os.path.abspath(os.path.dirname(func.__file__)) diff --git a/tests/unittests/resources/customer_func_path/HttpTrigger/function.json b/tests/unittests/resources/customer_func_path/HttpTrigger/function.json deleted file mode 100644 index 4667f0aca..000000000 --- a/tests/unittests/resources/customer_func_path/HttpTrigger/function.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "scriptFile": "__init__.py", - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": [ - "get", - "post" - ] - }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] -} \ No newline at end of file diff --git a/tests/unittests/resources/customer_func_path/common_module/__init__.py b/tests/unittests/resources/customer_func_path/common_module/__init__.py deleted file mode 100644 index 0784c9b6f..000000000 --- a/tests/unittests/resources/customer_func_path/common_module/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os - -FUNCTION_APP = "function_app" -__version__: str == FUNCTION_APP - -# This module should be shadowed from customer_deps_path/common_module -# ./tests/unittests/resources/customer_func_path/common_module -package_location: str = os.path.dirname(__file__) diff --git a/tests/unittests/resources/customer_func_path/func_specific_module/__init__.py b/tests/unittests/resources/customer_func_path/func_specific_module/__init__.py deleted file mode 100644 index ed832cbbc..000000000 --- a/tests/unittests/resources/customer_func_path/func_specific_module/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os - -FUNCTION_APP = "function_app" -__version__: str == FUNCTION_APP - -# ./tests/unittests/resources/customer_func_path/func_specific_module -package_location: str = os.path.dirname(__file__) diff --git a/tests/unittests/resources/customer_func_path/host.json b/tests/unittests/resources/customer_func_path/host.json deleted file mode 100644 index 05291ed43..000000000 --- a/tests/unittests/resources/customer_func_path/host.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "version": "2.0", - "logging": { - "applicationInsights": { - "samplingSettings": { - "isEnabled": true, - "excludedTypes": "Request" - } - } - }, - "extensionBundle": { - "id": "Microsoft.Azure.Functions.ExtensionBundle", - "version": "[1.*, 2.0.0)" - } -} \ No newline at end of file diff --git a/tests/unittests/resources/customer_func_path/requirements.txt b/tests/unittests/resources/customer_func_path/requirements.txt deleted file mode 100644 index f86a15a6a..000000000 --- a/tests/unittests/resources/customer_func_path/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -azure-functions \ No newline at end of file diff --git a/tests/unittests/resources/functions.png b/tests/unittests/resources/functions.png deleted file mode 100644 index 42ea4bbf5..000000000 Binary files a/tests/unittests/resources/functions.png and /dev/null differ diff --git a/tests/unittests/resources/mock_azure_functions/azure/__init__.py b/tests/unittests/resources/mock_azure_functions/azure/__init__.py deleted file mode 100644 index 649cbaa5f..000000000 --- a/tests/unittests/resources/mock_azure_functions/azure/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/tests/unittests/resources/mock_azure_functions/azure/functions/__init__.py b/tests/unittests/resources/mock_azure_functions/azure/functions/__init__.py deleted file mode 100644 index 9f561659c..000000000 --- a/tests/unittests/resources/mock_azure_functions/azure/functions/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__version__ = "dummy" diff --git a/tests/unittests/resources/mock_azure_functions/readme.md b/tests/unittests/resources/mock_azure_functions/readme.md deleted file mode 100644 index c40015fb4..000000000 --- a/tests/unittests/resources/mock_azure_functions/readme.md +++ /dev/null @@ -1,3 +0,0 @@ -# Instruction - -This is a dummy azure.functions SDK for testing the backward compatibility \ No newline at end of file diff --git a/tests/unittests/resources/worker_deps_path/azure/__init__.py b/tests/unittests/resources/worker_deps_path/azure/__init__.py deleted file mode 100644 index 649cbaa5f..000000000 --- a/tests/unittests/resources/worker_deps_path/azure/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/tests/unittests/resources/worker_deps_path/azure/functions/__init__.py b/tests/unittests/resources/worker_deps_path/azure/functions/__init__.py deleted file mode 100644 index abdc27afc..000000000 --- a/tests/unittests/resources/worker_deps_path/azure/functions/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__version__: str = 'worker' - -import os - -# ./tests/unittests/resources/worker_deps_path/common_module -package_location: str = os.path.dirname(__file__) diff --git a/tests/unittests/resources/worker_deps_path/common_module/__init__.py b/tests/unittests/resources/worker_deps_path/common_module/__init__.py deleted file mode 100644 index abdc27afc..000000000 --- a/tests/unittests/resources/worker_deps_path/common_module/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__version__: str = 'worker' - -import os - -# ./tests/unittests/resources/worker_deps_path/common_module -package_location: str = os.path.dirname(__file__) diff --git a/tests/unittests/resources/worker_deps_path/common_namespace/__init__.py b/tests/unittests/resources/worker_deps_path/common_namespace/__init__.py deleted file mode 100644 index 649cbaa5f..000000000 --- a/tests/unittests/resources/worker_deps_path/common_namespace/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/tests/unittests/resources/worker_deps_path/common_namespace/nested_module/__init__.py b/tests/unittests/resources/worker_deps_path/common_namespace/nested_module/__init__.py deleted file mode 100644 index 3211c9534..000000000 --- a/tests/unittests/resources/worker_deps_path/common_namespace/nested_module/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -__version__: str = 'worker' - -import os - -# ./tests/unittests/resources/worker_deps_path/common_namespace/nested_module -package_location: str = os.path.dirname(__file__) diff --git a/tests/unittests/resources/worker_deps_path/readme.md b/tests/unittests/resources/worker_deps_path/readme.md deleted file mode 100644 index a8e3154f9..000000000 --- a/tests/unittests/resources/worker_deps_path/readme.md +++ /dev/null @@ -1,9 +0,0 @@ -This is a folder for containing a common_module in worker dependencies. - -It is used for testing import behavior with customer_deps_path. - -Adding this folder to sys.path and importing common_module, printing out the -common_module.__version__ will show which module is loaded. - -To test if the namespace is reloaded properly, printing out the -common_namespace.nested_common.__version__ will show which namespace is loaded. diff --git a/tests/extension_tests/http_v2_tests/http_functions_v2/fastapi/function_app.py b/tests/unittests/streaming_function/function_app.py similarity index 100% rename from tests/extension_tests/http_v2_tests/http_functions_v2/fastapi/function_app.py rename to tests/unittests/streaming_function/function_app.py diff --git a/tests/unittests/test-binding/foo/__init__.py b/tests/unittests/test-binding/foo/__init__.py deleted file mode 100644 index 5b7f7a925..000000000 --- a/tests/unittests/test-binding/foo/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. diff --git a/tests/unittests/test-binding/foo/binding.py b/tests/unittests/test-binding/foo/binding.py deleted file mode 100644 index 68b0d80af..000000000 --- a/tests/unittests/test-binding/foo/binding.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from azure.functions import meta - - -class Binding(meta.InConverter, meta.OutConverter, - binding='fooType'): - pass diff --git a/tests/unittests/test-binding/functions/foo/function.json b/tests/unittests/test-binding/functions/foo/function.json deleted file mode 100644 index fb00b1207..000000000 --- a/tests/unittests/test-binding/functions/foo/function.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "scriptFile": "main.py", - - "bindings": [ - { - "type": "fooType", - "direction": "in", - "name": "req" - } - ] -} diff --git a/tests/unittests/test-binding/functions/foo/main.py b/tests/unittests/test-binding/functions/foo/main.py deleted file mode 100644 index 160eb0bd8..000000000 --- a/tests/unittests/test-binding/functions/foo/main.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - - -def main(req): - pass diff --git a/tests/unittests/test-binding/setup.py b/tests/unittests/test-binding/setup.py deleted file mode 100644 index e0e248693..000000000 --- a/tests/unittests/test-binding/setup.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from setuptools import setup - -setup( - name='foo-binding', - version='1.0', - packages=['foo'], - entry_points={ - 'azure.functions.bindings': [ - 'foo=foo.binding:Binding', - ] - }, -) diff --git a/tests/unittests/test_app_setting_manager.py b/tests/unittests/test_app_setting_manager.py deleted file mode 100644 index d203704f9..000000000 --- a/tests/unittests/test_app_setting_manager.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import collections as col -import os -from unittest.mock import patch - -from tests.utils import testutils - -from azure_functions_worker.constants import ( - PYTHON_ENABLE_DEBUG_LOGGING, - PYTHON_ENABLE_INIT_INDEXING, - PYTHON_THREADPOOL_THREAD_COUNT, -) -from azure_functions_worker.utils.app_setting_manager import get_python_appsetting_state - -SysVersionInfo = col.namedtuple("VersionInfo", ["major", "minor", "micro", - "releaselevel", "serial"]) -DISPATCHER_FUNCTIONS_DIR = testutils.UNIT_TESTS_FOLDER / 'dispatcher_functions' -DISPATCHER_STEIN_FUNCTIONS_DIR = testutils.UNIT_TESTS_FOLDER / \ - 'dispatcher_functions' / \ - 'dispatcher_functions_stein' -DISPATCHER_STEIN_INVALID_FUNCTIONS_DIR = testutils.UNIT_TESTS_FOLDER / \ - 'broken_functions' / \ - 'invalid_stein' - - -class TestDefaultAppSettingsLogs(testutils.AsyncTestCase): - """Tests for default app settings logs.""" - - @classmethod - def setUpClass(cls): - cls._ctrl = testutils.start_mockhost( - script_root=DISPATCHER_FUNCTIONS_DIR) - os_environ = os.environ.copy() - cls._patch_environ = patch.dict('os.environ', os_environ) - cls._patch_environ.start() - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - cls._patch_environ.stop() - - async def test_initialize_worker_logging(self): - """Test if the dispatcher's log can be flushed out during worker - initialization - """ - async with self._ctrl as host: - r = await host.init_worker('3.0.12345') - self.assertTrue('App Settings state: ' in log for log in r.logs) - self.assertTrue('PYTHON_ENABLE_WORKER_EXTENSIONS: ' - in log for log in r.logs) - - def test_get_python_appsetting_state(self): - app_setting_state = get_python_appsetting_state() - expected_string = "PYTHON_ENABLE_WORKER_EXTENSIONS: " - self.assertIn(expected_string, app_setting_state) - - -class TestNonDefaultAppSettingsLogs(testutils.AsyncTestCase): - """Tests for non-default app settings logs.""" - - @classmethod - def setUpClass(cls): - cls._ctrl = testutils.start_mockhost( - script_root=DISPATCHER_FUNCTIONS_DIR) - os_environ = os.environ.copy() - os_environ[PYTHON_THREADPOOL_THREAD_COUNT] = '20' - os_environ[PYTHON_ENABLE_DEBUG_LOGGING] = '1' - os_environ[PYTHON_ENABLE_INIT_INDEXING] = '1' - cls._patch_environ = patch.dict('os.environ', os_environ) - cls._patch_environ.start() - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - cls._patch_environ.stop() - - async def test_initialize_worker_logging(self): - """Test if the dispatcher's log can be flushed out during worker - initialization - """ - async with self._ctrl as host: - r = await host.init_worker('3.0.12345') - self.assertTrue('App Settings state: ' in log for log in r.logs) - self.assertTrue('PYTHON_THREADPOOL_THREAD_COUNT: ' - in log for log in r.logs) - self.assertTrue('PYTHON_ENABLE_DEBUG_LOGGING: ' - in log for log in r.logs) - self.assertTrue('PYTHON_ENABLE_INIT_INDEXING: ' - in log for log in r.logs) - - def test_get_python_appsetting_state(self): - app_setting_state = get_python_appsetting_state() - self.assertIn("PYTHON_THREADPOOL_THREAD_COUNT: 20 | ", - app_setting_state) - self.assertIn("PYTHON_ENABLE_DEBUG_LOGGING: 1 | ", app_setting_state) - self.assertIn("PYTHON_ENABLE_WORKER_EXTENSIONS: ", app_setting_state) diff --git a/tests/unittests/test_broken_functions.py b/tests/unittests/test_broken_functions.py index 508122c92..53c73286d 100644 --- a/tests/unittests/test_broken_functions.py +++ b/tests/unittests/test_broken_functions.py @@ -2,7 +2,7 @@ # Licensed under the MIT License. from tests.utils import testutils -from azure_functions_worker import protos +from tests import protos class TestMockHost(testutils.AsyncTestCase): diff --git a/tests/unittests/test_code_quality.py b/tests/unittests/test_code_quality.py index 54d1cc725..ee16bc370 100644 --- a/tests/unittests/test_code_quality.py +++ b/tests/unittests/test_code_quality.py @@ -17,7 +17,7 @@ def test_mypy(self): try: subprocess.run( - [sys.executable, '-m', 'mypy', '-m', 'azure_functions_worker'], + [sys.executable, '-m', 'mypy', '-m', 'azure_functions_worker_v2'], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, @@ -29,7 +29,7 @@ def test_mypy(self): from ex output = ex.output.decode() raise AssertionError( - f'mypy validation failed:\n{output}') from None + 'mypy validation failed:\n%s', output) from None def test_flake8(self): try: @@ -51,4 +51,4 @@ def test_flake8(self): except subprocess.CalledProcessError as ex: output = ex.output.decode() raise AssertionError( - f'flake8 validation failed:\n{output}') from None + 'flake8 validation failed:\n%s', output) from None diff --git a/tests/unittests/test_datumref.py b/tests/unittests/test_datumref.py deleted file mode 100644 index 3db946467..000000000 --- a/tests/unittests/test_datumref.py +++ /dev/null @@ -1,148 +0,0 @@ -import sys -import unittest -from http.cookies import SimpleCookie -from unittest import skipIf - -from dateutil import parser -from dateutil.parser import ParserError - -from azure_functions_worker import protos -from azure_functions_worker.bindings.datumdef import ( - Datum, - parse_cookie_attr_expires, - parse_cookie_attr_same_site, - parse_to_rpc_http_cookie_list, -) -from azure_functions_worker.bindings.nullable_converters import ( - to_nullable_bool, - to_nullable_double, - to_nullable_string, - to_nullable_timestamp, -) -from azure_functions_worker.protos import RpcHttpCookie - - -class TestDatumRef(unittest.TestCase): - def test_parse_cookie_attr_expires_none(self): - self.assertEqual(parse_cookie_attr_expires({"expires": None}), None) - - def test_parse_cookie_attr_expires_zero_length(self): - self.assertEqual(parse_cookie_attr_expires({"expires": ""}), None) - - def test_parse_cookie_attr_expires_valid(self): - self.assertEqual(parse_cookie_attr_expires( - {"expires": "Thu, 12-Jan-2017 13:55:08 GMT"}), - parser.parse("Thu, 12-Jan-2017 13:55:08 GMT")) - - def test_parse_cookie_attr_expires_parse_error(self): - with self.assertRaises(ParserError): - parse_cookie_attr_expires( - {"expires": "Thu, 12-Jan-2017 13:550:08 GMT"}) - - def test_parse_cookie_attr_expires_overflow_error(self): - with self.assertRaises(OverflowError): - parse_cookie_attr_expires( - {"expires": "Thu, 12-Jan-9999999999999999 13:550:08 GMT"}) - - def test_parse_cookie_attr_same_site_default(self): - self.assertEqual(parse_cookie_attr_same_site( - {}), - getattr(protos.RpcHttpCookie.SameSite, "None")) - - def test_parse_cookie_attr_same_site_lax(self): - self.assertEqual(parse_cookie_attr_same_site( - {'samesite': 'lax'}), - getattr(protos.RpcHttpCookie.SameSite, "Lax")) - - def test_parse_cookie_attr_same_site_strict(self): - self.assertEqual(parse_cookie_attr_same_site( - {'samesite': 'strict'}), - getattr(protos.RpcHttpCookie.SameSite, "Strict")) - - def test_parse_cookie_attr_same_site_explicit_none(self): - self.assertEqual(parse_cookie_attr_same_site( - {'samesite': 'none'}), - getattr(protos.RpcHttpCookie.SameSite, "ExplicitNone")) - - def test_parse_to_rpc_http_cookie_list_none(self): - self.assertEqual(parse_to_rpc_http_cookie_list(None), None) - - @skipIf(sys.version_info < (3, 8, 0), - "Skip the tests for Python 3.7 and below") - def test_parse_to_rpc_http_cookie_list_valid(self): - headers = [ - 'foo=bar; Path=/some/path; Secure; HttpOnly; Domain=123; ' - 'SameSite=Lax; Max-Age=12345; Expires=Thu, 12-Jan-2017 13:55:08 ' - 'GMT;', - 'foo2=bar; Path=/some/path2; Secure; HttpOnly; Domain=123; ' - 'SameSite=Lax; Max-Age=12345; Expires=Thu, 12-Jan-2017 13:55:08 ' - 'GMT;'] - - cookies = SimpleCookie('\r\n'.join(headers)) - - cookie1 = RpcHttpCookie(name="foo", - value="bar", - domain=to_nullable_string("123", - "cookie.domain"), - path=to_nullable_string("/some/path", - "cookie.path"), - expires=to_nullable_timestamp( - parse_cookie_attr_expires( - { - "expires": "Thu, " - "12-Jan-2017 13:55:08" - " GMT"}), - 'cookie.expires'), - secure=to_nullable_bool( - bool("True"), - 'cookie.secure'), - http_only=to_nullable_bool( - bool("True"), - 'cookie.httpOnly'), - same_site=parse_cookie_attr_same_site( - {"samesite": "Lax"}), - max_age=to_nullable_double( - 12345, - 'cookie.maxAge')) - - cookie2 = RpcHttpCookie(name="foo2", - value="bar", - domain=to_nullable_string("123", - "cookie.domain"), - path=to_nullable_string("/some/path2", - "cookie.path"), - expires=to_nullable_timestamp( - parse_cookie_attr_expires( - { - "expires": "Thu, " - "12-Jan-2017 13:55:08" - " GMT"}), - 'cookie.expires'), - secure=to_nullable_bool( - bool("True"), - 'cookie.secure'), - http_only=to_nullable_bool( - bool("True"), - 'cookie.httpOnly'), - same_site=parse_cookie_attr_same_site( - {"samesite": "Lax"}), - max_age=to_nullable_double( - 12345, - 'cookie.maxAge')) - - rpc_cookies = parse_to_rpc_http_cookie_list([cookies]) - self.assertEqual(cookie1, rpc_cookies[0]) - self.assertEqual(cookie2, rpc_cookies[1]) - - def test_parse_to_rpc_http_cookie_list_no_cookie(self): - datum = Datum( - type='http', - value=dict( - status_code=None, - headers=None, - body=None, - ) - ) - - self.assertIsNone( - parse_to_rpc_http_cookie_list(datum.value.get('cookies'))) diff --git a/tests/unittests/test_dispatcher.py b/tests/unittests/test_dispatcher.py deleted file mode 100644 index ebaac2ced..000000000 --- a/tests/unittests/test_dispatcher.py +++ /dev/null @@ -1,1127 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import collections as col -import contextvars -import os -import sys -import unittest -from typing import Optional, Tuple -from unittest.mock import patch - -from tests.utils import testutils -from tests.utils.testutils import UNIT_TESTS_ROOT - -from azure_functions_worker import protos -from azure_functions_worker.constants import ( - HTTP_URI, - METADATA_PROPERTIES_WORKER_INDEXED, - PYTHON_ENABLE_DEBUG_LOGGING, - PYTHON_ENABLE_INIT_INDEXING, - PYTHON_THREADPOOL_THREAD_COUNT, - PYTHON_THREADPOOL_THREAD_COUNT_DEFAULT, - PYTHON_THREADPOOL_THREAD_COUNT_MAX_37, - PYTHON_THREADPOOL_THREAD_COUNT_MIN, - REQUIRES_ROUTE_PARAMETERS -) -from azure_functions_worker.dispatcher import Dispatcher, ContextEnabledTask -from azure_functions_worker.version import VERSION - -SysVersionInfo = col.namedtuple("VersionInfo", ["major", "minor", "micro", - "releaselevel", "serial"]) -DISPATCHER_FUNCTIONS_DIR = testutils.UNIT_TESTS_FOLDER / 'dispatcher_functions' -DISPATCHER_STEIN_FUNCTIONS_DIR = testutils.UNIT_TESTS_FOLDER / \ - 'dispatcher_functions' / \ - 'dispatcher_functions_stein' -FUNCTION_APP_DIRECTORY = UNIT_TESTS_ROOT / 'dispatcher_functions' / \ - 'dispatcher_functions_stein' -HTTPV2_FUNCTION_APP_DIRECTORY = UNIT_TESTS_ROOT / 'dispatcher_functions' / \ - 'http_v2' / 'fastapi' - - -class TestThreadPoolSettingsPython37(testutils.AsyncTestCase): - """Base test class for testing thread pool settings for sync threadpool - worker count. This class specifically sets sys.version_info to return as - Python 3.7 and extended classes change this value and other platform - specific values to test the behavior across the different python versions. - - Ref: - NEW_TYPING = sys.version_info[:3] >= (3, 7, 0) # PEP 560 - """ - - def setUp(self, version=SysVersionInfo(3, 7, 0, 'final', 0)): - self._ctrl = testutils.start_mockhost( - script_root=DISPATCHER_FUNCTIONS_DIR) - self._default_workers: Optional[ - int] = PYTHON_THREADPOOL_THREAD_COUNT_DEFAULT - self._over_max_workers: int = 10000 - self._allowed_max_workers: int = PYTHON_THREADPOOL_THREAD_COUNT_MAX_37 - self._pre_env = dict(os.environ) - self.mock_version_info = patch( - 'azure_functions_worker.dispatcher.sys.version_info', - version) - self.mock_version_info.start() - - def tearDown(self): - os.environ.clear() - os.environ.update(self._pre_env) - self.mock_version_info.stop() - - async def test_dispatcher_initialize_worker(self): - """Test if the dispatcher can be initialized worker successfully - """ - async with self._ctrl as host: - r = await host.init_worker('3.0.12345') - self.assertIsInstance(r.response, protos.WorkerInitResponse) - self.assertIsInstance(r.response.worker_metadata, - protos.WorkerMetadata) - self.assertEqual(r.response.worker_metadata.runtime_name, - "python") - self.assertEqual(r.response.worker_metadata.worker_version, - VERSION) - - async def test_dispatcher_environment_reload(self): - """Test function environment reload response - """ - async with self._ctrl as host: - # Reload environment variable on specialization - r = await host.reload_environment(environment={}) - self.assertIsInstance(r.response, - protos.FunctionEnvironmentReloadResponse) - self.assertIsInstance(r.response.worker_metadata, - protos.WorkerMetadata) - self.assertEqual(r.response.worker_metadata.runtime_name, - "python") - self.assertEqual(r.response.worker_metadata.worker_version, - VERSION) - - async def test_dispatcher_initialize_worker_logging(self): - """Test if the dispatcher's log can be flushed out during worker - initialization - """ - async with self._ctrl as host: - r = await host.init_worker('3.0.12345') - self.assertEqual( - len([log for log in r.logs if log.message.startswith( - 'Received WorkerInitRequest' - )]), - 1 - ) - - async def test_dispatcher_initialize_worker_settings_logs(self): - """Test if the dispatcher's log can be flushed out during worker - initialization - """ - async with self._ctrl as host: - r = await host.init_worker('3.0.12345') - self.assertTrue('PYTHON_ENABLE_WORKER_EXTENSIONS: ' - in log for log in r.logs) - - async def test_dispatcher_environment_reload_logging(self): - """Test if the sync threadpool will pick up app setting in placeholder - mode (Linux Consumption) - """ - async with self._ctrl as host: - await host.init_worker() - await self._check_if_function_is_ok(host) - - # Reload environment variable on specialization - r = await host.reload_environment(environment={}) - self.assertEqual( - len([log for log in r.logs if log.message.startswith( - 'Received FunctionEnvironmentReloadRequest' - )]), - 1 - ) - - async def test_dispatcher_environment_reload_settings_logs(self): - """Test if the sync threadpool will pick up app setting in placeholder - mode (Linux Consumption) - """ - async with self._ctrl as host: - await host.init_worker() - await self._check_if_function_is_ok(host) - - # Reload environment variable on specialization - r = await host.reload_environment(environment={}) - self.assertTrue('PYTHON_ENABLE_WORKER_EXTENSIONS: ' - in log for log in r.logs) - - async def test_dispatcher_send_worker_request(self): - """Test if the worker status response will be sent correctly when - a worker status request is received - """ - async with self._ctrl as host: - r = await host.get_worker_status() - self.assertIsInstance(r.response, protos.WorkerStatusResponse) - - async def test_dispatcher_sync_threadpool_default_worker(self): - """Test if the sync threadpool has maximum worker count set the - correct default value - """ - async with self._ctrl as host: - await host.init_worker() - await self._check_if_function_is_ok(host) - await self._assert_workers_threadpool(self._ctrl, host, - self._default_workers) - - async def test_dispatcher_sync_threadpool_set_worker(self): - """Test if the sync threadpool maximum worker can be set - """ - # Configure thread pool max worker - os.environ.update({PYTHON_THREADPOOL_THREAD_COUNT: - f'{self._allowed_max_workers}'}) - async with self._ctrl as host: - await host.init_worker() - await self._check_if_function_is_ok(host) - await self._assert_workers_threadpool(self._ctrl, host, - self._allowed_max_workers) - - async def test_dispatcher_sync_threadpool_invalid_worker_count(self): - """Test when sync threadpool maximum worker is set to an invalid value, - the host should fallback to default value - """ - # The @patch decorator does not work as expected and will suppress - # any assertion failures in the async test cases. - # Thus we're moving the patch() method to use the with syntax - - with patch('azure_functions_worker.dispatcher.logger') as mock_logger: - # Configure thread pool max worker to an invalid value - os.environ.update({PYTHON_THREADPOOL_THREAD_COUNT: 'invalid'}) - - async with self._ctrl as host: - await host.init_worker() - await self._check_if_function_is_ok(host) - await self._assert_workers_threadpool(self._ctrl, host, - self._default_workers) - mock_logger.warning.assert_any_call( - '%s must be an integer', PYTHON_THREADPOOL_THREAD_COUNT) - - async def test_dispatcher_sync_threadpool_below_min_setting(self): - """Test if the sync threadpool will pick up default value when the - setting is below minimum - """ - with patch('azure_functions_worker.dispatcher.logger') as mock_logger: - # Configure thread pool max worker to an invalid value - os.environ.update({PYTHON_THREADPOOL_THREAD_COUNT: '0'}) - async with self._ctrl as host: - await host.init_worker() - await self._check_if_function_is_ok(host) - await self._assert_workers_threadpool(self._ctrl, host, - self._default_workers) - mock_logger.warning.assert_any_call( - '%s must be set to a value between %s and sys.maxint. ' - 'Reverting to default value for max_workers', - PYTHON_THREADPOOL_THREAD_COUNT, - PYTHON_THREADPOOL_THREAD_COUNT_MIN) - - async def test_dispatcher_sync_threadpool_exceed_max_setting(self): - """Test if the sync threadpool will pick up default max value when the - setting is above maximum - """ - with patch('azure_functions_worker.dispatcher.logger'): - # Configure thread pool max worker to an invalid value - os.environ.update({PYTHON_THREADPOOL_THREAD_COUNT: - f'{self._over_max_workers}'}) - async with self._ctrl as host: - await host.init_worker('4.15.1') - await self._check_if_function_is_ok(host) - - # Ensure the dispatcher sync threadpool should fallback to max - await self._assert_workers_threadpool(self._ctrl, host, - self._allowed_max_workers) - - async def test_dispatcher_sync_threadpool_in_placeholder(self): - """Test if the sync threadpool will pick up app setting in placeholder - mode (Linux Consumption) - """ - async with self._ctrl as host: - await host.init_worker() - await self._check_if_function_is_ok(host) - - # Reload environment variable on specialization - await host.reload_environment(environment={ - PYTHON_THREADPOOL_THREAD_COUNT: f'{self._allowed_max_workers}' - }) - await self._assert_workers_threadpool(self._ctrl, host, - self._allowed_max_workers) - - async def test_dispatcher_sync_threadpool_in_placeholder_invalid(self): - """Test if the sync threadpool will use the default setting when the - app setting is invalid - """ - with patch('azure_functions_worker.dispatcher.logger') as mock_logger: - async with self._ctrl as host: - await host.init_worker() - await self._check_if_function_is_ok(host) - - # Reload environment variable on specialization - await host.reload_environment(environment={ - PYTHON_THREADPOOL_THREAD_COUNT: 'invalid' - }) - await self._assert_workers_threadpool(self._ctrl, host, - self._default_workers) - - # Check warning message - mock_logger.warning.assert_any_call( - '%s must be an integer', PYTHON_THREADPOOL_THREAD_COUNT) - - async def test_dispatcher_sync_threadpool_in_placeholder_above_max(self): - """Test if the sync threadpool will use the default max setting when - the app setting is above maximum. - - Note: This is designed for Linux Consumption. - """ - with patch('azure_functions_worker.dispatcher.logger'): - async with self._ctrl as host: - await host.init_worker() - await self._check_if_function_is_ok(host) - - # Reload environment variable on specialization - await host.reload_environment(environment={ - PYTHON_THREADPOOL_THREAD_COUNT: f'{self._over_max_workers}' - }) - await self._assert_workers_threadpool(self._ctrl, host, - self._allowed_max_workers) - - async def test_dispatcher_sync_threadpool_in_placeholder_below_min(self): - """Test if the sync threadpool will use the default setting when the - app setting is below minimum - """ - with patch('azure_functions_worker.dispatcher.logger') as mock_logger: - async with self._ctrl as host: - await host.init_worker() - await self._check_if_function_is_ok(host) - - # Reload environment variable on specialization - await host.reload_environment(environment={ - PYTHON_THREADPOOL_THREAD_COUNT: '0' - }) - - await self._assert_workers_threadpool(self._ctrl, host, - self._default_workers) - - mock_logger.warning.assert_any_call( - '%s must be set to a value between %s and sys.maxint. ' - 'Reverting to default value for max_workers', - PYTHON_THREADPOOL_THREAD_COUNT, - PYTHON_THREADPOOL_THREAD_COUNT_MIN) - - async def test_sync_invocation_request_log(self): - with patch('azure_functions_worker.dispatcher.logger') as mock_logger: - async with self._ctrl as host: - await host.init_worker() - request_id: str = self._ctrl._worker._request_id - func_id, invoke_id, func_name = ( - await self._check_if_function_is_ok(host) - ) - - logs, _ = mock_logger.info.call_args - self.assertRegex(logs[0], - 'Received FunctionInvocationRequest, ' - f'request ID: {request_id}, ' - f'function ID: {func_id}, ' - f'function name: {func_name}, ' - f'invocation ID: {invoke_id}, ' - 'function type: sync, ' - r'timestamp \(UTC\): ' - r'(\d{4}-\d{2}-\d{2} ' - r'\d{2}:\d{2}:\d{2}.\d{6}), ' - 'sync threadpool max workers: ' - f'{self._default_workers}' - ) - - async def test_async_invocation_request_log(self): - with patch('azure_functions_worker.dispatcher.logger') as mock_logger: - async with self._ctrl as host: - await host.init_worker() - request_id: str = self._ctrl._worker._request_id - func_id, invoke_id, func_name = ( - await self._check_if_async_function_is_ok(host) - ) - - logs, _ = mock_logger.info.call_args - self.assertRegex(logs[0], - 'Received FunctionInvocationRequest, ' - f'request ID: {request_id}, ' - f'function ID: {func_id}, ' - f'function name: {func_name}, ' - f'invocation ID: {invoke_id}, ' - 'function type: async, ' - r'timestamp \(UTC\): ' - r'(\d{4}-\d{2}-\d{2} ' - r'\d{2}:\d{2}:\d{2}.\d{6})' - ) - - async def test_sync_invocation_request_log_threads(self): - with patch('azure_functions_worker.dispatcher.logger') as mock_logger: - os.environ.update({PYTHON_THREADPOOL_THREAD_COUNT: '5'}) - - async with self._ctrl as host: - await host.init_worker() - request_id: str = self._ctrl._worker._request_id - func_id, invoke_id, func_name = ( - await self._check_if_function_is_ok(host) - ) - - logs, _ = mock_logger.info.call_args - self.assertRegex(logs[0], - 'Received FunctionInvocationRequest, ' - f'request ID: {request_id}, ' - f'function ID: {func_id}, ' - f'function name: {func_name}, ' - f'invocation ID: {invoke_id}, ' - 'function type: sync, ' - r'timestamp \(UTC\): ' - r'(\d{4}-\d{2}-\d{2} ' - r'\d{2}:\d{2}:\d{2}.\d{6}), ' - 'sync threadpool max workers: 5' - ) - - async def test_async_invocation_request_log_threads(self): - with patch('azure_functions_worker.dispatcher.logger') as mock_logger: - os.environ.update({PYTHON_THREADPOOL_THREAD_COUNT: '4'}) - - async with self._ctrl as host: - await host.init_worker() - request_id: str = self._ctrl._worker._request_id - func_id, invoke_id, func_name = ( - await self._check_if_async_function_is_ok(host) - ) - - logs, _ = mock_logger.info.call_args - self.assertRegex(logs[0], - 'Received FunctionInvocationRequest, ' - f'request ID: {request_id}, ' - f'function ID: {func_id}, ' - f'function name: {func_name}, ' - f'invocation ID: {invoke_id}, ' - 'function type: async, ' - r'timestamp \(UTC\): ' - r'(\d{4}-\d{2}-\d{2} ' - r'\d{2}:\d{2}:\d{2}.\d{6})' - ) - - async def test_sync_invocation_request_log_in_placeholder_threads(self): - with patch('azure_functions_worker.dispatcher.logger') as mock_logger: - async with self._ctrl as host: - await host.reload_environment(environment={ - PYTHON_THREADPOOL_THREAD_COUNT: '5' - }) - - request_id: str = self._ctrl._worker._request_id - func_id, invoke_id, func_name = ( - await self._check_if_function_is_ok(host) - ) - - logs, _ = mock_logger.info.call_args - self.assertRegex(logs[0], - 'Received FunctionInvocationRequest, ' - f'request ID: {request_id}, ' - f'function ID: {func_id}, ' - f'function name: {func_name}, ' - f'invocation ID: {invoke_id}, ' - 'function type: sync, ' - r'timestamp \(UTC\): ' - r'(\d{4}-\d{2}-\d{2} ' - r'\d{2}:\d{2}:\d{2}.\d{6}), ' - 'sync threadpool max workers: 5' - ) - - async def test_async_invocation_request_log_in_placeholder_threads(self): - with patch('azure_functions_worker.dispatcher.logger') as mock_logger: - async with self._ctrl as host: - await host.reload_environment(environment={ - PYTHON_THREADPOOL_THREAD_COUNT: '5' - }) - - request_id: str = self._ctrl._worker._request_id - func_id, invoke_id, func_name = ( - await self._check_if_async_function_is_ok(host) - ) - - logs, _ = mock_logger.info.call_args - self.assertRegex(logs[0], - 'Received FunctionInvocationRequest, ' - f'request ID: {request_id}, ' - f'function ID: {func_id}, ' - f'function name: {func_name}, ' - f'invocation ID: {invoke_id}, ' - 'function type: async, ' - r'timestamp \(UTC\): ' - r'(\d{4}-\d{2}-\d{2} ' - r'\d{2}:\d{2}:\d{2}.\d{6})' - ) - - async def _assert_workers_threadpool(self, ctrl, host, - expected_worker_count): - self.assertIsNotNone(ctrl._worker._sync_call_tp) - self.assertEqual(ctrl._worker.get_sync_tp_workers_set(), - expected_worker_count) - # Check if the dispatcher still function - await self._check_if_function_is_ok(host) - - async def _check_if_function_is_ok(self, host) -> Tuple[str, str, str]: - # Ensure the function can be properly loaded - function_name = "show_context" - func_id, load_r = await host.load_function(function_name) - self.assertEqual(load_r.response.function_id, func_id) - ex = load_r.response.result.exception - self.assertEqual(load_r.response.result.status, - protos.StatusResult.Success, msg=ex) - - # Ensure the function can be properly invoked - invoke_id, call_r = await host.invoke_function( - 'show_context', [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp( - method='GET' - ) - ) - ) - ]) - self.assertIsNotNone(invoke_id) - self.assertEqual(call_r.response.result.status, - protos.StatusResult.Success) - - return func_id, invoke_id, function_name - - async def _check_if_async_function_is_ok(self, host) -> Tuple[str, str]: - # Ensure the function can be properly loaded - function_name = "show_context_async" - func_id, load_r = await host.load_function('show_context_async') - self.assertEqual(load_r.response.function_id, func_id) - self.assertEqual(load_r.response.result.status, - protos.StatusResult.Success) - - # Ensure the function can be properly invoked - invoke_id, call_r = await host.invoke_function( - 'show_context_async', [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp( - method='GET' - ) - ) - ) - ]) - self.assertIsNotNone(invoke_id) - self.assertEqual(call_r.response.result.status, - protos.StatusResult.Success) - - return func_id, invoke_id, function_name - - -@unittest.skipIf(sys.version_info.minor != 8, - "Run the tests only for Python 3.8. In other platforms, " - "as the default passed is None, the cpu_count determines the " - "number of max_workers and we cannot mock the os.cpu_count() " - "in the concurrent.futures.ThreadPoolExecutor") -class TestThreadPoolSettingsPython38(TestThreadPoolSettingsPython37): - def setUp(self, version=SysVersionInfo(3, 8, 0, 'final', 0)): - super(TestThreadPoolSettingsPython38, self).setUp(version) - self._allowed_max_workers: int = self._over_max_workers - - def tearDown(self): - super(TestThreadPoolSettingsPython38, self).tearDown() - - async def test_dispatcher_sync_threadpool_in_placeholder_above_max(self): - """Test if the sync threadpool will use any value and there isn't any - artificial max value set. - """ - with patch('azure_functions_worker.dispatcher.logger'): - async with self._ctrl as host: - await self._check_if_function_is_ok(host) - - # Reload environment variable on specialization - await host.reload_environment(environment={ - PYTHON_THREADPOOL_THREAD_COUNT: f'{self._over_max_workers}' - }) - await self._assert_workers_threadpool(self._ctrl, host, - self._allowed_max_workers) - self.assertNotEqual( - self._ctrl._worker.get_sync_tp_workers_set(), - self._default_workers) - - -@unittest.skipIf(sys.version_info.minor != 9, - "Run the tests only for Python 3.9. In other platforms, " - "as the default passed is None, the cpu_count determines the " - "number of max_workers and we cannot mock the os.cpu_count() " - "in the concurrent.futures.ThreadPoolExecutor") -class TestThreadPoolSettingsPython39(TestThreadPoolSettingsPython37): - def setUp(self, version=SysVersionInfo(3, 9, 0, 'final', 0)): - super(TestThreadPoolSettingsPython39, self).setUp(version) - self.mock_os_cpu = patch( - 'os.cpu_count', return_value=2) - # 6 - based on 2 cores - min(32, (os.cpu_count() or 1) + 4) - 2 + 4 - self._default_workers: Optional[int] = 6 - self.mock_os_cpu.start() - self._allowed_max_workers: int = self._over_max_workers - - def tearDown(self): - self.mock_os_cpu.stop() - super(TestThreadPoolSettingsPython39, self).tearDown() - - -@unittest.skipIf(sys.version_info.minor != 10, - "Run the tests only for Python 3.10. In other platforms, " - "as the default passed is None, the cpu_count determines the " - "number of max_workers and we cannot mock the os.cpu_count() " - "in the concurrent.futures.ThreadPoolExecutor") -class TestThreadPoolSettingsPython310(TestThreadPoolSettingsPython37): - def setUp(self, version=SysVersionInfo(3, 10, 0, 'final', 0)): - super(TestThreadPoolSettingsPython310, self).setUp(version) - self._allowed_max_workers: int = self._over_max_workers - self.mock_os_cpu = patch( - 'os.cpu_count', return_value=2) - # 6 - based on 2 cores - min(32, (os.cpu_count() or 1) + 4) - 2 + 4 - self._default_workers: Optional[int] = 6 - self.mock_os_cpu.start() - self._allowed_max_workers: int = self._over_max_workers - - def tearDown(self): - self.mock_os_cpu.stop() - super(TestThreadPoolSettingsPython310, self).tearDown() - - -@unittest.skipIf(sys.version_info.minor != 11, - "Run the tests only for Python 3.11. In other platforms, " - "as the default passed is None, the cpu_count determines the " - "number of max_workers and we cannot mock the os.cpu_count() " - "in the concurrent.futures.ThreadPoolExecutor") -class TestThreadPoolSettingsPython311(TestThreadPoolSettingsPython37): - def setUp(self, version=SysVersionInfo(3, 11, 0, 'final', 0)): - super(TestThreadPoolSettingsPython311, self).setUp(version) - self._allowed_max_workers: int = self._over_max_workers - self.mock_os_cpu = patch( - 'os.cpu_count', return_value=2) - # 6 - based on 2 cores - min(32, (os.cpu_count() or 1) + 4) - 2 + 4 - self._default_workers: Optional[int] = 6 - self.mock_os_cpu.start() - self._allowed_max_workers: int = self._over_max_workers - - def tearDown(self): - self.mock_os_cpu.stop() - super(TestThreadPoolSettingsPython311, self).tearDown() - - -@unittest.skipIf(sys.version_info.minor != 12, - "Run the tests only for Python 3.12. In other platforms, " - "as the default passed is None, the cpu_count determines the " - "number of max_workers and we cannot mock the os.cpu_count() " - "in the concurrent.futures.ThreadPoolExecutor") -class TestThreadPoolSettingsPython312(TestThreadPoolSettingsPython37): - def setUp(self, version=SysVersionInfo(3, 12, 0, 'final', 0)): - super(TestThreadPoolSettingsPython312, self).setUp(version) - self._allowed_max_workers: int = self._over_max_workers - self.mock_os_cpu = patch( - 'os.cpu_count', return_value=2) - # 6 - based on 2 cores - min(32, (os.cpu_count() or 1) + 4) - 2 + 4 - self._default_workers: Optional[int] = 6 - self.mock_os_cpu.start() - self._allowed_max_workers: int = self._over_max_workers - - def tearDown(self): - self.mock_os_cpu.stop() - super(TestThreadPoolSettingsPython312, self).tearDown() - - -class TestDispatcherStein(testutils.AsyncTestCase): - - def setUp(self): - self._ctrl = testutils.start_mockhost( - script_root=DISPATCHER_STEIN_FUNCTIONS_DIR) - - async def test_dispatcher_functions_metadata_request(self): - """Test if the functions metadata response will be sent correctly - when a functions metadata request is received - """ - async with self._ctrl as host: - await host.init_worker() - r = await host.get_functions_metadata() - self.assertIsInstance(r.response, protos.FunctionMetadataResponse) - self.assertFalse(r.response.use_default_metadata_indexing) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - del sys.modules['function_app'] - - async def test_dispatcher_functions_metadata_request_with_retry(self): - """Test if the functions metadata response will be sent correctly - when a functions metadata request is received - """ - async with self._ctrl as host: - await host.init_worker() - r = await host.get_functions_metadata() - self.assertIsInstance(r.response, protos.FunctionMetadataResponse) - self.assertFalse(r.response.use_default_metadata_indexing) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - del sys.modules['function_app'] - - -class TestDispatcherSteinLegacyFallback(testutils.AsyncTestCase): - - def setUp(self): - self._ctrl = testutils.start_mockhost( - script_root=DISPATCHER_FUNCTIONS_DIR) - self._pre_env = dict(os.environ) - self.mock_version_info = patch( - 'azure_functions_worker.dispatcher.sys.version_info', - SysVersionInfo(3, 9, 0, 'final', 0)) - self.mock_version_info.start() - - def tearDown(self): - os.environ.clear() - os.environ.update(self._pre_env) - self.mock_version_info.stop() - - async def test_dispatcher_functions_metadata_request_legacy_fallback(self): - """Test if the functions metadata response will be sent correctly - when a functions metadata request is received - """ - async with self._ctrl as host: - r = await host.get_functions_metadata() - self.assertIsInstance(r.response, protos.FunctionMetadataResponse) - self.assertTrue(r.response.use_default_metadata_indexing) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - -class TestDispatcherInitRequest(testutils.AsyncTestCase): - - def setUp(self): - self._ctrl = testutils.start_mockhost( - script_root=DISPATCHER_FUNCTIONS_DIR) - self._pre_env = dict(os.environ) - self.mock_version_info = patch( - 'azure_functions_worker.dispatcher.sys.version_info', - SysVersionInfo(3, 9, 0, 'final', 0)) - self.mock_version_info.start() - - def tearDown(self): - os.environ.clear() - os.environ.update(self._pre_env) - self.mock_version_info.stop() - - async def test_dispatcher_load_azfunc_in_init(self): - """Test if azure functions is loaded during init - """ - async with self._ctrl as host: - r = await host.init_worker() - self.assertEqual( - len([log for log in r.logs if log.message.startswith( - 'Received WorkerInitRequest' - )]), - 1 - ) - self.assertEqual( - len([log for log in r.logs if log.message.startswith( - "Received WorkerMetadataRequest from " - "_handle__worker_init_request" - )]), - 0 - ) - self.assertIn("azure.functions", sys.modules) - - async def test_dispatcher_indexing_in_init_request(self): - """Test if azure functions is loaded during init - """ - env = {PYTHON_ENABLE_INIT_INDEXING: "1", - PYTHON_ENABLE_DEBUG_LOGGING: "1"} - with patch.dict(os.environ, env): - async with self._ctrl as host: - r = await host.init_worker() - self.assertEqual( - len([log for log in r.logs if log.message.startswith( - "Received WorkerInitRequest" - )]), - 1 - ) - - self.assertEqual( - len([log for log in r.logs if log.message.startswith( - "Received load metadata request from " - "worker_init_request" - )]), - 1 - ) - - async def test_dispatcher_load_modules_dedicated_app(self): - """Test modules are loaded in dedicated apps - """ - os.environ["PYTHON_ISOLATE_WORKER_DEPENDENCIES"] = "1" - - # Dedicated Apps where placeholder mode is not set - async with self._ctrl as host: - r = await host.init_worker() - logs = [log.message for log in r.logs] - self.assertIn( - "Applying prioritize_customer_dependencies: " - "worker_dependencies_path: , customer_dependencies_path: , " - "working_directory: , Linux Consumption: False," - " Placeholder: False", logs - ) - - async def test_dispatcher_load_modules_con_placeholder_enabled(self): - """Test modules are loaded in consumption apps with placeholder mode - enabled. - """ - # Consumption apps with placeholder mode enabled - os.environ["PYTHON_ISOLATE_WORKER_DEPENDENCIES"] = "1" - os.environ["CONTAINER_NAME"] = "test" - os.environ["WEBSITE_PLACEHOLDER_MODE"] = "1" - async with self._ctrl as host: - r = await host.init_worker() - logs = [log.message for log in r.logs] - self.assertNotIn( - "Applying prioritize_customer_dependencies: " - "worker_dependencies_path: , customer_dependencies_path: , " - "working_directory: , Linux Consumption: True,", logs) - - async def test_dispatcher_load_modules_con_app_placeholder_disabled(self): - """Test modules are loaded in consumption apps with placeholder mode - disabled. - """ - # Consumption apps with placeholder mode disabled i.e. worker - # is specialized - os.environ["PYTHON_ISOLATE_WORKER_DEPENDENCIES"] = "1" - os.environ["WEBSITE_PLACEHOLDER_MODE"] = "0" - os.environ["CONTAINER_NAME"] = "test" - async with self._ctrl as host: - r = await host.init_worker() - logs = [log.message for log in r.logs] - self.assertIn( - "Applying prioritize_customer_dependencies: " - "worker_dependencies_path: , customer_dependencies_path: , " - "working_directory: , Linux Consumption: True," - " Placeholder: False", logs) - - -class TestDispatcherIndexingInInit(unittest.TestCase): - - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - self.dispatcher = testutils.create_dummy_dispatcher() - sys.path.append(str(FUNCTION_APP_DIRECTORY)) - sys.path.append(str(HTTPV2_FUNCTION_APP_DIRECTORY)) - - def tearDown(self): - self.loop.close() - - @patch.dict(os.environ, {PYTHON_ENABLE_INIT_INDEXING: 'true'}) - def test_worker_init_request_with_indexing_enabled(self): - request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(request)) - - self.assertIsNotNone(self.dispatcher._function_metadata_result) - self.assertIsNone(self.dispatcher._function_metadata_exception) - - del sys.modules['function_app'] - - @patch.dict(os.environ, {PYTHON_ENABLE_INIT_INDEXING: 'false'}) - def test_worker_init_request_with_indexing_disabled(self): - request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(request)) - - self.assertIsNone(self.dispatcher._function_metadata_result) - self.assertIsNone(self.dispatcher._function_metadata_exception) - - @patch.dict(os.environ, {PYTHON_ENABLE_INIT_INDEXING: 'true'}) - @patch.object(Dispatcher, 'index_functions') - def test_worker_init_request_with_indexing_exception(self, - mock_index_functions): - mock_index_functions.side_effect = Exception("Mocked Exception") - - request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(request)) - - self.assertIsNone(self.dispatcher._function_metadata_result) - self.assertIsNotNone(self.dispatcher._function_metadata_exception) - - @patch.dict(os.environ, {PYTHON_ENABLE_INIT_INDEXING: 'true'}) - def test_functions_metadata_request_with_init_indexing_enabled(self): - init_request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - metadata_request = protos.StreamingMessage( - functions_metadata_request=protos.FunctionsMetadataRequest( - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - init_response = self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(init_request)) - self.assertEqual(init_response.worker_init_response.result.status, - protos.StatusResult.Success) - - metadata_response = self.loop.run_until_complete( - self.dispatcher._handle__functions_metadata_request( - metadata_request)) - - self.assertEqual( - metadata_response.function_metadata_response.result.status, - protos.StatusResult.Success) - self.assertIsNotNone(self.dispatcher._function_metadata_result) - self.assertIsNone(self.dispatcher._function_metadata_exception) - - del sys.modules['function_app'] - - @patch.dict(os.environ, {PYTHON_ENABLE_INIT_INDEXING: 'false'}) - def test_functions_metadata_request_with_init_indexing_disabled(self): - init_request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - metadata_request = protos.StreamingMessage( - functions_metadata_request=protos.FunctionsMetadataRequest( - function_app_directory=str(str(FUNCTION_APP_DIRECTORY)) - ) - ) - - init_response = self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(init_request)) - self.assertEqual(init_response.worker_init_response.result.status, - protos.StatusResult.Success) - self.assertIsNone(self.dispatcher._function_metadata_result) - self.assertIsNone(self.dispatcher._function_metadata_exception) - - metadata_response = self.loop.run_until_complete( - self.dispatcher._handle__functions_metadata_request( - metadata_request)) - - self.assertEqual( - metadata_response.function_metadata_response.result.status, - protos.StatusResult.Success) - self.assertIsNotNone(self.dispatcher._function_metadata_result) - self.assertIsNone(self.dispatcher._function_metadata_exception) - - del sys.modules['function_app'] - - @patch.dict(os.environ, {PYTHON_ENABLE_INIT_INDEXING: 'true'}) - @patch.object(Dispatcher, 'index_functions') - def test_functions_metadata_request_with_indexing_exception( - self, - mock_index_functions): - mock_index_functions.side_effect = Exception("Mocked Exception") - - request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - metadata_request = protos.StreamingMessage( - functions_metadata_request=protos.FunctionsMetadataRequest( - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(request)) - - self.assertIsNone(self.dispatcher._function_metadata_result) - self.assertIsNotNone(self.dispatcher._function_metadata_exception) - - metadata_response = self.loop.run_until_complete( - self.dispatcher._handle__functions_metadata_request( - metadata_request)) - - self.assertEqual( - metadata_response.function_metadata_response.result.status, - protos.StatusResult.Failure) - - @patch.dict(os.environ, {PYTHON_ENABLE_INIT_INDEXING: 'false'}) - def test_dispatcher_indexing_in_load_request(self): - init_request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(init_request)) - - self.assertIsNone(self.dispatcher._function_metadata_result) - - load_request = protos.StreamingMessage( - function_load_request=protos.FunctionLoadRequest( - function_id="http_trigger", - metadata=protos.RpcFunctionMetadata( - directory=str(FUNCTION_APP_DIRECTORY), - properties={METADATA_PROPERTIES_WORKER_INDEXED: "True"} - ))) - - self.loop.run_until_complete( - self.dispatcher._handle__function_load_request(load_request)) - - self.assertIsNotNone(self.dispatcher._function_metadata_result) - self.assertIsNone(self.dispatcher._function_metadata_exception) - - del sys.modules['function_app'] - - @patch.dict(os.environ, {PYTHON_ENABLE_INIT_INDEXING: 'true'}) - @patch.object(Dispatcher, 'index_functions') - def test_dispatcher_indexing_in_load_request_with_exception( - self, - mock_index_functions): - # This is the case when the second worker has an exception in indexing. - # In this case, we save the error in _function_metadata_exception in - # the init request and throw the error when load request is called. - - mock_index_functions.side_effect = Exception("Mocked Exception") - - init_request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(init_request)) - - self.assertIsNone(self.dispatcher._function_metadata_result) - - load_request = protos.StreamingMessage( - function_load_request=protos.FunctionLoadRequest( - function_id="http_trigger", - metadata=protos.RpcFunctionMetadata( - directory=str(FUNCTION_APP_DIRECTORY), - properties={METADATA_PROPERTIES_WORKER_INDEXED: "True"} - ))) - - response = self.loop.run_until_complete( - self.dispatcher._handle__function_load_request(load_request)) - - self.assertIsNotNone(self.dispatcher._function_metadata_exception) - self.assertEqual( - response.function_load_response.result.exception.message, - "Exception: Mocked Exception") - - @patch.dict(os.environ, {PYTHON_ENABLE_INIT_INDEXING: 'true'}) - @patch("azure_functions_worker.http_v2.HttpV2Registry.http_v2_enabled", - return_value=True) - def test_dispatcher_http_v2_init_request_fail(self, mock_http_v2_enabled): - request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(HTTPV2_FUNCTION_APP_DIRECTORY) - ) - ) - - resp = self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(request) - ) - - mock_http_v2_enabled.assert_called_once() - self.assertIsNotNone(self.dispatcher._function_metadata_exception) - - capabilities = resp.worker_init_response.capabilities - self.assertNotIn(HTTP_URI, capabilities) - self.assertNotIn(REQUIRES_ROUTE_PARAMETERS, capabilities) - - # Cleanup - del sys.modules['function_app'] - - @patch.dict(os.environ, {PYTHON_ENABLE_INIT_INDEXING: 'true'}) - @patch("azure_functions_worker.http_v2.HttpV2Registry.http_v2_enabled", - return_value=True) - @patch("azure_functions_worker.dispatcher.initialize_http_server", - return_value="http://localhost:8080") - @patch("azure_functions_worker.dispatcher.Dispatcher" - ".load_function_metadata") - def test_dispatcher_http_v2_init_request_pass(self, mock_http_v2_enabled, - mock_init_http_server, - mock_load_func_metadata): - request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(HTTPV2_FUNCTION_APP_DIRECTORY) - ) - ) - - resp = self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(request) - ) - - mock_http_v2_enabled.assert_called_once() - mock_init_http_server.assert_called_once() - mock_load_func_metadata.assert_called_once() - self.assertIsNone(self.dispatcher._function_metadata_exception) - - capabilities = resp.worker_init_response.capabilities - self.assertIn(HTTP_URI, capabilities) - self.assertEqual(capabilities[HTTP_URI], "http://localhost:8080") - self.assertIn(REQUIRES_ROUTE_PARAMETERS, capabilities) - self.assertEqual(capabilities[REQUIRES_ROUTE_PARAMETERS], "true") - - -class TestContextEnabledTask(unittest.TestCase): - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self): - self.loop.close() - - def test_init_with_context(self): - # Since ContextEnabledTask accepts the context param, - # no errors will be thrown here - num = contextvars.ContextVar('num') - num.set(5) - ctx = contextvars.copy_context() - exception_raised = False - try: - self.loop.set_task_factory( - lambda loop, coro, context=None: ContextEnabledTask( - coro, loop=loop, context=ctx)) - except TypeError: - exception_raised = True - self.assertFalse(exception_raised) - - async def test_init_without_context(self): - # If the context param is not defined, - # no errors will be thrown for backwards compatibility - exception_raised = False - try: - self.loop.set_task_factory( - lambda loop, coro: ContextEnabledTask( - coro, loop=loop)) - except TypeError: - exception_raised = True - self.assertFalse(exception_raised) diff --git a/tests/unittests/test_enable_debug_logging_functions.py b/tests/unittests/test_enable_debug_logging_functions.py deleted file mode 100644 index c39e7b60e..000000000 --- a/tests/unittests/test_enable_debug_logging_functions.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os -import typing - -from tests.utils import testutils -from tests.utils.testutils import TESTS_ROOT, remove_path - -from azure_functions_worker.constants import PYTHON_ENABLE_DEBUG_LOGGING - -HOST_JSON_TEMPLATE_WITH_LOGLEVEL_INFO = """\ -{ - "version": "2.0", - "logging": { - "logLevel": { - "default": "Information" - } - }, - "functionTimeout": "00:05:00" -} -""" - - -@testutils.retryable_test(4, 5) -class TestDebugLoggingEnabledFunctions(testutils.WebHostTestCase): - """ - Tests for cx debug logging enabled case. - """ - @classmethod - def setUpClass(cls): - os.environ["PYTHON_ENABLE_DEBUG_LOGGING"] = "1" - super().setUpClass() - - @classmethod - def tearDownClass(cls): - os.environ.pop(PYTHON_ENABLE_DEBUG_LOGGING) - super().tearDownClass() - - @classmethod - def get_script_dir(cls): - return testutils.UNIT_TESTS_FOLDER / 'log_filtering_functions' - - def test_debug_logging_enabled(self): - """ - Verify when cx debug logging is enabled, cx function debug logs - are recorded in host logs. - """ - r = self.webhost.request('GET', 'debug_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-debug') - - def check_log_debug_logging_enabled(self, host_out: typing.List[str]): - self.assertIn('logging info', host_out) - self.assertIn('logging warning', host_out) - self.assertIn('logging debug', host_out) - self.assertIn('logging error', host_out) - - -class TestDebugLoggingDisabledFunctions(testutils.WebHostTestCase): - """ - Tests for cx debug logging disabled case. - """ - @classmethod - def setUpClass(cls): - os.environ["PYTHON_ENABLE_DEBUG_LOGGING"] = "0" - super().setUpClass() - - @classmethod - def tearDownClass(cls): - os.environ.pop(PYTHON_ENABLE_DEBUG_LOGGING) - super().tearDownClass() - - @classmethod - def get_script_dir(cls): - return testutils.UNIT_TESTS_FOLDER / 'log_filtering_functions' - - def test_debug_logging_disabled(self): - """ - Verify when cx debug logging is disabled, cx function debug logs - are not written to host logs. - """ - r = self.webhost.request('GET', 'debug_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-debug') - - def check_log_debug_logging_disabled(self, host_out: typing.List[str]): - self.assertIn('logging info', host_out) - self.assertIn('logging warning', host_out) - self.assertIn('logging error', host_out) - self.assertNotIn('logging debug', host_out) - - -class TestDebugLogEnabledHostFilteringFunctions(testutils.WebHostTestCase): - """ - Tests for enable debug logging flag enabled and host log level is - Information case. - """ - @classmethod - def setUpClass(cls): - host_json = TESTS_ROOT / cls.get_script_dir() / 'host.json' - - with open(host_json, 'w+') as f: - f.write(HOST_JSON_TEMPLATE_WITH_LOGLEVEL_INFO) - - os.environ["PYTHON_ENABLE_DEBUG_LOGGING"] = "1" - super().setUpClass() - - @classmethod - def tearDownClass(cls): - host_json = TESTS_ROOT / cls.get_script_dir() / 'host.json' - remove_path(host_json) - - os.environ.pop(PYTHON_ENABLE_DEBUG_LOGGING) - super().tearDownClass() - - @classmethod - def get_script_dir(cls): - return testutils.UNIT_TESTS_FOLDER / 'log_filtering_functions' - - def test_debug_logging_filtered(self): - """ - Verify when cx debug logging is enabled and host logging level - is Information, cx function debug logs are not written to host logs. - """ - r = self.webhost.request('GET', 'debug_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-debug') - - def check_log_debug_logging_filtered(self, host_out: typing.List[str]): - self.assertIn('logging info', host_out) - self.assertIn('logging warning', host_out) - self.assertNotIn('logging debug', host_out) - self.assertIn('logging error', host_out) diff --git a/tests/unittests/test_extension.py b/tests/unittests/test_extension.py deleted file mode 100644 index 62569fefd..000000000 --- a/tests/unittests/test_extension.py +++ /dev/null @@ -1,864 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import importlib -import logging -import os -import pathlib -import sys -import unittest -from importlib import import_module -from unittest.mock import Mock, call, patch - -from azure_functions_worker.constants import ( - CUSTOMER_PACKAGES_PATH, - PYTHON_ENABLE_WORKER_EXTENSIONS, -) -from azure_functions_worker.extension import ( - APP_EXT_POST_FUNCTION_LOAD, - APP_EXT_POST_INVOCATION, - APP_EXT_PRE_INVOCATION, - FUNC_EXT_POST_FUNCTION_LOAD, - FUNC_EXT_POST_INVOCATION, - FUNC_EXT_PRE_INVOCATION, - ExtensionManager, -) -from azure_functions_worker.utils.common import get_sdk_from_sys_path - - -class MockContext: - def __init__(self, function_name: str, function_directory: str): - self.function_name = function_name - self.function_directory = function_directory - - -class TestExtension(unittest.TestCase): - - def setUp(self): - # Patch sys.modules and sys.path to avoid pollution between tests - self.mock_environ = patch.dict('os.environ', os.environ.copy()) - self.mock_sys_module = patch.dict('sys.modules', sys.modules.copy()) - self.mock_sys_path = patch('sys.path', sys.path.copy()) - self.mock_environ.start() - self.mock_sys_module.start() - self.mock_sys_path.start() - - # Initialize Extension Manager Instance - self._instance = ExtensionManager - self._instance._is_sdk_detected = False - self._instance._extension_enabled_sdk = None - - # Initialize Azure Functions SDK and clear cache - self._sdk = import_module('azure.functions') - self._sdk.ExtensionMeta._func_exts = {} - self._sdk.ExtensionMeta._app_exts = None - self._sdk.ExtensionMeta._info = {} - sys.modules.pop('azure.functions') - sys.modules.pop('azure') - - # Derived dummy SDK Python system path - self._dummy_sdk_sys_path = os.path.join( - os.path.dirname(__file__), - 'resources', - 'mock_azure_functions' - ) - self._dummy_sdk = Mock(__file__="test") - - # Initialize mock context - self._mock_arguments = {'req': 'request'} - self._mock_func_name = 'HttpTrigger' - self._mock_func_dir = '/home/site/wwwroot/HttpTrigger' - self._mock_context = MockContext( - function_name=self._mock_func_name, - function_directory=self._mock_func_dir - ) - - # Set feature flag to on - os.environ[PYTHON_ENABLE_WORKER_EXTENSIONS] = 'true' - - def tearDown(self) -> None: - os.environ.pop(PYTHON_ENABLE_WORKER_EXTENSIONS) - - self.mock_sys_path.stop() - self.mock_sys_module.stop() - self.mock_environ.stop() - - def test_extension_is_supported_by_latest_sdk(self): - """Test if extension interface supports check as expected on - new version of azure.functions SDK - """ - module = get_sdk_from_sys_path() - sdk_enabled = self._instance._is_extension_enabled_in_sdk(module) - self.assertTrue(sdk_enabled) - - def test_extension_is_not_supported_by_mock_sdk(self): - """Test if the detection works when an azure.functions SDK does not - support extension management. - """ - sys.path.insert(0, self._dummy_sdk_sys_path) - module = get_sdk_from_sys_path() - sdk_enabled = self._instance._is_extension_enabled_in_sdk(module) - self.assertFalse(sdk_enabled) - - def test_extension_in_worker(self): - """Test if worker contains support for extensions - """ - sys.path.insert(0, pathlib.Path.home()) - module = importlib.import_module('azure.functions') - sdk_enabled = self._instance._is_extension_enabled_in_sdk(module) - self.assertTrue(sdk_enabled) - - def test_extension_if_sdk_not_in_path(self): - """Test if the detection works when an azure.functions SDK does not - support extension management. - """ - - module = get_sdk_from_sys_path() - self.assertIn(CUSTOMER_PACKAGES_PATH, sys.path) - sdk_enabled = self._instance._is_extension_enabled_in_sdk(module) - self.assertTrue(sdk_enabled) - - @patch('azure_functions_worker.extension.get_sdk_from_sys_path', - return_value=importlib.import_module('azure.functions')) - def test_function_load_extension_enable_when_feature_flag_is_on( - self, - get_sdk_from_sys_path_mock: Mock - ): - """When turning off the feature flag PYTHON_ENABLE_WORKER_EXTENSIONS, - the post_function_load extension should be disabled - """ - self._instance.function_load_extension( - func_name=self._mock_func_name, - func_directory=self._mock_func_dir - ) - get_sdk_from_sys_path_mock.assert_called_once() - - @patch('azure_functions_worker.extension.get_sdk_from_sys_path') - def test_function_load_extension_disable_when_feature_flag_is_off( - self, - get_sdk_from_sys_path_mock: Mock - ): - """When turning off the feature flag PYTHON_ENABLE_WORKER_EXTENSIONS, - the post_function_load extension should be disabled - """ - os.environ[PYTHON_ENABLE_WORKER_EXTENSIONS] = 'false' - self._instance.function_load_extension( - func_name=self._mock_func_name, - func_directory=self._mock_func_dir - ) - get_sdk_from_sys_path_mock.assert_not_called() - - @patch('azure_functions_worker.extension.ExtensionManager.' - '_warn_sdk_not_support_extension') - def test_function_load_extension_warns_when_sdk_does_not_support( - self, - _warn_sdk_not_support_extension_mock: Mock - ): - """When customer is using an old version of sdk which does not have - extension support and turning on the feature flag, we should warn them - """ - sys.path.insert(0, self._dummy_sdk_sys_path) - self._instance.function_load_extension( - func_name=self._mock_func_name, - func_directory=self._mock_func_dir - ) - _warn_sdk_not_support_extension_mock.assert_called_once() - - @patch('azure_functions_worker.extension.ExtensionManager.' - '_safe_execute_function_load_hooks') - def test_function_load_extension_should_invoke_extension_call( - self, - safe_execute_function_load_hooks_mock: Mock - ): - """Should invoke extension if SDK suports extension interface - """ - self._instance.function_load_extension( - func_name=self._mock_func_name, - func_directory=self._mock_func_dir - ) - # No registered hooks - safe_execute_function_load_hooks_mock.assert_has_calls( - calls=[ - call( - None, APP_EXT_POST_FUNCTION_LOAD, - self._mock_func_name, self._mock_func_dir - ), - call( - None, FUNC_EXT_POST_FUNCTION_LOAD, - self._mock_func_name, self._mock_func_dir - ) - ], - any_order=True - ) - - @patch('azure_functions_worker.extension.get_sdk_from_sys_path', - return_value=importlib.import_module('azure.functions')) - def test_invocation_extension_enable_when_feature_flag_is_on( - self, - get_sdk_from_sys_path_mock: Mock - ): - """When turning off the feature flag PYTHON_ENABLE_WORKER_EXTENSIONS, - the pre_invocation and post_invocation extension should be disabled - """ - self._instance._invocation_extension( - ctx=self._mock_context, - hook_name=FUNC_EXT_PRE_INVOCATION, - func_args=[], - func_ret=None - ) - get_sdk_from_sys_path_mock.assert_called_once() - - @patch('azure_functions_worker.extension.get_sdk_from_sys_path') - def test_invocation_extension_extension_disable_when_feature_flag_is_off( - self, - get_sdk_from_sys_path_mock: Mock - ): - """When turning off the feature flag PYTHON_ENABLE_WORKER_EXTENSIONS, - the pre_invocation and post_invocation extension should be disabled - """ - os.environ[PYTHON_ENABLE_WORKER_EXTENSIONS] = 'false' - self._instance._invocation_extension( - ctx=self._mock_context, - hook_name=FUNC_EXT_PRE_INVOCATION, - func_args=[], - func_ret=None - ) - get_sdk_from_sys_path_mock.assert_not_called() - - @patch('azure_functions_worker.extension.ExtensionManager.' - '_warn_sdk_not_support_extension') - def test_invocation_extension_warns_when_sdk_does_not_support( - self, - _warn_sdk_not_support_extension_mock: Mock - ): - """When customer is using an old version of sdk which does not have - extension support and turning on the feature flag, we should warn them - """ - sys.path.insert(0, self._dummy_sdk_sys_path) - self._instance._invocation_extension( - ctx=self._mock_context, - hook_name=FUNC_EXT_PRE_INVOCATION, - func_args=[], - func_ret=None - ) - _warn_sdk_not_support_extension_mock.assert_called_once() - - @patch('azure_functions_worker.extension.ExtensionManager.' - '_safe_execute_invocation_hooks') - def test_invocation_extension_should_invoke_extension_call( - self, - safe_execute_invocation_hooks_mock: Mock - ): - """Should invoke extension if SDK suports extension interface - """ - for hook_name in (APP_EXT_PRE_INVOCATION, FUNC_EXT_PRE_INVOCATION, - APP_EXT_POST_INVOCATION, FUNC_EXT_POST_INVOCATION): - self._instance._invocation_extension( - ctx=self._mock_context, - hook_name=hook_name, - func_args=[], - func_ret=None - ) - - safe_execute_invocation_hooks_mock.assert_has_calls( - calls=[ - call( - None, hook_name, self._mock_context, - [], None - ) - ], - any_order=True - ) - - @patch('azure_functions_worker.extension.ExtensionManager.' - '_is_pre_invocation_hook') - @patch('azure_functions_worker.extension.ExtensionManager.' - '_is_post_invocation_hook') - def test_empty_hooks_should_not_receive_any_invocation( - self, - _is_post_invocation_hook_mock: Mock, - _is_pre_invocation_hook_mock: Mock - ): - """If there is no life-cycle hooks implemented under a function, - then we should skip it - """ - for hook_name in (APP_EXT_PRE_INVOCATION, FUNC_EXT_PRE_INVOCATION, - APP_EXT_POST_INVOCATION, FUNC_EXT_POST_INVOCATION): - self._instance._safe_execute_invocation_hooks( - hooks=[], - hook_name=hook_name, - ctx=self._mock_context, - fargs=[], - fret=None - ) - _is_pre_invocation_hook_mock.assert_not_called() - _is_post_invocation_hook_mock.assert_not_called() - - def test_invocation_hooks_should_be_executed(self): - """If there is an extension implemented the pre_invocation and - post_invocation life-cycle hooks, it should be invoked in - safe_execute_invocation_hooks - """ - FuncExtClass = self._generate_new_func_extension_class( - base=self._sdk.FuncExtensionBase, - trigger=self._mock_func_name - ) - func_ext_instance = FuncExtClass() - hook_instances = ( - self._sdk.ExtensionMeta.get_function_hooks(self._mock_func_name) - ) - for hook_name in (FUNC_EXT_PRE_INVOCATION, FUNC_EXT_POST_INVOCATION): - self._instance._safe_execute_invocation_hooks( - hooks=hook_instances, - hook_name=hook_name, - ctx=self._mock_context, - fargs=[], - fret=None - ) - self.assertFalse(func_ext_instance._post_function_load_executed) - self.assertTrue(func_ext_instance._pre_invocation_executed) - self.assertTrue(func_ext_instance._post_invocation_executed) - - def test_post_function_load_hook_should_be_executed(self): - """If there is an extension implemented the post_function_load - life-cycle hook, it invokes in safe_execute_function_load_hooks - """ - FuncExtClass = self._generate_new_func_extension_class( - base=self._sdk.FuncExtensionBase, - trigger=self._mock_func_name - ) - func_ext_instance = FuncExtClass() - hook_instances = ( - self._sdk.ExtensionMeta.get_function_hooks(self._mock_func_name) - ) - for hook_name in (FUNC_EXT_POST_FUNCTION_LOAD,): - self._instance._safe_execute_function_load_hooks( - hooks=hook_instances, - hook_name=hook_name, - fname=self._mock_func_name, - fdir=self._mock_func_dir - ) - self.assertTrue(func_ext_instance._post_function_load_executed) - self.assertFalse(func_ext_instance._pre_invocation_executed) - self.assertFalse(func_ext_instance._post_invocation_executed) - - def test_invocation_hooks_app_level_should_be_executed(self): - """If there is an extension implemented the pre_invocation and - post_invocation life-cycle hooks, it should be invoked in - safe_execute_invocation_hooks - """ - AppExtClass = self._generate_new_app_extension( - base=self._sdk.AppExtensionBase - ) - hook_instances = ( - self._sdk.ExtensionMeta.get_application_hooks() - ) - for hook_name in (APP_EXT_PRE_INVOCATION, APP_EXT_POST_INVOCATION): - self._instance._safe_execute_invocation_hooks( - hooks=hook_instances, - hook_name=hook_name, - ctx=self._mock_context, - fargs=[], - fret=None - ) - self.assertFalse(AppExtClass._post_function_load_app_level_executed) - self.assertTrue(AppExtClass._pre_invocation_app_level_executed) - self.assertTrue(AppExtClass._post_invocation_app_level_executed) - - def test_post_function_load_app_level_hook_should_be_executed(self): - """If there is an extension implemented the post_function_load - life-cycle hook, it invokes in safe_execute_function_load_hooks - """ - AppExtClass = self._generate_new_app_extension( - base=self._sdk.AppExtensionBase - ) - hook_instances = ( - self._sdk.ExtensionMeta.get_application_hooks() - ) - for hook_name in (APP_EXT_POST_FUNCTION_LOAD,): - self._instance._safe_execute_function_load_hooks( - hooks=hook_instances, - hook_name=hook_name, - fname=self._mock_func_name, - fdir=self._mock_func_dir - ) - self.assertTrue(AppExtClass._post_function_load_app_level_executed) - self.assertFalse(AppExtClass._pre_invocation_app_level_executed) - self.assertFalse(AppExtClass._post_invocation_app_level_executed) - - def test_raw_invocation_wrapper(self): - """This wrapper should automatically invoke all invocation extensions - """ - # Instantiate extensions - AppExtClass = self._generate_new_app_extension( - base=self._sdk.AppExtensionBase - ) - FuncExtClass = self._generate_new_func_extension_class( - base=self._sdk.FuncExtensionBase, - trigger=self._mock_func_name - ) - func_ext_instance = FuncExtClass() - - # Invoke with wrapper - self._instance._raw_invocation_wrapper( - self._mock_context, self._mock_function_main, self._mock_arguments - ) - - # Assert: invocation hooks should be executed - self.assertTrue(func_ext_instance._pre_invocation_executed) - self.assertTrue(func_ext_instance._post_invocation_executed) - self.assertTrue(AppExtClass._pre_invocation_app_level_executed) - self.assertTrue(AppExtClass._post_invocation_app_level_executed) - - # Assert: arguments should be passed into the extension - comparisons = ( - func_ext_instance._pre_invocation_executed_fargs, - func_ext_instance._post_invocation_executed_fargs, - AppExtClass._pre_invocation_app_level_executed_fargs, - AppExtClass._post_invocation_app_level_executed_fargs - ) - for current_argument in comparisons: - self.assertEqual(current_argument, self._mock_arguments) - - # Assert: returns should be passed into the extension - comparisons = ( - func_ext_instance._post_invocation_executed_fret, - AppExtClass._post_invocation_app_level_executed_fret - ) - for current_return in comparisons: - self.assertEqual(current_return, 'request_ok') - - @patch('azure_functions_worker.extension.logger.error') - def test_exception_handling_in_post_function_load_app_level( - self, - error_mock: Mock - ): - """When there's a chain breaks in the extension chain, it should not - pause other executions. For post_function_load_app_level, becasue the - logger is not fully initialized, the exception will be suppressed. - """ - # Create an customized exception - expt = Exception('Exception in post_function_load_app_level') - - # Register an application extension - class BadAppExtension(self._sdk.AppExtensionBase): - post_function_load_app_level_executed = False - - @classmethod - def post_function_load_app_level(cls, - function_name, - function_directory, - *args, - **kwargs): - cls.post_function_load_app_level_executed = True - raise expt - - # Execute function with a broken extension - hooks = self._sdk.ExtensionMeta.get_application_hooks() - self._instance._safe_execute_function_load_hooks( - hooks=hooks, - hook_name=APP_EXT_POST_FUNCTION_LOAD, - fname=self._mock_func_name, - fdir=self._mock_func_dir - ) - - # Ensure the extension is executed, but the exception shouldn't surface - self.assertTrue(BadAppExtension.post_function_load_app_level_executed) - - # Ensure errors are reported from system logger - error_mock.assert_called_with(expt, exc_info=True) - - def test_exception_handling_in_pre_invocation_app_level(self): - """When there's a chain breaks in the extension chain, it should not - pause other executions, but report with a system logger, so that the - error is accessible to customers and ours. - """ - # Create an customized exception - expt = Exception('Exception in pre_invocation_app_level') - - # Register an application extension - class BadAppExtension(self._sdk.AppExtensionBase): - @classmethod - def pre_invocation_app_level(cls, logger, context, func_args, - *args, **kwargs): - raise expt - - # Create a mocked customer_function - wrapped = self._instance.get_sync_invocation_wrapper( - self._mock_context, - self._mock_function_main - ) - - # Mock logger - ext_logger = logging.getLogger( - 'azure_functions_worker.extension.BadAppExtension' - ) - ext_logger_error_mock = Mock() - ext_logger.error = ext_logger_error_mock - - # Invocation with arguments. This will throw an exception, but should - # not break the execution chain. - result = wrapped(self._mock_arguments) - - # Ensure the customer's function is executed - self.assertEqual(result, 'request_ok') - - # Ensure the error is reported - ext_logger_error_mock.assert_called_with(expt, exc_info=True) - - def test_get_sync_invocation_wrapper_no_extension(self): - """The wrapper is using functools.partial() to expose the arguments - for synchronous execution in dispatcher. - """ - # Create a mocked customer_function - wrapped = self._instance.get_sync_invocation_wrapper( - self._mock_context, - self._mock_function_main - ) - - # Invocation with arguments - result = wrapped(self._mock_arguments) - - # Ensure the return value matches the function method - self.assertEqual(result, 'request_ok') - - def test_get_sync_invocation_wrapper_with_func_extension(self): - """The wrapper is using functools.partial() to expose the arguments. - Ensure the func extension can be executed along with customer's funcs. - """ - # Register a function extension - FuncExtClass = self._generate_new_func_extension_class( - self._sdk.FuncExtensionBase, - self._mock_func_name - ) - _func_ext_instance = FuncExtClass() - - # Create a mocked customer_function - wrapped = self._instance.get_sync_invocation_wrapper( - self._mock_context, - self._mock_function_main - ) - - # Invocation via wrapper with arguments - result = wrapped(self._mock_arguments) - - # Ensure the extension is executed - self.assertTrue(_func_ext_instance._pre_invocation_executed) - - # Ensure the customer's function is executed - self.assertEqual(result, 'request_ok') - - def test_get_sync_invocation_wrapper_disabled_with_flag(self): - """The wrapper should still exist, customer's functions should still - be executed, but not the extension - """ - # Turn off feature flag - os.environ[PYTHON_ENABLE_WORKER_EXTENSIONS] = 'false' - - # Register a function extension - FuncExtClass = self._generate_new_func_extension_class( - self._sdk.FuncExtensionBase, - self._mock_func_name - ) - _func_ext_instance = FuncExtClass() - - # Create a mocked customer_function - wrapped = self._instance.get_sync_invocation_wrapper( - self._mock_context, - self._mock_function_main - ) - - # Invocation via wrapper with arguments - result = wrapped(self._mock_arguments) - - # The extension SHOULD NOT be executed, since the feature flag is off - self.assertFalse(_func_ext_instance._pre_invocation_executed) - - # Ensure the customer's function is executed - self.assertEqual(result, 'request_ok') - - def test_get_async_invocation_wrapper_no_extension(self): - """The async wrapper will wrap an asynchronous function with a - coroutine interface. When there is no extension, it should only invoke - the customer's function. - """ - # Create a mocked customer_function with async wrapper - result = asyncio.run( - self._instance.get_async_invocation_wrapper( - self._mock_context, - self._mock_function_main_async, - self._mock_arguments - ) - ) - - # Ensure the return value matches the function method - self.assertEqual(result, 'request_ok') - - def test_get_async_invocation_wrapper_with_func_extension(self): - """The async wrapper will wrap an asynchronous function with a - coroutine interface. When there is registered extension, it should - execute the extension as well. - """ - # Register a function extension - FuncExtClass = self._generate_new_func_extension_class( - self._sdk.FuncExtensionBase, - self._mock_func_name - ) - _func_ext_instance = FuncExtClass() - - # Create a mocked customer_function with async wrapper - result = asyncio.run( - self._instance.get_async_invocation_wrapper( - self._mock_context, - self._mock_function_main_async, - self._mock_arguments - ) - ) - - # Ensure the extension is executed - self.assertTrue(_func_ext_instance._pre_invocation_executed) - - # Ensure the customer's function is executed - self.assertEqual(result, 'request_ok') - - def test_get_invocation_async_disabled_with_flag(self): - """The async wrapper will only execute customer's function. This - should not execute the extension. - """ - # Turn off feature flag - os.environ[PYTHON_ENABLE_WORKER_EXTENSIONS] = 'false' - - # Register a function extension - FuncExtClass = self._generate_new_func_extension_class( - self._sdk.FuncExtensionBase, - self._mock_func_name - ) - _func_ext_instance = FuncExtClass() - - # Create a mocked customer_function with async wrapper - result = asyncio.run( - self._instance.get_async_invocation_wrapper( - self._mock_context, - self._mock_function_main_async, - self._mock_arguments - ) - ) - - # The extension SHOULD NOT be executed - self.assertFalse(_func_ext_instance._pre_invocation_executed) - - # Ensure the customer's function is executed - self.assertEqual(result, 'request_ok') - - def test_is_pre_invocation_hook(self): - for name in (FUNC_EXT_PRE_INVOCATION, APP_EXT_PRE_INVOCATION): - self.assertTrue( - self._instance._is_pre_invocation_hook(name) - ) - - def test_is_pre_invocation_hook_negative(self): - for name in (FUNC_EXT_POST_INVOCATION, APP_EXT_POST_INVOCATION, - FUNC_EXT_POST_FUNCTION_LOAD, APP_EXT_POST_FUNCTION_LOAD): - self.assertFalse( - self._instance._is_pre_invocation_hook(name) - ) - - def test_is_post_invocation_hook(self): - for name in (FUNC_EXT_POST_INVOCATION, APP_EXT_POST_INVOCATION): - self.assertTrue( - self._instance._is_post_invocation_hook(name) - ) - - def test_is_post_invocation_hook_negative(self): - for name in (FUNC_EXT_PRE_INVOCATION, APP_EXT_PRE_INVOCATION, - FUNC_EXT_POST_FUNCTION_LOAD, APP_EXT_POST_FUNCTION_LOAD): - self.assertFalse( - self._instance._is_post_invocation_hook(name) - ) - - @patch('azure_functions_worker.extension.' - 'ExtensionManager._info_extension_is_enabled') - def test_try_get_sdk_with_extension_enabled_should_execute_once( - self, - info_extension_is_enabled_mock: Mock - ): - """The result of an extension enabled SDK should be cached. No need - to be derived multiple times. - """ - # Call twice the function - self._instance._try_get_sdk_with_extension_enabled() - sdk = self._instance._try_get_sdk_with_extension_enabled() - - # The actual execution will only process once (e.g. list extensions) - info_extension_is_enabled_mock.assert_called_once() - - # Ensure the SDK is returned correctly - self.assertIsNotNone(sdk) - - @patch('azure_functions_worker.extension.' - 'ExtensionManager._warn_sdk_not_support_extension') - def test_try_get_sdk_with_extension_disabled_should_execute_once( - self, - warn_sdk_not_support_extension_mock: Mock - ): - """When SDK does not support extension interface, it should return - None and throw a warning. - """ - # Point to dummy SDK - sys.path.insert(0, self._dummy_sdk_sys_path) - - # Call twice the function - self._instance._try_get_sdk_with_extension_enabled() - sdk = self._instance._try_get_sdk_with_extension_enabled() - - # The actual execution will only process once (e.g. warning) - warn_sdk_not_support_extension_mock.assert_called_once() - - # The SDK does not support Extension Interface, should be None - self.assertIsNone(sdk) - - @patch('azure_functions_worker.extension.logger.info') - def test_info_extension_is_enabled(self, info_mock: Mock): - # Get SDK from sys.path - sdk = get_sdk_from_sys_path() - - # Check logs - self._instance._info_extension_is_enabled(sdk) - info_mock.assert_called_once_with( - 'Python Worker Extension is enabled in azure.functions ' - '(%s). Sdk path: %s', sdk.__version__, sdk.__file__ - ) - - @patch('azure_functions_worker.extension.logger.info') - def test_info_discover_extension_list_func_ext(self, info_mock: Mock): - # Get SDK from sys.path - sdk = get_sdk_from_sys_path() - - # Register a function extension class - FuncExtClass = self._generate_new_func_extension_class( - sdk.FuncExtensionBase, - self._mock_func_name - ) - - # Instantiate a function extension - FuncExtClass() - - # Check logs - self._instance._info_discover_extension_list(self._mock_func_name, sdk) - info_mock.assert_called_once_with( - 'Python Worker Extension Manager is loading %s, ' - 'current registered extensions: %s', 'HttpTrigger', - '{"FuncExtension": {"HttpTrigger": ["NewFuncExtension"]}}' - ) - - @patch('azure_functions_worker.extension.logger.info') - def test_info_discover_extension_list_app_ext(self, info_mock: Mock): - # Get SDK from sys.path - sdk = get_sdk_from_sys_path() - - # Register a function extension class - self._generate_new_app_extension(sdk.AppExtensionBase) - - # Check logs - self._instance._info_discover_extension_list(self._mock_func_name, sdk) - info_mock.assert_called_once_with( - 'Python Worker Extension Manager is loading %s, current ' - 'registered extensions: %s', - 'HttpTrigger', '{"AppExtension": ["NewAppExtension"]}' - ) - - @patch('azure_functions_worker.extension.logger.warning') - def test_warn_sdk_not_support_extension(self, warning_mock: Mock): - # Get SDK from dummy - sys.path.insert(0, self._dummy_sdk_sys_path) - sdk = get_sdk_from_sys_path() - - # Check logs - self._instance._warn_sdk_not_support_extension(sdk) - warning_mock.assert_called_once_with( - 'The azure.functions (%s) does not support Python worker ' - 'extensions. If you believe extensions are correctly installed, ' - 'please set the %s and %s to "true"', - 'dummy', 'PYTHON_ISOLATE_WORKER_DEPENDENCIES', - 'PYTHON_ENABLE_WORKER_EXTENSIONS' - ) - - def _generate_new_func_extension_class(self, base: type, trigger: str): - class NewFuncExtension(base): - def __init__(self): - self._trigger_name = trigger - self._post_function_load_executed = False - self._pre_invocation_executed = False - self._post_invocation_executed = False - - self._pre_invocation_executed_fargs = {} - self._post_invocation_executed_fargs = {} - self._post_invocation_executed_fret = None - - def post_function_load(self, - function_name, - function_directory, - *args, - **kwargs): - self._post_function_load_executed = True - - def pre_invocation(self, logger, context, fargs, - *args, **kwargs): - self._pre_invocation_executed = True - self._pre_invocation_executed_fargs = fargs - - def post_invocation(self, logger, context, fargs, fret, - *args, **kwargs): - self._post_invocation_executed = True - self._post_invocation_executed_fargs = fargs - self._post_invocation_executed_fret = fret - - return NewFuncExtension - - def _generate_new_app_extension(self, base: type): - class NewAppExtension(base): - _init_executed = False - - _post_function_load_app_level_executed = False - _pre_invocation_app_level_executed = False - _post_invocation_app_level_executed = False - - _pre_invocation_app_level_executed_fargs = {} - _post_invocation_app_level_executed_fargs = {} - _post_invocation_app_level_executed_fret = None - - @classmethod - def init(cls): - cls._init_executed = True - - @classmethod - def post_function_load_app_level(cls, - function_name, - function_directory, - *args, - **kwargs): - cls._post_function_load_app_level_executed = True - - @classmethod - def pre_invocation_app_level(cls, logger, context, func_args, - *args, **kwargs): - cls._pre_invocation_app_level_executed = True - cls._pre_invocation_app_level_executed_fargs = func_args - - @classmethod - def post_invocation_app_level(cls, logger, context, - func_args, func_ret, - *args, **kwargs): - cls._post_invocation_app_level_executed = True - cls._post_invocation_app_level_executed_fargs = func_args - cls._post_invocation_app_level_executed_fret = func_ret - - return NewAppExtension - - def _mock_function_main(self, req): - assert req == 'request' - return req + '_ok' - - async def _mock_function_main_async(self, req): - assert req == 'request' - return req + '_ok' diff --git a/tests/unittests/test_file_accessor.py b/tests/unittests/test_file_accessor.py deleted file mode 100644 index 97381229f..000000000 --- a/tests/unittests/test_file_accessor.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import os -import sys -import unittest -from unittest import skipIf - -from tests.utils import testutils - -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - SharedMemoryException, -) - - -@skipIf(sys.platform == 'darwin', 'MacOS M1 machines do not correctly test the' - 'shared memory filesystems and thus skipping' - ' these tests for the time being') -class TestFileAccessor(testutils.SharedMemoryTestCase): - """ - Tests for FileAccessor. - """ - def test_create_and_delete_mem_map(self): - """ - Verify if memory maps were created and deleted. - """ - for mem_map_size in [1, 10, 1024, 2 * 1024 * 1024, 10 * 1024 * 1024]: - mem_map_name = self.get_new_mem_map_name() - mem_map = self.file_accessor.create_mem_map(mem_map_name, - mem_map_size) - self.assertIsNotNone(mem_map) - delete_status = self.file_accessor.delete_mem_map(mem_map_name, - mem_map) - self.assertTrue(delete_status) - - def test_create_mem_map_invalid_inputs(self): - """ - Attempt to create memory maps with invalid inputs (size and name) and - verify that an SharedMemoryException is raised. - """ - mem_map_name = self.get_new_mem_map_name() - inv_mem_map_size = 0 - with self.assertRaisesRegex(SharedMemoryException, 'Invalid size'): - self.file_accessor.create_mem_map(mem_map_name, inv_mem_map_size) - inv_mem_map_name = None - mem_map_size = 1024 - with self.assertRaisesRegex(SharedMemoryException, 'Invalid name'): - self.file_accessor.create_mem_map(inv_mem_map_name, mem_map_size) - inv_mem_map_name = '' - with self.assertRaisesRegex(SharedMemoryException, 'Invalid name'): - self.file_accessor.create_mem_map(inv_mem_map_name, mem_map_size) - - def test_open_existing_mem_map(self): - """ - Verify that an existing memory map can be opened. - """ - mem_map_size = 1024 - mem_map_name = self.get_new_mem_map_name() - mem_map = self.file_accessor.create_mem_map(mem_map_name, mem_map_size) - o_mem_map = self.file_accessor.open_mem_map(mem_map_name, mem_map_size) - self.assertIsNotNone(o_mem_map) - o_mem_map.close() - delete_status = self.file_accessor.delete_mem_map(mem_map_name, mem_map) - self.assertTrue(delete_status) - - def test_open_mem_map_invalid_inputs(self): - """ - Attempt to open a memory map with invalid inputs (size and name) and - verify that an SharedMemoryException is raised. - """ - mem_map_name = self.get_new_mem_map_name() - inv_mem_map_size = -1 - with self.assertRaisesRegex(SharedMemoryException, 'Invalid size'): - self.file_accessor.open_mem_map(mem_map_name, inv_mem_map_size) - inv_mem_map_name = None - mem_map_size = 1024 - with self.assertRaisesRegex(SharedMemoryException, 'Invalid name'): - self.file_accessor.open_mem_map(inv_mem_map_name, mem_map_size) - inv_mem_map_name = '' - with self.assertRaisesRegex(SharedMemoryException, 'Invalid name'): - self.file_accessor.open_mem_map(inv_mem_map_name, mem_map_size) - - @unittest.skipIf(os.name == 'nt', - 'Windows will create an mmap if one does not exist') - def test_open_deleted_mem_map(self): - """ - Attempt to open a deleted memory map and verify that it fails. - Note: Windows creates a new memory map if one does not exist when - opening a memory map, so we skip this test on Windows. - """ - mem_map_size = 1024 - mem_map_name = self.get_new_mem_map_name() - mem_map = self.file_accessor.create_mem_map(mem_map_name, mem_map_size) - o_mem_map = self.file_accessor.open_mem_map(mem_map_name, mem_map_size) - self.assertIsNotNone(o_mem_map) - o_mem_map.close() - delete_status = self.file_accessor.delete_mem_map(mem_map_name, mem_map) - self.assertTrue(delete_status) - d_mem_map = self.file_accessor.open_mem_map(mem_map_name, mem_map_size) - self.assertIsNone(d_mem_map) diff --git a/tests/unittests/test_file_accessor_factory.py b/tests/unittests/test_file_accessor_factory.py deleted file mode 100644 index 13f8fbada..000000000 --- a/tests/unittests/test_file_accessor_factory.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import os -import sys -import unittest -from unittest.mock import patch - -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - FileAccessorFactory, -) -from azure_functions_worker.bindings.shared_memory_data_transfer.file_accessor_unix import ( # NoQA - FileAccessorUnix, -) -from azure_functions_worker.bindings.shared_memory_data_transfer.file_accessor_windows import ( # NoQA - FileAccessorWindows, -) - - -class TestFileAccessorFactory(unittest.TestCase): - """ - Tests for FileAccessorFactory. - """ - def setUp(self): - env = os.environ.copy() - env['FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED'] = "true" - self.mock_environ = patch.dict('os.environ', env) - self.mock_environ.start() - - def tearDown(self): - self.mock_environ.stop() - - @unittest.skipIf(os.name != 'nt', - 'FileAccessorWindows is only valid on Windows') - def test_file_accessor_windows_created(self): - """ - Verify that FileAccessorWindows was created when running on Windows. - """ - file_accessor = FileAccessorFactory.create_file_accessor() - self.assertTrue(type(file_accessor) is FileAccessorWindows) - - @unittest.skipIf(os.name == 'nt' or sys.platform == 'darwin', - 'FileAccessorUnix is only valid on Unix') - def test_file_accessor_unix_created(self): - """ - Verify that FileAccessorUnix was created when running on Windows. - """ - file_accessor = FileAccessorFactory.create_file_accessor() - self.assertTrue(type(file_accessor) is FileAccessorUnix) diff --git a/tests/unittests/test_functions_registry.py b/tests/unittests/test_functions_registry.py deleted file mode 100644 index 4f28541d0..000000000 --- a/tests/unittests/test_functions_registry.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -from azure.functions import Function -from azure.functions.decorators.blob import BlobInput -from azure.functions.decorators.http import HttpTrigger -from tests.utils import testutils - -from azure_functions_worker import functions -from azure_functions_worker.functions import FunctionLoadError - - -class TestFunctionsRegistry(testutils.AsyncTestCase): - - def setUp(self): - def dummy(): - return "test" - - self.dummy = dummy - self.func = Function(self.dummy, "test.py") - self.function_registry = functions.Registry() - - async def test_add_indexed_function_invalid_direction(self): - # Ensures that azure-functions is loaded and BINDING_REGISTRY - # is not None - async with testutils.start_mockhost() as host: - await host.init_worker() - - trigger1 = HttpTrigger(name="req1", route="test") - binding = BlobInput(name="$return", path="testpath", - connection="testconnection") - self.func.add_trigger(trigger=trigger1) - self.func.add_binding(binding=binding) - - with self.assertRaises(FunctionLoadError) as ex: - self.function_registry.add_indexed_function(function=self.func) - - self.assertEqual(str(ex.exception), - 'cannot load the dummy function: \"$return\" ' - 'binding must have direction set to \"out\"') diff --git a/tests/unittests/test_handle_event.py b/tests/unittests/test_handle_event.py new file mode 100644 index 000000000..b5c766eba --- /dev/null +++ b/tests/unittests/test_handle_event.py @@ -0,0 +1,155 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import os +from typing import Any +from unittest.mock import patch + +from azure_functions_worker_v2.utils.constants import PYTHON_ENABLE_INIT_INDEXING +from azure_functions_worker_v2.handle_event import (worker_init_request, + functions_metadata_request, + function_environment_reload_request) +from tests.utils import testutils + +import tests.protos as protos + +BASIC_FUNCTION_DIRECTORY = "C:\\Users\\victoriahall\\Documents\\repos\\azure-functions-python-worker\\tests\\unittests\\basic_function" +STREAMING_FUNCTION_DIRECTORY = "C:\\Users\\victoriahall\\Documents\\repos\\azure-functions-python-worker\\tests\\unittests\\streaming_function" +INDEXING_EXCEPTION_FUNCTION_DIRECTORY = "tests\\unittests\\indexing_exception_function" + + +# This represents the top level protos request sent from the host +class WorkerRequest: + def __init__(self, name: str, request: Any, properties: dict): + self.name = name + self.request = request + self.properties = properties + + +# This represents the inner request +class Request: + def __init__(self, name: Any): + self.worker_init_request = name + self.function_environment_reload_request = name + + +# This represents the Function Init/Metadata/Load/Invocation request +class FunctionRequest: + def __init__(self, capabilities: Any, function_app_directory: Any): + self.capabilities = capabilities + self.function_app_directory = function_app_directory + + +class TestHandleEvent(testutils.AsyncTestCase): + async def test_worker_init_request(self): + worker_request = WorkerRequest(name='worker_init_request', + request=Request(FunctionRequest( + 'hello', + BASIC_FUNCTION_DIRECTORY)), + properties={'host': '123', + 'protos': protos}) + result = await worker_init_request(worker_request) + self.assertEqual(result.capabilities, {'WorkerStatus': 'true', + 'RpcHttpBodyOnly': 'true', + 'SharedMemoryDataTransfer': 'true', + 'RpcHttpTriggerMetadataRemoved': 'true', + 'RawHttpBodyBytes': 'true', + 'TypedDataCollection': 'true'}) + self.assertEqual(result.worker_metadata.runtime_name, "python") + self.assertIsNotNone(result.worker_metadata.runtime_version) + self.assertIsNotNone(result.worker_metadata.worker_version) + self.assertIsNotNone(result.worker_metadata.worker_bitness) + self.assertEqual(result.result.status, 1) + + async def test_worker_init_request_with_streaming(self): + worker_request = WorkerRequest(name='worker_init_request', + request=Request(FunctionRequest( + 'hello', + STREAMING_FUNCTION_DIRECTORY)), + properties={'host': '123', + 'protos': protos}) + result = await worker_init_request(worker_request) + self.assertNotEqual(result.capabilities, {'WorkerStatus': 'true', + 'RpcHttpBodyOnly': 'true', + 'SharedMemoryDataTransfer': 'true', + 'RpcHttpTriggerMetadataRemoved': 'true', + 'RawHttpBodyBytes': 'true', + 'TypedDataCollection': 'true'}) + self.assertEqual(result.worker_metadata.runtime_name, "python") + self.assertIsNotNone(result.worker_metadata.runtime_version) + self.assertIsNotNone(result.worker_metadata.worker_version) + self.assertIsNotNone(result.worker_metadata.worker_bitness) + self.assertEqual(result.result.status, 1) + + async def test_worker_init_request_with_exception(self): + # Even if an exception happens during indexing, + # we still return the WorkerInitResponse + worker_request = WorkerRequest(name='worker_init_request', + request=Request(FunctionRequest( + 'hello', + INDEXING_EXCEPTION_FUNCTION_DIRECTORY)), + properties={'host': '123', + 'protos': protos}) + result = await worker_init_request(worker_request) + self.assertEqual(result.capabilities, {'WorkerStatus': 'true', + 'RpcHttpBodyOnly': 'true', + 'SharedMemoryDataTransfer': 'true', + 'RpcHttpTriggerMetadataRemoved': 'true', + 'RawHttpBodyBytes': 'true', + 'TypedDataCollection': 'true'}) + self.assertEqual(result.worker_metadata.runtime_name, "python") + self.assertIsNotNone(result.worker_metadata.runtime_version) + self.assertIsNotNone(result.worker_metadata.worker_version) + self.assertIsNotNone(result.worker_metadata.worker_bitness) + self.assertEqual(result.result.status, 1) + + async def test_functions_metadata_request(self): + result = await self.run_init_then_meta() + self.assertEqual(result.use_default_metadata_indexing, False) + self.assertIsNotNone(result.function_metadata_results) + self.assertEqual(result.result.status, 1) + + async def run_init_then_meta(self): + worker_request = WorkerRequest(name='worker_init_request', + request=Request(FunctionRequest('hello', BASIC_FUNCTION_DIRECTORY)), + properties={'host': '123', + 'protos': protos}) + _ = await worker_init_request(worker_request) + result = await functions_metadata_request(worker_request) + return result + + def test_functions_metadata_request_with_exception(self): + pass + + def test_invocation_request_sync(self): + pass + + def test_invocation_request_async(self): + pass + + def test_invocation_request_with_exception(self): + pass + + async def test_function_environment_reload_request(self): + worker_request = WorkerRequest(name='function_environment_reload_request', + request=Request(FunctionRequest('hello')), + properties={'host': '123', + 'protos': protos}) + result = await function_environment_reload_request(worker_request) + self.assertEqual(result.capabilities, {}) + self.assertEqual(result.worker_metadata.runtime_name, "python") + self.assertIsNotNone(result.worker_metadata.runtime_version) + self.assertIsNotNone(result.worker_metadata.worker_version) + self.assertIsNotNone(result.worker_metadata.worker_bitness) + self.assertEqual(result.result.status, 1) + + def test_function_environment_reload_request_with_streaming(self): + pass + + def test_function_environment_reload_request_with_exception(self): + pass + + def test_load_function_metadata(self): + pass + + def test_index_functions(self): + pass diff --git a/tests/unittests/test_http_functions.py b/tests/unittests/test_http_functions.py deleted file mode 100644 index cfa46be57..000000000 --- a/tests/unittests/test_http_functions.py +++ /dev/null @@ -1,480 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import filecmp -import hashlib -import os -import pathlib -import sys -import typing -from unittest import skipIf - -from tests.utils import testutils - - -class TestHttpFunctions(testutils.WebHostTestCase): - - @classmethod - def get_script_dir(cls): - return testutils.UNIT_TESTS_FOLDER / 'http_functions' - - def test_return_str(self): - r = self.webhost.request('GET', 'return_str') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'Hello World!') - self.assertTrue(r.headers['content-type'].startswith('text/plain')) - - def test_return_out(self): - r = self.webhost.request('GET', 'return_out') - self.assertEqual(r.status_code, 201) - self.assertEqual(r.text, 'hello') - self.assertTrue(r.headers['content-type'].startswith('text/plain')) - - def test_return_bytes(self): - r = self.webhost.request('GET', 'return_bytes') - self.assertEqual(r.status_code, 500) - # https://github.com/Azure/azure-functions-host/issues/2706 - # self.assertRegex( - # r.text, r'.*unsupported type .*http.* for Python type .*bytes.*') - - def test_return_http_200(self): - r = self.webhost.request('GET', 'return_http') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '

Hello World™

') - self.assertEqual(r.headers['content-type'], 'text/html; charset=utf-8') - - def test_return_http_no_body(self): - r = self.webhost.request('GET', 'return_http_no_body') - self.assertEqual(r.text, '') - self.assertEqual(r.status_code, 200) - - def test_return_http_auth_level_admin(self): - r = self.webhost.request('GET', 'return_http_auth_admin', - params={'code': 'testMasterKey'}) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '

Hello World™

') - self.assertEqual(r.headers['content-type'], 'text/html; charset=utf-8') - - def test_return_http_404(self): - r = self.webhost.request('GET', 'return_http_404') - self.assertEqual(r.status_code, 404) - self.assertEqual(r.text, 'bye') - self.assertEqual(r.headers['content-type'], - 'text/plain; charset=utf-8') - - def test_return_http_redirect(self): - r = self.webhost.request('GET', 'return_http_redirect') - self.assertEqual(r.text, '

Hello World™

') - self.assertEqual(r.status_code, 200) - - r = self.webhost.request('GET', 'return_http_redirect', - allow_redirects=False) - self.assertEqual(r.status_code, 302) - - def test_no_return(self): - r = self.webhost.request('GET', 'no_return') - self.assertEqual(r.status_code, 204) - - def test_no_return_returns(self): - r = self.webhost.request('GET', 'no_return_returns') - self.assertEqual(r.status_code, 500) - # https://github.com/Azure/azure-functions-host/issues/2706 - # self.assertRegex(r.text, - # r'.*function .+no_return_returns.+ without a ' - # r'\$return binding returned a non-None value.*') - - def test_async_return_str(self): - r = self.webhost.request('GET', 'async_return_str') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'Hello Async World!') - - def test_async_logging(self): - # Test that logging doesn't *break* things. - r = self.webhost.request('GET', 'async_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-async') - - def check_log_async_logging(self, host_out: typing.List[str]): - # Host out only contains user logs - self.assertIn('hello info', host_out) - self.assertIn('and another error', host_out) - - def test_debug_logging(self): - r = self.webhost.request('GET', 'debug_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-debug') - - def check_log_debug_logging(self, host_out: typing.List[str]): - self.assertIn('logging info', host_out) - self.assertIn('logging warning', host_out) - self.assertIn('logging error', host_out) - self.assertNotIn('logging debug', host_out) - - def check_log_debug_with_user_logging(self, host_out: typing.List[str]): - self.assertIn('logging info', host_out) - self.assertIn('logging warning', host_out) - self.assertIn('logging debug', host_out) - self.assertIn('logging error', host_out) - - def test_sync_logging(self): - # Test that logging doesn't *break* things. - r = self.webhost.request('GET', 'sync_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-sync') - - def check_log_sync_logging(self, host_out: typing.List[str]): - # Host out only contains user logs - self.assertIn('a gracefully handled error', host_out) - - def test_return_context(self): - r = self.webhost.request('GET', 'return_context') - self.assertEqual(r.status_code, 200) - - data = r.json() - - self.assertEqual(data['method'], 'GET') - self.assertEqual(data['ctx_func_name'], 'return_context') - self.assertIn('ctx_invocation_id', data) - self.assertIn('ctx_trace_context_Tracestate', data) - self.assertIn('ctx_trace_context_Traceparent', data) - - def test_remapped_context(self): - r = self.webhost.request('GET', 'remapped_context') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'GET') - - def test_return_request(self): - r = self.webhost.request( - 'GET', 'return_request', - params={'a': 1, 'b': ':%)'}, - headers={'xxx': 'zzz', 'Max-Forwards': '10'}) - - self.assertEqual(r.status_code, 200) - - req = r.json() - - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['params'], {'a': '1', 'b': ':%)'}) - self.assertEqual(req['headers']['xxx'], 'zzz') - self.assertEqual(req['headers']['max-forwards'], '10') - - self.assertIn('return_request', req['url']) - - def test_post_return_request(self): - r = self.webhost.request( - 'POST', 'return_request', - params={'a': 1, 'b': ':%)'}, - headers={'xxx': 'zzz'}, - data={'key': 'value'}) - - self.assertEqual(r.status_code, 200) - - req = r.json() - - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['params'], {'a': '1', 'b': ':%)'}) - self.assertEqual(req['headers']['xxx'], 'zzz') - - self.assertIn('return_request', req['url']) - - self.assertEqual(req['get_body'], 'key=value') - - def test_post_json_request_is_untouched(self): - body = b'{"foo": "bar", "two": 4}' - body_hash = hashlib.sha256(body).hexdigest() - r = self.webhost.request( - 'POST', 'return_request', - headers={'Content-Type': 'application/json'}, - data=body) - - self.assertEqual(r.status_code, 200) - req = r.json() - self.assertEqual(req['body_hash'], body_hash) - - def test_accept_json(self): - r = self.webhost.request( - 'POST', 'accept_json', - json={'a': 'abc', 'd': 42}) - - req = r.json() - - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['get_json'], {'a': 'abc', 'd': 42}) - - self.assertIn('accept_json', req['url']) - - def test_unhandled_error(self): - r = self.webhost.request('GET', 'unhandled_error') - self.assertEqual(r.status_code, 500) - # https://github.com/Azure/azure-functions-host/issues/2706 - # self.assertIn('Exception: ZeroDivisionError', r.text) - - def check_log_unhandled_error(self, - host_out: typing.List[str]): - self.assertIn('Exception: ZeroDivisionError: division by zero', - host_out) - - def test_unhandled_urllib_error(self): - r = self.webhost.request( - 'GET', 'unhandled_urllib_error', - params={'img': 'http://example.com/nonexistent.jpg'}) - self.assertEqual(r.status_code, 500) - - def test_unhandled_unserializable_error(self): - r = self.webhost.request( - 'GET', 'unhandled_unserializable_error') - self.assertEqual(r.status_code, 500) - - def test_return_route_params(self): - r = self.webhost.request('GET', 'return_route_params/foo/bar') - self.assertEqual(r.status_code, 200) - resp = r.json() - self.assertEqual(resp, {'param1': 'foo', 'param2': 'bar'}) - - def test_raw_body_bytes(self): - parent_dir = pathlib.Path(__file__).parent - image_file = parent_dir / 'resources/functions.png' - with open(image_file, 'rb') as image: - img = image.read() - img_len = len(img) - r = self.webhost.request('POST', 'raw_body_bytes', data=img) - - received_body_len = int(r.headers['body-len']) - self.assertEqual(received_body_len, img_len) - - body = r.content - try: - received_img_file = parent_dir / 'received_img.png' - with open(received_img_file, 'wb') as received_img: - received_img.write(body) - self.assertTrue(filecmp.cmp(received_img_file, image_file)) - finally: - if (os.path.exists(received_img_file)): - os.remove(received_img_file) - - def test_image_png_content_type(self): - parent_dir = pathlib.Path(__file__).parent - image_file = parent_dir / 'resources/functions.png' - with open(image_file, 'rb') as image: - img = image.read() - img_len = len(img) - r = self.webhost.request( - 'POST', 'raw_body_bytes', - headers={'Content-Type': 'image/png'}, - data=img) - - received_body_len = int(r.headers['body-len']) - self.assertEqual(received_body_len, img_len) - - body = r.content - try: - received_img_file = parent_dir / 'received_img.png' - with open(received_img_file, 'wb') as received_img: - received_img.write(body) - self.assertTrue(filecmp.cmp(received_img_file, image_file)) - finally: - if (os.path.exists(received_img_file)): - os.remove(received_img_file) - - def test_application_octet_stream_content_type(self): - parent_dir = pathlib.Path(__file__).parent - image_file = parent_dir / 'resources/functions.png' - with open(image_file, 'rb') as image: - img = image.read() - img_len = len(img) - r = self.webhost.request( - 'POST', 'raw_body_bytes', - headers={'Content-Type': 'application/octet-stream'}, - data=img) - - received_body_len = int(r.headers['body-len']) - self.assertEqual(received_body_len, img_len) - - body = r.content - try: - received_img_file = parent_dir / 'received_img.png' - with open(received_img_file, 'wb') as received_img: - received_img.write(body) - self.assertTrue(filecmp.cmp(received_img_file, image_file)) - finally: - if (os.path.exists(received_img_file)): - os.remove(received_img_file) - - def test_user_event_loop_error(self): - # User event loop is not supported in HTTP trigger - r = self.webhost.request('GET', 'user_event_loop/') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-user-event-loop') - - def check_log_user_event_loop_error(self, host_out: typing.List[str]): - self.assertIn('try_log', host_out) - - def check_log_import_module_troubleshooting_url(self, - host_out: typing.List[str]): - passed = False - exception_message = "Exception: ModuleNotFoundError: "\ - "No module named 'does_not_exist'. "\ - "Cannot find module. "\ - "Please check the requirements.txt file for the "\ - "missing module. For more info, please refer the "\ - "troubleshooting guide: "\ - "https://aka.ms/functions-modulenotfound. "\ - "Current sys.path: " - for log in host_out: - if exception_message in log: - passed = True - self.assertTrue(passed) - - @testutils.retryable_test(3, 5) - def test_print_logging_no_flush(self): - r = self.webhost.request('GET', 'print_logging?message=Secret42') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-print-logging') - - @testutils.retryable_test(3, 5) - def check_log_print_logging_no_flush(self, host_out: typing.List[str]): - self.assertIn('Secret42', host_out) - - @testutils.retryable_test(3, 5) - def test_print_logging_with_flush(self): - r = self.webhost.request('GET', - 'print_logging?flush=true&message=Secret42') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-print-logging') - - @testutils.retryable_test(3, 5) - def check_log_print_logging_with_flush(self, host_out: typing.List[str]): - self.assertIn('Secret42', host_out) - - def test_print_to_console_stdout(self): - r = self.webhost.request('GET', - 'print_logging?console=true&message=Secret42') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-print-logging') - - @skipIf(sys.version_info < (3, 8, 0), - "Skip the tests for Python 3.7 and below") - def test_multiple_cookie_header_in_response(self): - r = self.webhost.request('GET', 'multiple_set_cookie_resp_headers') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.headers.get( - 'Set-Cookie'), - "foo3=42; expires=Thu, 12 Jan 2017 13:55:08 GMT; " - "max-age=10000000; domain=example.com; path=/; secure; httponly, " - "foo3=43; expires=Fri, 12 Jan 2018 13:55:08 GMT; " - "max-age=10000000; domain=example.com; path=/; secure; httponly") - - @skipIf(sys.version_info < (3, 8, 0), - "Skip the tests for Python 3.7 and below") - def test_set_cookie_header_in_response_empty_value(self): - r = self.webhost.request('GET', 'set_cookie_resp_header_empty') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.headers.get('Set-Cookie'), None) - - @skipIf(sys.version_info < (3, 8, 0), - "Skip the tests for Python 3.7 and below") - def test_set_cookie_header_in_response_default_value(self): - r = self.webhost.request('GET', - 'set_cookie_resp_header_default_values') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.headers.get('Set-Cookie'), - 'foo=bar; domain=; path=') - - @skipIf(sys.version_info < (3, 8, 0), - "Skip the tests for Python 3.7 and below") - def test_response_cookie_header_nullable_timestamp_err(self): - r = self.webhost.request( - 'GET', - 'response_cookie_header_nullable_timestamp_err') - self.assertEqual(r.status_code, 500) - - def check_log_response_cookie_header_nullable_timestamp_err(self, - host_out: - typing.List[ - str]): - self.assertIn( - "Can not parse value Dummy of expires in the cookie due to " - "invalid format.", - host_out) - - @skipIf(sys.version_info < (3, 8, 0), - "Skip the tests for Python 3.7 and below") - def test_response_cookie_header_nullable_bool_err(self): - r = self.webhost.request( - 'GET', - 'response_cookie_header_nullable_bool_err') - self.assertEqual(r.status_code, 200) - self.assertFalse("Set-Cookie" in r.headers) - - @skipIf(sys.version_info < (3, 8, 0), - "Skip the tests for Python 3.7 and below") - def test_response_cookie_header_nullable_double_err(self): - r = self.webhost.request( - 'GET', - 'response_cookie_header_nullable_double_err') - self.assertEqual(r.status_code, 200) - self.assertFalse("Set-Cookie" in r.headers) - - def check_log_print_to_console_stdout(self, host_out: typing.List[str]): - # System logs stdout should exist in host_out - self.assertIn('Secret42', host_out) - - @skipIf(sys.version_info < (3, 9, 0), - "Skip the tests for Python 3.8 and below") - def test_print_to_console_stderr(self): - r = self.webhost.request('GET', 'print_logging?console=true' - '&message=Secret42&is_stderr=true') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-print-logging') - - def check_log_print_to_console_stderr(self, host_out: typing.List[str], ): - # System logs stderr should exist in host_out - self.assertIn('Secret42', host_out) - - def test_hijack_current_event_loop(self): - r = self.webhost.request('GET', 'hijack_current_event_loop/') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-hijack-current-event-loop') - - def check_log_hijack_current_event_loop(self, host_out: typing.List[str]): - # User logs should exist in host_out - self.assertIn('parallelly_print', host_out) - self.assertIn('parallelly_log_info at root logger', host_out) - self.assertIn('parallelly_log_warning at root logger', host_out) - self.assertIn('parallelly_log_error at root logger', host_out) - self.assertIn('parallelly_log_exception at root logger', host_out) - self.assertIn('parallelly_log_custom at custom_logger', host_out) - self.assertIn('callsoon_log', host_out) - - # System logs should exist in host_out - self.assertIn('parallelly_log_system at disguised_logger', host_out) - - @skipIf(sys.version_info.minor < 11, - "The context param is only available for 3.11+") - def test_create_task_with_context(self): - r = self.webhost.request('GET', 'create_task_with_context') - - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'Finished Hello World in 5' - ' | Finished Hello World in 10') - - def test_create_task_without_context(self): - r = self.webhost.request('GET', 'create_task_without_context') - - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'Finished Hello World in 5') - - -class TestHttpFunctionsStein(TestHttpFunctions): - - @classmethod - def get_script_dir(cls): - return testutils.UNIT_TESTS_FOLDER / 'http_functions' / \ - 'http_functions_stein' - - def test_no_return(self): - r = self.webhost.request('GET', 'no_return') - self.assertEqual(r.status_code, 500) - - def test_no_return_returns(self): - r = self.webhost.request('GET', 'no_return_returns') - self.assertEqual(r.status_code, 200) diff --git a/tests/unittests/test_http_functions_v2.py b/tests/unittests/test_http_functions_v2.py deleted file mode 100644 index b7e456671..000000000 --- a/tests/unittests/test_http_functions_v2.py +++ /dev/null @@ -1,472 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import filecmp -import hashlib -import os -import pathlib -import sys -import typing -import unittest -from unittest import skipIf -from unittest.mock import patch - -from tests.utils import testutils - -from azure_functions_worker.constants import PYTHON_ENABLE_INIT_INDEXING - - -@unittest.skipIf(sys.version_info.minor <= 7, "Skipping tests <= Python 3.7") -class TestHttpFunctionsV2FastApi(testutils.WebHostTestCase): - @classmethod - def setUpClass(cls): - cls._pre_env = dict(os.environ) - os_environ = os.environ.copy() - # Turn on feature flag - os_environ[PYTHON_ENABLE_INIT_INDEXING] = '1' - cls._patch_environ = patch.dict('os.environ', os_environ) - cls._patch_environ.start() - - super().setUpClass() - - @classmethod - def tearDownClass(cls): - os.environ.clear() - os.environ.update(cls._pre_env) - cls._patch_environ.stop() - super().tearDownClass() - - @classmethod - def get_script_dir(cls): - return testutils.UNIT_TESTS_FOLDER / 'http_functions' / \ - 'http_v2_functions' / \ - 'fastapi' - - def test_return_bytes(self): - r = self.webhost.request('GET', 'return_bytes') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.content, b'"Hello World"') - self.assertEqual(r.headers['content-type'], 'application/json') - - def test_return_http_200(self): - r = self.webhost.request('GET', 'return_http') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '

Hello World™

') - self.assertEqual(r.headers['content-type'], 'text/html; charset=utf-8') - - def test_return_http_no_body(self): - r = self.webhost.request('GET', 'return_http_no_body') - self.assertEqual(r.text, '') - self.assertEqual(r.status_code, 200) - - def test_return_http_auth_level_admin(self): - r = self.webhost.request('GET', 'return_http_auth_admin', - params={'code': 'testMasterKey'}) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '

Hello World™

') - self.assertEqual(r.headers['content-type'], 'text/html; charset=utf-8') - - def test_return_http_404(self): - r = self.webhost.request('GET', 'return_http_404') - self.assertEqual(r.status_code, 404) - self.assertEqual(r.text, 'bye') - - def test_return_http_redirect(self): - r = self.webhost.request('GET', 'return_http_redirect') - self.assertEqual(r.text, '

Hello World™

') - self.assertEqual(r.status_code, 200) - - r = self.webhost.request('GET', 'return_http_redirect', - allow_redirects=False) - self.assertEqual(r.status_code, 302) - - def test_async_return_str(self): - r = self.webhost.request('GET', 'async_return_str') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"Hello Async World!"') - - def test_async_logging(self): - # Test that logging doesn't *break* things. - r = self.webhost.request('GET', 'async_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"OK-async"') - - def check_log_async_logging(self, host_out: typing.List[str]): - # Host out only contains user logs - self.assertIn('hello info', host_out) - self.assertIn('and another error', host_out) - - @unittest.skipIf(sys.version_info.minor >= 7, "Skipping for ADO") - def test_debug_logging(self): - r = self.webhost.request('GET', 'debug_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"OK-debug"') - - def check_log_debug_logging(self, host_out: typing.List[str]): - self.assertIn('logging info', host_out) - self.assertIn('logging warning', host_out) - self.assertIn('logging error', host_out) - self.assertNotIn('logging debug', host_out) - - @unittest.skipIf(sys.version_info.minor >= 7, "Skipping for ADO") - def test_debug_with_user_logging(self): - r = self.webhost.request('GET', 'debug_user_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"OK-user-debug"') - - def check_log_debug_with_user_logging(self, host_out: typing.List[str]): - self.assertIn('logging info', host_out) - self.assertIn('logging warning', host_out) - self.assertIn('logging debug', host_out) - self.assertIn('logging error', host_out) - - def test_sync_logging(self): - # Test that logging doesn't *break* things. - r = self.webhost.request('GET', 'sync_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"OK-sync"') - - def check_log_sync_logging(self, host_out: typing.List[str]): - # Host out only contains user logs - self.assertIn('a gracefully handled error', host_out) - - def test_return_context(self): - r = self.webhost.request('GET', 'return_context') - self.assertEqual(r.status_code, 200) - - data = r.json() - - self.assertEqual(data['method'], 'GET') - self.assertEqual(data['ctx_func_name'], 'return_context') - self.assertIn('ctx_invocation_id', data) - self.assertIn('ctx_trace_context_Tracestate', data) - self.assertIn('ctx_trace_context_Traceparent', data) - - def test_remapped_context(self): - r = self.webhost.request('GET', 'remapped_context') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"GET"') - - def test_return_request(self): - r = self.webhost.request( - 'GET', 'return_request', - params={'a': 1, 'b': ':%)'}, - headers={'xxx': 'zzz', 'Max-Forwards': '10'}) - - self.assertEqual(r.status_code, 200) - - req = r.json() - - self.assertEqual(req['method'], 'GET') - self.assertEqual(req['params'], {'a': '1', 'b': ':%)'}) - self.assertEqual(req['headers']['xxx'], 'zzz') - self.assertEqual(req['headers']['max-forwards'], '10') - - self.assertIn('return_request', req['url']) - - def test_post_return_request(self): - r = self.webhost.request( - 'POST', 'return_request', - params={'a': 1, 'b': ':%)'}, - headers={'xxx': 'zzz'}, - data={'key': 'value'}) - - self.assertEqual(r.status_code, 200) - - req = r.json() - - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['params'], {'a': '1', 'b': ':%)'}) - self.assertEqual(req['headers']['xxx'], 'zzz') - - self.assertIn('return_request', req['url']) - - self.assertEqual(req['body'], 'key=value') - - def test_post_json_request_is_untouched(self): - body = b'{"foo": "bar", "two": 4}' - body_hash = hashlib.sha256(body).hexdigest() - r = self.webhost.request( - 'POST', 'return_request', - headers={'Content-Type': 'application/json'}, - data=body) - - self.assertEqual(r.status_code, 200) - req = r.json() - self.assertEqual(req['body_hash'], body_hash) - - def test_accept_json(self): - r = self.webhost.request( - 'GET', 'accept_json', - json={'a': 'abc', 'd': 42}) - - self.assertEqual(r.status_code, 200) - r_json = r.json() - self.assertEqual(r_json, {'a': 'abc', 'd': 42}) - self.assertEqual(r.headers['content-type'], 'application/json') - - @testutils.retryable_test(3, 5) - def test_unhandled_error(self): - r = self.webhost.request('GET', 'unhandled_error') - self.assertEqual(r.status_code, 500) - # https://github.com/Azure/azure-functions-host/issues/2706 - # self.assertIn('Exception: ZeroDivisionError', r.text) - - def check_log_unhandled_error(self, - host_out: typing.List[str]): - error_substring = 'ZeroDivisionError: division by zero' - for item in host_out: - if error_substring in item: - break - else: - self.fail( - f"{error_substring}' not found in host log.") - - def test_unhandled_urllib_error(self): - r = self.webhost.request( - 'GET', 'unhandled_urllib_error', - params={'img': 'http://example.com/nonexistent.jpg'}) - self.assertEqual(r.status_code, 500) - - def test_unhandled_unserializable_error(self): - r = self.webhost.request( - 'GET', 'unhandled_unserializable_error') - self.assertEqual(r.status_code, 500) - - def test_return_route_params(self): - r = self.webhost.request('GET', 'return_route_params/foo/bar') - self.assertEqual(r.status_code, 200) - resp = r.json() - self.assertEqual(resp, {'param1': 'foo', 'param2': 'bar'}) - - def test_raw_body_bytes(self): - parent_dir = pathlib.Path(__file__).parent - image_file = parent_dir / 'resources/functions.png' - with open(image_file, 'rb') as image: - img = image.read() - img_len = len(img) - r = self.webhost.request('POST', 'raw_body_bytes', data=img) - - received_body_len = int(r.headers['body-len']) - self.assertEqual(received_body_len, img_len) - - body = r.content - try: - received_img_file = parent_dir / 'received_img.png' - with open(received_img_file, 'wb') as received_img: - received_img.write(body) - self.assertTrue(filecmp.cmp(received_img_file, image_file)) - finally: - if (os.path.exists(received_img_file)): - os.remove(received_img_file) - - def test_image_png_content_type(self): - parent_dir = pathlib.Path(__file__).parent - image_file = parent_dir / 'resources/functions.png' - with open(image_file, 'rb') as image: - img = image.read() - img_len = len(img) - r = self.webhost.request( - 'POST', 'raw_body_bytes', - headers={'Content-Type': 'image/png'}, - data=img) - - received_body_len = int(r.headers['body-len']) - self.assertEqual(received_body_len, img_len) - - body = r.content - try: - received_img_file = parent_dir / 'received_img.png' - with open(received_img_file, 'wb') as received_img: - received_img.write(body) - self.assertTrue(filecmp.cmp(received_img_file, image_file)) - finally: - if (os.path.exists(received_img_file)): - os.remove(received_img_file) - - def test_application_octet_stream_content_type(self): - parent_dir = pathlib.Path(__file__).parent - image_file = parent_dir / 'resources/functions.png' - with open(image_file, 'rb') as image: - img = image.read() - img_len = len(img) - r = self.webhost.request( - 'POST', 'raw_body_bytes', - headers={'Content-Type': 'application/octet-stream'}, - data=img) - - received_body_len = int(r.headers['body-len']) - self.assertEqual(received_body_len, img_len) - - body = r.content - try: - received_img_file = parent_dir / 'received_img.png' - with open(received_img_file, 'wb') as received_img: - received_img.write(body) - self.assertTrue(filecmp.cmp(received_img_file, image_file)) - finally: - if (os.path.exists(received_img_file)): - os.remove(received_img_file) - - def test_user_event_loop_error(self): - # User event loop is not supported in HTTP trigger - r = self.webhost.request('GET', 'user_event_loop/') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"OK-user-event-loop"') - - def check_log_user_event_loop_error(self, host_out: typing.List[str]): - self.assertIn('try_log', host_out) - - def check_log_import_module_troubleshooting_url(self, - host_out: typing.List[str]): - passed = False - exception_message = "Exception: ModuleNotFoundError: "\ - "No module named 'does_not_exist'. "\ - "Cannot find module. "\ - "Please check the requirements.txt file for the "\ - "missing module. For more info, please refer the "\ - "troubleshooting guide: "\ - "https://aka.ms/functions-modulenotfound. "\ - "Current sys.path: " - for log in host_out: - if exception_message in log: - passed = True - self.assertTrue(passed) - - @testutils.retryable_test(3, 5) - def test_print_logging_no_flush(self): - r = self.webhost.request('GET', 'print_logging?message=Secret42') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"OK-print-logging"') - - def check_log_print_logging_no_flush(self, host_out: typing.List[str]): - self.assertIn('Secret42', host_out) - - def test_print_logging_with_flush(self): - r = self.webhost.request('GET', - 'print_logging?flush=true&message=Secret42') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"OK-print-logging"') - - def check_log_print_logging_with_flush(self, host_out: typing.List[str]): - self.assertIn('Secret42', host_out) - - def test_print_to_console_stdout(self): - r = self.webhost.request('GET', - 'print_logging?console=true&message=Secret42') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"OK-print-logging"') - - @skipIf(sys.version_info < (3, 8, 0), - "Skip the tests for Python 3.7 and below") - def test_multiple_cookie_header_in_response(self): - r = self.webhost.request('GET', 'multiple_set_cookie_resp_headers') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.headers.get( - 'Set-Cookie'), - "foo3=42; Domain=example.com; expires=Thu, 12 Jan 2017 13:55:08" - " GMT; HttpOnly; Max-Age=10000000; Path=/; SameSite=Lax; Secure," - " foo3=43; Domain=example.com; expires=Fri, 12 Jan 2018 13:55:08" - " GMT; HttpOnly; Max-Age=10000000; Path=/; SameSite=Lax; Secure") - - @skipIf(sys.version_info < (3, 8, 0), - "Skip the tests for Python 3.7 and below") - def test_set_cookie_header_in_response_default_value(self): - r = self.webhost.request('GET', - 'set_cookie_resp_header_default_values') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.headers.get('Set-Cookie'), - 'foo3=42; Path=/; SameSite=lax') - - @skipIf(sys.version_info < (3, 8, 0), - "Skip the tests for Python 3.7 and below") - def test_response_cookie_header_nullable_timestamp_err(self): - r = self.webhost.request( - 'GET', - 'response_cookie_header_nullable_timestamp_err') - self.assertEqual(r.status_code, 200) - - @skipIf(sys.version_info < (3, 8, 0), - "Skip the tests for Python 3.7 and below") - def test_response_cookie_header_nullable_bool_err(self): - r = self.webhost.request( - 'GET', - 'response_cookie_header_nullable_bool_err') - self.assertEqual(r.status_code, 200) - self.assertTrue("Set-Cookie" in r.headers) - - @skipIf(sys.version_info < (3, 9, 0), - "Skip the tests for Python 3.8 and below") - def test_print_to_console_stderr(self): - r = self.webhost.request('GET', 'print_logging?console=true' - '&message=Secret42&is_stderr=true') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"OK-print-logging"') - - def check_log_print_to_console_stderr(self, host_out: typing.List[str], ): - # System logs stderr now exist in host_out - self.assertIn('Secret42', host_out) - - def test_hijack_current_event_loop(self): - r = self.webhost.request('GET', 'hijack_current_event_loop/') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"OK-hijack-current-event-loop"') - - def check_log_hijack_current_event_loop(self, host_out: typing.List[str]): - # User logs should exist in host_out - self.assertIn('parallelly_print', host_out) - self.assertIn('parallelly_log_info at root logger', host_out) - self.assertIn('parallelly_log_warning at root logger', host_out) - self.assertIn('parallelly_log_error at root logger', host_out) - self.assertIn('parallelly_log_exception at root logger', host_out) - self.assertIn('parallelly_log_custom at custom_logger', host_out) - self.assertIn('callsoon_log', host_out) - - # System logs now exist in host_out - self.assertIn('parallelly_log_system at disguised_logger', host_out) - - def test_no_type_hint(self): - r = self.webhost.request('GET', 'no_type_hint') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '"no_type_hint"') - - def test_return_int(self): - r = self.webhost.request('GET', 'return_int') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '1000') - - def test_return_float(self): - r = self.webhost.request('GET', 'return_float') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '1000.0') - - def test_return_bool(self): - r = self.webhost.request('GET', 'return_bool') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'true') - - def test_return_dict(self): - r = self.webhost.request('GET', 'return_dict') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.json(), {'key': 'value'}) - - def test_return_list(self): - r = self.webhost.request('GET', 'return_list') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.json(), ["value1", "value2"]) - - def test_return_pydantic_model(self): - r = self.webhost.request('GET', 'return_pydantic_model') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.json(), {'description': 'description1', - 'name': 'item1'}) - - def test_return_pydantic_model_with_missing_fields(self): - r = self.webhost.request('GET', - 'return_pydantic_model_with_missing_fields') - self.assertEqual(r.status_code, 500) - - def check_return_pydantic_model_with_missing_fields(self, - host_out: - typing.List[str]): - self.assertIn("Field required [type=missing, input_value={'name': " - "'item1'}, input_type=dict]", host_out) diff --git a/tests/unittests/test_http_v2.py b/tests/unittests/test_http_v2.py deleted file mode 100644 index b2b1852fd..000000000 --- a/tests/unittests/test_http_v2.py +++ /dev/null @@ -1,253 +0,0 @@ -import asyncio -import socket -import sys -import unittest -from unittest.mock import MagicMock, patch - -from azure_functions_worker.http_v2 import ( - AsyncContextReference, - SingletonMeta, - get_unused_tcp_port, - http_coordinator, -) - - -class MockHttpRequest: - pass - - -class MockHttpResponse: - pass - - -@unittest.skipIf(sys.version_info <= (3, 7), "Skipping tests if <= Python 3.7") -class TestHttpCoordinator(unittest.TestCase): - def setUp(self): - self.invoc_id = "test_invocation" - self.http_request = MockHttpRequest() - self.http_response = MockHttpResponse() - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self) -> None: - http_coordinator._context_references.clear() - self.loop.close() - - def test_set_http_request_new_invocation(self): - # Test setting a new HTTP request - http_coordinator.set_http_request(self.invoc_id, self.http_request) - context_ref = http_coordinator._context_references.get(self.invoc_id) - self.assertIsNotNone(context_ref) - self.assertEqual(context_ref.http_request, self.http_request) - - def test_set_http_request_existing_invocation(self): - # Test updating an existing HTTP request - new_http_request = MagicMock() - http_coordinator.set_http_request(self.invoc_id, new_http_request) - context_ref = http_coordinator._context_references.get(self.invoc_id) - self.assertIsNotNone(context_ref) - self.assertEqual(context_ref.http_request, new_http_request) - - def test_set_http_response_context_ref_null(self): - with self.assertRaises(Exception) as cm: - http_coordinator.set_http_response(self.invoc_id, - self.http_response) - self.assertEqual(cm.exception.args[0], - "No context reference found for invocation " - f"{self.invoc_id}") - - def test_set_http_response(self): - http_coordinator.set_http_request(self.invoc_id, self.http_request) - http_coordinator.set_http_response(self.invoc_id, self.http_response) - context_ref = http_coordinator._context_references[self.invoc_id] - self.assertEqual(context_ref.http_response, self.http_response) - - def test_get_http_request_async_existing_invocation(self): - # Test retrieving an existing HTTP request - http_coordinator.set_http_request(self.invoc_id, - self.http_request) - retrieved_request = self.loop.run_until_complete( - http_coordinator.get_http_request_async(self.invoc_id)) - self.assertEqual(retrieved_request, self.http_request) - - def test_get_http_request_async_wait_forever(self): - # Test handling error when invoc_id is not found - invalid_invoc_id = "invalid_invocation" - - with self.assertRaises(asyncio.TimeoutError): - self.loop.run_until_complete( - asyncio.wait_for( - http_coordinator.get_http_request_async( - invalid_invoc_id), - timeout=1 - ) - ) - - def test_await_http_response_async_valid_invocation(self): - invoc_id = "valid_invocation" - expected_response = self.http_response - - context_ref = AsyncContextReference(http_response=expected_response) - - # Add the mock context reference to the coordinator - http_coordinator._context_references[invoc_id] = context_ref - - http_coordinator.set_http_response(invoc_id, expected_response) - - # Call the method and verify the returned response - response = self.loop.run_until_complete( - http_coordinator.await_http_response_async(invoc_id)) - self.assertEqual(response, expected_response) - self.assertTrue( - http_coordinator._context_references.get( - invoc_id).http_response is None) - - def test_await_http_response_async_invalid_invocation(self): - # Test handling error when invoc_id is not found - invalid_invoc_id = "invalid_invocation" - with self.assertRaises(Exception) as context: - self.loop.run_until_complete( - http_coordinator.await_http_response_async(invalid_invoc_id)) - self.assertEqual(str(context.exception), - f"'No context reference found for invocation " - f"{invalid_invoc_id}'") - - def test_await_http_response_async_response_not_set(self): - invoc_id = "invocation_with_no_response" - # Set up a mock context reference without setting the response - context_ref = AsyncContextReference() - - # Add the mock context reference to the coordinator - http_coordinator._context_references[invoc_id] = context_ref - - http_coordinator.set_http_response(invoc_id, None) - # Call the method and verify that it raises an exception - with self.assertRaises(Exception) as context: - self.loop.run_until_complete( - http_coordinator.await_http_response_async(invoc_id)) - self.assertEqual(str(context.exception), - f"No http response found for invocation {invoc_id}") - - -@unittest.skipIf(sys.version_info <= (3, 7), "Skipping tests if <= Python 3.7") -class TestAsyncContextReference(unittest.TestCase): - - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self) -> None: - self.loop.close() - - def test_init(self): - ref = AsyncContextReference() - self.assertIsInstance(ref, AsyncContextReference) - self.assertTrue(ref.is_async) - - def test_http_request_property(self): - ref = AsyncContextReference() - ref.http_request = object() - self.assertIsNotNone(ref.http_request) - - def test_http_response_property(self): - ref = AsyncContextReference() - ref.http_response = object() - self.assertIsNotNone(ref.http_response) - - def test_function_property(self): - ref = AsyncContextReference() - ref.function = object() - self.assertIsNotNone(ref.function) - - def test_fi_context_property(self): - ref = AsyncContextReference() - ref.fi_context = object() - self.assertIsNotNone(ref.fi_context) - - def test_http_trigger_param_name_property(self): - ref = AsyncContextReference() - ref.http_trigger_param_name = object() - self.assertIsNotNone(ref.http_trigger_param_name) - - def test_args_property(self): - ref = AsyncContextReference() - ref.args = object() - self.assertIsNotNone(ref.args) - - def test_http_request_available_event_property(self): - ref = AsyncContextReference() - self.assertIsNotNone(ref.http_request_available_event) - - def test_http_response_available_event_property(self): - ref = AsyncContextReference() - self.assertIsNotNone(ref.http_response_available_event) - - def test_full_args(self): - ref = AsyncContextReference(http_request=object(), - http_response=object(), - function=object(), - fi_context=object(), - args=object()) - self.assertIsNotNone(ref.http_request) - self.assertIsNotNone(ref.http_response) - self.assertIsNotNone(ref.function) - self.assertIsNotNone(ref.fi_context) - self.assertIsNotNone(ref.args) - - -class TestSingletonMeta(unittest.TestCase): - - def test_singleton_instance(self): - class TestClass(metaclass=SingletonMeta): - pass - - obj1 = TestClass() - obj2 = TestClass() - - self.assertIs(obj1, obj2) - - def test_singleton_with_arguments(self): - class TestClass(metaclass=SingletonMeta): - def __init__(self, arg): - self.arg = arg - - obj1 = TestClass(1) - obj2 = TestClass(2) - - self.assertEqual(obj1.arg, 1) - self.assertEqual(obj2.arg, - 1) # Should still refer to the same instance - - def test_singleton_with_kwargs(self): - class TestClass(metaclass=SingletonMeta): - def __init__(self, **kwargs): - self.kwargs = kwargs - - obj1 = TestClass(a=1) - obj2 = TestClass(b=2) - - self.assertEqual(obj1.kwargs, {'a': 1}) - self.assertEqual(obj2.kwargs, - {'a': 1}) # Should still refer to the same instance - - -class TestGetUnusedTCPPort(unittest.TestCase): - - @patch('socket.socket') - def test_get_unused_tcp_port(self, mock_socket): - # Mock the socket object and its methods - mock_socket_instance = mock_socket.return_value - mock_socket_instance.getsockname.return_value = ('localhost', 12345) - - # Call the function - port = get_unused_tcp_port() - - # Assert that socket.socket was called with the correct arguments - mock_socket.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM) - - # Assert that bind and close methods were called on the socket instance - mock_socket_instance.bind.assert_called_once_with(('', 0)) - mock_socket_instance.close.assert_called_once() - - # Assert that the returned port matches the expected value - self.assertEqual(port, 12345) diff --git a/tests/unittests/test_invalid_stein.py b/tests/unittests/test_invalid_stein.py deleted file mode 100644 index 1f65389d4..000000000 --- a/tests/unittests/test_invalid_stein.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from tests.utils import testutils - -from azure_functions_worker import protos - -STEIN_INVALID_APP_FUNCTIONS_DIR = testutils.UNIT_TESTS_FOLDER / \ - 'broken_functions' / \ - 'invalid_app_stein' -STEIN_INVALID_FUNCTIONS_DIR = testutils.UNIT_TESTS_FOLDER / \ - 'broken_functions' / \ - 'invalid_stein' - - -class TestInvalidAppStein(testutils.AsyncTestCase): - - @testutils.retryable_test(4, 5) - async def test_indexing_not_app(self): - """Test if the functions metadata status will be - Failure when an invalid app is provided - """ - async with testutils.start_mockhost( - script_root=STEIN_INVALID_APP_FUNCTIONS_DIR) as host: - await host.init_worker() - r = await host.get_functions_metadata() - self.assertIsInstance(r.response, protos.FunctionMetadataResponse) - self.assertEqual(r.response.result.status, - protos.StatusResult.Failure) - self.assertIsNotNone(r.response.result.exception.message) - - @testutils.retryable_test(4, 5) - async def test_indexing_invalid_app(self): - """Test if the functions metadata status will be - Failure when an invalid app is provided - """ - async with testutils.start_mockhost( - script_root=STEIN_INVALID_FUNCTIONS_DIR) as host: - await host.init_worker() - r = await host.get_functions_metadata() - self.assertIsInstance(r.response, protos.FunctionMetadataResponse) - self.assertEqual(r.response.result.status, - protos.StatusResult.Failure) - self.assertIsNotNone(r.response.result.exception.message) diff --git a/tests/unittests/test_loader.py b/tests/unittests/test_loader.py deleted file mode 100644 index a6af8faa5..000000000 --- a/tests/unittests/test_loader.py +++ /dev/null @@ -1,281 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import os -import pathlib -import subprocess -import sys -import textwrap -from unittest import skipIf -from unittest.mock import Mock, patch - -from azure.functions import Function -from azure.functions.decorators.retry_policy import RetryPolicy -from azure.functions.decorators.timer import TimerTrigger -from tests.utils import testutils - -from azure_functions_worker import functions -from azure_functions_worker.constants import ( - PYTHON_SCRIPT_FILE_NAME, - PYTHON_SCRIPT_FILE_NAME_DEFAULT, -) -from azure_functions_worker.loader import build_retry_protos - - -class TestLoader(testutils.WebHostTestCase): - - def setUp(self) -> None: - def test_function(): - return "Test" - - self.test_function = test_function - self.func = Function(self.test_function, script_file="test.py") - self.function_registry = functions.Registry() - - @classmethod - def get_script_dir(cls): - return testutils.UNIT_TESTS_FOLDER / 'load_functions' - - def test_loader_building_fixed_retry_protos(self): - trigger = TimerTrigger(schedule="*/1 * * * * *", arg_name="mytimer", - name="mytimer") - self.func.add_trigger(trigger=trigger) - setting = RetryPolicy(strategy="fixed_delay", max_retry_count="1", - delay_interval="00:02:00") - self.func.add_setting(setting=setting) - - protos = build_retry_protos(self.func) - self.assertEqual(protos.max_retry_count, 1) - self.assertEqual(protos.retry_strategy, 1) # 1 enum for fixed delay - self.assertEqual(protos.delay_interval.seconds, 120) - - def test_loader_building_exponential_retry_protos(self): - trigger = TimerTrigger(schedule="*/1 * * * * *", arg_name="mytimer", - name="mytimer") - self.func.add_trigger(trigger=trigger) - setting = RetryPolicy(strategy="exponential_backoff", - max_retry_count="1", - minimum_interval="00:01:00", - maximum_interval="00:02:00") - self.func.add_setting(setting=setting) - - protos = build_retry_protos(self.func) - self.assertEqual(protos.max_retry_count, 1) - self.assertEqual(protos.retry_strategy, - 0) # 0 enum for exponential backoff - self.assertEqual(protos.minimum_interval.seconds, 60) - self.assertEqual(protos.maximum_interval.seconds, 120) - - @patch('azure_functions_worker.logging.logger.warning') - def test_loader_retry_policy_attribute_error(self, mock_logger): - self.func = Mock() - self.func.get_settings_dict.side_effect = AttributeError('DummyError') - - result = build_retry_protos(self.func) - self.assertIsNone(result) - - # Check if the logged message starts with the expected string - logged_message = mock_logger.call_args[0][ - 0] # Get the first argument of the logger.warning call - self.assertTrue(logged_message.startswith( - 'AttributeError while loading retry policy.')) - - def test_loader_simple(self): - r = self.webhost.request('GET', 'simple') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '__app__.simple.main') - - def test_loader_custom_entrypoint(self): - r = self.webhost.request('GET', 'entrypoint') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '__app__.entrypoint.main') - - def test_loader_no_script_file(self): - r = self.webhost.request('GET', 'no_script_file') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '__app__.no_script_file.main') - - def test_loader_subdir(self): - r = self.webhost.request('GET', 'subdir') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '__app__.subdir.sub.main') - - def test_loader_relimport(self): - r = self.webhost.request('GET', 'relimport') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '__app__.relimport.relative') - - def test_loader_submodule(self): - r = self.webhost.request('GET', 'submodule') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '__app__.submodule.sub_module.module') - - def test_loader_parentmodule(self): - r = self.webhost.request('GET', 'parentmodule') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '__app__.parentmodule.module') - - def test_loader_absolute_thirdparty(self): - """Allow third-party package import from .python_packages - and worker_venv - """ - - r = self.webhost.request('GET', 'absolute_thirdparty') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'eh = azure.eventhub') - - def test_loader_prioritize_customer_module(self): - """When a module in customer code has the same name with a third-party - package, the worker should prioritize third-party package - """ - - r = self.webhost.request('GET', 'name_collision') - self.assertEqual(r.status_code, 200) - self.assertRegex(r.text, r'pt.__version__ = \d+.\d+.\d+') - - def test_loader_fix_customer_module_with_app_import(self): - """When a module in customer code has the same name with a third-party - package, if customer uses "import __app__." statement, - the worker should load customer package - """ - - r = self.webhost.request('GET', 'name_collision_app_import') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'pt.__version__ = from.customer.code') - - def test_loader_implicit_import(self): - """Since sys.path is now fixed with script root appended, - implicit import statement is now acceptable. - """ - - r = self.webhost.request('GET', 'implicit_import') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 's_main = simple.main') - - def test_loader_module_not_found(self): - """If a module cannot be found, should throw an exception with - trouble shooting link https://aka.ms/functions-modulenotfound - """ - r = self.webhost.request('GET', 'module_not_found') - self.assertEqual(r.status_code, 500) - - def test_loader_init_should_only_invoke_outside_main_once(self): - """Check if the code in __init__.py outside of main() function - is only executed once - """ - r = self.webhost.request('GET', 'outside_main_code_in_init') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'executed count = 1') - - def test_loader_main_should_only_invoke_outside_main_once(self): - """Check if the code in main.py outside of main() function - is only executed once - """ - r = self.webhost.request('GET', 'outside_main_code_in_main') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'executed count = 1') - - def test_loader_outside_main_package_should_be_loaded_from_init(self): - """Check if the package can still be loaded from __init__ module - """ - r = self.webhost.request('GET', 'load_outside_main?from=init') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - def test_loader_outside_main_package_should_be_loaded_from_package(self): - """Check if the package can still be loaded from package - """ - r = self.webhost.request('GET', - 'load_outside_main?from=package') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK') - - def check_log_loader_module_not_found(self, host_out): - passed = False - exception_message = "Exception: ModuleNotFoundError: "\ - "No module named 'notfound'. "\ - "Cannot find module. "\ - "Please check the requirements.txt file for the "\ - "missing module. For more info, please refer the "\ - "troubleshooting guide: "\ - "https://aka.ms/functions-modulenotfound. "\ - "Current sys.path: " - for log in host_out: - if exception_message in log: - passed = True - self.assertTrue(passed) - - -class TestPluginLoader(testutils.AsyncTestCase): - - @skipIf(sys.version_info.minor <= 7, "Skipping tests <= Python 3.7") - async def test_entry_point_plugin(self): - test_binding = pathlib.Path(__file__).parent / 'test-binding' - subprocess.run([ - sys.executable, '-m', 'pip', - '--disable-pip-version-check', - 'install', '--quiet', - '-e', test_binding - ], check=True) - - # This test must be run in a subprocess so that - # pkg_resources picks up the newly installed package. - code = textwrap.dedent(''' -import asyncio -from azure_functions_worker import protos -from tests.utils import testutils - -async def _runner(): - async with testutils.start_mockhost( - script_root='unittests/test-binding/functions') as host: - await host.init_worker() - func_id, r = await host.load_function('foo') - - print(r.response.function_id == func_id) - print(r.response.result.status == protos.StatusResult.Success) - -asyncio.get_event_loop().run_until_complete(_runner()) -''') - - try: - proc = await asyncio.create_subprocess_exec( - sys.executable, '-c', code, - stdout=asyncio.subprocess.PIPE) - - stdout, stderr = await proc.communicate() - - # Trimming off carriage return charater when testing on Windows - stdout_lines = [ - line.replace(b'\r', b'') for line in stdout.strip().split(b'\n') - ] - self.assertEqual(stdout_lines, [b'True', b'True']) - - finally: - subprocess.run([ - sys.executable, '-m', 'pip', - '--disable-pip-version-check', - 'uninstall', '-y', '--quiet', 'foo-binding' - ], check=True) - - -class TestConfigurableFileName(testutils.WebHostTestCase): - - def setUp(self) -> None: - def test_function(): - return "Test" - - self.file_name = PYTHON_SCRIPT_FILE_NAME_DEFAULT - self.test_function = test_function - self.func = Function(self.test_function, script_file="function_app.py") - self.function_registry = functions.Registry() - - @classmethod - def get_script_dir(cls): - return testutils.UNIT_TESTS_FOLDER / 'http_functions' / \ - 'http_functions_stein' - - def test_correct_file_name(self): - os.environ.update({PYTHON_SCRIPT_FILE_NAME: self.file_name}) - self.assertIsNotNone(os.environ.get(PYTHON_SCRIPT_FILE_NAME)) - self.assertEqual(os.environ.get(PYTHON_SCRIPT_FILE_NAME), - 'function_app.py') diff --git a/tests/unittests/test_log_filtering_functions.py b/tests/unittests/test_log_filtering_functions.py deleted file mode 100644 index 3d074316b..000000000 --- a/tests/unittests/test_log_filtering_functions.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import typing - -from tests.utils import testutils - -HOST_JSON_TEMPLATE_WITH_LOGLEVEL_INFO = """\ -{ - "version": "2.0", - "logging": { - "logLevel": { - "default": "Information" - } - }, - "functionTimeout": "00:05:00" -} -""" - - -class TestLogFilteringFunctions(testutils.WebHostTestCase): - """This class is for testing the logger behavior in Python Worker when - dealing with customer's log and system's log. Here's a list of expected - behaviors: - local_console customer_app_insight functions_kusto_table - system_log false false true - customer_log true true false - - Please ensure the following unit test cases align with the expectations - """ - - @classmethod - def setUpClass(cls): - host_json = testutils.TESTS_ROOT / cls.get_script_dir() / 'host.json' - - with open(host_json, 'w+') as f: - f.write(HOST_JSON_TEMPLATE_WITH_LOGLEVEL_INFO) - - super(TestLogFilteringFunctions, cls).setUpClass() - - @classmethod - def tearDownClass(cls): - host_json = testutils.TESTS_ROOT / cls.get_script_dir() / 'host.json' - testutils.remove_path(host_json) - - super(TestLogFilteringFunctions, cls).tearDownClass() - - @classmethod - def get_script_dir(cls): - return testutils.UNIT_TESTS_FOLDER / 'log_filtering_functions' - - def test_debug_logging(self): - r = self.webhost.request('GET', 'debug_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-debug') - - def check_log_debug_logging(self, host_out: typing.List[str]): - self.assertIn('logging info', host_out) - self.assertIn('logging warning', host_out) - self.assertIn('logging error', host_out) - # See HOST_JSON_TEMPLATE_WITH_LOGLEVEL_INFO, debug log is disabled - self.assertNotIn('logging debug', host_out) - - def test_debug_with_user_logging(self): - r = self.webhost.request('GET', 'debug_user_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-user-debug') - - def check_log_debug_with_user_logging(self, host_out: typing.List[str]): - self.assertIn('logging info', host_out) - self.assertIn('logging warning', host_out) - self.assertIn('logging error', host_out) - # See HOST_JSON_TEMPLATE_WITH_LOGLEVEL_INFO, debug log is disabled - self.assertNotIn('logging debug', host_out) - - def test_info_with_sdk_logging(self): - """Invoke a HttpTrigger sdk_logging which contains logging invocation - via the azure.functions logger. This should be treated as system logs, - which means the log should not be displayed in local console. - """ - r = self.webhost.request('GET', 'sdk_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-sdk-logger') - - def check_log_info_with_sdk_logging(self, host_out: typing.List[str]): - # See TestLogFilteringFunctions docstring - # System log should be captured in console - self.assertIn('sdk_logger info', host_out) - self.assertIn('sdk_logger warning', host_out) - self.assertIn('sdk_logger error', host_out) - self.assertNotIn('sdk_logger debug', host_out) - - def test_info_with_sdk_submodule_logging(self): - """Invoke a HttpTrigger sdk_submodule_logging which contains logging - invocation via the azure.functions logger. This should be treated as - system logs. - """ - r = self.webhost.request('GET', 'sdk_submodule_logging') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-sdk-submodule-logging') - - def check_log_info_with_sdk_submodule_logging(self, - host_out: typing.List[str]): - # See TestLogFilteringFunctions docstring - # System log should be captured in console - self.assertIn('sdk_submodule_logger info', host_out) - self.assertIn('sdk_submodule_logger warning', host_out) - self.assertIn('sdk_submodule_logger error', host_out) - self.assertNotIn('sdk_submodule_logger debug', host_out) diff --git a/tests/unittests/test_logging.py b/tests/unittests/test_logging.py deleted file mode 100644 index b7c4f5f4a..000000000 --- a/tests/unittests/test_logging.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import unittest - -from azure_functions_worker import logging as flog -from azure_functions_worker.logging import format_exception - - -class TestLogging(unittest.TestCase): - """This class is for testing the grpc logging behavior in Python Worker. - Here's a list of expected behaviors: - local_console customer_app_insight functions_kusto_table - system_log false false true - customer_log true true false - - Please ensure the following unit test cases align with the expectations - """ - - def test_system_log_namespace(self): - """Ensure the following list is part of the system's log - """ - self.assertTrue(flog.is_system_log_category('azure_functions_worker')) - self.assertTrue( - flog.is_system_log_category('azure_functions_worker_error') - ) - self.assertTrue(flog.is_system_log_category('azure.functions')) - self.assertTrue(flog.is_system_log_category('azure.functions.module')) - - def test_customer_log_namespace(self): - """Ensure the following list is part of the customer's log - """ - self.assertFalse(flog.is_system_log_category('customer_logger')) - self.assertFalse(flog.is_system_log_category('azure')) - self.assertFalse(flog.is_system_log_category('protobuf')) - self.assertFalse(flog.is_system_log_category('root')) - self.assertFalse(flog.is_system_log_category('')) - - def test_format_exception(self): - def call0(fn): - call1(fn) - - def call1(fn): - call2(fn) - - def call2(fn): - fn() - - def raising_function(): - raise ValueError("Value error being raised.", ) - - try: - call0(raising_function) - except ValueError as e: - processed_exception = format_exception(e) - self.assertIn("call0", processed_exception) - self.assertIn("call1", processed_exception) - self.assertIn("call2", processed_exception) - self.assertIn("f", processed_exception) - self.assertRegex(processed_exception, - r".*tests\\unittests\\test_logging.py.*") diff --git a/tests/unittests/test_main.py b/tests/unittests/test_main.py deleted file mode 100644 index 688b4b0c1..000000000 --- a/tests/unittests/test_main.py +++ /dev/null @@ -1,80 +0,0 @@ -import sys -import unittest -from unittest.mock import patch - -from azure_functions_worker.main import parse_args - - -class TestMain(unittest.TestCase): - - @patch.object(sys, 'argv', - ['xxx', '--host', '127.0.0.1', - '--port', '50821', - '--workerId', 'e9efd817-47a1-45dc-9e20-e6f975d7a025', - '--requestId', 'cbef5957-cdb3-4462-9ee7-ac9f91be0a51', - '--grpcMaxMessageLength', '2147483647', - '--functions-uri', 'http://127.0.0.1:50821', - '--functions-worker-id', - 'e9efd817-47a1-45dc-9e20-e6f975d7a025', - '--functions-request-id', - 'cbef5957-cdb3-4462-9ee7-ac9f91be0a51', - '--functions-grpc-max-message-length', '2147483647']) - def test_all_args(self): - args = parse_args() - self.assertEqual(args.host, '127.0.0.1') - self.assertEqual(args.port, 50821) - self.assertEqual(args.worker_id, - 'e9efd817-47a1-45dc-9e20-e6f975d7a025') - self.assertEqual(args.request_id, - 'cbef5957-cdb3-4462-9ee7-ac9f91be0a51') - self.assertEqual(args.grpc_max_msg_len, 2147483647) - self.assertEqual(args.functions_uri, 'http://127.0.0.1:50821') - self.assertEqual(args.functions_worker_id, - 'e9efd817-47a1-45dc-9e20-e6f975d7a025') - self.assertEqual(args.functions_request_id, - 'cbef5957-cdb3-4462-9ee7-ac9f91be0a51') - self.assertEqual(args.functions_grpc_max_msg_len, 2147483647) - - @patch.object(sys, 'argv', - ['xxx', '--host', '127.0.0.1', - '--port', '50821', - '--workerId', 'e9efd817-47a1-45dc-9e20-e6f975d7a025', - '--requestId', 'cbef5957-cdb3-4462-9ee7-ac9f91be0a51', - '--grpcMaxMessageLength', '2147483647']) - def test_old_args(self): - args = parse_args() - self.assertEqual(args.host, '127.0.0.1') - self.assertEqual(args.port, 50821) - self.assertEqual(args.worker_id, - 'e9efd817-47a1-45dc-9e20-e6f975d7a025') - self.assertEqual(args.request_id, - 'cbef5957-cdb3-4462-9ee7-ac9f91be0a51') - self.assertEqual(args.grpc_max_msg_len, 2147483647) - self.assertIsNone(args.functions_uri) - self.assertIsNone(args.functions_worker_id) - self.assertIsNone(args.functions_request_id) - self.assertIsNone(args.functions_grpc_max_msg_len) - - @patch.object(sys, 'argv', - ['xxx', '--functions-uri', 'http://127.0.0.1:50821', - '--functions-worker-id', - 'e9efd817-47a1-45dc-9e20-e6f975d7a025', - '--functions-request-id', - 'cbef5957-cdb3-4462-9ee7-ac9f91be0a51', - '--functions-grpc-max-message-length', '2147483647']) - def test_new_args(self): - args = parse_args() - self.assertEqual(args.functions_uri, 'http://127.0.0.1:50821') - self.assertEqual(args.functions_worker_id, - 'e9efd817-47a1-45dc-9e20-e6f975d7a025') - self.assertEqual(args.functions_request_id, - 'cbef5957-cdb3-4462-9ee7-ac9f91be0a51') - self.assertEqual(args.functions_grpc_max_msg_len, 2147483647) - - @patch.object(sys, 'argv', ['xxx', '--host', 'dummy_host', - '--port', '12345', - '--invalid-arg', 'invalid_value']) - def test_invalid_args(self): - with self.assertRaises(SystemExit) as context: - parse_args() - self.assertEqual(context.exception.code, 2) diff --git a/tests/unittests/test_mock_blob_shared_memory_functions.py b/tests/unittests/test_mock_blob_shared_memory_functions.py deleted file mode 100644 index 63b06ca12..000000000 --- a/tests/unittests/test_mock_blob_shared_memory_functions.py +++ /dev/null @@ -1,620 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import hashlib -import json -import sys -import time -from unittest import skipIf - -from tests.utils import testutils - -from azure_functions_worker import protos -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - SharedMemoryConstants as consts, -) -from azure_functions_worker.bindings.shared_memory_data_transfer import SharedMemoryMap - - -@skipIf(sys.platform == 'darwin', 'MacOS M1 machines do not correctly test the' - 'shared memory filesystems and thus skipping' - ' these tests for the time being') -class TestMockBlobSharedMemoryFunctions(testutils.SharedMemoryTestCase, - testutils.AsyncTestCase): - """ - Test the use of shared memory to transfer input and output data to and from - the host/worker. - """ - def setUp(self): - super().setUp() - self.blob_funcs_dir = testutils.EMULATOR_TESTS_FOLDER / 'blob_functions' - - async def test_binary_blob_read_as_bytes_function(self): - """ - Read a blob with binary input that was transferred between the host and - worker over shared memory. - The function's input data type will be bytes. - """ - func_name = 'get_blob_as_bytes_return_http_response' - await self._test_binary_blob_read_function(func_name) - - async def test_binary_blob_read_as_stream_function(self): - """ - Read a blob with binary input that was transferred between the host and - worker over shared memory. - The function's input data type will be InputStream. - """ - func_name = 'get_blob_as_bytes_stream_return_http_response' - await self._test_binary_blob_read_function(func_name) - - async def test_binary_blob_write_function(self): - """ - Write a blob with binary output that was transferred between the worker - and host over shared memory. - """ - func_name = 'put_blob_as_bytes_return_http_response' - async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ - as host: - await host.init_worker("4.17.1") - await host.load_function(func_name) - - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - http_params = {'content_size': str(content_size)} - - # Invoke the function; it should read the input blob from shared - # memory and respond back in the HTTP body with the number of bytes - # it read in the input - _, response_msg = await host.invoke_function( - func_name, [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp( - method='GET', - query=http_params))), - ]) - - # Verify if the function executed successfully - self.assertEqual(protos.StatusResult.Success, - response_msg.response.result.status) - - # The function responds back in the HTTP body with the sha256 digest of - # the output it created along with its size - response_bytes = response_msg.response.return_value.http.body.bytes - json_response = json.loads(response_bytes) - func_created_content_size = json_response['content_size'] - func_created_content_sha256 = json_response['content_sha256'] - - # Verify if the worker produced an output blob which was written - # in shared memory - output_data = response_msg.response.output_data - self.assertEqual(1, len(output_data)) - - output_binding = output_data[0] - binding_type = output_binding.WhichOneof('rpc_data') - self.assertEqual('rpc_shared_memory', binding_type) - - # Get the information about the shared memory region in which the - # worker wrote the function's output blob - shmem = output_binding.rpc_shared_memory - mem_map_name = shmem.name - offset = shmem.offset - count = shmem.count - data_type = shmem.type - - # Verify if the shared memory region's information is valid - self.assertTrue(self.is_valid_uuid(mem_map_name)) - self.assertEqual(0, offset) - self.assertEqual(func_created_content_size, count) - self.assertEqual(protos.RpcDataType.bytes, data_type) - - # Read data from the shared memory region - mem_map_size = consts.CONTENT_HEADER_TOTAL_BYTES + count - mem_map = self.file_accessor.open_mem_map(mem_map_name, - mem_map_size) - shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, - mem_map) - read_content = shared_mem_map.get_bytes() - - # Dispose the shared memory map since we have read the function's - # output now - shared_mem_map.dispose() - - # Verify if we were able to read the correct output that the - # function has produced - read_content_sha256 = hashlib.sha256(read_content).hexdigest() - self.assertEqual(func_created_content_sha256, read_content_sha256) - self.assertEqual(len(read_content), func_created_content_size) - - async def test_str_blob_read_function(self): - """ - Read a blob with binary input that was transferred between the host and - worker over shared memory. - The function's input data type will be str. - """ - func_name = 'get_blob_as_str_return_http_response' - async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ - as host: - await host.init_worker("4.17.1") - await host.load_function(func_name) - - # Write binary content into shared memory - mem_map_name = self.get_new_mem_map_name() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - num_chars = int(content_size / consts.SIZE_OF_CHAR_BYTES) - content = self.get_random_string(num_chars) - content_bytes = content.encode('utf-8') - content_sha256 = hashlib.sha256(content_bytes).hexdigest() - mem_map_size = consts.CONTENT_HEADER_TOTAL_BYTES + content_size - mem_map = self.file_accessor.create_mem_map(mem_map_name, - mem_map_size) - shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, - mem_map) - num_bytes_written = shared_mem_map.put_bytes(content_bytes) - - # Create a message to send to the worker containing info about the - # shared memory region to read input from - value = protos.RpcSharedMemory( - name=mem_map_name, - offset=0, - count=num_bytes_written, - type=protos.RpcDataType.string - ) - - # Invoke the function; it should read the input blob from shared - # memory and respond back in the HTTP body with the number of bytes - # it read in the input - _, response_msg = await host.invoke_function( - func_name, [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp( - method='GET'))), - protos.ParameterBinding( - name='file', - rpc_shared_memory=value - ) - ]) - - # Dispose the shared memory map since the function is done using it - shared_mem_map.dispose() - - # Verify if the function executed successfully - self.assertEqual(protos.StatusResult.Success, - response_msg.response.result.status) - - response_bytes = response_msg.response.return_value.http.body.bytes - json_response = json.loads(response_bytes) - func_received_num_chars = json_response['num_chars'] - func_received_content_sha256 = json_response['content_sha256'] - - # Check the function response to ensure that it read the complete - # input that we provided and the sha256 matches - self.assertEqual(num_chars, func_received_num_chars) - self.assertEqual(content_sha256, func_received_content_sha256) - - async def test_str_blob_write_function(self): - """ - Write a blob with string output that was transferred between the worker - and host over shared memory. - """ - func_name = 'put_blob_as_str_return_http_response' - async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ - as host: - await host.init_worker("4.17.1") - await host.load_function(func_name) - - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - num_chars = int(content_size / consts.SIZE_OF_CHAR_BYTES) - http_params = {'num_chars': str(num_chars)} - - # Invoke the function; it should read the input blob from shared - # memory and respond back in the HTTP body with the number of bytes - # it read in the input - _, response_msg = await host.invoke_function( - func_name, [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp( - method='GET', - query=http_params))), - ]) - - # Verify if the function executed successfully - self.assertEqual(protos.StatusResult.Success, - response_msg.response.result.status) - - # The function responds back in the HTTP body with the sha256 digest of - # the output it created along with its size - response_bytes = response_msg.response.return_value.http.body.bytes - json_response = json.loads(response_bytes) - func_created_num_chars = json_response['num_chars'] - func_created_content_sha256 = json_response['content_sha256'] - - # Verify if the worker produced an output blob which was written - # in shared memory - output_data = response_msg.response.output_data - self.assertEqual(1, len(output_data)) - - output_binding = output_data[0] - binding_type = output_binding.WhichOneof('rpc_data') - self.assertEqual('rpc_shared_memory', binding_type) - - # Get the information about the shared memory region in which the - # worker wrote the function's output blob - shmem = output_binding.rpc_shared_memory - mem_map_name = shmem.name - offset = shmem.offset - count = shmem.count - data_type = shmem.type - - # Verify if the shared memory region's information is valid - self.assertTrue(self.is_valid_uuid(mem_map_name)) - self.assertEqual(0, offset) - self.assertEqual(func_created_num_chars, count) - self.assertEqual(protos.RpcDataType.string, data_type) - - # Read data from the shared memory region - mem_map_size = consts.CONTENT_HEADER_TOTAL_BYTES + count - mem_map = self.file_accessor.open_mem_map(mem_map_name, - mem_map_size) - shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, - mem_map) - read_content_bytes = shared_mem_map.get_bytes() - - # Dispose the shared memory map since we have read the function's - # output now - shared_mem_map.dispose() - - # Verify if we were able to read the correct output that the - # function has produced - read_content_sha256 = hashlib.sha256(read_content_bytes).hexdigest() - self.assertEqual(func_created_content_sha256, read_content_sha256) - read_content = read_content_bytes.decode('utf-8') - self.assertEqual(len(read_content), func_created_num_chars) - - async def test_close_shared_memory_maps(self): - """ - Close the shared memory maps created by the worker to transfer output - blob to the host after the host is done processing the response. - """ - func_name = 'put_blob_as_bytes_return_http_response' - async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ - as host: - await host.init_worker("4.17.1") - await host.load_function(func_name) - - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - http_params = {'content_size': str(content_size)} - - # Invoke the function; it should read the input blob from shared - # memory and respond back in the HTTP body with the number of bytes - # it read in the input - _, response_msg = await host.invoke_function( - func_name, [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp( - method='GET', - query=http_params))), - ]) - - # Verify if the function executed successfully - self.assertEqual(protos.StatusResult.Success, - response_msg.response.result.status) - - # Verify if the worker produced an output blob which was written - # in shared memory - output_data = response_msg.response.output_data - output_binding = output_data[0] - - # Get the information about the shared memory region in which the - # worker wrote the function's output blob - shmem = output_binding.rpc_shared_memory - mem_map_name = shmem.name - - # Request the worker to close the memory maps - mem_map_names = [mem_map_name] - response_msg = \ - await host.close_shared_memory_resources(mem_map_names) - - # Verify that the worker responds with a successful status after - # closing the requested memory map - mem_map_statuses = response_msg.response.close_map_results - self.assertEqual(len(mem_map_names), len(mem_map_statuses.keys())) - for mem_map_name in mem_map_names: - self.assertTrue(mem_map_name in mem_map_statuses) - status = mem_map_statuses[mem_map_name] - self.assertTrue(status) - - async def test_shared_memory_not_used_with_small_output(self): - """ - Even though shared memory is enabled, small inputs will not be - transferred over shared memory (in this case from the worker to the - host.) - """ - func_name = 'put_blob_as_bytes_return_http_response' - async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ - as host: - await host.init_worker("4.17.1") - await host.load_function(func_name) - - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER - 10 - http_params = {'content_size': str(content_size)} - - # Invoke the function; it should read the input blob from shared - # memory and respond back in the HTTP body with the number of bytes - # it read in the input - _, response_msg = await host.invoke_function( - func_name, [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp( - method='GET', - query=http_params))), - ]) - - # Verify if the function executed successfully - self.assertEqual(protos.StatusResult.Success, - response_msg.response.result.status) - - # Verify if the worker produced an output blob which was sent over - # RPC instead of shared memory - output_data = response_msg.response.output_data - self.assertEqual(1, len(output_data)) - - output_binding = output_data[0] - binding_type = output_binding.WhichOneof('rpc_data') - self.assertEqual('data', binding_type) - - async def test_multiple_input_output_blobs(self): - """ - Read two blobs and write two blobs, all over shared memory. - """ - func_name = 'put_get_multiple_blobs_as_bytes_return_http_response' - async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ - as host: - await host.init_worker("4.17.1") - await host.load_function(func_name) - - # Input 1 - # Write binary content into shared memory - mem_map_name_1 = self.get_new_mem_map_name() - input_content_size_1 = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - input_content_1 = self.get_random_bytes(input_content_size_1) - input_content_sha256_1 = hashlib.sha256(input_content_1).hexdigest() - input_mem_map_size_1 = \ - consts.CONTENT_HEADER_TOTAL_BYTES + input_content_size_1 - input_mem_map_1 = \ - self.file_accessor.create_mem_map(mem_map_name_1, - input_mem_map_size_1) - input_shared_mem_map_1 = \ - SharedMemoryMap(self.file_accessor, mem_map_name_1, - input_mem_map_1) - input_num_bytes_written_1 = \ - input_shared_mem_map_1.put_bytes(input_content_1) - - # Create a message to send to the worker containing info about the - # shared memory region to read input from - input_value_1 = protos.RpcSharedMemory( - name=mem_map_name_1, - offset=0, - count=input_num_bytes_written_1, - type=protos.RpcDataType.bytes - ) - - # Input 2 - # Write binary content into shared memory - mem_map_name_2 = self.get_new_mem_map_name() - input_content_size_2 = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 20 - input_content_2 = self.get_random_bytes(input_content_size_2) - input_content_sha256_2 = hashlib.sha256(input_content_2).hexdigest() - input_mem_map_size_2 = \ - consts.CONTENT_HEADER_TOTAL_BYTES + input_content_size_2 - input_mem_map_2 = \ - self.file_accessor.create_mem_map(mem_map_name_2, - input_mem_map_size_2) - input_shared_mem_map_2 = \ - SharedMemoryMap(self.file_accessor, mem_map_name_2, - input_mem_map_2) - input_num_bytes_written_2 = \ - input_shared_mem_map_2.put_bytes(input_content_2) - - # Outputs - output_content_size_1 = \ - consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 11 - output_content_size_2 = \ - consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 22 - http_params = { - 'output_content_size_1': str(output_content_size_1), - 'output_content_size_2': str(output_content_size_2)} - - # Create a message to send to the worker containing info about the - # shared memory region to read input from - input_value_2 = protos.RpcSharedMemory( - name=mem_map_name_2, - offset=0, - count=input_num_bytes_written_2, - type=protos.RpcDataType.bytes - ) - - # Invoke the function; it should read the input blob from shared - # memory and respond back in the HTTP body with the number of bytes - # it read in the input - _, response_msg = await host.invoke_function( - func_name, [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp( - method='GET', - query=http_params))), - protos.ParameterBinding( - name='inputfile1', - rpc_shared_memory=input_value_1 - ), - protos.ParameterBinding( - name='inputfile2', - rpc_shared_memory=input_value_2 - ) - ]) - time.sleep(1) - - # Dispose the shared memory map since the function is done using it - input_shared_mem_map_1.dispose() - input_shared_mem_map_2.dispose() - - # Verify if the function executed successfully - self.assertEqual(protos.StatusResult.Success, - response_msg.response.result.status) - - response_bytes = response_msg.response.return_value.http.body.bytes - json_response = json.loads(response_bytes) - - func_received_content_size_1 = json_response['input_content_size_1'] - func_received_content_sha256_1 = json_response['input_content_sha256_1'] - func_received_content_size_2 = json_response['input_content_size_2'] - func_received_content_sha256_2 = json_response['input_content_sha256_2'] - func_created_content_size_1 = json_response['output_content_size_1'] - func_created_content_size_2 = json_response['output_content_size_2'] - func_created_content_sha256_1 = json_response['output_content_sha256_1'] - func_created_content_sha256_2 = json_response['output_content_sha256_2'] - - # Check the function response to ensure that it read the complete - # input that we provided and the sha256 matches - self.assertEqual(input_content_size_1, func_received_content_size_1) - self.assertEqual(input_content_sha256_1, func_received_content_sha256_1) - self.assertEqual(input_content_size_2, func_received_content_size_2) - self.assertEqual(input_content_sha256_2, func_received_content_sha256_2) - - # Verify if the worker produced two output blobs which were written - # in shared memory - output_data = response_msg.response.output_data - self.assertEqual(2, len(output_data)) - - # Output 1 - output_binding_1 = output_data[0] - binding_type = output_binding_1.WhichOneof('rpc_data') - self.assertEqual('rpc_shared_memory', binding_type) - - shmem_1 = output_binding_1.rpc_shared_memory - self._verify_function_output(shmem_1, func_created_content_size_1, - func_created_content_sha256_1) - - # Output 2 - output_binding_2 = output_data[1] - binding_type = output_binding_2.WhichOneof('rpc_data') - self.assertEqual('rpc_shared_memory', binding_type) - - shmem_2 = output_binding_2.rpc_shared_memory - self._verify_function_output(shmem_2, func_created_content_size_2, - func_created_content_sha256_2) - - async def _test_binary_blob_read_function(self, func_name): - """ - Verify that the function executed successfully when the worker received - inputs for the function over shared memory. - """ - async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ - as host: - await host.init_worker("4.17.1") - await host.load_function(func_name) - - # Write binary content into shared memory - mem_map_name = self.get_new_mem_map_name() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - content = self.get_random_bytes(content_size) - content_sha256 = hashlib.sha256(content).hexdigest() - mem_map_size = consts.CONTENT_HEADER_TOTAL_BYTES + content_size - mem_map = self.file_accessor.create_mem_map(mem_map_name, - mem_map_size) - shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, - mem_map) - num_bytes_written = shared_mem_map.put_bytes(content) - - # Create a message to send to the worker containing info about the - # shared memory region to read input from - value = protos.RpcSharedMemory( - name=mem_map_name, - offset=0, - count=num_bytes_written, - type=protos.RpcDataType.bytes - ) - - # Invoke the function; it should read the input blob from shared - # memory and respond back in the HTTP body with the number of bytes - # it read in the input - _, response_msg = await host.invoke_function( - func_name, [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp( - method='GET'))), - protos.ParameterBinding( - name='file', - rpc_shared_memory=value - ) - ]) - - # Dispose the shared memory map since the function is done using it - shared_mem_map.dispose() - - # Verify if the function executed successfully - self.assertEqual(protos.StatusResult.Success, - response_msg.response.result.status) - - response_bytes = response_msg.response.return_value.http.body.bytes - json_response = json.loads(response_bytes) - func_received_content_size = json_response['content_size'] - func_received_content_sha256 = json_response['content_sha256'] - - # Check the function response to ensure that it read the complete - # input that we provided and the sha256 matches - self.assertEqual(content_size, func_received_content_size) - self.assertEqual(content_sha256, func_received_content_sha256) - - def _verify_function_output( - self, - shmem: protos.RpcSharedMemory, - expected_size: int, - expected_sha256: str): - """ - Verify if the output produced by the worker is what we expect it to be - based on the size and MD5 digest. - """ - output_mem_map_name = shmem.name - output_offset = shmem.offset - output_count = shmem.count - output_data_type = shmem.type - - # Verify if the shared memory region's information is valid - self.assertTrue(self.is_valid_uuid(output_mem_map_name)) - self.assertEqual(0, output_offset) - self.assertEqual(expected_size, output_count) - self.assertEqual(protos.RpcDataType.bytes, output_data_type) - - # Read data from the shared memory region - output_mem_map_size = \ - consts.CONTENT_HEADER_TOTAL_BYTES + output_count - output_mem_map = \ - self.file_accessor.open_mem_map(output_mem_map_name, - output_mem_map_size) - output_shared_mem_map = \ - SharedMemoryMap(self.file_accessor, output_mem_map_name, - output_mem_map) - output_read_content = output_shared_mem_map.get_bytes() - - # Dispose the shared memory map since we have read the function's - # output now - output_shared_mem_map.dispose() - - # Verify if we were able to read the correct output that the - # function has produced - output_read_content_sha256 = hashlib.sha256(output_read_content).hexdigest() - self.assertEqual(expected_sha256, output_read_content_sha256) - self.assertEqual(len(output_read_content), expected_size) diff --git a/tests/unittests/test_mock_durable_functions.py b/tests/unittests/test_mock_durable_functions.py deleted file mode 100644 index ce19c613f..000000000 --- a/tests/unittests/test_mock_durable_functions.py +++ /dev/null @@ -1,158 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from tests.utils import testutils - -from azure_functions_worker import protos - - -class TestDurableFunctions(testutils.AsyncTestCase): - durable_functions_dir = testutils.UNIT_TESTS_FOLDER / 'durable_functions' - - async def test_mock_activity_trigger(self): - async with testutils.start_mockhost( - script_root=self.durable_functions_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('activity_trigger') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'activity_trigger', [ - # According to Durable Python - # Activity Trigger's input must be json serializable - protos.ParameterBinding( - name='input', - data=protos.TypedData( - string='test single_word' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(json='"test single_word"') - ) - - async def test_mock_activity_trigger_no_anno(self): - async with testutils.start_mockhost( - script_root=self.durable_functions_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('activity_trigger_no_anno') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'activity_trigger_no_anno', [ - # According to Durable Python - # Activity Trigger's input must be json serializable - protos.ParameterBinding( - name='input', - data=protos.TypedData( - string='test multiple words' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(json='"test multiple words"') - ) - - async def test_mock_activity_trigger_dict(self): - async with testutils.start_mockhost( - script_root=self.durable_functions_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('activity_trigger_dict') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'activity_trigger_dict', [ - # According to Durable Python - # Activity Trigger's input must be json serializable - protos.ParameterBinding( - name='input', - data=protos.TypedData( - json='{"bird": "Crane"}' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(json='{"bird": "enarC"}') - ) - - async def test_mock_activity_trigger_int_to_float(self): - async with testutils.start_mockhost( - script_root=self.durable_functions_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function( - 'activity_trigger_int_to_float') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'activity_trigger_int_to_float', [ - # According to Durable Python - # Activity Trigger's input must be json serializable - protos.ParameterBinding( - name='input', - data=protos.TypedData( - json=str(int(10)) - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(json='-11.0') - ) - - async def test_mock_orchestration_trigger(self): - async with testutils.start_mockhost( - script_root=self.durable_functions_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('orchestration_trigger') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'orchestration_trigger', [ - protos.ParameterBinding( - name='context', - data=protos.TypedData( - string='Durable functions coming soon' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(json='Durable functions coming soon :)') - ) diff --git a/tests/unittests/test_mock_eventhub_functions.py b/tests/unittests/test_mock_eventhub_functions.py deleted file mode 100644 index f93dfa994..000000000 --- a/tests/unittests/test_mock_eventhub_functions.py +++ /dev/null @@ -1,155 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -from tests.utils import testutils - -from azure_functions_worker import protos - - -class TestEventHubMockFunctions(testutils.AsyncTestCase): - mock_funcs_dir = testutils.UNIT_TESTS_FOLDER / 'eventhub_mock_functions' - - async def test_mock_eventhub_trigger_iot(self): - async with testutils.start_mockhost( - script_root=self.mock_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('eventhub_trigger_iot') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - async def call_and_check(): - _, r = await host.invoke_function( - 'eventhub_trigger_iot', - [ - protos.ParameterBinding( - name='event', - data=protos.TypedData( - json=json.dumps({ - 'id': 'foo' - }) - ), - ), - ], - metadata={ - 'SystemProperties': protos.TypedData(json=json.dumps({ - 'iothub-device-id': 'mock-iothub-device-id', - 'iothub-auth-data': 'mock-iothub-auth-data', - 'EnqueuedTimeUtc': '2020-02-18T21:28:42.5888539Z' - })) - } - ) - - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - res_json_string = r.response.return_value.string - self.assertIn('device-id', res_json_string) - self.assertIn('mock-iothub-device-id', res_json_string) - self.assertIn('auth-data', res_json_string) - self.assertIn('mock-iothub-auth-data', res_json_string) - - await call_and_check() - - async def test_mock_eventhub_cardinality_one(self): - async with testutils.start_mockhost( - script_root=self.mock_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('eventhub_cardinality_one') - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'eventhub_cardinality_one', - [ - protos.ParameterBinding( - name='event', - data=protos.TypedData( - json=json.dumps({ - 'id': 'cardinality_one' - }) - ), - ), - ], - metadata={ - 'SystemProperties': protos.TypedData(json=json.dumps({ - 'iothub-device-id': 'mock-iothub-device-id', - 'iothub-auth-data': 'mock-iothub-auth-data', - 'EnqueuedTimeUtc': '2020-02-18T21:28:42.5888539Z' - })) - } - ) - - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual(r.response.return_value.string, 'OK_ONE') - - async def test_mock_eventhub_cardinality_one_bad_annotation(self): - async with testutils.start_mockhost( - script_root=self.mock_funcs_dir) as host: - - await host.init_worker("4.17.1") - # This suppose to fail since the event should not be int - func_id, r = await host.load_function( - 'eventhub_cardinality_one_bad_anno' - ) - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Failure) - - async def test_mock_eventhub_cardinality_many(self): - async with testutils.start_mockhost( - script_root=self.mock_funcs_dir) as host: - - await host.init_worker("4.17.1") - - func_id, r = await host.load_function('eventhub_cardinality_many') - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'eventhub_cardinality_many', - [ - protos.ParameterBinding( - name='events', - data=protos.TypedData( - json=json.dumps([{ - 'id': 'cardinality_many' - }]) - ), - ), - ], - metadata={ - 'SystemPropertiesArray': protos.TypedData(json=json.dumps([ - { - 'iothub-device-id': 'mock-iothub-device-id', - 'iothub-auth-data': 'mock-iothub-auth-data', - 'EnqueuedTimeUtc': '2020-02-18T21:28:42.5888539Z' - } - ])) - } - ) - - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual(r.response.return_value.string, 'OK_MANY') - - async def test_mock_eventhub_cardinality_many_bad_annotation(self): - async with testutils.start_mockhost( - script_root=self.mock_funcs_dir) as host: - - # This suppose to fail since the event should not be List[str] - await host.init_worker("4.17.1") - - func_id, r = await host.load_function( - 'eventhub_cardinality_many_bad_anno' - ) - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Failure) diff --git a/tests/unittests/test_mock_generic_functions.py b/tests/unittests/test_mock_generic_functions.py deleted file mode 100644 index 5ae199bbf..000000000 --- a/tests/unittests/test_mock_generic_functions.py +++ /dev/null @@ -1,390 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from tests.utils import testutils - -from azure_functions_worker import protos - - -class TestGenericFunctions(testutils.AsyncTestCase): - generic_funcs_dir = testutils.UNIT_TESTS_FOLDER / 'generic_functions' - - async def test_mock_generic_as_str(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_as_str') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_as_str', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - string='test' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(string='test') - ) - - async def test_mock_generic_as_bytes(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_as_bytes') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_as_bytes', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - bytes=b'\x00\x01' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(bytes=b'\x00\x01') - ) - - async def test_mock_generic_as_str_no_anno(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_as_str_no_anno') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_as_str_no_anno', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - string='test' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(string='test') - ) - - async def test_mock_generic_as_bytes_no_anno(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_as_bytes_no_anno') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_as_bytes_no_anno', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - bytes=b'\x00\x01' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(bytes=b'\x00\x01') - ) - - async def test_mock_generic_should_support_implicit_output(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_implicit_output') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_implicit_output', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - bytes=b'\x00\x01' - ) - ) - ] - ) - # It passes now as we are enabling generic binding to return output - # implicitly - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(bytes=b'\x00\x01')) - - async def test_mock_generic_should_support_without_datatype(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_with_no_datatype') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_with_no_datatype', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - bytes=b'\x00\x01' - ) - ) - ] - ) - # It passes now as we are enabling generic binding to return output - # implicitly - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(bytes=b'\x00\x01')) - - async def test_mock_generic_implicit_output_exemption(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - await host.init_worker("4.17.1") - func_id, r = await host.load_function( - 'foobar_implicit_output_exemption') - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_implicit_output_exemption', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - bytes=b'\x00\x01' - ) - ) - ] - ) - # It should fail here, since implicit output is False - # For the Durable Functions durableClient case - self.assertEqual(r.response.result.status, - protos.StatusResult.Failure) - - async def test_mock_generic_as_nil_data(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_nil_data') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_nil_data', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData() - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData()) - - async def test_mock_generic_as_none(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_as_none') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_as_none', [ - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(string="hello")) - - async def test_mock_generic_return_dict(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_return_dict') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_return_dict', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - string='test' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(json="{\"hello\": \"world\"}") - ) - - async def test_mock_generic_return_list(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_return_list') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_return_list', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - string='test' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(json="[1, 2, 3]") - ) - - async def test_mock_generic_return_int(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_return_int') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_return_int', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - string='test' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(int=12) - ) - - async def test_mock_generic_return_double(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_return_double') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_return_double', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - string='test' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(double=12.34) - ) - - async def test_mock_generic_return_bool(self): - async with testutils.start_mockhost( - script_root=self.generic_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('foobar_return_bool') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - _, r = await host.invoke_function( - 'foobar_return_bool', [ - protos.ParameterBinding( - name='input', - data=protos.TypedData( - string='test' - ) - ) - ] - ) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - r.response.return_value, - protos.TypedData(int=1) - ) diff --git a/tests/unittests/test_mock_http_functions.py b/tests/unittests/test_mock_http_functions.py deleted file mode 100644 index 849134038..000000000 --- a/tests/unittests/test_mock_http_functions.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from tests.utils import testutils - -from azure_functions_worker import protos - - -class TestMockHost(testutils.AsyncTestCase): - - async def test_call_sync_function_check_logs(self): - async with testutils.start_mockhost() as host: - - await host.init_worker("4.17.1") - await host.load_function('sync_logging') - - invoke_id, r = await host.invoke_function( - 'sync_logging', [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp( - method='GET'))) - ]) - - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - user_logs = [line for line in r.logs - if line.category == 'my function'] - # 2 log statements added (critical and error) in sync_logging - self.assertEqual(len(user_logs), 2) - - log = user_logs[0] - self.assertEqual(log.invocation_id, invoke_id) - self.assertTrue(log.message.startswith( - 'a gracefully handled error')) - - self.assertEqual(r.response.return_value.string, 'OK-sync') - - async def test_call_async_function_check_logs(self): - async with testutils.start_mockhost() as host: - - await host.init_worker("4.17.1") - await host.load_function('async_logging') - - invoke_id, r = await host.invoke_function( - 'async_logging', [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp( - method='GET'))) - ]) - - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - user_logs = [line for line in r.logs if - line.category == 'my function'] - self.assertEqual(len(user_logs), 2) - - first_msg = user_logs[0] - self.assertEqual(first_msg.invocation_id, invoke_id) - self.assertEqual(first_msg.message, 'hello info') - self.assertEqual(first_msg.level, protos.RpcLog.Information) - - second_msg = user_logs[1] - self.assertEqual(second_msg.invocation_id, invoke_id) - self.assertTrue(second_msg.message.startswith('and another error')) - self.assertEqual(second_msg.level, protos.RpcLog.Error) - - self.assertEqual(r.response.return_value.string, 'OK-async') - - async def test_handles_unsupported_messages_gracefully(self): - async with testutils.start_mockhost() as host: - # Intentionally send a message to worker that isn't - # going to be ever supported by it. The idea is that - # workers should survive such messages and continue - # their operation. If anything, the host can always - # terminate the worker. - await host.send( - protos.StreamingMessage( - worker_heartbeat=protos.WorkerHeartbeat())) - - await host.init_worker("4.17.1") - _, r = await host.load_function('return_out') - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) diff --git a/tests/unittests/test_mock_log_filtering_functions.py b/tests/unittests/test_mock_log_filtering_functions.py deleted file mode 100644 index 022499502..000000000 --- a/tests/unittests/test_mock_log_filtering_functions.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from unittest.mock import call, patch - -from tests.utils import testutils - -from azure_functions_worker import protos -from azure_functions_worker.logging import is_system_log_category - - -class TestMockLogFilteringFunctions(testutils.AsyncTestCase): - dir = testutils.UNIT_TESTS_FOLDER / 'log_filtering_functions' - - async def test_root_logger_should_be_customer_log(self): - """When customer use the root logger to send logs, the 'root' namespace - should be treated as customer log, only sending to our customers. - """ - with patch( - 'azure_functions_worker.dispatcher.is_system_log_category' - ) as islc_mock: - async with testutils.start_mockhost(script_root=self.dir) as host: - await host.init_worker("4.17.1") - await host.load_function('debug_logging') - await self._invoke_function(host, 'debug_logging') - - self.assertIn(call('root'), islc_mock.call_args_list) - self.assertFalse(is_system_log_category('root')) - - async def test_customer_logging_should_not_be_system_log(self): - """When sdk uses the 'azure' logger to send logs - (e.g. 'azure.servicebus'), the namespace should be treated as customer - log, only sends to our customers. - """ - with patch( - 'azure_functions_worker.dispatcher.is_system_log_category' - ) as islc_mock: - async with testutils.start_mockhost(script_root=self.dir) as host: - await host.init_worker("4.17.1") - await host.load_function('debug_user_logging') - await self._invoke_function(host, 'debug_user_logging') - - self.assertIn(call('my function'), islc_mock.call_args_list) - self.assertFalse(is_system_log_category('my function')) - - async def test_sdk_logger_should_be_system_log(self): - """When sdk uses the 'azure.functions' logger to send logs, the - namespace should be treated as system log, sending to our customers and - our kusto table. - """ - with patch( - 'azure_functions_worker.dispatcher.is_system_log_category' - ) as islc_mock: - async with testutils.start_mockhost(script_root=self.dir) as host: - await host.init_worker("4.17.1") - await host.load_function('sdk_logging') - await self._invoke_function(host, 'sdk_logging') - - self.assertIn( - call('azure.functions'), islc_mock.call_args_list - ) - self.assertTrue(is_system_log_category('azure.functions')) - - async def test_sdk_submodule_logger_should_be_system_log(self): - """When sdk uses the 'azure.functions.submodule' logger to send logs, - the namespace should be treated as system log, sending to our customers - and our kusto table. - """ - with patch( - 'azure_functions_worker.dispatcher.is_system_log_category' - ) as islc_mock: - async with testutils.start_mockhost(script_root=self.dir) as host: - await host.init_worker("4.17.1") - await host.load_function('sdk_submodule_logging') - await self._invoke_function(host, 'sdk_submodule_logging') - - self.assertIn( - call('azure.functions.submodule'), islc_mock.call_args_list - ) - self.assertTrue( - is_system_log_category('azure.functions.submodule') - ) - - async def _invoke_function(self, - host: testutils._MockWebHost, - function_name: str): - _, r = await host.invoke_function( - function_name, [ - protos.ParameterBinding( - name='req', - data=protos.TypedData( - http=protos.RpcHttp(method='GET') - ) - ) - ] - ) - - self.assertEqual(r.response.result.status, protos.StatusResult.Success) diff --git a/tests/unittests/test_mock_timer_functions.py b/tests/unittests/test_mock_timer_functions.py deleted file mode 100644 index d4f11e644..000000000 --- a/tests/unittests/test_mock_timer_functions.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json - -from tests.utils import testutils - -from azure_functions_worker import protos - - -class TestTimerFunctions(testutils.AsyncTestCase): - timer_funcs_dir = testutils.UNIT_TESTS_FOLDER / 'timer_functions' - - async def test_mock_timer__return_pastdue(self): - async with testutils.start_mockhost( - script_root=self.timer_funcs_dir) as host: - - await host.init_worker("4.17.1") - func_id, r = await host.load_function('return_pastdue') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - async def call_and_check(due: bool): - _, r = await host.invoke_function( - 'return_pastdue', [ - protos.ParameterBinding( - name='timer', - data=protos.TypedData( - json=json.dumps({ - 'IsPastDue': due - }))) - ]) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - self.assertEqual( - list(r.response.output_data), [ - protos.ParameterBinding( - name='pastdue', - data=protos.TypedData(string=str(due))) - ]) - - await call_and_check(True) - await call_and_check(False) - - async def test_mock_timer__user_event_loop(self): - async with testutils.start_mockhost( - script_root=self.timer_funcs_dir) as host: - await host.init_worker("4.17.1") - func_id, r = await host.load_function('user_event_loop_timer') - - self.assertEqual(r.response.function_id, func_id) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - async def call_and_check(): - _, r = await host.invoke_function( - 'user_event_loop_timer', [ - protos.ParameterBinding( - name='timer', - data=protos.TypedData( - json=json.dumps({ - 'IsPastDue': False - }))) - ]) - self.assertEqual(r.response.result.status, - protos.StatusResult.Success) - - await call_and_check() diff --git a/tests/unittests/test_nullable_converters.py b/tests/unittests/test_nullable_converters.py deleted file mode 100644 index 913c80fb7..000000000 --- a/tests/unittests/test_nullable_converters.py +++ /dev/null @@ -1,110 +0,0 @@ -import datetime -import unittest - -import pytest -from google.protobuf.timestamp_pb2 import Timestamp - -from azure_functions_worker import protos -from azure_functions_worker.bindings.nullable_converters import ( - to_nullable_bool, - to_nullable_double, - to_nullable_string, - to_nullable_timestamp, -) - -try: - from http.cookies import SimpleCookie -except ImportError: - from Cookie import SimpleCookie - -headers = ['foo=bar; Path=/some/path; Secure', - 'foo2=42; Domain=123; Expires=Thu, 12-Jan-2017 13:55:08 GMT; ' - 'Path=/; Max-Age=dd;'] - -cookies = SimpleCookie('\r\n'.join(headers)) - - -class TestNullableConverters(unittest.TestCase): - def test_to_nullable_string_none(self): - self.assertEqual(to_nullable_string(None, "name"), None) - - def test_to_nullable_string_valid(self): - self.assertEqual(to_nullable_string("dummy", "name"), - protos.NullableString(value="dummy")) - - def test_to_nullable_string_wrong_type(self): - with pytest.raises(Exception) as e: - self.assertEqual(to_nullable_string(123, "name"), - protos.NullableString(value="dummy")) - self.assertEqual(type(e), TypeError) - - def test_to_nullable_bool_none(self): - self.assertEqual(to_nullable_bool(None, "name"), None) - - def test_to_nullable_bool_valid(self): - self.assertEqual(to_nullable_bool(True, "name"), - protos.NullableBool(value=True)) - - def test_to_nullable_bool_wrong_type(self): - with pytest.raises(Exception) as e: - to_nullable_bool("True", "name") - - self.assertEqual(e.type, TypeError) - self.assertEqual(e.value.args[0], - "A 'bool' type was expected instead of a '' type. " - "Cannot parse value True of 'name'.") - - def test_to_nullable_double_str(self): - self.assertEqual(to_nullable_double("12", "name"), - protos.NullableDouble(value=12)) - - def test_to_nullable_double_empty_str(self): - self.assertEqual(to_nullable_double("", "name"), None) - - def test_to_nullable_double_invalid_str(self): - with pytest.raises(TypeError) as e: - to_nullable_double("222d", "name") - - self.assertEqual(e.type, TypeError) - self.assertEqual(e.value.args[0], - "Cannot parse value 222d of 'name' to float.") - - def test_to_nullable_double_int(self): - self.assertEqual(to_nullable_double(12, "name"), - protos.NullableDouble(value=12)) - - def test_to_nullable_double_float(self): - self.assertEqual(to_nullable_double(12.0, "name"), - protos.NullableDouble(value=12)) - - def test_to_nullable_double_none(self): - self.assertEqual(to_nullable_double(None, "name"), None) - - def test_to_nullable_double_wrong_type(self): - with pytest.raises(Exception) as e: - to_nullable_double(object(), "name") - - self.assertIn( - "A 'int' or 'float' type was expected instead of a '' type", - e.value.args[0]) - self.assertEqual(e.type, TypeError) - - def test_to_nullable_timestamp_int(self): - self.assertEqual(to_nullable_timestamp(1000, "datetime"), - protos.NullableTimestamp( - value=Timestamp(seconds=int(1000)))) - - def test_to_nullable_timestamp_datetime(self): - now = datetime.datetime.now() - self.assertEqual(to_nullable_timestamp(now, "datetime"), - protos.NullableTimestamp( - value=Timestamp(seconds=int(now.timestamp())))) - - def test_to_nullable_timestamp_wrong_type(self): - with self.assertRaises(TypeError): - to_nullable_timestamp("now", "datetime") - - def test_to_nullable_timestamp_none(self): - self.assertEqual(to_nullable_timestamp(None, "timestamp"), None) diff --git a/tests/unittests/test_opentelemetry.py b/tests/unittests/test_opentelemetry.py deleted file mode 100644 index b26334bdf..000000000 --- a/tests/unittests/test_opentelemetry.py +++ /dev/null @@ -1,110 +0,0 @@ -import asyncio -import os -import unittest -from unittest.mock import MagicMock, patch - -from tests.unittests.test_dispatcher import FUNCTION_APP_DIRECTORY -from tests.utils import testutils - -from azure_functions_worker import protos - - -class TestOpenTelemetry(unittest.TestCase): - - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - self.dispatcher = testutils.create_dummy_dispatcher() - - def tearDown(self): - self.loop.close() - - def test_update_opentelemetry_status_import_error(self): - # Patch the built-in import mechanism - with patch('builtins.__import__', side_effect=ImportError): - self.dispatcher.update_opentelemetry_status() - # Verify that otel_libs_available is set to False due to ImportError - self.assertFalse(self.dispatcher._azure_monitor_available) - - @patch('builtins.__import__') - def test_update_opentelemetry_status_success( - self, mock_imports): - mock_imports.return_value = MagicMock() - self.dispatcher.update_opentelemetry_status() - self.assertIsNotNone(self.dispatcher._context_api) - self.assertIsNotNone(self.dispatcher._trace_context_propagator) - - @patch('builtins.__import__') - @patch("azure_functions_worker.dispatcher.Dispatcher.update_opentelemetry_status") - def test_initialize_azure_monitor_success( - self, - mock_update_ot, - mock_imports, - ): - mock_imports.return_value = MagicMock() - self.dispatcher.initialize_azure_monitor() - mock_update_ot.assert_called_once() - self.assertTrue(self.dispatcher._azure_monitor_available) - - @patch("azure_functions_worker.dispatcher.Dispatcher.update_opentelemetry_status") - def test_initialize_azure_monitor_import_error( - self, - mock_update_ot, - ): - with patch('builtins.__import__', side_effect=ImportError): - self.dispatcher.initialize_azure_monitor() - mock_update_ot.assert_called_once() - # Verify that otel_libs_available is set to False due to ImportError - self.assertFalse(self.dispatcher._azure_monitor_available) - - @patch.dict(os.environ, {'PYTHON_ENABLE_OPENTELEMETRY': 'true'}) - @patch('builtins.__import__') - def test_init_request_otel_capability_enabled_app_setting( - self, - mock_imports, - ): - mock_imports.return_value = MagicMock() - - init_request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - init_response = self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(init_request)) - - self.assertEqual(init_response.worker_init_response.result.status, - protos.StatusResult.Success) - - # Verify that WorkerOpenTelemetryEnabled capability is set to _TRUE - capabilities = init_response.worker_init_response.capabilities - self.assertIn("WorkerOpenTelemetryEnabled", capabilities) - self.assertEqual(capabilities["WorkerOpenTelemetryEnabled"], "true") - - @patch("azure_functions_worker.dispatcher.Dispatcher.initialize_azure_monitor") - def test_init_request_otel_capability_disabled_app_setting( - self, - mock_initialize_azmon, - ): - - init_request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - init_response = self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(init_request)) - - self.assertEqual(init_response.worker_init_response.result.status, - protos.StatusResult.Success) - - # Azure monitor initialized not called - mock_initialize_azmon.assert_not_called() - - # Verify that WorkerOpenTelemetryEnabled capability is not set - capabilities = init_response.worker_init_response.capabilities - self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities) diff --git a/tests/unittests/test_rpc_messages.py b/tests/unittests/test_rpc_messages.py deleted file mode 100644 index 4e3a6c23d..000000000 --- a/tests/unittests/test_rpc_messages.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os -import subprocess -import sys -import tempfile -import typing -import unittest - -from tests.utils import testutils - -from azure_functions_worker import protos -from azure_functions_worker.utils.common import is_python_version - - -class TestGRPC(testutils.AsyncTestCase): - pre_test_env = os.environ.copy() - pre_test_cwd = os.getcwd() - - def _reset_environ(self): - for key, value in self.pre_test_env.items(): - os.environ[key] = value - os.chdir(self.pre_test_cwd) - - async def _verify_environment_reloaded( - self, - test_env: typing.Dict[str, str] = {}, - test_cwd: str = os.getcwd()): - request = protos.FunctionEnvironmentReloadRequest( - environment_variables=test_env, - function_app_directory=test_cwd) - - request_msg = protos.StreamingMessage( - request_id='0', - function_environment_reload_request=request) - - disp = testutils.create_dummy_dispatcher() - - try: - r = await disp._handle__function_environment_reload_request( - request_msg) - status = r.function_environment_reload_response.result.status - exp = r.function_environment_reload_response.result.exception - self.assertEqual(status, protos.StatusResult.Success, - f"Exception in Reload request: {exp}") - - environ_dict = os.environ.copy() - self.assertDictEqual(environ_dict, test_env) - self.assertEqual(os.getcwd(), test_cwd) - - finally: - self._reset_environ() - - async def test_multiple_env_vars_load(self): - test_env = {'TEST_KEY': 'foo', 'HELLO': 'world'} - await self._verify_environment_reloaded(test_env=test_env) - - async def test_empty_env_vars_load(self): - test_env = {} - await self._verify_environment_reloaded(test_env=test_env) - - @unittest.skipIf(sys.platform == 'darwin', - 'MacOS creates the processes specific var folder in ' - '/private filesystem and not in /var like in linux ' - 'systems.') - async def test_changing_current_working_directory(self): - test_cwd = tempfile.gettempdir() - await self._verify_environment_reloaded(test_cwd=test_cwd) - - @unittest.skipIf(sys.platform == 'darwin', - 'MacOS creates the processes specific var folder in ' - '/private filesystem and not in /var like in linux ' - 'systems.') - async def test_reload_env_message(self): - test_env = {'TEST_KEY': 'foo', 'HELLO': 'world'} - test_cwd = tempfile.gettempdir() - await self._verify_environment_reloaded(test_env, test_cwd) - - def _verify_sys_path_import(self, result, expected_output): - path_import_script = os.path.join(testutils.UNIT_TESTS_ROOT, - 'path_import', 'test_path_import.sh') - try: - subprocess.run(['chmod +x ' + path_import_script], shell=True) - - exported_path = ":".join(sys.path) - output = subprocess.check_output( - [path_import_script, result, exported_path], - stderr=subprocess.STDOUT) - decoded_output = output.decode(sys.stdout.encoding).strip() - self.assertTrue(expected_output in decoded_output) - finally: - subprocess.run(['chmod -x ' + path_import_script], shell=True) - self._reset_environ() - - @unittest.skipIf(sys.platform == 'win32', - 'Linux .sh script only works on Linux') - def test_failed_sys_path_import(self): - self._verify_sys_path_import( - 'fail', - "No module named 'test_module'") - - @unittest.skipIf(sys.platform == 'win32', - 'Linux .sh script only works on Linux') - def test_successful_sys_path_import(self): - self._verify_sys_path_import( - 'success', - 'This module was imported!') - - def _verify_azure_namespace_import(self, result, expected_output): - print(os.getcwd()) - path_import_script = os.path.join(testutils.UNIT_TESTS_ROOT, - 'azure_namespace_import', - 'test_azure_namespace_import.sh') - try: - subprocess.run(['chmod +x ' + path_import_script], shell=True) - - output = subprocess.check_output( - [path_import_script, result], - stderr=subprocess.STDOUT) - decoded_output = output.decode(sys.stdout.encoding).strip() - self.assertTrue(expected_output in decoded_output, - f"Decoded Output: {decoded_output}") # DNM - finally: - subprocess.run(['chmod -x ' + path_import_script], shell=True) - self._reset_environ() - - @unittest.skipIf(sys.platform == 'win32', - 'Linux .sh script only works on Linux') - @unittest.skip("TODO: fix this tests. Failing with ImportError.") - def test_failed_azure_namespace_import(self): - self._verify_azure_namespace_import( - 'false', - 'module_b fails to import') - - @unittest.skipIf(sys.platform == 'win32', - 'Linux .sh script only works on Linux') - @unittest.skipIf( - is_python_version('3.10'), - 'In Python 3.10, isolate worker dependencies is turned on by default.' - ' Reloading all customer dependencies on specialization is a must.' - ' This partially reloading namespace feature is no longer needed.' - ) - @unittest.skip("TODO: fix this tests. Failing with ImportError.") - def test_successful_azure_namespace_import(self): - self._verify_azure_namespace_import( - 'true', - 'module_b is imported') diff --git a/tests/unittests/test_script_file_name.py b/tests/unittests/test_script_file_name.py deleted file mode 100644 index 24327249a..000000000 --- a/tests/unittests/test_script_file_name.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os - -from tests.utils import testutils - -from azure_functions_worker.constants import ( - PYTHON_SCRIPT_FILE_NAME, - PYTHON_SCRIPT_FILE_NAME_DEFAULT, -) - -DEFAULT_SCRIPT_FILE_NAME_DIR = testutils.UNIT_TESTS_FOLDER / \ - 'file_name_functions' / \ - 'default_file_name' - -NEW_SCRIPT_FILE_NAME_DIR = testutils.UNIT_TESTS_FOLDER / \ - 'file_name_functions' / \ - 'new_file_name' - -INVALID_SCRIPT_FILE_NAME_DIR = testutils.UNIT_TESTS_FOLDER / \ - 'file_name_functions' / \ - 'invalid_file_name' - - -class TestDefaultScriptFileName(testutils.WebHostTestCase): - """ - Tests for default file name - """ - - @classmethod - def setUpClass(cls): - os.environ["PYTHON_SCRIPT_FILE_NAME"] = "function_app.py" - super().setUpClass() - - @classmethod - def tearDownClass(cls): - # Remove the PYTHON_SCRIPT_FILE_NAME environment variable - os.environ.pop('PYTHON_SCRIPT_FILE_NAME') - super().tearDownClass() - - @classmethod - def get_script_dir(cls): - return DEFAULT_SCRIPT_FILE_NAME_DIR - - def test_default_file_name(self): - """ - Test the default file name - """ - self.assertIsNotNone(os.environ.get(PYTHON_SCRIPT_FILE_NAME)) - self.assertEqual(os.environ.get(PYTHON_SCRIPT_FILE_NAME), - PYTHON_SCRIPT_FILE_NAME_DEFAULT) - - -class TestNewScriptFileName(testutils.WebHostTestCase): - """ - Tests for changed file name - """ - - @classmethod - def setUpClass(cls): - os.environ["PYTHON_SCRIPT_FILE_NAME"] = "test.py" - super().setUpClass() - - @classmethod - def tearDownClass(cls): - # Remove the PYTHON_SCRIPT_FILE_NAME environment variable - os.environ.pop('PYTHON_SCRIPT_FILE_NAME') - super().tearDownClass() - - @classmethod - def get_script_dir(cls): - return NEW_SCRIPT_FILE_NAME_DIR - - def test_new_file_name(self): - """ - Test the new file name - """ - self.assertIsNotNone(os.environ.get(PYTHON_SCRIPT_FILE_NAME)) - self.assertEqual(os.environ.get(PYTHON_SCRIPT_FILE_NAME), - 'test.py') - - -class TestInvalidScriptFileName(testutils.WebHostTestCase): - """ - Tests for invalid file name - """ - - @classmethod - def setUpClass(cls): - os.environ["PYTHON_SCRIPT_FILE_NAME"] = "main" - super().setUpClass() - - @classmethod - def tearDownClass(cls): - # Remove the PYTHON_SCRIPT_FILE_NAME environment variable - os.environ.pop('PYTHON_SCRIPT_FILE_NAME') - super().tearDownClass() - - @classmethod - def get_script_dir(cls): - return INVALID_SCRIPT_FILE_NAME_DIR - - def test_invalid_file_name(self): - """ - Test the invalid file name - """ - self.assertIsNotNone(os.environ.get(PYTHON_SCRIPT_FILE_NAME)) - self.assertEqual(os.environ.get(PYTHON_SCRIPT_FILE_NAME), - 'main') diff --git a/tests/unittests/test_shared_memory_manager.py b/tests/unittests/test_shared_memory_manager.py deleted file mode 100644 index ca3cb6088..000000000 --- a/tests/unittests/test_shared_memory_manager.py +++ /dev/null @@ -1,394 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import json -import math -import os -import sys -from unittest import skipIf -from unittest.mock import patch - -from azure.functions import meta as bind_meta -from tests.utils import testutils - -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - SharedMemoryConstants as consts, -) -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - SharedMemoryManager, -) -from azure_functions_worker.constants import ( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, -) -from azure_functions_worker.utils.common import is_envvar_true - - -@skipIf(sys.platform == 'darwin', 'MacOS M1 machines do not correctly test the' - 'shared memory filesystems and thus skipping' - ' these tests for the time being') -class TestSharedMemoryManager(testutils.SharedMemoryTestCase): - """ - Tests for SharedMemoryManager. - """ - def setUp(self): - env = os.environ.copy() - env['FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED'] = "true" - self.mock_environ = patch.dict('os.environ', env) - self.mock_sys_module = patch.dict('sys.modules', sys.modules.copy()) - self.mock_sys_path = patch('sys.path', sys.path.copy()) - self.mock_environ.start() - self.mock_sys_module.start() - self.mock_sys_path.start() - - def tearDown(self): - self.mock_sys_path.stop() - self.mock_sys_module.stop() - self.mock_environ.stop() - - def test_is_enabled(self): - """ - Verify that when the AppSetting is enabled, SharedMemoryManager is - enabled. - """ - - # Make sure shared memory data transfer is enabled - was_shmem_env_true = is_envvar_true( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) - os.environ.update( - {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '1'}) - manager = SharedMemoryManager() - self.assertTrue(manager.is_enabled()) - # Restore the env variable to original value - if not was_shmem_env_true: - os.environ.update( - {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '0'}) - - def test_is_disabled(self): - """ - Verify that when the AppSetting is disabled, SharedMemoryManager is - disabled. - """ - # Make sure shared memory data transfer is disabled - was_shmem_env_true = is_envvar_true( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) - os.environ.update( - {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '0'}) - manager = SharedMemoryManager() - self.assertFalse(manager.is_enabled()) - # Restore the env variable to original value - if was_shmem_env_true: - os.environ.update( - {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '1'}) - - def test_bytes_input_support(self): - """ - Verify that the given input is supported by SharedMemoryManager to be - transfered over shared memory. - The input is bytes. - """ - manager = SharedMemoryManager() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - content = self.get_random_bytes(content_size) - bytes_datum = bind_meta.Datum(type='bytes', value=content) - is_supported = manager.is_supported(bytes_datum) - self.assertTrue(is_supported) - - def test_string_input_support(self): - """ - Verify that the given input is supported by SharedMemoryManager to be - transfered over shared memory. - The input is string. - """ - manager = SharedMemoryManager() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - num_chars = math.floor(content_size / consts.SIZE_OF_CHAR_BYTES) - content = self.get_random_string(num_chars) - bytes_datum = bind_meta.Datum(type='string', value=content) - is_supported = manager.is_supported(bytes_datum) - self.assertTrue(is_supported) - - def test_int_input_unsupported(self): - """ - Verify that the given input is unsupported by SharedMemoryManager. - This input is int. - """ - manager = SharedMemoryManager() - datum = bind_meta.Datum(type='int', value=1) - is_supported = manager.is_supported(datum) - self.assertFalse(is_supported) - - def test_double_input_unsupported(self): - """ - Verify that the given input is unsupported by SharedMemoryManager. - This input is double. - """ - manager = SharedMemoryManager() - datum = bind_meta.Datum(type='double', value=1.0) - is_supported = manager.is_supported(datum) - self.assertFalse(is_supported) - - def test_json_input_unsupported(self): - """ - Verify that the given input is unsupported by SharedMemoryManager. - This input is json. - """ - manager = SharedMemoryManager() - content = { - 'name': 'foo', - 'val': 'bar' - } - datum = bind_meta.Datum(type='json', value=json.dumps(content)) - is_supported = manager.is_supported(datum) - self.assertFalse(is_supported) - - def test_collection_string_unsupported(self): - """ - Verify that the given input is unsupported by SharedMemoryManager. - This input is collection_string. - """ - manager = SharedMemoryManager() - content = ['foo', 'bar'] - datum = bind_meta.Datum(type='collection_string', value=content) - is_supported = manager.is_supported(datum) - self.assertFalse(is_supported) - - def test_collection_bytes_unsupported(self): - """ - Verify that the given input is unsupported by SharedMemoryManager. - This input is collection_bytes. - """ - manager = SharedMemoryManager() - content = [b'x01', b'x02'] - datum = bind_meta.Datum(type='collection_bytes', value=content) - is_supported = manager.is_supported(datum) - self.assertFalse(is_supported) - - def test_collection_double_unsupported(self): - """ - Verify that the given input is unsupported by SharedMemoryManager. - This input is collection_double. - """ - manager = SharedMemoryManager() - content = [1.0, 2.0] - datum = bind_meta.Datum(type='collection_double', value=content) - is_supported = manager.is_supported(datum) - self.assertFalse(is_supported) - - def test_collection_sint64_unsupported(self): - """ - Verify that the given input is unsupported by SharedMemoryManager. - This input is collection_sint64. - """ - manager = SharedMemoryManager() - content = [1, 2] - datum = bind_meta.Datum(type='collection_sint64', value=content) - is_supported = manager.is_supported(datum) - self.assertFalse(is_supported) - - def test_large_invalid_bytes_input_support(self): - """ - Verify that the given input is NOT supported by SharedMemoryManager to - be transfered over shared memory. - The input is bytes of larger than the allowed size. - """ - manager = SharedMemoryManager() - content_size = consts.MAX_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - # Not using get_random_bytes to avoid slowing down for creating a large - # random input - content = b'x01' * content_size - bytes_datum = bind_meta.Datum(type='bytes', value=content) - is_supported = manager.is_supported(bytes_datum) - self.assertFalse(is_supported) - - def test_small_invalid_bytes_input_support(self): - """ - Verify that the given input is NOT supported by SharedMemoryManager to - be transfered over shared memory. - The input is bytes of smaller than the allowed size. - """ - manager = SharedMemoryManager() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER - 10 - content = self.get_random_bytes(content_size) - bytes_datum = bind_meta.Datum(type='bytes', value=content) - is_supported = manager.is_supported(bytes_datum) - self.assertFalse(is_supported) - - def test_large_invalid_string_input_support(self): - """ - Verify that the given input is NOT supported by SharedMemoryManager to - be transfered over shared memory. - The input is string of larger than the allowed size. - """ - manager = SharedMemoryManager() - content_size = consts.MAX_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - num_chars = math.floor(content_size / consts.SIZE_OF_CHAR_BYTES) - # Not using get_random_string to avoid slowing down for creating a large - # random input - content = 'a' * num_chars - string_datum = bind_meta.Datum(type='string', value=content) - is_supported = manager.is_supported(string_datum) - self.assertFalse(is_supported) - - def test_small_invalid_string_input_support(self): - """ - Verify that the given input is NOT supported by SharedMemoryManager to - be transfered over shared memory. - The input is string of smaller than the allowed size. - """ - manager = SharedMemoryManager() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER - 10 - num_chars = math.floor(content_size / consts.SIZE_OF_CHAR_BYTES) - content = self.get_random_string(num_chars) - string_datum = bind_meta.Datum(type='string', value=content) - is_supported = manager.is_supported(string_datum) - self.assertFalse(is_supported) - - def test_put_bytes(self): - """ - Verify that the given input was successfully put into shared memory. - The input is bytes. - """ - manager = SharedMemoryManager() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - content = self.get_random_bytes(content_size) - shared_mem_meta = manager.put_bytes(content) - self.assertIsNotNone(shared_mem_meta) - self.assertTrue(self.is_valid_uuid(shared_mem_meta.mem_map_name)) - self.assertEqual(content_size, shared_mem_meta.count_bytes) - free_success = manager.free_mem_map(shared_mem_meta.mem_map_name) - self.assertTrue(free_success) - - def test_invalid_put_bytes(self): - """ - Attempt to put bytes using an invalid input and verify that it fails. - """ - manager = SharedMemoryManager() - shared_mem_meta = manager.put_bytes(None) - self.assertIsNone(shared_mem_meta) - - def test_get_bytes(self): - """ - Verify that the output object was successfully gotten from shared - memory. - The output is bytes. - """ - manager = SharedMemoryManager() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - content = self.get_random_bytes(content_size) - shared_mem_meta = manager.put_bytes(content) - mem_map_name = shared_mem_meta.mem_map_name - num_bytes_written = shared_mem_meta.count_bytes - read_content = manager.get_bytes(mem_map_name, offset=0, - count=num_bytes_written) - self.assertEqual(content, read_content) - free_success = manager.free_mem_map(mem_map_name) - self.assertTrue(free_success) - - def test_put_string(self): - """ - Verify that the given input was successfully put into shared memory. - The input is string. - """ - manager = SharedMemoryManager() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - num_chars = math.floor(content_size / consts.SIZE_OF_CHAR_BYTES) - content = self.get_random_string(num_chars) - expected_size = len(content.encode('utf-8')) - shared_mem_meta = manager.put_string(content) - self.assertIsNotNone(shared_mem_meta) - self.assertTrue(self.is_valid_uuid(shared_mem_meta.mem_map_name)) - self.assertEqual(expected_size, shared_mem_meta.count_bytes) - free_success = manager.free_mem_map(shared_mem_meta.mem_map_name) - self.assertTrue(free_success) - - def test_invalid_put_string(self): - """ - Attempt to put a string using an invalid input and verify that it fails. - """ - manager = SharedMemoryManager() - shared_mem_meta = manager.put_string(None) - self.assertIsNone(shared_mem_meta) - - def test_get_string(self): - """ - Verify that the output object was successfully gotten from shared - memory. - The output is string. - """ - manager = SharedMemoryManager() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - num_chars = math.floor(content_size / consts.SIZE_OF_CHAR_BYTES) - content = self.get_random_string(num_chars) - shared_mem_meta = manager.put_string(content) - mem_map_name = shared_mem_meta.mem_map_name - num_bytes_written = shared_mem_meta.count_bytes - read_content = manager.get_string(mem_map_name, offset=0, - count=num_bytes_written) - self.assertEqual(content, read_content) - free_success = manager.free_mem_map(mem_map_name) - self.assertTrue(free_success) - - def test_allocated_mem_maps(self): - """ - Verify that the SharedMemoryManager is tracking the shared memory maps - it has allocated after put operations. - Verify that those shared memory maps are freed and no longer tracked - after attempting to free them. - """ - manager = SharedMemoryManager() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - content = self.get_random_bytes(content_size) - shared_mem_meta = manager.put_bytes(content) - self.assertIsNotNone(shared_mem_meta) - mem_map_name = shared_mem_meta.mem_map_name - is_mem_map_found = mem_map_name in manager.allocated_mem_maps - self.assertTrue(is_mem_map_found) - self.assertEqual(1, len(manager.allocated_mem_maps.keys())) - free_success = manager.free_mem_map(mem_map_name) - self.assertTrue(free_success) - is_mem_map_found = mem_map_name in manager.allocated_mem_maps - self.assertFalse(is_mem_map_found) - self.assertEqual(0, len(manager.allocated_mem_maps.keys())) - - def test_do_not_free_resources_on_dispose(self): - """ - Verify that when the allocated shared memory maps are freed, - their backing resources are not freed. - Note: The shared memory map should no longer be tracked by the - SharedMemoryManager, though. - """ - manager = SharedMemoryManager() - content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 - content = self.get_random_bytes(content_size) - shared_mem_meta = manager.put_bytes(content) - self.assertIsNotNone(shared_mem_meta) - mem_map_name = shared_mem_meta.mem_map_name - is_mem_map_found = mem_map_name in manager.allocated_mem_maps - self.assertTrue(is_mem_map_found) - self.assertEqual(1, len(manager.allocated_mem_maps.keys())) - free_success = manager.free_mem_map(mem_map_name, False) - self.assertTrue(free_success) - is_mem_map_found = mem_map_name in manager.allocated_mem_maps - self.assertFalse(is_mem_map_found) - self.assertEqual(0, len(manager.allocated_mem_maps.keys())) - - def test_invalid_put_allocated_mem_maps(self): - """ - Verify that after an invalid put operation, no shared memory maps were - added to the list of allocated/tracked shared memory maps. - i.e. no resources were leaked for invalid operations. - """ - manager = SharedMemoryManager() - shared_mem_meta = manager.put_bytes(None) - self.assertIsNone(shared_mem_meta) - self.assertEqual(0, len(manager.allocated_mem_maps.keys())) - - def test_invalid_free_mem_map(self): - """ - Attempt to free a shared memory map that does not exist in the list of - allocated/tracked shared memory maps and verify that it fails. - """ - manager = SharedMemoryManager() - mem_map_name = self.get_new_mem_map_name() - free_success = manager.free_mem_map(mem_map_name) - self.assertFalse(free_success) diff --git a/tests/unittests/test_shared_memory_map.py b/tests/unittests/test_shared_memory_map.py deleted file mode 100644 index ecaeaacc0..000000000 --- a/tests/unittests/test_shared_memory_map.py +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -import os -import sys -import unittest -from unittest import skipIf - -from tests.utils import testutils - -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - SharedMemoryConstants as consts, -) -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - SharedMemoryException, - SharedMemoryMap, -) - - -@skipIf(sys.platform == 'darwin', 'MacOS M1 machines do not correctly test the' - 'shared memory filesystems and thus skipping' - ' these tests for the time being') -class TestSharedMemoryMap(testutils.SharedMemoryTestCase): - """ - Tests for SharedMemoryMap. - """ - def test_init(self): - """ - Verify the initialization of a SharedMemoryMap. - """ - mem_map_name = self.get_new_mem_map_name() - mem_map_size = 1024 - mem_map = self.file_accessor.create_mem_map(mem_map_name, mem_map_size) - shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, - mem_map) - self.assertIsNotNone(shared_mem_map) - dispose_status = shared_mem_map.dispose() - self.assertTrue(dispose_status) - - def test_init_with_invalid_inputs(self): - """ - Attempt to initialize a SharedMemoryMap from invalid inputs (name and - mmap) and verify that an SharedMemoryException is raised. - """ - inv_mem_map_name = None - mem_map_name = self.get_new_mem_map_name() - mem_map_size = 1024 - mem_map = self.file_accessor.create_mem_map(mem_map_name, mem_map_size) - with self.assertRaisesRegex(SharedMemoryException, 'Invalid name'): - SharedMemoryMap(self.file_accessor, inv_mem_map_name, mem_map) - inv_mem_map_name = '' - with self.assertRaisesRegex(SharedMemoryException, 'Invalid name'): - SharedMemoryMap(self.file_accessor, inv_mem_map_name, mem_map) - with self.assertRaisesRegex(SharedMemoryException, - 'Invalid memory map'): - SharedMemoryMap(self.file_accessor, mem_map_name, None) - - def test_put_bytes(self): - """ - Create a SharedMemoryMap and write bytes to it. - """ - for content_size in [1, 10, 1024, 2 * 1024 * 1024, 20 * 1024 * 1024]: - mem_map_name = self.get_new_mem_map_name() - mem_map_size = content_size + consts.CONTENT_HEADER_TOTAL_BYTES - mem_map = self.file_accessor.create_mem_map(mem_map_name, - mem_map_size) - shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, - mem_map) - content = self.get_random_bytes(content_size) - num_bytes_written = shared_mem_map.put_bytes(content) - self.assertEqual(content_size, num_bytes_written) - dispose_status = shared_mem_map.dispose() - self.assertTrue(dispose_status) - - def test_get_bytes(self): - """ - Create a SharedMemoryMap, write bytes to it and then read them back. - Verify that the bytes written and read match. - """ - for content_size in [1, 10, 1024, 2 * 1024 * 1024, 20 * 1024 * 1024]: - mem_map_name = self.get_new_mem_map_name() - mem_map_size = content_size + consts.CONTENT_HEADER_TOTAL_BYTES - mem_map = self.file_accessor.create_mem_map(mem_map_name, - mem_map_size) - shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, - mem_map) - content = self.get_random_bytes(content_size) - num_bytes_written = shared_mem_map.put_bytes(content) - self.assertEqual(content_size, num_bytes_written) - read_content = shared_mem_map.get_bytes() - self.assertEqual(content, read_content) - dispose_status = shared_mem_map.dispose() - self.assertTrue(dispose_status) - - def test_put_bytes_more_than_capacity(self): - """ - Attempt to put more bytes into the created SharedMemoryMap than the - size with which it was created. Verify that an SharedMemoryException is - raised. - """ - mem_map_name = self.get_new_mem_map_name() - mem_map_size = 1024 + consts.CONTENT_HEADER_TOTAL_BYTES - mem_map = self.file_accessor.create_mem_map(mem_map_name, - mem_map_size) - shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, - mem_map) - # Attempt to write more bytes than the size of the memory map we created - # earlier (1024). - content_size = 2048 - content = self.get_random_bytes(content_size) - with self.assertRaisesRegex(ValueError, 'out of range'): - shared_mem_map.put_bytes(content) - dispose_status = shared_mem_map.dispose() - self.assertTrue(dispose_status) - - @unittest.skipIf(os.name == 'nt', - 'Windows will create an mmap if one does not exist') - def test_dispose_without_delete_file(self): - """ - Dispose a SharedMemoryMap without making it dispose the backing file - resources (on Unix). Verify that the same memory map can be opened again - as the backing file was still present. - """ - mem_map_name = self.get_new_mem_map_name() - mem_map_size = 1024 + consts.CONTENT_HEADER_TOTAL_BYTES - mem_map = self.file_accessor.create_mem_map(mem_map_name, - mem_map_size) - shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, - mem_map) - # Close the memory map but do not delete the backing file - dispose_status = shared_mem_map.dispose(is_delete_file=False) - self.assertTrue(dispose_status) - # Attempt to open the memory map again, it should still open since the - # backing file is present - mem_map_op = self.file_accessor.open_mem_map(mem_map_name, mem_map_size) - self.assertIsNotNone(mem_map_op) - delete_status = \ - self.file_accessor.delete_mem_map(mem_map_name, mem_map_op) - self.assertTrue(delete_status) diff --git a/tests/unittests/test_third_party_http_functions.py b/tests/unittests/test_third_party_http_functions.py deleted file mode 100644 index 7dd57e88d..000000000 --- a/tests/unittests/test_third_party_http_functions.py +++ /dev/null @@ -1,237 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License -import filecmp -import os -import pathlib -import re -import typing -import base64 -import sys - -from unittest import skipIf -from unittest.mock import patch - -from tests.utils import testutils -from tests.utils.testutils import UNIT_TESTS_ROOT - -HOST_JSON_TEMPLATE = """\ -{ - "version": "2.0", - "logging": { - "logLevel": { - "default": "Trace" - } - }, - "extensions": { - "http": { - "routePrefix": "" - } - }, - "functionTimeout": "00:05:00" -} -""" - - -class ThirdPartyHttpFunctionsTestBase: - class TestThirdPartyHttpFunctions(testutils.WebHostTestCase): - - @classmethod - def setUpClass(cls): - host_json = cls.get_script_dir() / 'host.json' - with open(host_json, 'w+') as f: - f.write(HOST_JSON_TEMPLATE) - os_environ = os.environ.copy() - # Turn on feature flag - os_environ['AzureWebJobsFeatureFlags'] = 'EnableWorkerIndexing' - cls._patch_environ = patch.dict('os.environ', os_environ) - cls._patch_environ.start() - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - cls._patch_environ.stop() - - @classmethod - def get_script_dir(cls): - pass - - def test_debug_logging(self): - r = self.webhost.request('GET', 'debug_logging', no_prefix=True) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-debug') - - def check_log_debug_logging(self, host_out: typing.List[str]): - self.assertIn('logging info', host_out) - self.assertIn('logging warning', host_out) - self.assertIn('logging error', host_out) - self.assertNotIn('logging debug', host_out) - - def test_debug_with_user_logging(self): - r = self.webhost.request('GET', 'debug_user_logging', - no_prefix=True) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-user-debug') - - def check_log_debug_with_user_logging(self, - host_out: typing.List[str]): - self.assertIn('logging info', host_out) - self.assertIn('logging warning', host_out) - self.assertIn('logging debug', host_out) - self.assertIn('logging error', host_out) - - @testutils.retryable_test(3, 5) - def test_print_logging_no_flush(self): - r = self.webhost.request('GET', 'print_logging?message=Secret42', - no_prefix=True) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-print-logging') - - @testutils.retryable_test(3, 5) - def check_log_print_logging_no_flush(self, host_out: typing.List[str]): - self.assertIn('Secret42', host_out) - - @testutils.retryable_test(3, 5) - def test_print_logging_with_flush(self): - r = self.webhost.request('GET', - 'print_logging?flush=true&message' - '=Secret42', - no_prefix=True) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-print-logging') - - @testutils.retryable_test(3, 5) - def check_log_print_logging_with_flush(self, - host_out: typing.List[str]): - self.assertIn('Secret42', host_out) - - def test_print_to_console_stdout(self): - r = self.webhost.request('GET', - 'print_logging?console=true&message' - '=Secret42', - no_prefix=True) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-print-logging') - - def check_log_print_to_console_stdout(self, - host_out: typing.List[str]): - # System logs stdout now exist in host_out - self.assertIn('Secret42', host_out) - - @skipIf(sys.version_info < (3, 9, 0), - "Skip the tests for Python 3.8 and below") - def test_print_to_console_stderr(self): - r = self.webhost.request('GET', 'print_logging?console=true' - '&message=Secret42&is_stderr=true', - no_prefix=True) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-print-logging') - - def check_log_print_to_console_stderr(self, - host_out: typing.List[str], ): - # System logs stderr now exist in host_out - self.assertIn('Secret42', host_out) - - def test_return_http_no_body(self): - r = self.webhost.request('GET', 'return_http_no_body', - no_prefix=True) - self.assertEqual(r.text, '') - self.assertEqual(r.status_code, 200) - - def test_unhandled_error(self): - r = self.webhost.request('GET', 'unhandled_error', no_prefix=True) - self.assertEqual(r.status_code, 500) - # https://github.com/Azure/azure-functions-host/issues/2706 - # self.assertIn('ZeroDivisionError', r.text) - - def check_log_unhandled_error(self, - host_out: typing.List[str]): - r = re.compile(".*ZeroDivisionError: division by zero.*") - error_log = list(filter(r.match, host_out)) - self.assertGreaterEqual(len(error_log), 1) - - def test_unhandled_unserializable_error(self): - r = self.webhost.request( - 'GET', 'unhandled_unserializable_error', no_prefix=True) - self.assertEqual(r.status_code, 500) - - def test_unhandled_urllib_error(self): - r = self.webhost.request( - 'GET', 'unhandled_urllib_error', - params={'img': 'http://example.com/nonexistent.jpg'}, - no_prefix=True) - self.assertEqual(r.status_code, 500) - - -class TestAsgiHttpFunctions( - ThirdPartyHttpFunctionsTestBase.TestThirdPartyHttpFunctions): - @classmethod - def get_script_dir(cls): - return UNIT_TESTS_ROOT / 'third_party_http_functions' / 'stein' / \ - 'asgi_function' - - def test_hijack_current_event_loop(self): - r = self.webhost.request('GET', 'hijack_current_event_loop', - no_prefix=True) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, 'OK-hijack-current-event-loop') - - def check_log_hijack_current_event_loop(self, - host_out: typing.List[str]): - # User logs should exist in host_out - self.assertIn('parallelly_print', host_out) - self.assertIn('parallelly_log_info at root logger', host_out) - self.assertIn('parallelly_log_warning at root logger', host_out) - self.assertIn('parallelly_log_error at root logger', host_out) - self.assertIn('parallelly_log_exception at root logger', - host_out) - self.assertIn('parallelly_log_custom at custom_logger', host_out) - self.assertIn('callsoon_log', host_out) - - # System logs now exist in host_out - self.assertIn('parallelly_log_system at disguised_logger', - host_out) - - def test_raw_body_bytes(self): - parent_dir = pathlib.Path(__file__).parent.parent - image_file = parent_dir / 'unittests/resources/functions.png' - with open(image_file, 'rb') as image: - img = image.read() - encoded_image = base64.b64encode(img).decode('utf-8') - html_img_tag = \ - f'PNG Image' # noqa - sanitized_img_len = len(html_img_tag) - r = self.webhost.request('POST', 'raw_body_bytes', data=img, - no_prefix=True) - - received_body_len = int(r.headers['body-len']) - self.assertEqual(received_body_len, sanitized_img_len) - - encoded_image_data = encoded_image.split(",")[0] - body = base64.b64decode(encoded_image_data) - try: - received_img_file = parent_dir / 'received_img.png' - with open(received_img_file, 'wb') as received_img: - received_img.write(body) - self.assertTrue(filecmp.cmp(received_img_file, image_file)) - finally: - if (os.path.exists(received_img_file)): - os.remove(received_img_file) - - -class TestWsgiHttpFunctions( - ThirdPartyHttpFunctionsTestBase.TestThirdPartyHttpFunctions): - @classmethod - def get_script_dir(cls): - return UNIT_TESTS_ROOT / 'third_party_http_functions' / 'stein' / \ - 'wsgi_function' - - def test_return_http_redirect(self): - r = self.webhost.request('GET', 'return_http_redirect', - no_prefix=True) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.text, '

Hello World™

') - - r = self.webhost.request('GET', 'return_http_redirect', - allow_redirects=False, no_prefix=True) - self.assertEqual(r.status_code, 302) diff --git a/tests/unittests/test_types.py b/tests/unittests/test_types.py deleted file mode 100644 index 963f26914..000000000 --- a/tests/unittests/test_types.py +++ /dev/null @@ -1,196 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import unittest - -from azure import functions as azf -from azure.functions import http as bind_http -from azure.functions import meta as bind_meta - -from azure_functions_worker import protos -from azure_functions_worker.bindings import datumdef - - -class MockMBD: - def __init__(self, version: str, source: str, - content_type: str, content: str): - self.version = version - self.source = source - self.content_type = content_type - self.content = content - - -class TestFunctions(unittest.TestCase): - - def test_http_request_bytes(self): - r = bind_http.HttpRequest( - 'get', - 'http://example.com/abc?a=1', - headers=dict(aaa='zzz', bAb='xYz'), - params=dict(a='b'), - route_params={'route': 'param'}, - body_type='bytes', - body=b'abc') - - self.assertEqual(r.method, 'GET') - self.assertEqual(r.url, 'http://example.com/abc?a=1') - self.assertEqual(r.params, {'a': 'b'}) - self.assertEqual(r.route_params, {'route': 'param'}) - - with self.assertRaises(TypeError): - r.params['a'] = 'z' - - self.assertEqual(r.get_body(), b'abc') - - with self.assertRaisesRegex(ValueError, 'does not contain valid JSON'): - r.get_json() - - h = r.headers - with self.assertRaises(AttributeError): - r.headers = dict() - - self.assertEqual(h['aaa'], 'zzz') - self.assertEqual(h['aaA'], 'zzz') - self.assertEqual(h['bab'], 'xYz') - self.assertEqual(h['BaB'], 'xYz') - - # test that request headers are read-only - with self.assertRaises(TypeError): - h['zzz'] = '123' - - def test_http_request_json(self): - r = bind_http.HttpRequest( - 'POST', - 'http://example.com/abc?a=1', - headers={}, - params={}, - route_params={}, - body_type='json', - body='{"a":1}') - - self.assertEqual(r.method, 'POST') - self.assertEqual(r.url, 'http://example.com/abc?a=1') - self.assertEqual(r.params, {}) - self.assertEqual(r.route_params, {}) - - self.assertEqual(r.get_body(), b'{"a":1}') - self.assertEqual(r.get_json(), {'a': 1}) - - def test_http_response(self): - r = azf.HttpResponse( - 'body™', - status_code=201, - headers=dict(aaa='zzz', bAb='xYz')) - - self.assertEqual(r.status_code, 201) - self.assertEqual(r.get_body(), b'body\xe2\x84\xa2') - - self.assertEqual(r.mimetype, 'text/plain') - self.assertEqual(r.charset, 'utf-8') - - h = r.headers - with self.assertRaises(AttributeError): - r.headers = dict() - - self.assertEqual(h['aaa'], 'zzz') - self.assertEqual(h['aaA'], 'zzz') - self.assertEqual(h['bab'], 'xYz') - self.assertEqual(h['BaB'], 'xYz') - - # test that response headers are mutable - h['zZz'] = '123' - self.assertEqual(h['zzz'], '123') - - -class Converter(bind_meta.InConverter, binding='foo'): - pass - - -class TestTriggerMetadataDecoder(unittest.TestCase): - - def test_scalar_typed_data_decoder_ok(self): - metadata = { - 'int_as_json': bind_meta.Datum(type='json', value='1'), - 'int_as_string': bind_meta.Datum(type='string', value='1'), - 'int_as_int': bind_meta.Datum(type='int', value=1), - 'string_as_json': bind_meta.Datum(type='json', value='"aaa"'), - 'string_as_string': bind_meta.Datum(type='string', value='aaa'), - 'dict_as_json': bind_meta.Datum(type='json', value='{"foo":"bar"}') - } - - cases = [ - ('int_as_json', int, 1), - ('int_as_string', int, 1), - ('int_as_int', int, 1), - ('string_as_json', str, 'aaa'), - ('string_as_string', str, 'aaa'), - ('dict_as_json', dict, {'foo': 'bar'}), - ] - - for field, pytype, expected in cases: - with self.subTest(field=field): - value = Converter._decode_trigger_metadata_field( - metadata, field, python_type=pytype) - - self.assertIsInstance(value, pytype) - self.assertEqual(value, expected) - - def test_scalar_typed_data_decoder_not_ok(self): - metadata = { - 'unsupported_type': - bind_meta.Datum(type='bytes', value=b'aaa'), - 'unexpected_json': - bind_meta.Datum(type='json', value='[1, 2, 3]'), - 'unexpected_data': - bind_meta.Datum(type='json', value='"foo"'), - } - - cases = [ - ( - 'unsupported_type', int, ValueError, - "unsupported type of field 'unsupported_type' in " - "trigger metadata: bytes" - ), - ( - 'unexpected_json', int, ValueError, - "cannot convert value of field 'unexpected_json' in " - "trigger metadata into int" - ), - ( - 'unexpected_data', int, ValueError, - "cannot convert value of field " - "'unexpected_data' in trigger metadata into int: " - "invalid literal for int" - ), - ( - 'unexpected_data', (int, float), ValueError, - "unexpected value type in field " - "'unexpected_data' in trigger metadata: str, " - "expected one of: int, float" - ), - ] - - for field, pytype, exc, msg in cases: - with self.subTest(field=field): - with self.assertRaisesRegex(exc, msg): - Converter._decode_trigger_metadata_field( - metadata, field, python_type=pytype) - - def test_model_binding_data_datum_ok(self): - sample_mbd = MockMBD(version="1.0", - source="AzureStorageBlobs", - content_type="application/json", - content="{\"Connection\":\"python-worker-tests\"," - "\"ContainerName\":\"test-blob\"," - "\"BlobName\":\"test.txt\"}") - - datum: bind_meta.Datum = bind_meta.Datum(value=sample_mbd, - type='model_binding_data') - - self.assertEqual(datum.value, sample_mbd) - self.assertEqual(datum.type, "model_binding_data") - - def test_model_binding_data_td_ok(self): - mock_mbd = protos.TypedData(model_binding_data={'version': '1.0'}) - mbd_datum = datumdef.Datum.from_typed_data(mock_mbd) - - self.assertEqual(mbd_datum.type, 'model_binding_data') diff --git a/tests/unittests/test_typing_inspect.py b/tests/unittests/test_typing_inspect.py deleted file mode 100644 index 4f01e4c73..000000000 --- a/tests/unittests/test_typing_inspect.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -# Imported from https://github.com/ilevkivskyi/typing_inspect/blob/168fa6f7c5c55f720ce6282727211cf4cf6368f6/test_typing_inspect.py -# Author: Ivan Levkivskyi -# License: MIT - -from typing import ( - Any, - Callable, - ClassVar, - Dict, - Generic, - Iterable, - List, - Mapping, - MutableMapping, - NamedTuple, - Optional, - Sequence, - Tuple, - TypeVar, - Union, -) -from unittest import TestCase, main, skipIf - -from azure_functions_worker._thirdparty.typing_inspect import ( - get_args, - get_generic_bases, - get_generic_type, - get_last_args, - get_last_origin, - get_origin, - get_parameters, - is_callable_type, - is_classvar, - is_generic_type, - is_tuple_type, - is_typevar, - is_union_type, -) - - -class IsUtilityTestCase(TestCase): - def sample_test(self, fun, samples, nonsamples): - for s in samples: - self.assertTrue(fun(s), f"{s} type expected in {samples}") - for s in nonsamples: - self.assertFalse(fun(s), f"{s} type expected in {nonsamples}") - - def test_generic(self): - T = TypeVar('T') - samples = [Generic, Generic[T], Iterable[int], Mapping, - MutableMapping[T, List[int]], Sequence[Union[str, bytes]]] - nonsamples = [int, Union[int, str], Union[int, T], ClassVar[List[int]], - Callable[..., T], ClassVar, Optional, bytes, list] - self.sample_test(is_generic_type, samples, nonsamples) - - def test_callable(self): - samples = [Callable, Callable[..., int], - Callable[[int, int], Iterable[str]]] - nonsamples = [int, type, 42, [], List[int], - Union[callable, Callable[..., int]]] - self.sample_test(is_callable_type, samples, nonsamples) - class MyClass(Callable[[int], int]): - pass - self.assertTrue(is_callable_type(MyClass)) - - def test_tuple(self): - samples = [Tuple, Tuple[str, int], Tuple[Iterable, ...]] - nonsamples = [int, tuple, 42, List[int], NamedTuple('N', [('x', int)])] - self.sample_test(is_tuple_type, samples, nonsamples) - class MyClass(Tuple[str, int]): - pass - self.assertTrue(is_tuple_type(MyClass)) - - def test_union(self): - T = TypeVar('T') - S = TypeVar('S') - samples = [Union, Union[T, int], Union[int, Union[T, S]]] - nonsamples = [int, Union[int, int], [], Iterable[Any]] - self.sample_test(is_union_type, samples, nonsamples) - - def test_typevar(self): - T = TypeVar('T') - S_co = TypeVar('S_co', covariant=True) - samples = [T, S_co] - nonsamples = [int, Union[T, int], Union[T, S_co], type, ClassVar[int]] - self.sample_test(is_typevar, samples, nonsamples) - - def test_classvar(self): - T = TypeVar('T') - samples = [ClassVar, ClassVar[int], ClassVar[List[T]]] - nonsamples = [int, 42, Iterable, List[int], type, T] - self.sample_test(is_classvar, samples, nonsamples) - - -class GetUtilityTestCase(TestCase): - - def test_origin(self): - T = TypeVar('T') - self.assertEqual(get_origin(int), None) - self.assertEqual(get_origin(ClassVar[int]), None) - self.assertEqual(get_origin(Generic), Generic) - self.assertEqual(get_origin(Generic[T]), Generic) - self.assertEqual(get_origin(List[Tuple[T, T]][int]), list) - - def test_parameters(self): - T = TypeVar('T') - S_co = TypeVar('S_co', covariant=True) - U = TypeVar('U') - self.assertEqual(get_parameters(int), ()) - self.assertEqual(get_parameters(Generic), ()) - self.assertEqual(get_parameters(Union), ()) - self.assertEqual(get_parameters(List[int]), ()) - self.assertEqual(get_parameters(Generic[T]), (T,)) - self.assertEqual(get_parameters(Tuple[List[T], List[S_co]]), (T, S_co)) - self.assertEqual(get_parameters(Union[S_co, Tuple[T, T]][int, U]), (U,)) - self.assertEqual(get_parameters(Mapping[T, Tuple[S_co, T]]), (T, S_co)) - - def test_args_evaluated(self): - T = TypeVar('T') - self.assertEqual(get_args(Union[int, Tuple[T, int]][str], evaluate=True), - (int, Tuple[str, int])) - self.assertEqual(get_args(Dict[int, Tuple[T, T]][Optional[int]], evaluate=True), - (int, Tuple[Optional[int], Optional[int]])) - self.assertEqual(get_args(Callable[[], T][int], evaluate=True), ([], int,)) - - def test_generic_type(self): - T = TypeVar('T') - class Node(Generic[T]): pass - self.assertIs(get_generic_type(Node()), Node) - self.assertIs(get_generic_type(Node[int]()), Node[int]) - self.assertIs(get_generic_type(Node[T]()), Node[T],) - self.assertIs(get_generic_type(1), int) - - def test_generic_bases(self): - class MyClass(List[int], Mapping[str, List[int]]): pass - self.assertEqual(get_generic_bases(MyClass), - (List[int], Mapping[str, List[int]])) - self.assertEqual(get_generic_bases(int), ()) - - -if __name__ == '__main__': - main() diff --git a/tests/unittests/test_utilities.py b/tests/unittests/test_utilities.py deleted file mode 100644 index 99b014e09..000000000 --- a/tests/unittests/test_utilities.py +++ /dev/null @@ -1,390 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import os -import pathlib -import sys -import typing -import unittest -from unittest.mock import patch - -from azure_functions_worker.constants import PYTHON_EXTENSIONS_RELOAD_FUNCTIONS -from azure_functions_worker.utils import common, wrappers - -TEST_APP_SETTING_NAME = "TEST_APP_SETTING_NAME" -TEST_FEATURE_FLAG = "APP_SETTING_FEATURE_FLAG" -FEATURE_DEFAULT = 42 - - -class MockFeature: - @wrappers.enable_feature_by(TEST_FEATURE_FLAG) - def mock_feature_enabled(self, output: typing.List[str]) -> str: - result = 'mock_feature_enabled' - output.append(result) - return result - - @wrappers.enable_feature_by(TEST_FEATURE_FLAG, flag_default=True) - def mock_enabled_default_true(self, output: typing.List[str]) -> str: - result = 'mock_enabled_default_true' - output.append(result) - return result - - @wrappers.disable_feature_by(TEST_FEATURE_FLAG) - def mock_feature_disabled(self, output: typing.List[str]) -> str: - result = 'mock_feature_disabled' - output.append(result) - return result - - @wrappers.disable_feature_by(TEST_FEATURE_FLAG, flag_default=True) - def mock_disabled_default_true(self, output: typing.List[str]) -> str: - result = 'mock_disabled_default_true' - output.append(result) - return result - - @wrappers.enable_feature_by(TEST_FEATURE_FLAG, FEATURE_DEFAULT) - def mock_feature_default(self, output: typing.List[str]) -> str: - result = 'mock_feature_default' - output.append(result) - return result - - -class MockMethod: - @wrappers.attach_message_to_exception(ImportError, 'success') - def mock_load_function_success(self): - return True - - @wrappers.attach_message_to_exception(ImportError, 'module_not_found') - def mock_load_function_module_not_found(self): - raise ModuleNotFoundError('MODULE_NOT_FOUND') - - @wrappers.attach_message_to_exception(ImportError, 'import_error') - def mock_load_function_import_error(self): - # ImportError is a subclass of ModuleNotFoundError - raise ImportError('IMPORT_ERROR') - - @wrappers.attach_message_to_exception(ImportError, 'value_error') - def mock_load_function_value_error(self): - # ValueError is not a subclass of ImportError - raise ValueError('VALUE_ERROR') - - -class TestUtilities(unittest.TestCase): - - def setUp(self): - self._dummy_sdk_sys_path = os.path.join( - os.path.dirname(__file__), - 'resources', - 'mock_azure_functions' - ) - - self.mock_environ = patch.dict('os.environ', os.environ.copy()) - self.mock_sys_module = patch.dict('sys.modules', sys.modules.copy()) - self.mock_sys_path = patch('sys.path', sys.path.copy()) - self.mock_environ.start() - self.mock_sys_module.start() - self.mock_sys_path.start() - - def tearDown(self): - self.mock_sys_path.stop() - self.mock_sys_module.stop() - self.mock_environ.stop() - - def test_is_true_like_accepted(self): - self.assertTrue(common.is_true_like('1')) - self.assertTrue(common.is_true_like('true')) - self.assertTrue(common.is_true_like('T')) - self.assertTrue(common.is_true_like('YES')) - self.assertTrue(common.is_true_like('y')) - - def test_is_true_like_rejected(self): - self.assertFalse(common.is_true_like(None)) - self.assertFalse(common.is_true_like('')) - self.assertFalse(common.is_true_like('secret')) - - def test_is_false_like_accepted(self): - self.assertTrue(common.is_false_like('0')) - self.assertTrue(common.is_false_like('false')) - self.assertTrue(common.is_false_like('F')) - self.assertTrue(common.is_false_like('NO')) - self.assertTrue(common.is_false_like('n')) - - def test_is_false_like_rejected(self): - self.assertFalse(common.is_false_like(None)) - self.assertFalse(common.is_false_like('')) - self.assertFalse(common.is_false_like('secret')) - - def test_is_envvar_true(self): - os.environ[TEST_FEATURE_FLAG] = 'true' - self.assertTrue(common.is_envvar_true(TEST_FEATURE_FLAG)) - - def test_is_envvar_not_true_on_unset(self): - self._unset_feature_flag() - self.assertFalse(common.is_envvar_true(TEST_FEATURE_FLAG)) - - def test_is_envvar_false(self): - os.environ[TEST_FEATURE_FLAG] = 'false' - self.assertTrue(common.is_envvar_false(TEST_FEATURE_FLAG)) - - def test_is_envvar_not_false_on_unset(self): - self._unset_feature_flag() - self.assertFalse(common.is_envvar_true(TEST_FEATURE_FLAG)) - - def test_disable_feature_with_no_feature_flag(self): - mock_feature = MockFeature() - output = [] - result = mock_feature.mock_feature_enabled(output) - self.assertIsNone(result) - self.assertListEqual(output, []) - - def test_disable_feature_with_default_value(self): - mock_feature = MockFeature() - output = [] - result = mock_feature.mock_disabled_default_true(output) - self.assertIsNone(result) - self.assertListEqual(output, []) - - def test_enable_feature_with_feature_flag(self): - feature_flag = TEST_FEATURE_FLAG - os.environ[feature_flag] = '1' - mock_feature = MockFeature() - output = [] - result = mock_feature.mock_feature_enabled(output) - self.assertEqual(result, 'mock_feature_enabled') - self.assertListEqual(output, ['mock_feature_enabled']) - - def test_enable_feature_with_default_value(self): - mock_feature = MockFeature() - output = [] - result = mock_feature.mock_enabled_default_true(output) - self.assertEqual(result, 'mock_enabled_default_true') - self.assertListEqual(output, ['mock_enabled_default_true']) - - def test_enable_feature_with_no_rollback_flag(self): - mock_feature = MockFeature() - output = [] - result = mock_feature.mock_feature_disabled(output) - self.assertEqual(result, 'mock_feature_disabled') - self.assertListEqual(output, ['mock_feature_disabled']) - - def test_ignore_disable_default_value_when_set_explicitly(self): - feature_flag = TEST_FEATURE_FLAG - os.environ[feature_flag] = '0' - mock_feature = MockFeature() - output = [] - result = mock_feature.mock_disabled_default_true(output) - self.assertEqual(result, 'mock_disabled_default_true') - self.assertListEqual(output, ['mock_disabled_default_true']) - - def test_disable_feature_with_rollback_flag(self): - rollback_flag = TEST_FEATURE_FLAG - os.environ[rollback_flag] = '1' - mock_feature = MockFeature() - output = [] - result = mock_feature.mock_feature_disabled(output) - self.assertIsNone(result) - self.assertListEqual(output, []) - - def test_enable_feature_with_rollback_flag_is_false(self): - rollback_flag = TEST_FEATURE_FLAG - os.environ[rollback_flag] = 'false' - mock_feature = MockFeature() - output = [] - result = mock_feature.mock_feature_disabled(output) - self.assertEqual(result, 'mock_feature_disabled') - self.assertListEqual(output, ['mock_feature_disabled']) - - def test_ignore_enable_default_value_when_set_explicitly(self): - feature_flag = TEST_FEATURE_FLAG - os.environ[feature_flag] = '0' - mock_feature = MockFeature() - output = [] - result = mock_feature.mock_enabled_default_true(output) - self.assertIsNone(result) - self.assertListEqual(output, []) - - def test_fail_to_enable_feature_return_default_value(self): - mock_feature = MockFeature() - output = [] - result = mock_feature.mock_feature_default(output) - self.assertEqual(result, FEATURE_DEFAULT) - self.assertListEqual(output, []) - - def test_disable_feature_with_false_flag_return_default_value(self): - feature_flag = TEST_FEATURE_FLAG - os.environ[feature_flag] = 'false' - mock_feature = MockFeature() - output = [] - result = mock_feature.mock_feature_default(output) - self.assertEqual(result, FEATURE_DEFAULT) - self.assertListEqual(output, []) - - def test_exception_message_should_not_be_extended_on_success(self): - mock_method = MockMethod() - result = mock_method.mock_load_function_success() - self.assertTrue(result) - - def test_exception_message_should_be_extended_on_subexception(self): - mock_method = MockMethod() - with self.assertRaises(Exception) as e: - mock_method.mock_load_function_module_not_found() - self.assertIn('module_not_found', e.msg) - self.assertEqual(type(e), ModuleNotFoundError) - - def test_exception_message_should_be_extended_on_exact_exception(self): - mock_method = MockMethod() - with self.assertRaises(Exception) as e: - mock_method.mock_load_function_module_not_found() - self.assertIn('import_error', e.msg) - self.assertEqual(type(e), ImportError) - - def test_exception_message_should_not_be_extended_on_other_exception(self): - mock_method = MockMethod() - with self.assertRaises(Exception) as e: - mock_method.mock_load_function_value_error() - self.assertNotIn('import_error', e.msg) - self.assertEqual(type(e), ValueError) - - def test_app_settings_not_set_should_return_none(self): - app_setting = common.get_app_setting(TEST_APP_SETTING_NAME) - self.assertIsNone(app_setting) - - def test_app_settings_should_return_value(self): - # Set application setting by os.setenv - os.environ.update({TEST_APP_SETTING_NAME: '42'}) - - # Try using utility to acquire application setting - app_setting = common.get_app_setting(TEST_APP_SETTING_NAME) - self.assertEqual(app_setting, '42') - - def test_app_settings_not_set_should_return_default_value(self): - app_setting = common.get_app_setting(TEST_APP_SETTING_NAME, 'default') - self.assertEqual(app_setting, 'default') - - def test_app_settings_should_ignore_default_value(self): - # Set application setting by os.setenv - os.environ.update({TEST_APP_SETTING_NAME: '42'}) - - # Try using utility to acquire application setting - app_setting = common.get_app_setting(TEST_APP_SETTING_NAME, 'default') - self.assertEqual(app_setting, '42') - - def test_app_settings_should_not_trigger_validator_when_not_set(self): - def raise_excpt(value: str): - raise Exception('Should not raise on app setting not found') - - common.get_app_setting(TEST_APP_SETTING_NAME, validator=raise_excpt) - - def test_app_settings_return_default_value_when_validation_fail(self): - def parse_int_no_raise(value: str): - try: - int(value) - return True - except ValueError: - return False - - # Set application setting to an invalid value - os.environ.update({TEST_APP_SETTING_NAME: 'invalid'}) - - app_setting = common.get_app_setting( - TEST_APP_SETTING_NAME, - default_value='1', - validator=parse_int_no_raise - ) - - # Because 'invalid' is not an interger, falls back to default value - self.assertEqual(app_setting, '1') - - def test_app_settings_return_setting_value_when_validation_succeed(self): - def parse_int_no_raise(value: str): - try: - int(value) - return True - except ValueError: - return False - - # Set application setting to an invalid value - os.environ.update({TEST_APP_SETTING_NAME: '42'}) - - app_setting = common.get_app_setting( - TEST_APP_SETTING_NAME, - default_value='1', - validator=parse_int_no_raise - ) - - # Because 'invalid' is not an interger, falls back to default value - self.assertEqual(app_setting, '42') - - def test_is_python_version(self): - # Should pass at least 1 test - is_python_version_37 = common.is_python_version('3.7') - is_python_version_38 = common.is_python_version('3.8') - is_python_version_39 = common.is_python_version('3.9') - is_python_version_310 = common.is_python_version('3.10') - is_python_version_311 = common.is_python_version('3.11') - is_python_version_312 = common.is_python_version('3.12') - - self.assertTrue(any([ - is_python_version_37, - is_python_version_38, - is_python_version_39, - is_python_version_310, - is_python_version_311, - is_python_version_312 - ])) - - def test_get_sdk_from_sys_path(self): - """Test if the extension manager can find azure.functions module - """ - module = common.get_sdk_from_sys_path() - self.assertIsNotNone(module.__file__) - - def test_get_sdk_from_sys_path_after_updating_sys_path(self): - """Test if the get_sdk_from_sys_path can find the newer azure.functions - module after updating the sys.path. This is specifically for a scenario - after the dependency manager is switched to customer's path - """ - sys.path.insert(0, self._dummy_sdk_sys_path) - module = common.get_sdk_from_sys_path() - self.assertNotEqual( - os.path.dirname(module.__file__), - os.path.join(pathlib.Path.home(), 'azure', 'functions') - ) - - def test_get_sdk_version(self): - """Test if sdk version can be retrieved correctly - """ - module = common.get_sdk_from_sys_path() - sdk_version = common.get_sdk_version(module) - # e.g. 1.6.0, 1.7.0b, 1.8.1dev - self.assertRegex(sdk_version, r'\d+\.\d+\.\w+') - - def test_get_sdk_dummy_version(self): - """Test if sdk version can get dummy sdk version - """ - sys.path.insert(0, self._dummy_sdk_sys_path) - module = common.get_sdk_from_sys_path() - sdk_version = common.get_sdk_version(module) - self.assertNotEqual(sdk_version, 'dummy') - - def test_get_sdk_dummy_version_with_flag_enabled(self): - """Test if sdk version can get dummy sdk version - """ - os.environ[PYTHON_EXTENSIONS_RELOAD_FUNCTIONS] = '1' - sys.path.insert(0, self._dummy_sdk_sys_path) - module = common.get_sdk_from_sys_path() - sdk_version = common.get_sdk_version(module) - self.assertEqual(sdk_version, 'dummy') - - def test_valid_script_file_name(self): - file_name = 'test.py' - common.validate_script_file_name(file_name) - - def test_invalid_script_file_name(self): - file_name = 'test' - with self.assertRaises(common.InvalidFileNameError): - common.validate_script_file_name(file_name) - - def _unset_feature_flag(self): - try: - os.environ.pop(TEST_FEATURE_FLAG) - except KeyError: - pass diff --git a/tests/unittests/test_utilities_dependency.py b/tests/unittests/test_utilities_dependency.py deleted file mode 100644 index 432aee750..000000000 --- a/tests/unittests/test_utilities_dependency.py +++ /dev/null @@ -1,784 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import importlib.util -import os -import sys -import unittest -from unittest.mock import patch - -from tests.utils import testutils - -from azure_functions_worker.utils.dependency import DependencyManager - - -class TestDependencyManager(unittest.TestCase): - - def setUp(self): - self._patch_environ = patch.dict('os.environ', os.environ.copy()) - self._patch_sys_path = patch('sys.path', []) - self._patch_importer_cache = patch.dict('sys.path_importer_cache', {}) - self._patch_modules = patch.dict('sys.modules', {}) - self._customer_func_path = os.path.abspath( - os.path.join( - testutils.UNIT_TESTS_ROOT, 'resources', 'customer_func_path' - ) - ) - self._worker_deps_path = os.path.abspath( - os.path.join( - testutils.UNIT_TESTS_ROOT, 'resources', 'worker_deps_path' - ) - ) - self._customer_deps_path = os.path.abspath( - os.path.join( - testutils.UNIT_TESTS_ROOT, 'resources', 'customer_deps_path' - ) - ) - - self._patch_environ.start() - self._patch_sys_path.start() - self._patch_importer_cache.start() - self._patch_modules.start() - - def tearDown(self): - self._patch_environ.stop() - self._patch_sys_path.stop() - self._patch_importer_cache.stop() - self._patch_modules.stop() - DependencyManager.cx_deps_path = '' - DependencyManager.cx_working_dir = '' - DependencyManager.worker_deps_path = '' - - def test_should_not_have_any_paths_initially(self): - self.assertEqual(DependencyManager.cx_deps_path, '') - self.assertEqual(DependencyManager.cx_working_dir, '') - self.assertEqual(DependencyManager.worker_deps_path, '') - - def test_initialize_in_linux_consumption(self): - os.environ['AzureWebJobsScriptRoot'] = '/home/site/wwwroot' - sys.path.extend([ - '/tmp/functions\\standby\\wwwroot', - '/home/site/wwwroot/.python_packages/lib/site-packages', - '/azure-functions-host/workers/python/3.11/LINUX/X64', - '/home/site/wwwroot' - ]) - DependencyManager.initialize() - self.assertEqual( - DependencyManager.cx_deps_path, - '/home/site/wwwroot/.python_packages/lib/site-packages' - ) - self.assertEqual( - DependencyManager.cx_working_dir, - '/home/site/wwwroot', - ) - self.assertEqual( - DependencyManager.worker_deps_path, - '/azure-functions-host/workers/python/3.11/LINUX/X64' - ) - - def test_initialize_in_linux_dedicated(self): - os.environ['AzureWebJobsScriptRoot'] = '/home/site/wwwroot' - sys.path.extend([ - '/home/site/wwwroot', - '/home/site/wwwroot/.python_packages/lib/site-packages', - '/azure-functions-host/workers/python/3.11/LINUX/X64' - ]) - DependencyManager.initialize() - self.assertEqual( - DependencyManager.cx_deps_path, - '/home/site/wwwroot/.python_packages/lib/site-packages' - ) - self.assertEqual( - DependencyManager.cx_working_dir, - '/home/site/wwwroot', - ) - self.assertEqual( - DependencyManager.worker_deps_path, - '/azure-functions-host/workers/python/3.11/LINUX/X64' - ) - - def test_initialize_in_windows_core_tools(self): - os.environ['AzureWebJobsScriptRoot'] = 'C:\\FunctionApp' - sys.path.extend([ - 'C:\\Users\\user\\AppData\\Roaming\\npm\\' - 'node_modules\\azure-functions-core-tools\\bin\\' - 'workers\\python\\3.11\\WINDOWS\\X64', - 'C:\\FunctionApp\\.venv38\\lib\\site-packages', - 'C:\\FunctionApp' - ]) - DependencyManager.initialize() - self.assertEqual( - DependencyManager.cx_deps_path, - 'C:\\FunctionApp\\.venv38\\lib\\site-packages' - ) - self.assertEqual( - DependencyManager.cx_working_dir, - 'C:\\FunctionApp', - ) - self.assertEqual( - DependencyManager.worker_deps_path, - 'C:\\Users\\user\\AppData\\Roaming\\npm\\node_modules\\' - 'azure-functions-core-tools\\bin\\workers\\python\\3.11\\WINDOWS' - '\\X64' - ) - - def test_get_cx_deps_path_in_no_script_root(self): - result = DependencyManager._get_cx_deps_path() - self.assertEqual(result, '') - - def test_get_cx_deps_path_in_script_root_no_sys_path(self): - os.environ['AzureWebJobsScriptRoot'] = '/home/site/wwwroot' - result = DependencyManager._get_cx_deps_path() - self.assertEqual(result, '') - - def test_get_cx_deps_path_in_script_root_with_sys_path_linux(self): - # Test for Python 3.7+ Azure Environment - sys.path.append('/home/site/wwwroot/.python_packages/sites/lib/' - 'site-packages/') - os.environ['AzureWebJobsScriptRoot'] = '/home/site/wwwroot' - result = DependencyManager._get_cx_deps_path() - self.assertEqual(result, '/home/site/wwwroot/.python_packages/sites/' - 'lib/site-packages/') - - def test_get_cx_deps_path_in_script_root_with_sys_path_windows(self): - # Test for Windows Core Tools Environment - sys.path.append('C:\\FunctionApp\\sites\\lib\\site-packages') - os.environ['AzureWebJobsScriptRoot'] = 'C:\\FunctionApp' - result = DependencyManager._get_cx_deps_path() - self.assertEqual(result, - 'C:\\FunctionApp\\sites\\lib\\site-packages') - - def test_get_cx_working_dir_no_script_root(self): - result = DependencyManager._get_cx_working_dir() - self.assertEqual(result, '') - - def test_get_cx_working_dir_with_script_root_linux(self): - # Test for Azure Environment - os.environ['AzureWebJobsScriptRoot'] = '/home/site/wwwroot' - result = DependencyManager._get_cx_working_dir() - self.assertEqual(result, '/home/site/wwwroot') - - def test_get_cx_working_dir_with_script_root_windows(self): - # Test for Windows Core Tools Environment - os.environ['AzureWebJobsScriptRoot'] = 'C:\\FunctionApp' - result = DependencyManager._get_cx_working_dir() - self.assertEqual(result, 'C:\\FunctionApp') - - @unittest.skipIf(os.environ.get('VIRTUAL_ENV'), - 'Test is not capable to run in a virtual environment') - def test_get_worker_deps_path_with_no_worker_sys_path(self): - result = DependencyManager._get_worker_deps_path() - azf_spec = importlib.util.find_spec('azure.functions') - worker_parent = os.path.abspath( - os.path.join(os.path.dirname(azf_spec.origin), '..', '..') - ) - self.assertEqual(result.lower(), worker_parent.lower()) - - def test_get_worker_deps_path_from_windows_core_tools(self): - # Test for Windows Core Tools Environment - sys.path.append('C:\\Users\\user\\AppData\\Roaming\\npm\\' - 'node_modules\\azure-functions-core-tools\\bin\\' - 'workers\\python\\3.11\\WINDOWS\\X64') - result = DependencyManager._get_worker_deps_path() - self.assertEqual(result, - 'C:\\Users\\user\\AppData\\Roaming\\npm\\' - 'node_modules\\azure-functions-core-tools\\bin\\' - 'workers\\python\\3.11\\WINDOWS\\X64') - - def test_get_worker_deps_path_from_linux_azure_environment(self): - # Test for Azure Environment - sys.path.append('/azure-functions-host/workers/python/3.11/LINUX/X64') - result = DependencyManager._get_worker_deps_path() - self.assertEqual(result, - '/azure-functions-host/workers/python/3.11/LINUX/X64') - - @patch('azure_functions_worker.utils.dependency.importlib.util') - def test_get_worker_deps_path_without_worker_path(self, mock): - # Test when worker path is not provided - mock.find_spec.return_value = None - sys.path.append('/home/site/wwwroot') - result = DependencyManager._get_worker_deps_path() - worker_parent = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..', '..') - ) - self.assertEqual(result.lower(), worker_parent.lower()) - - def test_add_to_sys_path_add_to_first(self): - DependencyManager._add_to_sys_path(self._customer_deps_path, True) - self.assertEqual(sys.path[0], self._customer_deps_path) - - def test_add_to_sys_path_add_to_last(self): - DependencyManager._add_to_sys_path(self._customer_deps_path, False) - self.assertEqual(sys.path[-1], self._customer_deps_path) - - def test_add_to_sys_path_no_duplication(self): - DependencyManager._add_to_sys_path(self._customer_deps_path, True) - DependencyManager._add_to_sys_path(self._customer_deps_path, True) - path_count = len(list(filter( - lambda x: x == self._customer_deps_path, sys.path - ))) - self.assertEqual(path_count, 1) - - def test_add_to_sys_path_import_module(self): - DependencyManager._add_to_sys_path(self._customer_deps_path, True) - import common_module # NoQA - self.assertEqual( - common_module.package_location, - os.path.join(self._customer_deps_path, 'common_module') - ) - - def test_add_to_sys_path_import_namespace_path(self): - """Check if a common_namespace can be loaded after adding its path - into sys.path - """ - DependencyManager._add_to_sys_path(self._customer_deps_path, True) - import common_namespace # NoQA - self.assertEqual(len(common_namespace.__path__), 1) - self.assertEqual( - common_namespace.__path__[0], - os.path.join(self._customer_deps_path, 'common_namespace') - ) - - def test_add_to_sys_path_import_nested_module_in_namespace(self): - """Check if a nested module in a namespace can be imported correctly - """ - DependencyManager._add_to_sys_path(self._customer_deps_path, True) - import common_namespace.nested_module # NoQA - self.assertEqual(common_namespace.nested_module.__version__, 'customer') - - def test_add_to_sys_path_disallow_module_resolution_from_namespace(self): - """The standard Python import mechanism does not allow deriving a - specific module from a namespace without the import statement, e.g. - - import azure - azure.functions # Error: module 'azure' has not attribute 'functions' - """ - DependencyManager._add_to_sys_path(self._customer_deps_path, True) - import common_namespace # NoQA - with self.assertRaises(AttributeError): - common_namespace.nested_module - - def test_add_to_sys_path_allow_resolution_from_import_statement(self): - """The standard Python import mechanism allows deriving a specific - module in an import statement, e.g. - - from azure import functions # OK - """ - DependencyManager._add_to_sys_path(self._customer_deps_path, True) - from common_namespace import nested_module # NoQA - self.assertEqual(nested_module.__version__, 'customer') - - def test_add_to_sys_path_picks_latest_module_in_same_namespace(self): - """If a Linux Consumption function app is switching from placeholder to - specialized customer's app, the latest call of a nested module should - be picked from the most recently import namespace. - """ - DependencyManager._add_to_sys_path(self._worker_deps_path, True) - from common_namespace import nested_module # NoQA - self.assertEqual(nested_module.__version__, 'worker') - - # Now switch to customer's function app - DependencyManager._remove_from_sys_path(self._worker_deps_path) - DependencyManager._add_to_sys_path(self._customer_deps_path, True) - from common_namespace import nested_module # NoQA - self.assertEqual(nested_module.__version__, 'customer') - - def test_add_to_sys_path_importer_cache(self): - DependencyManager._add_to_sys_path(self._customer_deps_path, True) - import common_module # NoQA - self.assertIn(self._customer_deps_path, sys.path_importer_cache) - - def test_add_to_sys_path_importer_cache_reloaded(self): - # First import the common module from worker_deps_path - DependencyManager._add_to_sys_path(self._worker_deps_path, True) - import common_module # NoQA - self.assertIn(self._worker_deps_path, sys.path_importer_cache) - self.assertEqual( - common_module.package_location, - os.path.join(self._worker_deps_path, 'common_module') - ) - - # Mock that the customer's script are running in a different module - # (e.g. HttpTrigger/__init__.py) - del sys.modules['common_module'] - del common_module - - # Import the common module from customer_deps_path - # Customer should only see their own module - DependencyManager._add_to_sys_path(self._customer_deps_path, True) - import common_module # NoQA - self.assertIn(self._customer_deps_path, sys.path_importer_cache) - self.assertEqual( - common_module.package_location, - os.path.join(self._customer_deps_path, 'common_module') - ) - - def test_reload_all_modules_from_customer_deps(self): - """The test simulates a linux consumption environment where the worker - transits from placeholder mode to specialized worker with customer's - dependencies. First the worker will use worker's dependencies for its - own modules. After worker init request, it starts adding customer's - library path into sys.path (e.g. .python_packages/). The final step - is in environment reload where the worker is fully specialized, - reloading all libraries from customer's package. - """ - self._initialize_scenario() - - # Ensure the common_module is imported from _worker_deps_path - DependencyManager.use_worker_dependencies() - import common_module # NoQA - self.assertEqual( - common_module.package_location, - os.path.join(self._worker_deps_path, 'common_module') - ) - - # At placeholder specialization from function_environment_reload - DependencyManager.prioritize_customer_dependencies( - self._customer_func_path - ) - - # Now the module should be imported from customer dependency - import common_module # NoQA - self.assertIn(self._customer_deps_path, sys.path_importer_cache) - self.assertEqual( - common_module.package_location, - os.path.join(self._customer_deps_path, 'common_module') - ) - - # Check if the order matches expectation - self._assert_path_order(sys.path, [ - self._customer_deps_path, - self._worker_deps_path, - self._customer_func_path, - ]) - - def test_reload_all_namespaces_from_customer_deps(self): - """The test simulates a linux consumption environment where the worker - transits from placeholder mode to specialized mode. In a very typical - scenario, the nested azure.functions library (with common azure) - namespace needs to be switched from worker_deps to customer_Deps. - """ - self._initialize_scenario() - - # Ensure the nested_module is imported from _worker_deps_path - DependencyManager.use_worker_dependencies() - import common_namespace.nested_module # NoQA - self.assertEqual(common_namespace.nested_module.__version__, 'worker') - - # At placeholder specialization from function_environment_reload - DependencyManager.prioritize_customer_dependencies( - self._customer_func_path - ) - - # Now the nested_module should be imported from customer dependency - import common_namespace.nested_module # NoQA - self.assertIn(self._customer_deps_path, sys.path_importer_cache) - self.assertEqual( - common_namespace.__path__[0], - os.path.join(self._customer_deps_path, 'common_namespace') - ) - self.assertEqual(common_namespace.nested_module.__version__, 'customer') - - # Check if the order matches expectation - self._assert_path_order(sys.path, [ - self._customer_deps_path, - self._worker_deps_path, - self._customer_func_path, - ]) - - def test_remove_from_sys_path(self): - sys.path.append(self._customer_deps_path) - DependencyManager._remove_from_sys_path(self._customer_deps_path) - self.assertNotIn(self._customer_deps_path, sys.path) - - def test_remove_from_sys_path_should_remove_all_duplications(self): - sys.path.insert(0, self._customer_deps_path) - sys.path.append(self._customer_deps_path) - DependencyManager._remove_from_sys_path(self._customer_deps_path) - self.assertNotIn(self._customer_deps_path, sys.path) - - def test_remove_from_sys_path_should_remove_path_importer_cache(self): - # Import a common_module from customer deps will create a path finter - # cache in sys.path_importer_cache - sys.path.insert(0, self._customer_deps_path) - import common_module # NoQA - self.assertIn(self._customer_deps_path, sys.path_importer_cache) - - # Remove sys.path_importer_cache - DependencyManager._remove_from_sys_path(self._customer_deps_path) - self.assertNotIn(self._customer_deps_path, sys.path_importer_cache) - - def test_remove_from_sys_path_should_remove_related_module(self): - # Import a common_module from customer deps will create a module import - # cache in sys.module - sys.path.insert(0, self._customer_deps_path) - import common_module # NoQA - self.assertIn('common_module', sys.modules) - - # Remove sys.path_importer_cache - DependencyManager._remove_from_sys_path(self._customer_deps_path) - self.assertNotIn('common_module', sys.modules) - - def test_remove_from_sys_path_should_remove_related_namespace(self): - """When a namespace is imported, the sys.modules should cache it. - After calling the remove_from_sys_path, the namespace in sys.modules - cache should be removed. - """ - sys.path.insert(0, self._customer_deps_path) - import common_namespace # NoQA - self.assertIn('common_namespace', sys.modules) - - # Remove from sys.modules via _remove_from_sys_path - DependencyManager._remove_from_sys_path(self._customer_deps_path) - self.assertNotIn('common_namespace', sys.modules) - - def test_remove_from_sys_path_should_remove_nested_module(self): - """When a nested module is imported into a namespace, the sys.modules - should cache it. After calling the remove_from_sys_path, the nested - module should be removed from sys.modules - """ - sys.path.insert(0, self._customer_deps_path) - import common_namespace.nested_module # NoQA - self.assertIn('common_namespace.nested_module', sys.modules) - - # Remove from sys.modules via _remove_from_sys_path - DependencyManager._remove_from_sys_path(self._customer_deps_path) - self.assertNotIn('common_namespace.nested_module', sys.modules) - - def test_clear_path_importer_cache_and_modules(self): - # Ensure sys.path_importer_cache and sys.modules cache is cleared - sys.path.insert(0, self._customer_deps_path) - import common_module # NoQA - self.assertIn('common_module', sys.modules) - - # Clear out cache - DependencyManager._clear_path_importer_cache_and_modules( - self._customer_deps_path - ) - - # Ensure cache is cleared - self.assertNotIn('common_module', sys.modules) - - def test_clear_path_importer_cache_and_modules_reimport(self): - # First import common_module from _customer_deps_path - sys.path.insert(0, self._customer_deps_path) - import common_module # NoQA - self.assertIn('common_module', sys.modules) - self.assertEqual( - common_module.package_location, - os.path.join(self._customer_deps_path, 'common_module') - ) - - # Clean up cache - DependencyManager._clear_path_importer_cache_and_modules( - self._customer_deps_path - ) - self.assertNotIn('common_module', sys.modules) - - # Clean up namespace - del common_module - - # Try import common_module from _worker_deps_path - sys.path.insert(0, self._worker_deps_path) - - # Ensure new import is from _worker_deps_path - import common_module # NoQA - self.assertIn('common_module', sys.modules) - self.assertEqual( - common_module.package_location, - os.path.join(self._worker_deps_path, 'common_module') - ) - - def test_clear_path_importer_cache_and_modules_retain_namespace(self): - # First import common_module from _customer_deps_path as customer_mod - sys.path.insert(0, self._customer_deps_path) - import common_module as customer_mod # NoQA - self.assertIn('common_module', sys.modules) - self.assertEqual( - customer_mod.package_location, - os.path.join(self._customer_deps_path, 'common_module') - ) - - # Clean up cache - DependencyManager._clear_path_importer_cache_and_modules( - self._customer_deps_path - ) - self.assertNotIn('common_module', sys.modules) - - # Try import common_module from _worker_deps_path as worker_mod - sys.path.insert(0, self._worker_deps_path) - - # Ensure new import is from _worker_deps_path - import common_module as worker_mod # NoQA - self.assertIn('common_module', sys.modules) - self.assertEqual( - worker_mod.package_location, - os.path.join(self._worker_deps_path, 'common_module') - ) - - def test_use_worker_dependencies(self): - # Setup app settings - os.environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = 'true' - - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - # Ensure the common_module is imported from _worker_deps_path - DependencyManager.use_worker_dependencies() - import common_module # NoQA - self.assertEqual( - common_module.package_location, - os.path.join(self._worker_deps_path, 'common_module') - ) - - def test_use_worker_dependencies_disable(self): - # Setup app settings - os.environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = 'false' - - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - # The common_module cannot be imported since feature is disabled - DependencyManager.use_worker_dependencies() - with self.assertRaises(ImportError): - import common_module # NoQA - - def test_use_worker_dependencies_default_python_all_versions(self): - # Feature should be disabled for all python versions - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - # The common_module cannot be imported since feature is disabled - DependencyManager.use_worker_dependencies() - with self.assertRaises(ImportError): - import common_module # NoQA - - def test_prioritize_customer_dependencies(self): - # Setup app settings - os.environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = 'true' - - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - # Ensure the common_module is imported from _customer_deps_path - DependencyManager.prioritize_customer_dependencies() - import common_module # NoQA - self.assertEqual( - common_module.package_location, - os.path.join(self._customer_deps_path, 'common_module') - ) - - # Check if the sys.path order matches the expected order - self._assert_path_order(sys.path, [ - self._customer_deps_path, - self._worker_deps_path, - self._customer_func_path, - ]) - - def test_prioritize_customer_dependencies_disable(self): - # Setup app settings - os.environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = 'false' - - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - # Ensure the common_module is imported from _customer_deps_path - DependencyManager.prioritize_customer_dependencies() - with self.assertRaises(ImportError): - import common_module # NoQA - - def test_prioritize_customer_dependencies_default_all_versions(self): - # Feature should be disabled in Python for all versions - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - # Ensure the common_module is imported from _customer_deps_path - DependencyManager.prioritize_customer_dependencies() - with self.assertRaises(ImportError): - import common_module # NoQA - - def test_prioritize_customer_dependencies_from_working_directory(self): - self._initialize_scenario() - - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - # Ensure the func_specific_module is imported from _customer_func_path - DependencyManager.prioritize_customer_dependencies() - import func_specific_module # NoQA - self.assertEqual( - func_specific_module.package_location, - os.path.join(self._customer_func_path, 'func_specific_module') - ) - - def test_remove_module_cache(self): - # First import the common_module and create a sys.modules cache - sys.path.append(self._customer_deps_path) - import common_module # NoQA - self.assertIn('common_module', sys.modules) - - # Ensure the module cache will be remove - DependencyManager._remove_module_cache(self._customer_deps_path) - self.assertNotIn('common_module', sys.modules) - - def test_remove_module_cache_with_namespace_remain(self): - # Create common_module namespace - sys.path.append(self._customer_deps_path) - import common_module # NoQA - - # Ensure namespace remains after module cache is removed - DependencyManager._remove_module_cache(self._customer_deps_path) - self.assertIsNotNone(common_module) - - @unittest.skipIf(sys.version_info.minor > 7, - "The worker brings different protobuf versions" - "between 3.7 and 3.8+.") - def test_newrelic_protobuf_import_scenario_worker_deps_37(self): - # https://github.com/Azure/azure-functions-python-worker/issues/1339 - # newrelic checks if protobuf has been imported and based on the - # version it finds, imports a specific pb2 file. - - # PIWD = 0. protobuf is brought through the worker's deps. - os.environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = 'false' - - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - DependencyManager.prioritize_customer_dependencies() - - # protobuf v3 is found - from google.protobuf import __version__ - - protobuf_version = tuple(int(v) for v in __version__.split(".")) - self.assertIsNotNone(protobuf_version) - self.assertEqual(protobuf_version[0], 3) - - @unittest.skipIf(sys.version_info.minor <= 7, - "The worker brings different protobuf versions" - "between 3.7 and 3.8+.") - def test_newrelic_protobuf_import_scenario_worker_deps(self): - # https://github.com/Azure/azure-functions-python-worker/issues/1339 - # newrelic checks if protobuf has been imported and based on the - # version it finds, imports a specific pb2 file. - - # PIWD = 0. protobuf is brought through the worker's deps. - os.environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = 'false' - - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - DependencyManager.prioritize_customer_dependencies() - - # protobuf v4 is found - from google.protobuf import __version__ - - protobuf_version = tuple(int(v) for v in __version__.split(".")) - self.assertIsNotNone(protobuf_version) - self.assertEqual(protobuf_version[0], 4) - - @unittest.skipIf(sys.version_info.minor > 7, - "The worker brings different protobuf versions" - "between 3.7 and 3.8+.") - def test_newrelic_protobuf_import_scenario_user_deps_37(self): - # https://github.com/Azure/azure-functions-python-worker/issues/1339 - # newrelic checks if protobuf has been imported and based on the - # version it finds, imports a specific pb2 file. - - # PIWD = 1. protobuf is brought through the user's deps. - os.environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = 'true' - - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - DependencyManager.prioritize_customer_dependencies() - - # protobuf is found from worker deps, but newrelic won't find it - from google.protobuf import __version__ - - protobuf_version = tuple(int(v) for v in __version__.split(".")) - self.assertIsNotNone(protobuf_version) - - # newrelic tries to import protobuf v3 - self.assertEqual(protobuf_version[0], 3) - - # newrelic tries to import protobuf v4 - self.assertNotEqual(protobuf_version[0], 4) - - @unittest.skipIf(sys.version_info.minor <= 7, - "The worker brings different protobuf versions" - "between 3.7 and 3.8+.") - def test_newrelic_protobuf_import_scenario_user_deps(self): - # https://github.com/Azure/azure-functions-python-worker/issues/1339 - # newrelic checks if protobuf has been imported and based on the - # version it finds, imports a specific pb2 file. - - # PIWD = 1. protobuf is brought through the user's deps. - os.environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = 'true' - - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - DependencyManager.prioritize_customer_dependencies() - - # protobuf is found from worker deps, but newrelic won't find it - from google.protobuf import __version__ - - protobuf_version = tuple(int(v) for v in __version__.split(".")) - self.assertIsNotNone(protobuf_version) - - # newrelic tries to import protobuf v4 - self.assertEqual(protobuf_version[0], 4) - - # newrelic tries to import protobuf v3 - self.assertNotEqual(protobuf_version[0], 3) - - def _initialize_scenario(self): - # Setup app settings - os.environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = 'true' - os.environ['AzureWebJobsScriptRoot'] = '/home/site/wwwroot' - - # Setup paths - DependencyManager.worker_deps_path = self._worker_deps_path - DependencyManager.cx_deps_path = self._customer_deps_path - DependencyManager.cx_working_dir = self._customer_func_path - - def _assert_path_order(self, sys_paths, expected_order): - """Check if the path exist in sys_paths meets the path ordering in - expected_order. - """ - if not expected_order: - return - - next_check = 0 - for path in sys_paths: - if path == expected_order[next_check]: - next_check += 1 - - if next_check == len(expected_order): - break - - self.assertEqual( - next_check, len(expected_order), - 'The order in sys_paths does not match the expected_order paths' - ) diff --git a/tests/unittests/third_party_http_functions/stein/asgi_function/function_app.py b/tests/unittests/third_party_http_functions/stein/asgi_function/function_app.py deleted file mode 100644 index da76f0714..000000000 --- a/tests/unittests/third_party_http_functions/stein/asgi_function/function_app.py +++ /dev/null @@ -1,173 +0,0 @@ -import asyncio -import logging -import sys -from urllib.request import urlopen -import base64 - -import azure.functions as func -from fastapi import FastAPI, Request, Response - -fast_app = FastAPI() -logger = logging.getLogger("my-function") -# Attempt to log info into system log from customer code -disguised_logger = logging.getLogger('azure_functions_worker') - - -async def parallelly_print(): - await asyncio.sleep(0.1) - print('parallelly_print') - - -async def parallelly_log_info(): - await asyncio.sleep(0.2) - logging.info('parallelly_log_info at root logger') - - -async def parallelly_log_warning(): - await asyncio.sleep(0.3) - logging.warning('parallelly_log_warning at root logger') - - -async def parallelly_log_error(): - await asyncio.sleep(0.4) - logging.error('parallelly_log_error at root logger') - - -async def parallelly_log_exception(): - await asyncio.sleep(0.5) - try: - raise Exception('custom exception') - except Exception: - logging.exception('parallelly_log_exception at root logger', - exc_info=sys.exc_info()) - - -async def parallelly_log_custom(): - await asyncio.sleep(0.6) - logger.info('parallelly_log_custom at custom_logger') - - -async def parallelly_log_system(): - await asyncio.sleep(0.7) - disguised_logger.info('parallelly_log_system at disguised_logger') - - -@fast_app.get("/debug_logging") -async def debug_logging(): - logging.critical('logging critical', exc_info=True) - logging.info('logging info', exc_info=True) - logging.warning('logging warning', exc_info=True) - logging.debug('logging debug', exc_info=True) - logging.error('logging error', exc_info=True) - - return Response(content='OK-debug', media_type="text/plain") - - -@fast_app.get("/debug_user_logging") -async def debug_user_logging(): - logger.setLevel(logging.DEBUG) - - logger.critical('logging critical', exc_info=True) - logger.info('logging info', exc_info=True) - logger.warning('logging warning', exc_info=True) - logger.debug('logging debug', exc_info=True) - logger.error('logging error', exc_info=True) - - return Response(content='OK-user-debug', media_type="text/plain") - - -@fast_app.get("/hijack_current_event_loop") -async def hijack_current_event_loop(): - loop = asyncio.get_event_loop() - - # Create multiple tasks and schedule it into one asyncio.wait blocker - task_print: asyncio.Task = loop.create_task(parallelly_print()) - task_info: asyncio.Task = loop.create_task(parallelly_log_info()) - task_warning: asyncio.Task = loop.create_task(parallelly_log_warning()) - task_error: asyncio.Task = loop.create_task(parallelly_log_error()) - task_exception: asyncio.Task = loop.create_task(parallelly_log_exception()) - task_custom: asyncio.Task = loop.create_task(parallelly_log_custom()) - task_disguise: asyncio.Task = loop.create_task(parallelly_log_system()) - - # Create an awaitable future and occupy the current event loop resource - future = loop.create_future() - loop.call_soon_threadsafe(future.set_result, 'callsoon_log') - - # WaitAll - await asyncio.wait([task_print, task_info, task_warning, task_error, - task_exception, task_custom, task_disguise, future]) - - # Log asyncio low-level future result - logging.info(future.result()) - - return Response(content='OK-hijack-current-event-loop', - media_type="text/plain") - - -@fast_app.get("/print_logging") -async def print_logging(message: str = "", flush: str = 'false', - console: str = 'false', is_stderr: str = 'false'): - flush_required = False - is_console_log = False - is_stderr = False - - if flush == 'true': - flush_required = True - if console == 'true': - is_console_log = True - if is_stderr == 'true': - is_stderr = True - - # Adding LanguageWorkerConsoleLog will make function host to treat - # this as system log and will be propagated to kusto - prefix = 'LanguageWorkerConsoleLog' if is_console_log else '' - print(f'{prefix} {message}'.strip(), - file=sys.stderr if is_stderr else sys.stdout, - flush=flush_required) - - return Response(content='OK-print-logging', media_type="text/plain") - - -@fast_app.post("/raw_body_bytes") -async def raw_body_bytes(request: Request): - body = await request.body() - - base64_encoded = base64.b64encode(body).decode('utf-8') - html_img_tag = \ - f'PNG Image' - - return Response(html_img_tag, headers={'body-len': str(len(html_img_tag))}) - - -@fast_app.get("/return_http_no_body") -async def return_http_no_body(): - return Response(content='', media_type="text/plain") - - -@fast_app.get("/return_http") -async def return_http(request: Request): - return Response('

Hello World™

', media_type='text/html') - - -@fast_app.get("/unhandled_error") -async def unhandled_error(): - 1 / 0 - - -@fast_app.get("/unhandled_urllib_error") -async def unhandled_urllib_error(img: str = ''): - urlopen(img).read() - - -class UnserializableException(Exception): - def __str__(self): - raise RuntimeError('cannot serialize me') - - -@fast_app.get("/unhandled_unserializable_error") -async def unhandled_unserializable_error(): - raise UnserializableException('foo') - - -app = func.AsgiFunctionApp(app=fast_app, - http_auth_level=func.AuthLevel.ANONYMOUS) diff --git a/tests/unittests/third_party_http_functions/stein/wsgi_function/function_app.py b/tests/unittests/third_party_http_functions/stein/wsgi_function/function_app.py deleted file mode 100644 index 3d2f63d91..000000000 --- a/tests/unittests/third_party_http_functions/stein/wsgi_function/function_app.py +++ /dev/null @@ -1,96 +0,0 @@ -import logging -import sys -from urllib.request import urlopen - -import azure.functions as func -from flask import Flask, Response, redirect, request, url_for - -flask_app = Flask(__name__) -logger = logging.getLogger("my-function") - - -@flask_app.get("/debug_logging") -def debug_logging(): - logging.critical('logging critical', exc_info=True) - logging.info('logging info', exc_info=True) - logging.warning('logging warning', exc_info=True) - logging.debug('logging debug', exc_info=True) - logging.error('logging error', exc_info=True) - - return 'OK-debug' - - -@flask_app.get("/debug_user_logging") -def debug_user_logging(): - logger.setLevel(logging.DEBUG) - - logger.critical('logging critical', exc_info=True) - logger.info('logging info', exc_info=True) - logger.warning('logging warning', exc_info=True) - logger.debug('logging debug', exc_info=True) - logger.error('logging error', exc_info=True) - return 'OK-user-debug' - - -@flask_app.get("/print_logging") -def print_logging(): - flush_required = False - is_console_log = False - is_stderr = False - - message = request.args.get("message", '') - - if request.args.get("flush") == 'true': - flush_required = True - if request.args.get("console") == 'true': - is_console_log = True - if request.args.get("is_stderr") == 'true': - is_stderr = True - - # Adding LanguageWorkerConsoleLog will make function host to treat - # this as system log and will be propagated to kusto - prefix = 'LanguageWorkerConsoleLog' if is_console_log else '' - print(f'{prefix} {message}'.strip(), - file=sys.stderr if is_stderr else sys.stdout, - flush=flush_required) - - return 'OK-print-logging' - - -@flask_app.get("/return_http_no_body") -def return_http_no_body(): - return '' - - -@flask_app.get("/return_http") -def return_http(): - return Response('

Hello World™

', mimetype='text/html') - - -@flask_app.get("/return_http_redirect") -def return_http_redirect(code: str = ''): - return redirect(url_for('return_http')) - - -@flask_app.get("/unhandled_error") -def unhandled_error(): - 1 / 0 - - -@flask_app.get("/unhandled_urllib_error") -def unhandled_urllib_error(img: str = ''): - urlopen(img).read() - - -class UnserializableException(Exception): - def __str__(self): - raise RuntimeError('cannot serialize me') - - -@flask_app.get("/unhandled_unserializable_error") -def unhandled_unserializable_error(): - raise UnserializableException('foo') - - -app = func.WsgiFunctionApp(app=flask_app.wsgi_app, - http_auth_level=func.AuthLevel.ANONYMOUS) diff --git a/tests/unittests/timer_functions/return_pastdue/function.json b/tests/unittests/timer_functions/return_pastdue/function.json deleted file mode 100644 index 95c9914ff..000000000 --- a/tests/unittests/timer_functions/return_pastdue/function.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "timerTrigger", - "direction": "in", - "name": "timer", - "schedule": "*/5 * * * * *" - }, - { - "direction": "out", - "name": "pastdue", - "type": "http" - } - ] -} diff --git a/tests/unittests/timer_functions/return_pastdue/main.py b/tests/unittests/timer_functions/return_pastdue/main.py deleted file mode 100644 index d272f4982..000000000 --- a/tests/unittests/timer_functions/return_pastdue/main.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import azure.functions as azf - - -def main(timer: azf.TimerRequest, pastdue: azf.Out[str]): - pastdue.set(str(timer.past_due)) diff --git a/tests/unittests/timer_functions/user_event_loop_timer/function.json b/tests/unittests/timer_functions/user_event_loop_timer/function.json deleted file mode 100644 index 27de92aab..000000000 --- a/tests/unittests/timer_functions/user_event_loop_timer/function.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "scriptFile": "main.py", - "bindings": [ - { - "type": "timerTrigger", - "direction": "in", - "name": "timer", - "schedule": "*/5 * * * * *" - } - ] -} diff --git a/tests/unittests/timer_functions/user_event_loop_timer/main.py b/tests/unittests/timer_functions/user_event_loop_timer/main.py deleted file mode 100644 index 0bb039078..000000000 --- a/tests/unittests/timer_functions/user_event_loop_timer/main.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import asyncio -import logging - -import azure.functions as func - -logger = logging.getLogger('my function') - - -async def try_log(): - logger.info("try_log") - - -def main(timer: func.TimerRequest): - loop = asyncio.SelectorEventLoop() - asyncio.set_event_loop(loop) - loop.run_until_complete(try_log()) - loop.close() diff --git a/tests/utils/constants.py b/tests/utils/constants.py index 34c262f20..5b55e1ea0 100644 --- a/tests/utils/constants.py +++ b/tests/utils/constants.py @@ -1,82 +1,6 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import pathlib - -# Extensions necessary for non-core bindings. -EXTENSIONS_CSPROJ_TEMPLATE = """\ - - - - net8.0 - - ** - - - - - - - - - - - - - - - - - - -""" - -NUGET_CONFIG = """\ - - - - - - - - - - -""" - -# PROJECT_ROOT refers to the path to azure-functions-python-worker -# TODO: Find root folder without .parent -PROJECT_ROOT = pathlib.Path(__file__).parent.parent.parent -TESTS_ROOT = PROJECT_ROOT / 'tests' -WORKER_CONFIG = PROJECT_ROOT / '.testconfig' - -# E2E Integration Flags and Configurations -PYAZURE_INTEGRATION_TEST = "PYAZURE_INTEGRATION_TEST" -PYAZURE_WORKER_DIR = "PYAZURE_WORKER_DIR" - -# Debug Flags -PYAZURE_WEBHOST_DEBUG = "PYAZURE_WEBHOST_DEBUG" -ARCHIVE_WEBHOST_LOGS = "ARCHIVE_WEBHOST_LOGS" - -# CI test constants -CONSUMPTION_DOCKER_TEST = "CONSUMPTION_DOCKER_TEST" -DEDICATED_DOCKER_TEST = "DEDICATED_DOCKER_TEST" +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import pathlib + +PROJECT_ROOT = pathlib.Path(__file__).parent.parent.parent +TESTS_ROOT = PROJECT_ROOT / 'tests' diff --git a/tests/utils/testutils.py b/tests/utils/testutils.py index c04b134c5..de229fc3e 100644 --- a/tests/utils/testutils.py +++ b/tests/utils/testutils.py @@ -6,118 +6,13 @@ and can be changed without a notice. """ -import argparse import asyncio -import concurrent.futures -import configparser import functools import inspect -import json -import logging -import os -import pathlib -import platform -import queue -import random -import re -import shutil -import socket -import string -import subprocess -import sys -import tempfile -import time -import typing import unittest -import uuid - -import grpc -import requests -from tests.utils.constants import ( - ARCHIVE_WEBHOST_LOGS, - CONSUMPTION_DOCKER_TEST, - DEDICATED_DOCKER_TEST, - EXTENSIONS_CSPROJ_TEMPLATE, - PROJECT_ROOT, - PYAZURE_INTEGRATION_TEST, - PYAZURE_WEBHOST_DEBUG, - PYAZURE_WORKER_DIR, - WORKER_CONFIG, -) -from tests.utils.testutils_docker import ( - DockerConfigs, - WebHostConsumption, - WebHostDedicated, -) - -from azure_functions_worker import dispatcher, protos -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - FileAccessorFactory, -) -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - SharedMemoryConstants as consts, -) -from azure_functions_worker.constants import ( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, - UNIX_SHARED_MEMORY_DIRECTORIES, -) -from azure_functions_worker.utils.common import get_app_setting, is_envvar_true - -TESTS_ROOT = PROJECT_ROOT / 'tests' -E2E_TESTS_FOLDER = pathlib.Path('endtoend') -E2E_TESTS_ROOT = TESTS_ROOT / E2E_TESTS_FOLDER -UNIT_TESTS_FOLDER = pathlib.Path('unittests') -UNIT_TESTS_ROOT = TESTS_ROOT / UNIT_TESTS_FOLDER -EMULATOR_TESTS_FOLDER = pathlib.Path('emulator_tests') -EXTENSION_TESTS_FOLDER = pathlib.Path('extension_tests') -WEBHOST_DLL = "Microsoft.Azure.WebJobs.Script.WebHost.dll" -DEFAULT_WEBHOST_DLL_PATH = ( - PROJECT_ROOT / 'build' / 'webhost' / 'bin' / WEBHOST_DLL -) -EXTENSIONS_PATH = PROJECT_ROOT / 'build' / 'extensions' / 'bin' -FUNCS_PATH = TESTS_ROOT / UNIT_TESTS_FOLDER / 'http_functions' -WORKER_PATH = PROJECT_ROOT / 'python' / 'test' -ON_WINDOWS = platform.system() == 'Windows' -LOCALHOST = "127.0.0.1" - -# The template of host.json that will be applied to each test functions -HOST_JSON_TEMPLATE = """\ -{ - "version": "2.0", - "logging": {"logLevel": {"default": "Trace"}} -} -""" - -SECRETS_TEMPLATE = """\ -{ - "masterKey": { - "name": "master", - "value": "testMasterKey", - "encrypted": false - }, - "functionKeys": [ - { - "name": "default", - "value": "testFunctionKey", - "encrypted": false - } - ], - "systemKeys": [ - { - "name": "eventgridextensionconfig_extension", - "value": "testSystemKey", - "encrypted": false - } - ], - "hostName": null, - "instanceId": "0000000000000000000000001C69C103", - "source": "runtime" -} -""" class AsyncTestCaseMeta(type(unittest.TestCase)): - def __new__(mcls, name, bases, ns): for attrname, attr in ns.items(): if (attrname.startswith('test_') @@ -137,949 +32,3 @@ def wrapper(*args, **kwargs): class AsyncTestCase(unittest.TestCase, metaclass=AsyncTestCaseMeta): pass - - -class WebHostTestCaseMeta(type(unittest.TestCase)): - - def __new__(mcls, name, bases, dct): - if is_envvar_true(DEDICATED_DOCKER_TEST) \ - or is_envvar_true(CONSUMPTION_DOCKER_TEST): - return super().__new__(mcls, name, bases, dct) - - for attrname, attr in dct.items(): - if attrname.startswith('test_') and callable(attr): - test_case = attr - check_log_name = attrname.replace('test_', 'check_log_', 1) - check_log_case = dct.get(check_log_name) - - @functools.wraps(test_case) - def wrapper(self, *args, __meth__=test_case, - __check_log__=check_log_case, **kwargs): - if (__check_log__ is not None - and callable(__check_log__) - and not is_envvar_true(PYAZURE_WEBHOST_DEBUG)): - - # Check logging output for unit test scenarios - result = self._run_test(__meth__, *args, **kwargs) - - # Trim off host output timestamps - host_output = getattr(self, 'host_out', '') - output_lines = host_output.splitlines() - ts_re = r"^\[\d+(\/|-)\d+(\/|-)\d+T*\d+\:\d+\:\d+.*(" \ - r"A|P)*M*\]" - output = list(map(lambda s: - re.sub(ts_re, '', s).strip(), - output_lines)) - - # Execute check_log_ test cases - self._run_test(__check_log__, host_out=output) - return result - else: - # Check normal unit test - return self._run_test(__meth__, *args, **kwargs) - - dct[attrname] = wrapper - - return super().__new__(mcls, name, bases, dct) - - -class WebHostTestCase(unittest.TestCase, metaclass=WebHostTestCaseMeta): - """Base class for integration tests that need a WebHost. - - In addition to automatically starting up a WebHost instance, - this test case class logs WebHost stdout/stderr in case - a unit test fails. - - You can write two sets of test - test_* and check_log_* tests. - - test_ABC - Unittest - check_log_ABC - Check logs generated during the execution of test_ABC. - """ - host_stdout_logger = logging.getLogger('webhosttests') - env_variables = {} - - @classmethod - def get_script_dir(cls): - raise NotImplementedError - - @classmethod - def get_libraries_to_install(cls) -> typing.List: - pass - - @classmethod - def get_environment_variables(cls): - pass - - @classmethod - def docker_tests_enabled(self) -> (bool, str): - """ - Returns True if the environment variables - CONSUMPTION_DOCKER_TEST or DEDICATED_DOCKER_TEST - is enabled else returns False - """ - if is_envvar_true(CONSUMPTION_DOCKER_TEST): - return True, CONSUMPTION_DOCKER_TEST - elif is_envvar_true(DEDICATED_DOCKER_TEST): - return True, DEDICATED_DOCKER_TEST - else: - return False, None - - @classmethod - def setUpClass(cls): - script_dir = pathlib.Path(cls.get_script_dir()) - is_unit_test = True if 'unittests' in script_dir.parts else False - - docker_tests_enabled, sku = cls.docker_tests_enabled() - - cls.host_stdout = None if is_envvar_true(PYAZURE_WEBHOST_DEBUG) \ - else tempfile.NamedTemporaryFile('w+t') - - try: - if docker_tests_enabled: - docker_configs = DockerConfigs( - script_path=script_dir, - libraries=cls.get_libraries_to_install(), - env=cls.get_environment_variables() or {}) - if sku == CONSUMPTION_DOCKER_TEST: - cls.webhost = \ - WebHostConsumption(docker_configs).spawn_container() - elif sku == DEDICATED_DOCKER_TEST: - cls.webhost = \ - WebHostDedicated(docker_configs).spawn_container() - else: - _setup_func_app(TESTS_ROOT / script_dir, is_unit_test) - try: - cls.webhost = start_webhost(script_dir=script_dir, - stdout=cls.host_stdout) - except Exception: - raise - - if not cls.webhost.is_healthy() and cls.host_stdout is not None: - cls.host_out = cls.host_stdout.read() - if cls.host_out is not None and len(cls.host_out) > 0: - error_message = 'WebHost is not started correctly.' - f'{cls.host_stdout.name}: {cls.host_out}' - cls.host_stdout_logger.error(error_message) - raise RuntimeError(error_message) - except Exception as ex: - cls.host_stdout_logger.error(f"WebHost is not started correctly. {ex}") - cls.tearDownClass() - raise - - @classmethod - def tearDownClass(cls): - cls.webhost.close() - cls.webhost = None - - if cls.host_stdout is not None: - if is_envvar_true(ARCHIVE_WEBHOST_LOGS): - cls.host_stdout.seek(0) - content = cls.host_stdout.read() - if content is not None and len(content) > 0: - version_info = sys.version_info - log_file = ( - "logs/" - f"{cls.__module__}_{cls.__name__}" - f"{version_info.minor}_webhost.log" - ) - with open(log_file, 'w+') as file: - file.write(content) - cls.host_stdout_logger.info("WebHost log is archived to" - f"{log_file} in the artifact") - - cls.host_stdout.close() - cls.host_stdout = None - - script_dir = pathlib.Path(cls.get_script_dir()) - _teardown_func_app(TESTS_ROOT / script_dir) - - def _run_test(self, test, *args, **kwargs): - if self.host_stdout is None: - test(self, *args, **kwargs) - else: - # Discard any host stdout left from the previous test or - # from the setup. - self.host_stdout.read() - last_pos = self.host_stdout.tell() - - test_exception = None - try: - test(self, *args, **kwargs) - except Exception as e: - test_exception = e - finally: - try: - self.host_stdout.seek(last_pos) - self.host_out = self.host_stdout.read() - if self.host_out is not None and len(self.host_out) > 0: - self.host_stdout_logger.error( - 'Captured WebHost log generated during test ' - '%s from %s :\n%s', test.__name__, - self.host_stdout.name, self.host_out) - finally: - if test_exception is not None: - raise test_exception - - -class SharedMemoryTestCase(unittest.TestCase): - """ - For tests involving shared memory data transfer usage. - """ - - def setUp(self): - self.was_shmem_env_true = is_envvar_true( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) - os.environ.update( - {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '1'}) - - os_name = platform.system() - if os_name == 'Darwin': - # If an existing AppSetting is specified, save it so it can be - # restored later - self.was_shmem_dirs = get_app_setting( - UNIX_SHARED_MEMORY_DIRECTORIES - ) - self._setUpDarwin() - elif os_name == 'Linux': - self._setUpLinux() - self.file_accessor = FileAccessorFactory.create_file_accessor() - - def tearDown(self): - os_name = platform.system() - if os_name == 'Darwin': - self._tearDownDarwin() - if self.was_shmem_dirs is not None: - # If an AppSetting was set before the tests ran, restore it back - os.environ.update( - {UNIX_SHARED_MEMORY_DIRECTORIES: self.was_shmem_dirs}) - elif os_name == 'Linux': - self._tearDownLinux() - - if not self.was_shmem_env_true: - os.environ.update( - {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '0'}) - - def get_new_mem_map_name(self): - return str(uuid.uuid4()) - - def get_random_bytes(self, num_bytes): - return bytearray(random.getrandbits(8) for _ in range(num_bytes)) - - def get_random_string(self, num_chars): - return ''.join(random.choices(string.ascii_uppercase + string.digits, - k=num_chars)) - - def is_valid_uuid(self, uuid_to_test: str, version: int = 4) -> bool: - """ - Check if uuid_to_test is a valid UUID. - Reference: https://stackoverflow.com/a/33245493/3132415 - """ - try: - uuid_obj = uuid.UUID(uuid_to_test, version=version) - except ValueError: - return False - return str(uuid_obj) == uuid_to_test - - def _createSharedMemoryDirectories(self, directories): - for temp_dir in directories: - temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) - if not os.path.exists(temp_dir_path): - os.makedirs(temp_dir_path) - - def _deleteSharedMemoryDirectories(self, directories): - for temp_dir in directories: - temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) - shutil.rmtree(temp_dir_path) - - def _setUpLinux(self): - self._createSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) - - def _tearDownLinux(self): - self._deleteSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) - - def _setUpDarwin(self): - """ - Create a RAM disk on macOS. - Ref: https://stackoverflow.com/a/2033417/3132415 - """ - size_in_mb = consts.MAX_BYTES_FOR_SHARED_MEM_TRANSFER / (1024 * 1024) - size = 2048 * size_in_mb - # The following command returns the name of the created disk - cmd = ['hdiutil', 'attach', '-nomount', f'ram://{size}'] - result = subprocess.run(cmd, stdout=subprocess.PIPE) - if result.returncode != 0: - raise IOError(f'Cannot create ram disk with command: {cmd} - ' - f'{result.stdout} - {result.stderr}') - disk_name = result.stdout.strip().decode() - # We create a volume on the disk created above and mount it - volume_name = 'shm' - cmd = ['diskutil', 'eraseVolume', 'HFS+', volume_name, disk_name] - result = subprocess.run(cmd, stdout=subprocess.PIPE) - if result.returncode != 0: - raise IOError(f'Cannot create volume with command: {cmd} - ' - f'{result.stdout} - {result.stderr}') - directory = f'/Volumes/{volume_name}' - self.created_directories = [directory] - # Create directories in the volume for shared memory maps - self._createSharedMemoryDirectories(self.created_directories) - # Override the AppSetting for the duration of this test so the - # FileAccessorUnix can use these directories for creating memory maps - os.environ.update( - {UNIX_SHARED_MEMORY_DIRECTORIES: ','.join(self.created_directories)} - ) - - def _tearDownDarwin(self): - # Delete the directories containing shared memory maps - self._deleteSharedMemoryDirectories(self.created_directories) - # Unmount the volume used for shared memory maps - volume_name = 'shm' - cmd = f"find /Volumes -type d -name '{volume_name}*' -print0 " \ - "| xargs -0 umount -f" - result = subprocess.run(cmd, stdout=subprocess.PIPE, shell=True) - if result.returncode != 0: - raise IOError(f'Cannot delete volume with command: {cmd} - ' - f'{result.stdout} - {result.stderr}') - - -class _MockWebHostServicer(protos.FunctionRpcServicer): - _STOP = object() - - def __init__(self, host): - self._host = host - - def EventStream(self, client_response_iterator, context): - client_response = next(client_response_iterator) - rtype = client_response.WhichOneof('content') - try: - if rtype != 'start_stream': - raise AssertionError( - f'unexpected {rtype!r} initial message from the worker') - - if client_response.start_stream.worker_id != self._host.worker_id: - raise AssertionError('worker_id mismatch') - - except Exception as ex: - self._host._loop.call_soon_threadsafe( - self._host._connected_fut.set_exception, ex) - return - else: - self._host._loop.call_soon_threadsafe( - self._host._connected_fut.set_result, True) - - while True: - message, wait_for = self._host._in_queue.get() - if message is self._STOP: - return - - yield message - - if wait_for is None: - continue - - response = None - logs = [] - - for client_response in client_response_iterator: - rtype = client_response.WhichOneof('content') - unpacked = getattr(client_response, rtype) - - if rtype == wait_for: - response = unpacked - break - elif rtype == 'rpc_log': - logs.append(unpacked) - else: - raise RuntimeError( - f'unexpected response from worker: ' - f'expected to receive {wait_for!r}, got {rtype!r}') - - self._host._loop.call_soon_threadsafe( - self._host._out_aqueue.put_nowait, - _WorkerResponseMessages(response, logs)) - - -class _WebHostFunction(typing.NamedTuple): - id: str - name: str - desc: dict - script: pathlib.Path - - -class _WorkerResponseMessages(typing.NamedTuple): - response: object - logs: list - - -class _MockWebHost: - - def __init__(self, loop, scripts_dir): - self._loop = loop - self._scripts_dir = scripts_dir - - self._available_functions = {} - self._read_available_functions() - - self._connected_fut = loop.create_future() - self._in_queue = queue.Queue() - self._out_aqueue = asyncio.Queue() - self._threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=1) - self._server = grpc.server(self._threadpool) - self._servicer = _MockWebHostServicer(self) - - protos.add_FunctionRpcServicer_to_server(self._servicer, self._server) - self._port = self._server.add_insecure_port(f'{LOCALHOST}:0') - self._worker_id = self.make_id() - self._request_id = self.make_id() - - def make_id(self): - return str(uuid.uuid4()) - - @property - def worker_id(self): - return self._worker_id - - @property - def request_id(self): - return self._request_id - - async def init_worker(self, host_version: str = '4.28.0'): - r = await self.communicate( - protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version=host_version - ) - ), - wait_for='worker_init_response' - ) - - return r - - async def get_functions_metadata(self): - r = await self.communicate( - protos.StreamingMessage( - functions_metadata_request=protos.FunctionsMetadataRequest( - function_app_directory=str(self._scripts_dir) - ) - ), - wait_for='function_metadata_response' - ) - - return r - - async def load_function(self, name): - if name not in self._available_functions: - raise RuntimeError(f'cannot load function {name}') - - func = self._available_functions[name] - - bindings = {} - for b in func.desc['bindings']: - direction = getattr(protos.BindingInfo, b['direction']) - - data_type_v = b.get('dataType') - if not data_type_v: - data_type = protos.BindingInfo.undefined - elif data_type_v == 'binary': - data_type = protos.BindingInfo.binary - elif data_type_v == 'string': - data_type = protos.BindingInfo.string - elif data_type_v == 'stream': - data_type = protos.BindingInfo.stream - else: - raise RuntimeError(f'invalid dataType: {data_type_v!r}') - - bindings[b['name']] = protos.BindingInfo( - type=b['type'], - data_type=data_type, - direction=direction) - - r = await self.communicate( - protos.StreamingMessage( - function_load_request=protos.FunctionLoadRequest( - function_id=func.id, - metadata=protos.RpcFunctionMetadata( - name=func.name, - directory=os.path.dirname(func.script), - script_file=func.script, - bindings=bindings))), - wait_for='function_load_response') - - return func.id, r - - async def invoke_function( - self, - name, - input_data: typing.List[protos.ParameterBinding], - metadata: typing.Optional[ - typing.Mapping[str, protos.TypedData]] = None): - - if metadata is None: - metadata = {} - - if name not in self._available_functions: - raise RuntimeError(f'cannot load function {name}') - - func = self._available_functions[name] - invocation_id = self.make_id() - - r = await self.communicate( - protos.StreamingMessage( - invocation_request=protos.InvocationRequest( - invocation_id=invocation_id, - function_id=func.id, - input_data=input_data, - trigger_metadata=metadata, - ) - ), - wait_for='invocation_response') - - return invocation_id, r - - async def close_shared_memory_resources( - self, - map_names: typing.List[str]): - - request = protos.CloseSharedMemoryResourcesRequest( - map_names=map_names) - - r = await self.communicate( - protos.StreamingMessage( - close_shared_memory_resources_request=request - ), - wait_for='close_shared_memory_resources_response') - - return r - - async def reload_environment( - self, - environment: typing.Dict[str, str], - function_project_path: str = '/home/site/wwwroot' - ) -> protos.FunctionEnvironmentReloadResponse: - - request_content = protos.FunctionEnvironmentReloadRequest( - function_app_directory=function_project_path, - environment_variables={ - k.encode(): v.encode() for k, v in environment.items() - } - ) - - r = await self.communicate( - protos.StreamingMessage( - function_environment_reload_request=request_content - ), - wait_for='function_environment_reload_response' - ) - - return r - - async def get_worker_status(self): - r = await self.communicate( - protos.StreamingMessage( - worker_status_request=protos.WorkerStatusRequest() - ), - wait_for='worker_status_response' - ) - - return r - - async def send(self, message): - self._in_queue.put_nowait((message, None)) - - async def communicate(self, message, *, wait_for): - self._in_queue.put_nowait((message, wait_for)) - return await self._out_aqueue.get() - - async def start(self): - self._server.start() - - async def close(self): - self._in_queue.put_nowait((_MockWebHostServicer._STOP, None)) - self._server.stop(1) - - def _read_available_functions(self): - for fd in self._scripts_dir.iterdir(): - if not fd.is_dir(): - continue - - fjson_fn = fd / 'function.json' - if not fjson_fn.exists(): - continue - - try: - with open(fjson_fn, 'rt') as f: - fjson = json.loads(f.read()) - - fscript = fjson['scriptFile'] - fscript_fn = fd / fscript - if not fscript_fn.exists(): - raise RuntimeError(f'{fscript_fn} path does not exist') - - except Exception as ex: - raise RuntimeError( - f'could not load function {fd.name}') from ex - - fn = _WebHostFunction( - name=fd.name, desc=fjson, script=str(fscript_fn), - id=self.make_id()) - - self._available_functions[fn.name] = fn - - -class _MockWebHostController: - - def __init__(self, scripts_dir: pathlib.PurePath): - self._host: typing.Optional[_MockWebHost] = None - self._scripts_dir: pathlib.PurePath = scripts_dir - self._worker: typing.Optional[dispatcher.Dispatcher] = None - - async def __aenter__(self) -> _MockWebHost: - loop = asyncio.get_running_loop() - self._host = _MockWebHost(loop, self._scripts_dir) - - await self._host.start() - - self._worker = await dispatcher. \ - Dispatcher.connect(LOCALHOST, self._host._port, - self._host.worker_id, self._host.request_id, - connect_timeout=5.0) - - self._worker_task = loop.create_task(self._worker.dispatch_forever()) - - done, pending = await asyncio. \ - wait([self._host._connected_fut, self._worker_task], - return_when=asyncio.FIRST_COMPLETED) - - # noinspection PyBroadException - try: - if self._worker_task in done: - self._worker_task.result() - - if self._host._connected_fut not in done: - raise RuntimeError('could not start a worker thread') - except Exception: - try: - await self._host.close() - self._worker.stop() - finally: - raise - - return self._host - - async def __aexit__(self, *exc): - if not self._worker_task.done(): - self._worker_task.cancel() - try: - await self._worker_task - except asyncio.CancelledError: - pass - - self._worker_task = None - self._worker = None - - await self._host.close() - self._host = None - - -def start_mockhost(*, script_root=FUNCS_PATH): - scripts_dir = TESTS_ROOT / script_root - if not (scripts_dir.exists() and scripts_dir.is_dir()): - raise RuntimeError( - f'invalid script_root argument: ' - f'{scripts_dir} directory does not exist') - - sys.path.append(str(scripts_dir)) - - return _MockWebHostController(scripts_dir) - - -class _WebHostProxy: - - def __init__(self, proc, addr): - self._proc = proc - self._addr = addr - - def is_healthy(self): - r = self.request('GET', '', no_prefix=True) - return 200 <= r.status_code < 300 - - def request(self, meth, funcname, *args, **kwargs): - request_method = getattr(requests, meth.lower()) - params = dict(kwargs.pop('params', {})) - no_prefix = kwargs.pop('no_prefix', False) - if 'code' not in params: - params['code'] = 'testFunctionKey' - - return request_method( - self._addr + ('/' if no_prefix else '/api/') + funcname, - *args, params=params, **kwargs) - - def close(self): - if self._proc.stdout: - self._proc.stdout.close() - if self._proc.stderr: - self._proc.stderr.close() - - self._proc.terminate() - try: - self._proc.wait(20) - except subprocess.TimeoutExpired: - self._proc.kill() - - -def _find_open_port(): - with socket.socket() as s: - s.bind((LOCALHOST, 0)) - s.listen(1) - return s.getsockname()[1] - - -def popen_webhost(*, stdout, stderr, script_root=FUNCS_PATH, port=None): - testconfig = None - if WORKER_CONFIG.exists(): - testconfig = configparser.ConfigParser() - testconfig.read(WORKER_CONFIG) - - hostexe_args = [] - - # If we want to use core-tools - coretools_exe = os.environ.get('CORE_TOOLS_EXE_PATH') - if coretools_exe: - coretools_exe = coretools_exe.strip() - if pathlib.Path(coretools_exe).exists(): - hostexe_args = [str(coretools_exe), 'host', 'start', '--verbose'] - if port is not None: - hostexe_args.extend(['--port', str(port)]) - - # If we need to use Functions host directly - if not hostexe_args: - dll = os.environ.get('PYAZURE_WEBHOST_DLL') - if not dll and testconfig and testconfig.has_section('webhost'): - dll = testconfig['webhost'].get('dll') - - if dll: - # Paths from environment might contain trailing - # or leading whitespace. - dll = dll.strip() - - if not dll: - dll = DEFAULT_WEBHOST_DLL_PATH - - os.makedirs(dll.parent / 'Secrets', exist_ok=True) - with open(dll.parent / 'Secrets' / 'host.json', 'w') as f: - secrets = SECRETS_TEMPLATE - - f.write(secrets) - - if dll and pathlib.Path(dll).exists(): - hostexe_args = ['dotnet', str(dll)] - - if not hostexe_args: - raise RuntimeError('\n'.join([ - 'Unable to locate Azure Functions Host binary.', - 'Please do one of the following:', - ' * run the following command from the root folder of', - ' the project:', - '', - f'cd tests && $ {sys.executable} -m invoke -c test_setup webhost', - '', - ' * or download or build the Azure Functions Host and' - ' then write the full path to WebHost.dll' - ' into the `PYAZURE_WEBHOST_DLL` environment variable.', - ' Alternatively, you can create the', - f' {WORKER_CONFIG.name} file in the root folder', - ' of the project with the following structure:', - '', - ' [webhost]', - ' dll = /path/Microsoft.Azure.WebJobs.Script.WebHost.dll', - ' * or download Azure Functions Core Tools binaries and', - ' then write the full path to func.exe into the ', - ' `CORE_TOOLS_EXE_PATH` environment variable.', - '', - 'Setting "export PYAZURE_WEBHOST_DEBUG=true" to get the full', - 'stdout and stderr from function host.' - ])) - - worker_path = os.environ.get(PYAZURE_WORKER_DIR) - worker_path = WORKER_PATH if not worker_path else pathlib.Path(worker_path) - if not worker_path.exists(): - raise RuntimeError(f'Worker path {worker_path} does not exist') - - # Casting to strings is necessary because Popen doesn't like - # path objects there on Windows. - extra_env = { - 'AzureWebJobsScriptRoot': str(script_root), - 'languageWorkers:python:workerDirectory': str(worker_path), - 'host:logger:consoleLoggingMode': 'always', - 'AZURE_FUNCTIONS_ENVIRONMENT': 'development', - 'AzureWebJobsSecretStorageType': 'files', - 'FUNCTIONS_WORKER_RUNTIME': 'python' - } - - # In E2E Integration mode, we should use the core tools worker - # from the latest artifact instead of the azure_functions_worker module - if is_envvar_true(PYAZURE_INTEGRATION_TEST): - extra_env.pop('languageWorkers:python:workerDirectory') - - if testconfig and 'azure' in testconfig: - st = testconfig['azure'].get('storage_key') - if st: - extra_env['AzureWebJobsStorage'] = st - - cosmos = testconfig['azure'].get('cosmosdb_key') - if cosmos: - extra_env['AzureWebJobsCosmosDBConnectionString'] = cosmos - - eventhub = testconfig['azure'].get('eventhub_key') - if eventhub: - extra_env['AzureWebJobsEventHubConnectionString'] = eventhub - - servicebus = testconfig['azure'].get('servicebus_key') - if servicebus: - extra_env['AzureWebJobsServiceBusConnectionString'] = servicebus - - sql = testconfig['azure'].get('sql_key') - if sql: - extra_env['AzureWebJobsSqlConnectionString'] = sql - - eventgrid_topic_uri = testconfig['azure'].get('eventgrid_topic_uri') - if eventgrid_topic_uri: - extra_env['AzureWebJobsEventGridTopicUri'] = eventgrid_topic_uri - - eventgrid_topic_key = testconfig['azure'].get('eventgrid_topic_key') - if eventgrid_topic_key: - extra_env['AzureWebJobsEventGridConnectionKey'] = \ - eventgrid_topic_key - - if port is not None: - extra_env['ASPNETCORE_URLS'] = f'http://*:{port}' - - return subprocess.Popen( - hostexe_args, - cwd=script_root, - env={ - **os.environ, - **extra_env, - }, - stdout=stdout, - stderr=stderr) - - -def start_webhost(*, script_dir=None, stdout=None): - script_root = TESTS_ROOT / script_dir if script_dir else FUNCS_PATH - if stdout is None: - if is_envvar_true(PYAZURE_WEBHOST_DEBUG): - stdout = sys.stdout - else: - stdout = subprocess.DEVNULL - - port = _find_open_port() - - proc = popen_webhost(stdout=stdout, stderr=subprocess.STDOUT, - script_root=script_root, port=port) - time.sleep(10) # Giving host some time to start fully. - - addr = f'http://{LOCALHOST}:{port}' - return _WebHostProxy(proc, addr) - - -def create_dummy_dispatcher(): - dummy_event_loop = asyncio.new_event_loop() - disp = dispatcher.Dispatcher( - dummy_event_loop, LOCALHOST, 0, - 'test_worker_id', 'test_request_id', - 1.0, 1000) - dummy_event_loop.close() - return disp - - -def retryable_test( - number_of_retries: int, - interval_sec: int, - expected_exception: type = Exception -): - def decorate(func): - def call(*args, **kwargs): - retries = number_of_retries - while True: - try: - return func(*args, **kwargs) - except expected_exception as e: - retries -= 1 - if retries <= 0: - raise e - - time.sleep(interval_sec) - - return call - - return decorate - - -def remove_path(path): - if path.is_symlink(): - path.unlink() - elif path.is_dir(): - shutil.rmtree(str(path)) - elif path.exists(): - path.unlink() - - -def _symlink_dir(src, dst): - remove_path(dst) - - if ON_WINDOWS: - shutil.copytree(str(src), str(dst)) - else: - dst.symlink_to(src, target_is_directory=True) - - -def _setup_func_app(app_root, is_unit_test=False): - extensions = app_root / 'bin' - host_json = app_root / 'host.json' - extensions_csproj_file = app_root / 'extensions.csproj' - - if not os.path.isfile(host_json): - with open(host_json, 'w') as f: - f.write(HOST_JSON_TEMPLATE) - - if not os.path.isfile(extensions_csproj_file) and not is_unit_test: - with open(extensions_csproj_file, 'w') as f: - f.write(EXTENSIONS_CSPROJ_TEMPLATE) - - _symlink_dir(EXTENSIONS_PATH, extensions) - - -def _teardown_func_app(app_root): - extensions = app_root / 'bin' - host_json = app_root / 'host.json' - extensions_csproj_file = app_root / 'extensions.csproj' - extensions_obj_file = app_root / 'obj' - libraries_path = app_root / '.python_packages' - - for path in (extensions, host_json, extensions_csproj_file, - extensions_obj_file, libraries_path): - remove_path(path) - - -def _main(): - parser = argparse.ArgumentParser(description='Run a Python worker.') - parser.add_argument('scriptroot', - help='directory with functions to load') - - args = parser.parse_args() - - app_root = pathlib.Path(args.scriptroot) - _setup_func_app(app_root) - - host = popen_webhost( - stdout=sys.stdout, stderr=sys.stderr, - script_root=os.path.abspath(args.scriptroot)) - try: - host.wait() - finally: - host.terminate() - _teardown_func_app() - - -if __name__ == '__main__': - _main() diff --git a/tests/utils/testutils_docker.py b/tests/utils/testutils_docker.py deleted file mode 100644 index 964d8a83e..000000000 --- a/tests/utils/testutils_docker.py +++ /dev/null @@ -1,214 +0,0 @@ -import os -import re -import subprocess -import sys -import typing -import unittest -import uuid -from dataclasses import dataclass -from pathlib import Path -from time import sleep - -import requests -from tests.utils.constants import PROJECT_ROOT, TESTS_ROOT - -_DOCKER_PATH = "DOCKER_PATH" -_DOCKER_DEFAULT_PATH = "docker" -_HOST_VERSION = "4" -_docker_cmd = os.getenv(_DOCKER_PATH, _DOCKER_DEFAULT_PATH) -_addr = "" -_python_version = f'{sys.version_info.major}.{sys.version_info.minor}' -_libraries_path = '.python_packages/lib/site-packages' -_uuid = str(uuid.uuid4()) -_MESH_IMAGE_URL = "https://mcr.microsoft.com/v2/azure-functions/mesh/tags/list" -_MESH_IMAGE_REPO = "mcr.microsoft.com/azure-functions/mesh" -_IMAGE_URL = "https://mcr.microsoft.com/v2/azure-functions/python/tags/list" -_IMAGE_REPO = "mcr.microsoft.com/azure-functions/python" -_CUSTOM_IMAGE = os.getenv("IMAGE_NAME") - - -@dataclass -class DockerConfigs: - script_path: Path - libraries: typing.List = None - env: typing.Dict = None - - -class WebHostProxy: - - def __init__(self, proc, addr): - self._proc = proc - self._addr = addr - - def request(self, meth, funcname, *args, **kwargs): - request_method = getattr(requests, meth.lower()) - params = dict(kwargs.pop('params', {})) - no_prefix = kwargs.pop('no_prefix', False) - - return request_method( - self._addr + ('/' if no_prefix else '/api/') + funcname, - *args, params=params, **kwargs) - - def close(self) -> bool: - """Kill a container by its name. Returns True on success. - """ - kill_cmd = [_docker_cmd, "rm", "-f", _uuid] - kill_process = subprocess.run(args=kill_cmd, stdout=subprocess.DEVNULL) - exit_code = kill_process.returncode - - return exit_code == 0 - - def is_healthy(self) -> bool: - pass - - -class WebHostDockerContainerBase(unittest.TestCase): - - @staticmethod - def find_latest_image(image_repo: str, - image_url: str) -> str: - - regex = re.compile(_HOST_VERSION + r'.\d+.\d+-python' + _python_version) - - response = requests.get(image_url, allow_redirects=True) - if not response.ok: - raise RuntimeError(f'Failed to query latest image for v4' - f' Python {_python_version}.' - f' Status {response.status_code}') - - tag_list = response.json().get('tags', []) - # Removing images with a -upgrade and -slim. Upgrade images were - # temporary images used to onboard customers from a previous version. - # These images are no longer used. - tag_list = [x.strip("-upgrade") for x in tag_list] - tag_list = [x.strip("-slim") for x in tag_list] - - # Listing all the versions from the tags with suffix -python - python_versions = list(filter(regex.match, tag_list)) - - # sorting all the python versions based on the runtime version and - # getting the latest released runtime version for python. - latest_version = sorted(python_versions, key=lambda x: float( - x.split(_HOST_VERSION + '.')[-1].split("-python")[0]))[-1] - - image_tag = f'{image_repo}:{latest_version}' - return image_tag - - def create_container(self, image_repo: str, image_url: str, - configs: DockerConfigs): - """Create a docker container and record its port. Create a docker - container according to the image name. Return the port of container. - """ - - worker_path = os.path.join(PROJECT_ROOT, 'azure_functions_worker') - script_path = os.path.join(TESTS_ROOT, configs.script_path) - env = {"AzureWebJobsFeatureFlags": "EnableWorkerIndexing", - "AzureWebJobsStorage": f"{os.getenv('AzureWebJobsStorage')}", - "AzureWebJobsEventHubConnectionString": - f"{os.getenv('AzureWebJobsEventHubConnectionString')}", - "AzureWebJobsCosmosDBConnectionString": - f"{os.getenv('AzureWebJobsCosmosDBConnectionString')}", - "AzureWebJobsServiceBusConnectionString": - f"{os.getenv('AzureWebJobsServiceBusConnectionString')}", - "AzureWebJobsSqlConnectionString": - f"{os.getenv('AzureWebJobsSqlConnectionString')}", - "AzureWebJobsEventGridTopicUri": - f"{os.getenv('AzureWebJobsEventGridTopicUri')}", - "AzureWebJobsEventGridConnectionKey": - f"{os.getenv('AzureWebJobsEventGridConnectionKey')}" - } - - configs.env.update(env) - - if _CUSTOM_IMAGE: - image = _CUSTOM_IMAGE - else: - image = self.find_latest_image(image_repo, image_url) - - container_worker_path = ( - f"/azure-functions-host/workers/python/{_python_version}/" - "LINUX/X64/azure_functions_worker" - ) - - function_path = "/home/site/wwwroot" - configs.libraries = ((configs.libraries or []) - + ['azurefunctions-extensions-base']) - install_libraries_cmd = [] - install_libraries_cmd.extend(['pip', 'install']) - install_libraries_cmd.extend(['--platform=manylinux2014_x86_64']) - install_libraries_cmd.extend(configs.libraries) - install_libraries_cmd.extend(['-t', - f'{script_path}/{_libraries_path}']) - install_libraries_cmd.extend(['--only-binary=:all:']) - - install_libraries_process = \ - subprocess.run(args=install_libraries_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - - if install_libraries_process.returncode != 0: - raise RuntimeError('Failed to install libraries') - - run_cmd = [] - run_cmd.extend([_docker_cmd, "run", "-p", "0:80", "-d"]) - run_cmd.extend(["--name", _uuid, "--privileged"]) - run_cmd.extend(["--cap-add", "SYS_ADMIN"]) - run_cmd.extend(["--device", "/dev/fuse"]) - run_cmd.extend(["-e", f"CONTAINER_NAME={_uuid}"]) - run_cmd.extend(["-e", f"AzureFunctionsWebHost__hostid={_uuid}"]) - run_cmd.extend(["-v", f"{worker_path}:{container_worker_path}"]) - run_cmd.extend(["-v", f"{script_path}:{function_path}"]) - - if configs.env: - for key, value in configs.env.items(): - run_cmd.extend(["-e", f"{key}={value}"]) - - run_cmd.append(image) - run_process = subprocess.run(args=run_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - - if run_process.returncode != 0: - raise RuntimeError('Failed to create docker container for' - f' {image} with uuid {_uuid}.' - f' stderr: {run_process.stderr}') - - # Wait for six seconds for the port to expose - sleep(6) - - # Acquire the port number of the container - port_cmd = [_docker_cmd, "port", _uuid] - port_process = subprocess.run(args=port_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - if port_process.returncode != 0: - raise RuntimeError(f'Failed to acquire port for {_uuid}.' - f' stderr: {port_process.stderr}') - port_number = port_process.stdout.decode().strip('\n').split(':')[-1] - - # Wait for six seconds for the container to be in ready state - sleep(6) - self._addr = f'http://localhost:{port_number}' - - return WebHostProxy(run_process, self._addr) - - -class WebHostConsumption(WebHostDockerContainerBase): - - def __init__(self, configs: DockerConfigs): - self.configs = configs - - def spawn_container(self): - return self.create_container(_MESH_IMAGE_REPO, - _MESH_IMAGE_URL, - self.configs) - - -class WebHostDedicated(WebHostDockerContainerBase): - - def __init__(self, configs: DockerConfigs): - self.configs = configs - - def spawn_container(self): - return self.create_container(_IMAGE_REPO, _IMAGE_URL, - self.configs) diff --git a/tests/utils/testutils_lc.py b/tests/utils/testutils_lc.py deleted file mode 100644 index 43665a65a..000000000 --- a/tests/utils/testutils_lc.py +++ /dev/null @@ -1,339 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import base64 -import json -import os -import re -import shutil -import subprocess -import sys -import tempfile -import time -import uuid -from io import BytesIO -from typing import Dict -from urllib.request import urlopen -from zipfile import ZipFile - -import requests -from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives import padding - -from tests.utils.constants import PROJECT_ROOT - -# Linux Consumption Testing Constants -_DOCKER_PATH = "DOCKER_PATH" -_DOCKER_DEFAULT_PATH = "docker" -_MESH_IMAGE_URL = "https://mcr.microsoft.com/v2/azure-functions/mesh/tags/list" -_MESH_IMAGE_REPO = "mcr.microsoft.com/azure-functions/mesh" -_FUNC_GITHUB_ZIP = "https://github.com/Azure/azure-functions-python-library" \ - "/archive/refs/heads/dev.zip" -_FUNC_FILE_NAME = "azure-functions-python-library-dev" -_CUSTOM_IMAGE = "CUSTOM_IMAGE" -_EXTENSION_BASE_ZIP = 'https://github.com/Azure/azure-functions-python-' \ - 'extensions/archive/refs/heads/dev.zip' - - -class LinuxConsumptionWebHostController: - """A controller for spawning mesh Docker container and apply multiple - test cases on it. - """ - - _docker_cmd = os.getenv(_DOCKER_PATH, _DOCKER_DEFAULT_PATH) - _ports: Dict[str, str] = {} # { uuid: port } - _mesh_images: Dict[str, str] = {} # { host version: image tag } - - def __init__(self, host_version: str, python_version: str): - """Initialize a new container for - """ - self._uuid = str(uuid.uuid4()) - self._host_version = host_version # "3" - self._py_version = python_version # "3.9" - - @property - def url(self) -> str: - if self._uuid not in self._ports: - raise RuntimeError(f'Failed to assign container {self._name} since' - ' it is not spawned') - - return f'http://localhost:{self._ports[self._uuid]}' - - def assign_container(self, env: Dict[str, str] = {}): - """Make a POST request to /admin/instance/assign to specialize the - container - """ - url = f'http://localhost:{self._ports[self._uuid]}' - - # Add compulsory fields in specialization context - env["FUNCTIONS_EXTENSION_VERSION"] = f"~{self._host_version}" - env["FUNCTIONS_WORKER_RUNTIME"] = "python" - env["FUNCTIONS_WORKER_RUNTIME_VERSION"] = self._py_version - env["WEBSITE_SITE_NAME"] = self._uuid - env["WEBSITE_HOSTNAME"] = f"{self._uuid}.azurewebsites.com" - - # Send the specialization context via a POST request - req = requests.Request( - method="POST", - url=f"{url}/admin/instance/assign", - data=json.dumps({ - "encryptedContext": self._get_site_encrypted_context( - self._uuid, env - ) - }) - ) - response = self.send_request(req) - if not response.ok: - stdout = self.get_container_logs() - raise RuntimeError(f'Failed to specialize container {self._uuid}' - f' at {url} (status {response.status_code}).' - f' stdout: {stdout}') - - def send_request( - self, - req: requests.Request, - ses: requests.Session = None - ) -> requests.Response: - """Send a request with authorization token. Return a Response object""" - session = ses - if session is None: - session = requests.Session() - - prepped = session.prepare_request(req) - prepped.headers['Content-Type'] = 'application/json' - prepped.headers['x-ms-site-restricted-token'] = ( - self._get_site_restricted_token() - ) - prepped.headers['x-site-deployment-id'] = self._uuid - - resp = session.send(prepped) - return resp - - @classmethod - def _find_latest_mesh_image(cls, - host_major: str, - python_version: str) -> str: - """Find the latest image in https://mcr.microsoft.com/v2/ - azure-functions/mesh/tags/list. Match either (3.1.3, or 3.1.3-python3.x) - """ - if host_major in cls._mesh_images: - return cls._mesh_images[host_major] - - # match 3.1.3 - regex = re.compile(host_major + r'.\d+.\d+-python' + python_version) - - response = requests.get(_MESH_IMAGE_URL, allow_redirects=True) - if not response.ok: - raise RuntimeError(f'Failed to query latest image for v{host_major}' - f' Python {python_version}.' - f' Status {response.status_code}') - - tag_list = response.json().get('tags', []) - # Removing images with a -upgrade. Upgrade images were temporary - # images used to onboard customers from a previous version. These - # images are no longer used. - tag_list = [x.strip("-upgrade") for x in tag_list] - - # Listing all the versions from the tags with suffix -python - python_versions = list(filter(regex.match, tag_list)) - - # sorting all the python versions based on the runtime version and - # getting the latest released runtime version for python. - latest_version = sorted(python_versions, key=lambda x: float( - x.split(host_major + '.')[-1].split("-python")[0]))[-1] - - image_tag = f'{_MESH_IMAGE_REPO}:{latest_version}' - cls._mesh_images[host_major] = image_tag - return image_tag - - @staticmethod - def _download_azure_functions() -> str: - with urlopen(_FUNC_GITHUB_ZIP) as zipresp: - with ZipFile(BytesIO(zipresp.read())) as zfile: - zfile.extractall(tempfile.gettempdir()) - - @staticmethod - def _download_extensions() -> str: - folder = tempfile.gettempdir() - with urlopen(_EXTENSION_BASE_ZIP) as zipresp: - with ZipFile(BytesIO(zipresp.read())) as zfile: - zfile.extractall(tempfile.gettempdir()) - - return folder - - def spawn_container(self, - image: str, - env: Dict[str, str] = {}) -> int: - """Create a docker container and record its port. Create a docker - container according to the image name. Return the port of container. - """ - # Construct environment variables and start the docker container - worker_path = os.path.join(PROJECT_ROOT, 'azure_functions_worker') - - # TODO: Mount library in docker container - # self._download_azure_functions() - - # Download python extension base package - ext_folder = self._download_extensions() - - container_worker_path = ( - f"/azure-functions-host/workers/python/{self._py_version}/" - "LINUX/X64/azure_functions_worker" - ) - - base_ext_container_path = ( - f"/azure-functions-host/workers/python/{self._py_version}/" - "LINUX/X64/azurefunctions/extensions/base" - ) - - base_ext_local_path = ( - f'{ext_folder}/azure-functions-python' - '-extensions-dev/azurefunctions-extensions-base' - '/azurefunctions/extensions/base' - ) - run_cmd = [] - run_cmd.extend([self._docker_cmd, "run", "-p", "0:80", "-d"]) - run_cmd.extend(["--name", self._uuid, "--privileged"]) - run_cmd.extend(["--cap-add", "SYS_ADMIN"]) - run_cmd.extend(["--device", "/dev/fuse"]) - run_cmd.extend(["-e", f"CONTAINER_NAME={self._uuid}"]) - run_cmd.extend(["-e", - f"CONTAINER_ENCRYPTION_KEY={os.getenv('_DUMMY_CONT_KEY')}"]) - run_cmd.extend(["-e", "WEBSITE_PLACEHOLDER_MODE=1"]) - run_cmd.extend(["-v", f'{worker_path}:{container_worker_path}']) - run_cmd.extend(["-v", - f'{base_ext_local_path}:{base_ext_container_path}']) - - for key, value in env.items(): - run_cmd.extend(["-e", f"{key}={value}"]) - run_cmd.append(image) - - run_process = subprocess.run(args=run_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - - if run_process.returncode != 0: - raise RuntimeError('Failed to spawn docker container for' - f' {image} with uuid {self._uuid}.' - f' stderr: {run_process.stderr}') - - # Wait for three seconds for the port to expose - time.sleep(3) - - # Acquire the port number of the container - port_cmd = [self._docker_cmd, "port", self._uuid] - port_process = subprocess.run(args=port_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - if port_process.returncode != 0: - raise RuntimeError(f'Failed to acquire port for {self._uuid}.' - f' stderr: {port_process.stderr}') - port_number = port_process.stdout.decode().strip('\n').split(':')[-1] - - # Register port number onto the table - self._ports[self._uuid] = port_number - - # Wait for three seconds for the container to be in ready state - time.sleep(6) - return port_number - - def get_container_logs(self) -> str: - """Get container logs, the first element in tuple is stdout and the - second element is stderr - """ - get_logs_cmd = [self._docker_cmd, "logs", self._uuid] - get_logs_process = subprocess.run(args=get_logs_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - - # The `docker logs` command will merge stdout and stderr into stdout - return get_logs_process.stdout.decode('utf-8') - - def safe_kill_container(self) -> bool: - """Kill a container by its name. Returns True on success. - """ - kill_cmd = [self._docker_cmd, "rm", "-f", self._uuid] - kill_process = subprocess.run(args=kill_cmd, stdout=subprocess.DEVNULL) - exit_code = kill_process.returncode - - if self._uuid in self._ports: - del self._ports[self._uuid] - return exit_code == 0 - - @classmethod - def _get_site_restricted_token(cls) -> str: - """Get the header value which can be used by x-ms-site-restricted-token - which expires in one day. - """ - exp_ns = int(time.time() + 24 * 60 * 60) * 1000000000 - return cls._encrypt_context(os.getenv('_DUMMY_CONT_KEY'), f'exp={exp_ns}') - - @classmethod - def _get_site_encrypted_context(cls, - site_name: str, - env: Dict[str, str]) -> str: - """Get the encrypted context for placeholder mode specialization""" - ctx = { - "SiteId": 1, - "SiteName": site_name, - "Environment": env - } - - # Ensure WEBSITE_SITE_NAME is set to simulate production mode - ctx["Environment"]["WEBSITE_SITE_NAME"] = site_name - return cls._encrypt_context(os.getenv('_DUMMY_CONT_KEY'), json.dumps(ctx)) - - @classmethod - def _encrypt_context(cls, encryption_key: str, plain_text: str) -> str: - """Encrypt plain text context into a encrypted message which can - be accepted by the host - """ - # Decode the encryption key - encryption_key_bytes = base64.b64decode(encryption_key.encode()) - - # Pad the plaintext to be a multiple of the AES block size - padder = padding.PKCS7(algorithms.AES.block_size).padder() - plain_text_bytes = padder.update(plain_text.encode()) + padder.finalize() - - # Initialization vector (IV) (fixed value for simplicity) - iv_bytes = '0123456789abcedf'.encode() - - # Create AES cipher with CBC mode - cipher = Cipher(algorithms.AES(encryption_key_bytes), - modes.CBC(iv_bytes), backend=default_backend()) - - # Perform encryption - encryptor = cipher.encryptor() - encrypted_bytes = encryptor.update(plain_text_bytes) + encryptor.finalize() - - # Compute SHA256 hash of the encryption key - digest = hashes.Hash(hashes.SHA256(), backend=default_backend()) - digest.update(encryption_key_bytes) - key_sha256 = digest.finalize() - - # Encode IV, encrypted message, and SHA256 hash in base64 - iv_base64 = base64.b64encode(iv_bytes).decode() - encrypted_base64 = base64.b64encode(encrypted_bytes).decode() - key_sha256_base64 = base64.b64encode(key_sha256).decode() - - # Return the final result - return f'{iv_base64}.{encrypted_base64}.{key_sha256_base64}' - - def __enter__(self): - mesh_image = (os.environ.get(_CUSTOM_IMAGE) - or self._find_latest_mesh_image(self._host_version, - self._py_version)) - self.spawn_container(image=mesh_image) - return self - - def __exit__(self, exc_type, exc_value, traceback): - logs = self.get_container_logs() - self.safe_kill_container() - shutil.rmtree(os.path.join(tempfile.gettempdir(), _FUNC_FILE_NAME), - ignore_errors=True) - - if traceback: - print(f'Test failed with container logs: {logs}', - file=sys.stderr, - flush=True)