Skip to content

Instantly share code, notes, and snippets.

@lmazuel
Last active February 9, 2021 23:02
Show Gist options
  • Save lmazuel/199218d0b1b20bc5c580f0cfa635139d to your computer and use it in GitHub Desktop.
Save lmazuel/199218d0b1b20bc5c580f0cfa635139d to your computer and use it in GitHub Desktop.
LowLevel brainstorm
import os
import json
#
# Low-level client: lowest possible call
#
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.credentials import AzureKeyCredential
from azure.ai.textanalytics import TextAnalyticsClient
endpoint = os.environ["AZURE_TEXT_ANALYTICS_ENDPOINT"]
key = os.environ["AZURE_TEXT_ANALYTICS_KEY"]
text_analytics_client = TextAnalyticsClient(
endpoint=endpoint,
credential=AzureKeyCredential(key),
logging_enable=True
)
documents = [
"""
The concierge Paulette was extremely helpful. Sadly when we arrived the elevator was broken, but with Paulette's help we barely noticed this inconvenience.
She arranged for our baggage to be brought up to our room with no extra charge and gave us a free meal to refurbish all of the calories we lost from
walking up the stairs :). Can't say enough good things about my experience!
""",
"""
最近由于工作压力太大,我们决定去富酒店度假。那儿的温泉实在太舒服了,我跟我丈夫都完全恢复了工作前的青春精神!加油!
"""
]
documents = [{"id": str(idx), "text": doc} for idx, doc in enumerate(documents)]
request = HttpRequest("POST", "/languages",
headers={
'Content-Type': 'application/json'
},
data=json.dumps({
"documents": documents
})
)
response : HttpResponse = text_analytics_client.request(request)
response.raise_for_status()
result = json.loads(response.text())
reviewed_docs = [doc for doc, error in zip(result['documents'], result['errors']) if not error['error']]
#
# Low-level client: lowest possible call low-level option
#
from azure.core.pipeline.transport import HttpResponse
from azure.core.credentials import AzureKeyCredential
from azure.ai.textanalytics import TextAnalyticsClient
endpoint = os.environ["AZURE_TEXT_ANALYTICS_ENDPOINT"]
key = os.environ["AZURE_TEXT_ANALYTICS_KEY"]
text_analytics_client = TextAnalyticsClient(
endpoint=endpoint,
credential=AzureKeyCredential(key),
logging_enable=True
)
documents = [
"""
The concierge Paulette was extremely helpful. Sadly when we arrived the elevator was broken, but with Paulette's help we barely noticed this inconvenience.
She arranged for our baggage to be brought up to our room with no extra charge and gave us a free meal to refurbish all of the calories we lost from
walking up the stairs :). Can't say enough good things about my experience!
""",
"""
最近由于工作压力太大,我们决定去富酒店度假。那儿的温泉实在太舒服了,我跟我丈夫都完全恢复了工作前的青春精神!加油!
"""
]
documents = [{"id": str(idx), "text": doc} for idx, doc in enumerate(documents)]
response : HttpResponse = text_analytics_client.lowlevel().post(
"/languages/,
headers={
'Content-Type': 'application/json'
},
data=json.dumps({
"documents": documents
})
)
response.raise_for_status()
result = json.loads(response.text())
reviewed_docs = [doc for doc, error in zip(result['documents'], result['errors']) if not error['error']]
#
# Low-level client: with preparers
#
from azure.core.pipeline.transport import HttpResponse
from azure.core.credentials import AzureKeyCredential
from azure.ai.textanalytics import TextAnalyticsClient
from azure.ai.textanalytics.protocol import TextAnalyticsPreparers
endpoint = os.environ["AZURE_TEXT_ANALYTICS_ENDPOINT"]
key = os.environ["AZURE_TEXT_ANALYTICS_KEY"]
text_analytics_client = TextAnalyticsClient(
endpoint=endpoint,
credential=AzureKeyCredential(key),
logging_enable=True
)
documents = [
"""
The concierge Paulette was extremely helpful. Sadly when we arrived the elevator was broken, but with Paulette's help we barely noticed this inconvenience.
She arranged for our baggage to be brought up to our room with no extra charge and gave us a free meal to refurbish all of the calories we lost from
walking up the stairs :). Can't say enough good things about my experience!
""",
"""
最近由于工作压力太大,我们决定去富酒店度假。那儿的温泉实在太舒服了,我跟我丈夫都完全恢复了工作前的青春精神!加油!
"""
]
documents = [{"id": str(idx), "text": doc} for idx, doc in enumerate(documents)]
request = TextAnalyticsPreparers.prepare_languages(
path_1="foo", # positional
path_2="a/b/c",
# **kwargs
body=documents, # Follow Swagger requiredness
header_path_x_ms_stroage_whatever='bar', # Follow Swagger requiredness
api_version="3.1-preview", # Required
model_version="v1",
show_stats=True,
)
response : HttpResponse = text_analytics_client.lowlevel().request(request)
response.raise_for_status()
result = json.loads(response.text())
reviewed_docs = [doc for doc, error in zip(result['documents'], result['errors']) if not error['error']]
#
# High-level autorest client: sending JSON
#
from azure.core.credentials import AzureKeyCredential
from azure.ai.textanalytics import TextAnalyticsClient
endpoint = os.environ["AZURE_TEXT_ANALYTICS_ENDPOINT"]
key = os.environ["AZURE_TEXT_ANALYTICS_KEY"]
text_analytics_client = TextAnalyticsClient(
endpoint=endpoint,
credential=AzureKeyCredential(key),
logging_enable=True
)
documents = [
"""
The concierge Paulette was extremely helpful. Sadly when we arrived the elevator was broken, but with Paulette's help we barely noticed this inconvenience.
She arranged for our baggage to be brought up to our room with no extra charge and gave us a free meal to refurbish all of the calories we lost from
walking up the stairs :). Can't say enough good things about my experience!
""",
"""
最近由于工作压力太大,我们决定去富酒店度假。那儿的温泉实在太舒服了,我跟我丈夫都完全恢复了工作前的青春精神!加油!
"""
]
documents = [{"id": str(idx), "text": doc} for idx, doc in enumerate(documents)]
result = text_analytics_client.languages(documents) # Might raise
reviewed_docs = [doc for doc, error in zip(result.documents, result.errors) if not error.error]
#
# High-level autorest client: sending models
#
from azure.core.credentials import AzureKeyCredential
from azure.ai.textanalytics import TextAnalyticsClient
from azure.ai.textanalytics.models import LanguageInput
endpoint = os.environ["AZURE_TEXT_ANALYTICS_ENDPOINT"]
key = os.environ["AZURE_TEXT_ANALYTICS_KEY"]
text_analytics_client = TextAnalyticsClient(
endpoint=endpoint,
credential=AzureKeyCredential(key),
logging_enable=True
)
documents = [
"""
The concierge Paulette was extremely helpful. Sadly when we arrived the elevator was broken, but with Paulette's help we barely noticed this inconvenience.
She arranged for our baggage to be brought up to our room with no extra charge and gave us a free meal to refurbish all of the calories we lost from
walking up the stairs :). Can't say enough good things about my experience!
""",
"""
最近由于工作压力太大,我们决定去富酒店度假。那儿的温泉实在太舒服了,我跟我丈夫都完全恢复了工作前的青春精神!加油!
"""
]
documents = [LanguageInput(id=str(idx), text=doc) for idx, doc in enumerate(documents)]
result = text_analytics_client.languages(documents) # Might raise
reviewed_docs = [doc for doc, error in zip(result.documents, result.errors) if not error.error]
#
# High-level manual client
#
from azure.core.credentials import AzureKeyCredential
from azure.ai.textanalytics import TextAnalyticsClient
endpoint = os.environ["AZURE_TEXT_ANALYTICS_ENDPOINT"]
key = os.environ["AZURE_TEXT_ANALYTICS_KEY"]
text_analytics_client = TextAnalyticsClient(
endpoint=endpoint,
credential=AzureKeyCredential(key),
logging_enable=True
)
documents = [
"""
The concierge Paulette was extremely helpful. Sadly when we arrived the elevator was broken, but with Paulette's help we barely noticed this inconvenience.
She arranged for our baggage to be brought up to our room with no extra charge and gave us a free meal to refurbish all of the calories we lost from
walking up the stairs :). Can't say enough good things about my experience!
""",
"""
最近由于工作压力太大,我们决定去富酒店度假。那儿的温泉实在太舒服了,我跟我丈夫都完全恢复了工作前的青春精神!加油!
"""
]
result = text_analytics_client.detect_language(documents) # Might raise
reviewed_docs = [doc for doc in result if not doc.is_error]
from azure.core.credentials import AzureKeyCredential
from azure.ai.textanalytics import TextAnalyticsClient
from azure.ai.textanalytics.protocol.v3_1_preview import TextAnalyticsPreparers
endpoint = os.environ["AZURE_TEXT_ANALYTICS_ENDPOINT"]
key = os.environ["AZURE_TEXT_ANALYTICS_KEY"]
# I create a regular client as usual
# This is setting my policies, transports, etc. specific to TextAnalytics
text_analytics_client = TextAnalyticsClient(
endpoint=endpoint,
credential=AzureKeyCredential(key),
logging_enable=True
)
documents = [
"""
The concierge Paulette was extremely helpful. Sadly when we arrived the elevator was broken, but with Paulette's help we barely noticed this inconvenience.
She arranged for our baggage to be brought up to our room with no extra charge and gave us a free meal to refurbish all of the calories we lost from
walking up the stairs :). Can't say enough good things about my experience!
""",
"""
最近由于工作压力太大,我们决定去富酒店度假。那儿的温泉实在太舒服了,我跟我丈夫都完全恢复了工作前的青春精神!加油!
"""
]
documents = [{"id": str(idx), "text": doc} for idx, doc in enumerate(documents)]
# This is Swagger generated but produce a request, it's NOT doing a call.
# This never fails, there is no client side validation, and it's completly optional if you
# feel like creating the request yourself
# The body here could be JSON dict (like this example), or could be a model from Swagger generation, Python will adapt
# Low-level will disable flattening, making certain that there is only ONE body parameter.
# All parameters will be optional.
# For the sake of clarity, body parameters are always called "body" whatever the Swagger will name it.
# This follow the kwargs only syntax we use for model for now
# This method is then a query parameter / path parameter / header parameter as its core.
# Preparers are static, meaning they won't include client level parameters (endpoint). They do include API version info though,
# since it's imported from a API version module name.
request = TextAnalyticsPreparers.prepare_languages(
documents,
model_version="v1",
show_stats=True,
)
# You have full flexibility to change what you want into it
request.headers["x-ms-sdk-value"] = "lowlevelprototype/1.0.0b1"
# All calls are done through "invoke". Swagger generating method are just preparers and optional
response = text_analytics_client.invoke(request)
# This is a convenience layer to raise if status is >= 4xx.
# This is Python convention: https://2.python-requests.org/en/master/user/quickstart/#response-status-codes
# This raises a HttpResponseError from azure-core
response.raise_for_status()
# Since it's Python, you can always deserialize if you really want to, but it's a manual step
from azure.ai.textanalytics.protocol.v3_1_preview.models import DocumentsOutput
result: DocumentsOutput = DocumentsOutput.deserialize(response)
# An high-level autorest generated code is then syntactic sugar, with some "requiredness" validation
def high_level_languages_example(documents, model_version=None, show_stats=None): # Some can be required
request = self.prepare_languages(documents, model_version, show_stats)
response = client.invoke(request)
response.raise_for_status()
return DocumentsOutput.deserialize(response)
@lmazuel
Copy link
Author

lmazuel commented Jan 21, 2021

  • The two layer architecture we recommend:
    o Pure generation: low-level generation + high level autorest generation
    o Handcrafted: low-level generation + handcrafted layer

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment