Wednesday, March 26, 2025

Hybrid Connections WebSockets in Azure Relay

Hybrid Connections WebSockets in Azure Relay

Source: https://learn.microsoft.com/en-us/azure/azure-relay/relay-hybrid-connections-dotnet-get-started

1. Create a Namespace





Get Managed credentials 

Create Hybrid Connection 


2. Create a server application (listener)

using Microsoft.Azure.Relay;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;

namespace myserver
{
    public class Program
    {
        private const string RelayNamespace = "sreerelay.servicebus.windows.net";
        private const string ConnectionName = "sreehybridconn";
        private const string KeyName = "RootManageSharedAccessKey";
        private const string Key = "Wtcg6qDIGI4aFts+qYH+zmHwCL1Q=";

        public static void Main(string[] args)
        {
            RunAsync().GetAwaiter().GetResult();
        }

        private static async Task RunAsync()
        {
            var cts = new CancellationTokenSource();
            var tokenProvider =
            TokenProvider.CreateSharedAccessSignatureTokenProvider(KeyName, Key);
            var listener = new HybridConnectionListener(new Uri(string.Format(
            "sb://{0}/{1}", RelayNamespace, ConnectionName)), tokenProvider);
            listener.Connecting += (o, e) => { Console.WriteLine("Connecting"); };
            listener.Offline += (o, e) => { Console.WriteLine("Offline"); };
            listener.Online += (o, e) => { Console.WriteLine("Online"); };
            await listener.OpenAsync(cts.Token);
            Console.WriteLine("Server listening");
            cts.Token.Register(() => listener.CloseAsync(CancellationToken.None));
            new Task(() => Console.In.ReadLineAsync().ContinueWith((s) => {
            cts.Cancel(); })).Start();
            while (true)
            {
                var relayConnection = await listener.AcceptConnectionAsync();
                if (relayConnection == null)
                {
                    break;
                }
                ProcessMessagesOnConnection(relayConnection, cts);
            }
            await listener.CloseAsync(cts.Token);
        }

        private static async void ProcessMessagesOnConnection(HybridConnectionStream
        relayConnection, CancellationTokenSource cts)
        {
            Console.WriteLine("New session");
            var reader = new StreamReader(relayConnection);
            var writer = new StreamWriter(relayConnection) { AutoFlush = true };
            while (!cts.IsCancellationRequested)
            {
                try
                {
                    var line = await reader.ReadLineAsync();
                    if (string.IsNullOrEmpty(line))
                    {
                        await relayConnection.ShutdownAsync(cts.Token);
                        break;
                    }
                    Console.WriteLine(line);
                    await writer.WriteLineAsync($"Echo: {line}");
                }
                catch (IOException)
                {
                    Console.WriteLine("Client closed connection");
                    break;
                }
            }
            Console.WriteLine("End session");
            await relayConnection.CloseAsync(cts.Token);
        }
    }
}


3. Create a client application (sender)

using Microsoft.Azure.Relay;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;

namespace myclient
{
    class Program
    {
        private const string RelayNamespace = "sreerelay.servicebus.windows.net";
        private const string ConnectionName = "sreehybridconn";
        private const string KeyName = "RootManageSharedAccessKey";
        private const string Key = "Wtcg6qDIGI4aFts+qYH+zmHwCL1Q=";

        static void Main(string[] args)
        {
            RunAsync().GetAwaiter().GetResult();
        }

        private static async Task RunAsync()
        {
            Console.WriteLine("Enter lines of text to send to the server with ENTER");
            var tokenProvider =
            TokenProvider.CreateSharedAccessSignatureTokenProvider(KeyName, Key);
            var client = new HybridConnectionClient(new Uri(String.Format(
            "sb://{0}/{1}", RelayNamespace, ConnectionName)), tokenProvider);
            var relayConnection = await client.CreateConnectionAsync();

            var reads = Task.Run(async () =>
            {
                var reader = new StreamReader(relayConnection);
                var writer = Console.Out;
                do
                {
                    string line = await reader.ReadLineAsync();
                    if (String.IsNullOrEmpty(line))
                        break;
                    await writer.WriteLineAsync(line);
                }
                while (true);
            });

            var writes = Task.Run(async () =>
            {
                var reader = Console.In;
                var writer = new StreamWriter(relayConnection) { AutoFlush = true };
                do
                {
                    string line = await reader.ReadLineAsync();
                    await writer.WriteLineAsync(line);
                    if (String.IsNullOrEmpty(line))
                        break;
                }
                while (true);
            });
           
            await Task.WhenAll(reads, writes);
            await relayConnection.CloseAsync(CancellationToken.None);
        }
    }
}


OutPut:


Monday, March 10, 2025

Actionable Email Developer Dashboard

 Actionable Email Developer Dashboard:

Source:
Get started with actionable messages in Office 365: https://learn.microsoft.com/en-us/outlook/actionable-messages/get-started

Register your service with the actionable email developer dashboard: https://learn.microsoft.com/en-us/outlook/actionable-messages/email-dev-dashboard

1. Create Provider in ''Actionable Email Developer Dashboard - https://outlook.office.com/connectors/oam/publish "

2. Save "Provider Id (originator)" in notepad.

3. Approve submitted provider here: https://outlook.office.com/connectors/oam/admin





4. Prepare Json as below: 

<html>
<head>
  <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
  <script type="application/adaptivecard+json">{
    "type": "AdaptiveCard",
    "version": "1.0",
    "hideOriginalBody": true,
    "originator": "Provider Id (originator) : GUID from Previous step",
    "body": [
      {
        "type": "TextBlock",
        "text": "Visit the Outlook Dev Portal",
        "size": "large"
      },
      {
        "type": "TextBlock",
        "text": "Click **Learn More** to learn more about Actionable Messages!"
      },
      {
        "type": "Input.Text",
        "id": "feedbackText",
        "placeholder": "Let us know what you think about Actionable Messages"
      }
    ],
    "actions": [
      {
        "type": "Action.Http",
        "title": "Send Feedback",
        "method": "POST",
        "url": "https://...",
        "body": "{{feedbackText.value}}"
      },
      {
        "type": "Action.OpenUrl",
        "title": "Learn More",
        "url": "https://learn.microsoft.com/outlook/actionable-messages"
      }
    ]
  }
  </script>
</head>
<body>
Visit the <a href="https://learn.microsoft.com/outlook/actionable-messages">Outlook Dev Portal</a> to learn more about Actionable Messages.
</body>
</html>

C# Code:
using Azure.Identity;
using Microsoft.Graph;
using Microsoft.Graph.Models;
using Microsoft.Graph.Users.Item.SendMail;

public class Program
{
    public static async Task Main(string[] args)
    {
        var clientId = "6e4110e7-e5b0d411db60";
        var tenantId = "bb55f134-82bf54373c6d";
        var clientSecret = "imx8Q~Q~AHcGN";
        var userFromEmail = "user1@test.onmicrosoft.com";
        var userToEmails = "user2@test.onmicrosoft.com,user3@test.onmicrosoft.com";

        var credential = new ClientSecretCredential(tenantId, clientId, clientSecret);
        var graphClient = new GraphServiceClient(credential);

        string adaptiveCardJson = @"<html>
        <head>
          <meta http-equiv='Content-Type' content='text/html; charset=utf-8'>
          <script type='application/adaptivecard+json'>{
            'type': 'AdaptiveCard',
            'version': '1.0',
            'hideOriginalBody': true,
            'originator': 'a115aabe-03994fbaf1d',
            'body': [
              {
                'type': 'TextBlock',
                'text': 'Visit the Outlook Dev Portal',
                'size': 'large'
              },
              {
                'type': 'TextBlock',
                'text': 'Click **Learn More** to learn more about Actionable Messages!'
              },
              {
                'type': 'Input.Text',
                'id': 'feedbackText',
                'placeholder': 'Let us know what you think about Actionable Messages'
              }
            ],
            'actions': [
              {
                'type': 'Action.Http',
                'title': 'Send Feedback',
                'method': 'POST',
                'url': 'https://...',
                'body': '{{feedbackText.value}}'
              },
              {
                'type': 'Action.OpenUrl',
                'title': 'Learn More',
                'url': 'https://learn.microsoft.com/outlook/actionable-messages'
              }
            ]
          }
          </script>
        </head>
        <body>
        Visit the <a href='https://learn.microsoft.com/outlook/actionable-messages'>Outlook Dev
        Portal</a> to learn more about Actionable Messages.
        </body>
        </html>";

        List<string> userToEmailList = new List<string>(userToEmails.Split(','));
        var message = new Message
        {
            Subject = "Test Subject",
            Body = new ItemBody
            {
                ContentType = BodyType.Html,
                Content = $"{adaptiveCardJson}"
            },
            ToRecipients = userToEmailList.Select(email => new Recipient { EmailAddress =
            new EmailAddress { Address = email } }).ToList(),
        };
        var sendMailRequest = new SendMailPostRequestBody { Message = message };
        await graphClient.Users[userFromEmail].SendMail.PostAsync(sendMailRequest);

        Console.ReadKey();
    }
}



OutPut:






Monday, March 3, 2025

Connect SharePoint using Azure App Application/Delegate Authentication in C#

1. Connect SharePoint using Azure App Application Authentication in C#
Create Azure app and give below Application permissions. 


using Azure.Identity;
using Microsoft.Graph;

public class Program
{
    public static async Task Main(string[] args)
    {
        string tenantId = "";
        string clientId = "";
        string clientSecret = "";
        string siteId = "283f598a-6b0f-4ba5-af06-c72a0cef8f42";
        string listId = "e9609d64-1f36-45a2-8260-743998ea2cd4";
        var credential = new ClientSecretCredential(tenantId, clientId, clientSecret);
        var graphClient = new GraphServiceClient(credential);
        var items = await graphClient.Sites[siteId].Lists[listId].Items.GetAsync();
        foreach (var item in items.Value)
        {
            Console.WriteLine($"Item ID: {item.Id}, Created By: {item.CreatedBy?.User?.DisplayName}");
        }
        Console.ReadKey();
    }
}



2. Connect SharePoint using Azure App Delegate Authentication in C#

Wednesday, February 19, 2025

Azure AI services | Language service | Azure Cognitive Search | Sentiment analysis and opinion mining

Azure AI services | Language service | Azure Cognitive Search | Sentiment analysis and opinion mining

1. Create a Language resource in Azure and copy languagekey.

using Azure;
using System;
using Azure.AI.TextAnalytics;
using System.Collections.Generic;

namespace Example
{
    class Program
    {
        // This example requires environment variables named "LANGUAGE_KEY" and "LANGUAGE_ENDPOINT"

        // Environment.GetEnvironmentVariable("LANGUAGE_KEY");
        static string languageKey = "9BuOuIbUCOGGygz";
       
        //Environment.GetEnvironmentVariable("LANGUAGE_ENDPOINT");
        static string languageEndpoint = "https://sreelanguage.cognitiveservices.azure.com/";

        private static readonly AzureKeyCredential credentials = new AzureKeyCredential(languageKey);
        private static readonly Uri endpoint = new Uri(languageEndpoint);

        // Example method for detecting opinions text.
        static void SentimentAnalysisWithOpinionMiningExample(TextAnalyticsClient client)
        {
            var documents = new List<string>
            {
                "The food and service were unacceptable. The concierge was nice, however."
            };

            AnalyzeSentimentResultCollection reviews = client.AnalyzeSentimentBatch(documents,
            options: new AnalyzeSentimentOptions()
            {
                IncludeOpinionMining = true
            });

            foreach (AnalyzeSentimentResult review in reviews)
            {
                Console.WriteLine($"Document sentiment: {review.DocumentSentiment.Sentiment}\n");
                Console.WriteLine($"\tPositive score:
                {review.DocumentSentiment.ConfidenceScores.Positive:0.00}");
                Console.WriteLine($"\tNegative score:
                {review.DocumentSentiment.ConfidenceScores.Negative:0.00}");
                Console.WriteLine($"\tNeutral score:
                {review.DocumentSentiment.ConfidenceScores.Neutral:0.00}\n");

                foreach (SentenceSentiment sentence in review.DocumentSentiment.Sentences)
                {
                    Console.WriteLine($"\tText: \"{sentence.Text}\"");
                    Console.WriteLine($"\tSentence sentiment: {sentence.Sentiment}");
                    Console.WriteLine($"\tSentence positive score:
                    {sentence.ConfidenceScores.Positive:0.00}");
                    Console.WriteLine($"\tSentence negative score:
                    {sentence.ConfidenceScores.Negative:0.00}");
                    Console.WriteLine($"\tSentence neutral score:
                    {sentence.ConfidenceScores.Neutral:0.00}\n");

                    foreach (SentenceOpinion sentenceOpinion in sentence.Opinions)
                    {
                        Console.WriteLine($"\tTarget: {sentenceOpinion.Target.Text}, Value:
                        {sentenceOpinion.Target.Sentiment}");
                        Console.WriteLine($"\tTarget positive score:
                        {sentenceOpinion.Target.ConfidenceScores.Positive:0.00}");
                        Console.WriteLine($"\tTarget negative score:
                        {sentenceOpinion.Target.ConfidenceScores.Negative:0.00}");
                        foreach (AssessmentSentiment assessment in sentenceOpinion.Assessments)
                        {
                            Console.WriteLine($"\t\tRelated Assessment: {assessment.Text}, Value:
                            {assessment.Sentiment}");
                            Console.WriteLine($"\t\tRelated Assessment positive score:
                            {assessment.ConfidenceScores.Positive:0.00}");
                            Console.WriteLine($"\t\tRelated Assessment negative score:
                            {assessment.ConfidenceScores.Negative:0.00}");
                        }
                    }
                }
                Console.WriteLine($"\n");
            }
        }

        static void Main(string[] args)
        {
            var client = new TextAnalyticsClient(endpoint, credentials);
            SentimentAnalysisWithOpinionMiningExample(client);
            Console.Write("Press any key to exit.");
            Console.ReadKey();
        }
    }
}


OutPut:


Monday, February 17, 2025

Azure AI services | Document intelligence | Use prebuilt Document Intelligence models

Azure AI services | Document intelligence | Use prebuilt Document Intelligence models

Source: https://github.com/MicrosoftLearning/mslearn-ai-document-intelligence

https://documentintelligence.ai.azure.com/studio

C# Code:

using Azure;
using Azure.AI.FormRecognizer.DocumentAnalysis;

// dotnet add package Azure.AI.FormRecognizer --version 4.1.0

// Store connection information
string endpoint = "https://sreedocumentintelligence.cognitiveservices.azure.com/";
string apiKey = "BxcKE20FOGiN8b";

Uri fileUri = new Uri("https://github.com/MicrosoftLearning/mslearn-ai-document-intelligence/blob
/main/Labfiles/01-prebuild-models/sample-invoice/sample-invoice.pdf?raw=true");

Console.WriteLine("\nConnecting to Forms Recognizer at: {0}", endpoint);
Console.WriteLine("Analyzing invoice at: {0}\n", fileUri.ToString());

// Create the client
var cred = new AzureKeyCredential(apiKey);
var client = new DocumentAnalysisClient(new Uri(endpoint), cred);

// Analyze the invoice
AnalyzeDocumentOperation operation = await client.AnalyzeDocumentFromUriAsync(WaitUntil.Completed,
"prebuilt-invoice", fileUri);


// Display invoice information to the user
AnalyzeResult result = operation.Value;

foreach (AnalyzedDocument invoice in result.Documents)
{
    if (invoice.Fields.TryGetValue("VendorName", out DocumentField? vendorNameField))
    {
        if (vendorNameField.FieldType == DocumentFieldType.String)
        {
            string vendorName = vendorNameField.Value.AsString();
            Console.WriteLine($"Vendor Name: '{vendorName}', with confidence
            {vendorNameField.Confidence}.");
        }
    }

    if (invoice.Fields.TryGetValue("CustomerName", out DocumentField? customerNameField))
    {
        if (customerNameField.FieldType == DocumentFieldType.String)
        {
            string customerName = customerNameField.Value.AsString();
            Console.WriteLine($"Customer Name: '{customerName}', with confidence
            {customerNameField.Confidence}.");
        }
    }

    if (invoice.Fields.TryGetValue("InvoiceTotal", out DocumentField? invoiceTotalField))
    {
        if (invoiceTotalField.FieldType == DocumentFieldType.Currency)
        {
            CurrencyValue invoiceTotal = invoiceTotalField.Value.AsCurrency();
            Console.WriteLine($"Invoice Total: '{invoiceTotal.Symbol}{invoiceTotal.Amount}',
            with confidence {invoiceTotalField.Confidence}.");
        }
    }
}

Console.WriteLine("\nAnalysis complete.\n");


OutPut:


Python Code:
from azure.core.credentials import AzureKeyCredential
from azure.ai.formrecognizer import DocumentAnalysisClient

# pip install azure-ai-formrecognizer==3.3.3

# Store connection information
endpoint = "https://sreedocumentintelligence.cognitiveservices.azure.com/"
key = "BxcKE20FOGiN8b"

fileUri = "https://github.com/MicrosoftLearning/mslearn-ai-document-intelligence/blob/main/Labfiles
/01-prebuild-models/sample-invoice/sample-invoice.pdf?raw=true"
fileLocale = "en-US"
fileModelId = "prebuilt-invoice"

print(f"\nConnecting to Forms Recognizer at: {endpoint}")
print(f"Analyzing invoice at: {fileUri}")

# Create the client
document_analysis_client = DocumentAnalysisClient(
     endpoint=endpoint, credential=AzureKeyCredential(key)
)

# Analyse the invoice
poller = document_analysis_client.begin_analyze_document_from_url(
     fileModelId, fileUri, locale=fileLocale
)

# Display invoice information to the user
receipts = poller.result()
   
for idx, receipt in enumerate(receipts.documents):
    vendor_name = receipt.fields.get("VendorName")
    if vendor_name:
        print(f"\nVendor Name: {vendor_name.value}, with confidence {vendor_name.confidence}.")

    customer_name = receipt.fields.get("CustomerName")
    if customer_name:
        print(f"Customer Name: '{customer_name.value}, with confidence {customer_name.confidence}.")


    invoice_total = receipt.fields.get("InvoiceTotal")
    if invoice_total:
        print(f"Invoice Total: '{invoice_total.value.symbol}{invoice_total.value.amount},
        with confidence {invoice_total.confidence}.")

print("\nAnalysis complete.\n")

OutPut:




Tuesday, February 11, 2025

Azure AI services - Detect and Analyze Faces

Azure AI services - Detect and Analyze Faces:

Source: https://github.com/MicrosoftLearning/mslearn-ai-vision

1. Azure AI services multi-service account - Create Azure resource.

Detect Faces:
C# Code:
using System;
using System.Drawing;
using Microsoft.Extensions.Configuration;
using Azure;
using System.IO;

// dotnet add package Azure.AI.Vision.ImageAnalysis -v 1.0.0-beta.3

// Import namespaces
using Azure.AI.Vision.ImageAnalysis;

namespace detect_people
{
    class Program
    {
        static void Main(string[] args)
        {
            try
            {
                // Get config settings from AppSettings
                IConfigurationBuilder builder =
                new ConfigurationBuilder().AddJsonFile("appsettings.json");
                IConfigurationRoot configuration = builder.Build();
                string aiSvcEndpoint =
                "https://sreemultiserviceaccount1.cognitiveservices.azure.com/";
                //configuration["AIServicesEndpoint"];
                string aiSvcKey = "2D9XtWQ0Yfuw3AAAEACOGFMV1"; //configuration["AIServiceKey"];

                // Get image
                string imageFile = "images/people.jpg";
                if (args.Length > 0)
                {
                    imageFile = args[0];
                }

                // Authenticate Azure AI Vision client
                ImageAnalysisClient cvClient = new ImageAnalysisClient(
                    new Uri(aiSvcEndpoint),
                    new AzureKeyCredential(aiSvcKey));

                // Analyze image
                AnalyzeImage(imageFile, cvClient);

            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }
        }

        static void AnalyzeImage(string imageFile, ImageAnalysisClient client)
        {
            Console.WriteLine($"\nAnalyzing {imageFile} \n");

            // Use a file stream to pass the image data to the analyze call
            using FileStream stream = new FileStream(imageFile, FileMode.Open);

            // Get result with specified features to be retrieved (PEOPLE)
            ImageAnalysisResult result = client.Analyze(
                BinaryData.FromStream(stream),
                VisualFeatures.People);

            // Close the stream
            stream.Close();

            // Get people in the image
            if (result.People.Values.Count > 0)
            {
                Console.WriteLine($" People:");

                // Prepare image for drawing
                System.Drawing.Image image = System.Drawing.Image.FromFile(imageFile);
                Graphics graphics = Graphics.FromImage(image);
                Pen pen = new Pen(Color.Cyan, 3);

                // Draw bounding box around detected people
                foreach (DetectedPerson person in result.People.Values)
                {
                    if (person.Confidence > 0.5)
                    {
                        // Draw object bounding box
                        var r = person.BoundingBox;
                        Rectangle rect = new Rectangle(r.X, r.Y, r.Width, r.Height);
                        graphics.DrawRectangle(pen, rect);
                    }

                    // Return the confidence of the person detected
                    Console.WriteLine($"   Bounding box {person.BoundingBox.ToString()},
                    Confidence: {person.Confidence:F2}");
                }

                // Save annotated image
                String output_file = "people.jpg";
                image.Save(output_file);
                Console.WriteLine("  Results saved in " + output_file + "\n");
            }
        }
    }
}



OutPut:



Analyze Faces:
C# Code:
using System;
using System.IO;
using System.Linq;
using System.Drawing;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;

// dotnet add package Azure.AI.Vision.Face -v 1.0.0-beta.2

// Import namespaces
using Azure;
using Azure.AI.Vision.Face;

namespace analyze_faces
{
    class Program
    {
        private static FaceClient faceClient;
        static async Task Main(string[] args)
        {
            try
            {
                // Get config settings from AppSettings
                IConfigurationBuilder builder =
                new ConfigurationBuilder().AddJsonFile("appsettings.json");
                IConfigurationRoot configuration = builder.Build();
                string cogSvcEndpoint =
                "https://sreemultiserviceaccount1.cognitiveservices.azure.com/";
                //configuration["AIServicesEndpoint"];
                string cogSvcKey = "2D9XtWQ0Yfuw3AAAEACOGFMV1"; //configuration["AIServiceKey"];

                // Authenticate Face client
                faceClient = new FaceClient(
                    new Uri(cogSvcEndpoint),
                    new AzureKeyCredential(cogSvcKey));

                // Menu for face functions
                Console.WriteLine("1: Detect faces\nAny other key to quit");
                Console.WriteLine("Enter a number:");
                string command = Console.ReadLine();
                switch (command)
                {
                    case "1":
                        await DetectFaces("images/people.jpg");
                        break;
                    default:
                        break;
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }
        }

        static async Task DetectFaces(string imageFile)
        {
            Console.WriteLine($"Detecting faces in {imageFile}");

            // Specify facial features to be retrieved
            FaceAttributeType[] features = new FaceAttributeType[]
            {
     FaceAttributeType.Detection03.HeadPose,
     FaceAttributeType.Detection03.Blur,
     FaceAttributeType.Detection03.Mask
            };

            // Get faces
            using (var imageData = File.OpenRead(imageFile))
            {
                var response = await faceClient.DetectAsync(
                    BinaryData.FromStream(imageData),
                    FaceDetectionModel.Detection03,
                    FaceRecognitionModel.Recognition04,
                    returnFaceId: false,
                    returnFaceAttributes: features);
                IReadOnlyList<FaceDetectionResult> detected_faces = response.Value;

                if (detected_faces.Count() > 0)
                {
                    Console.WriteLine($"{detected_faces.Count()} faces detected.");

                    // Prepare image for drawing
                    Image image = Image.FromFile(imageFile);
                    Graphics graphics = Graphics.FromImage(image);
                    Pen pen = new Pen(Color.LightGreen, 3);
                    Font font = new Font("Arial", 4);
                    SolidBrush brush = new SolidBrush(Color.White);
                    int faceCount = 0;

                    // Draw and annotate each face
                    foreach (var face in detected_faces)
                    {
                        faceCount++;
                        Console.WriteLine($"\nFace number {faceCount}");

                        // Get face properties
                        Console.WriteLine($" - Head Pose (Yaw): {face.FaceAttributes.HeadPose.Yaw}");
                        Console.WriteLine($" - Head Pose (Pitch):
                        {face.FaceAttributes.HeadPose.Pitch}");
                        Console.WriteLine($" - Head Pose (Roll):
                        {face.FaceAttributes.HeadPose.Roll}");
                        Console.WriteLine($" - Blur: {face.FaceAttributes.Blur.BlurLevel}");
                        Console.WriteLine($" - Mask: {face.FaceAttributes.Mask.Type}");

                        // Draw and annotate face
                        var r = face.FaceRectangle;
                        Rectangle rect = new Rectangle(r.Left, r.Top, r.Width, r.Height);
                        graphics.DrawRectangle(pen, rect);
                        string annotation = $"Face number {faceCount}";
                        graphics.DrawString(annotation, font, brush, r.Left, r.Top);
                    }

                    // Save annotated image
                    String output_file = "detected_faces.jpg";
                    image.Save(output_file);
                    Console.WriteLine(" Results saved in " + output_file);
                }
            }
        }
    }
}


OutPut:



Detect Faces:
Python Code:
from dotenv import load_dotenv
import os
from PIL import Image, ImageDraw
import sys
from matplotlib import pyplot as plt
import numpy as np
#pip install azure-ai-vision-imageanalysis==1.0.0b3
# import namespaces
from azure.ai.vision.imageanalysis import ImageAnalysisClient
from azure.ai.vision.imageanalysis.models import VisualFeatures
from azure.core.credentials import AzureKeyCredential

def main():
    global cv_client

    try:
        # Get Configuration Settings
        load_dotenv()
        ai_endpoint = 'https://sreemultiserviceaccount1.cognitiveservices.azure.com/'           #os.getenv('AI_SERVICE_ENDPOINT')
        ai_key = '2D9XtWQ0Yfuw3AAAEACOGFMV1' #os.getenv('AI_SERVICE_KEY')

        # Get image
        image_file = 'images/people.jpg'
        if len(sys.argv) > 1:
            image_file = sys.argv[1]

        with open(image_file, "rb") as f:
            image_data = f.read()

         # Authenticate Azure AI Vision client
        cv_client = ImageAnalysisClient(
            endpoint=ai_endpoint,
            credential=AzureKeyCredential(ai_key)
        )
       
        # Analyze image
        AnalyzeImage(image_file, image_data, cv_client)

    except Exception as ex:
        print(ex)

def AnalyzeImage(filename, image_data, cv_client):
    print('\nAnalyzing ', filename)

     # Get result with specified features to be retrieved (PEOPLE)
    result = cv_client.analyze(
         image_data=image_data,
        visual_features=[
             VisualFeatures.PEOPLE],
    )
   
    # Identify people in the image
    if result.people is not None:
        print("\nPeople in image:")

        # Prepare image for drawing
        image = Image.open(filename)
        fig = plt.figure(figsize=(image.width/100, image.height/100))
        plt.axis('off')
        draw = ImageDraw.Draw(image)
        color = 'cyan'

         # Draw bounding box around detected people
        for detected_people in result.people.list:
            if(detected_people.confidence > 0.5):
                 # Draw object bounding box
                r = detected_people.bounding_box
                bounding_box = ((r.x, r.y), (r.x + r.width, r.y + r.height))
                draw.rectangle(bounding_box, outline=color, width=3)

                # Return the confidence of the person detected
                print(" {} (confidence: {:.2f}%)".format(detected_people.bounding_box,
                detected_people.confidence * 100))

        # Save annotated image
        plt.imshow(image)
        plt.tight_layout(pad=0)
        outputfile = 'people.jpg'
        fig.savefig(outputfile)
        print('  Results saved in', outputfile)

if __name__ == "__main__":
    main()

OutPut:




Analyze Faces:
Python Code:
from dotenv import load_dotenv
import os
from PIL import Image, ImageDraw
from matplotlib import pyplot as plt

#pip install azure-cognitiveservices-vision-face==0.6.0

# Import namespaces
from azure.cognitiveservices.vision.face import FaceClient
from azure.cognitiveservices.vision.face.models import FaceAttributeType
from msrest.authentication import CognitiveServicesCredentials

def main():
    global face_client

    try:
        # Get Configuration Settings
        load_dotenv()
        # cog_endpoint = os.getenv('AI_SERVICE_ENDPOINT')
        # cog_key = os.getenv('AI_SERVICE_KEY')
       
        cog_endpoint = 'https://sreemultiserviceaccount1.cognitiveservices.azure.com/'
        cog_key ='2D9XtWQ0Yfuw3AAAEACOGFMV1'

        # Authenticate Face client
        credentials = CognitiveServicesCredentials(cog_key)
        face_client = FaceClient(cog_endpoint, credentials)

        # Menu for face functions
        print('1: Detect faces\nAny other key to quit')
        command = input('Enter a number:')
        if command == '1':
            DetectFaces(os.path.join('images','people.jpg'))

    except Exception as ex:
        print(ex)

def DetectFaces(image_file):
    print('Detecting faces in', image_file)

    # Specify facial features to be retrieved
    features = [
        FaceAttributeType.occlusion,
        FaceAttributeType.blur,
        FaceAttributeType.glasses
    ]

    # Get faces
    with open(image_file, mode="rb") as image_data:
        detected_faces = face_client.face.detect_with_stream(
        image=image_data,
        return_face_attributes=features,
        return_face_id=False
    )

    if len(detected_faces) > 0:
        print(len(detected_faces), 'faces detected.')

        # Prepare image for drawing
        fig = plt.figure(figsize=(8, 6))
        plt.axis('off')
        image = Image.open(image_file)
        draw = ImageDraw.Draw(image)
        color = 'lightgreen'
        face_count = 0

        # Draw and annotate each face
        for face in detected_faces:
            # Get face properties
            face_count += 1
            print('\nFace number {}'.format(face_count))
            detected_attributes = face.face_attributes.as_dict()

            if 'blur' in detected_attributes:
                print(' - Blur:')
                for blur_name in detected_attributes['blur']:
                    print('   - {}: {}'.format(blur_name, detected_attributes['blur'][blur_name]))

            if 'occlusion' in detected_attributes:
                print(' - Occlusion:')
                for occlusion_name in detected_attributes['occlusion']:
                    print('   - {}: {}'.format(occlusion_name,                         detected_attributes['occlusion'][occlusion_name]))

            if 'glasses' in detected_attributes:
                print(' - Glasses: {}'.format(detected_attributes['glasses']))

            # Draw and annotate face
            r = face.face_rectangle
            bounding_box = ((r.left, r.top), (r.left + r.width, r.top + r.height))
            draw.rectangle(bounding_box, outline=color, width=5)
            annotation = 'Face number {}'.format(face_count)
            plt.annotate(annotation, (r.left, r.top), backgroundcolor=color)

        # Save annotated image
        plt.imshow(image)
        outputfile = 'detected_faces.jpg'
        fig.savefig(outputfile)
        print('\nResults saved in', outputfile)

if __name__ == "__main__":
    main()

OutPut:








Featured Post

Hybrid Connections WebSockets in Azure Relay

Hybrid Connections WebSockets in Azure Relay Source: https://learn.microsoft.com/en-us/azure/azure-relay/relay-hybrid-connections-dotnet-get...

Popular posts