@@ -9,7 +9,7 @@ and includes six main functions:
99- Personally Identifiable Information Entity Recognition
1010- Linked Entity Recognition
1111- Healthcare Recognition <sup >beta</sup >
12- - Analyze Operation <sup >beta</sup >
12+ - Support Multiple Actions Per Document <sup >beta</sup >
1313
1414[ Source code] [ source_code ] | [ Package (Maven)] [ package ] | [ API reference documentation] [ api_reference_doc ] | [ Product Documentation] [ product_documentation ] | [ Samples] [ samples_readme ]
1515
@@ -315,7 +315,7 @@ Text Analytics for health is a containerized service that extracts and labels re
315315unstructured texts such as doctor's notes, discharge summaries, clinical documents, and electronic health records.
316316Currently, Azure Active Directory (AAD) is not supported in the Healthcare recognition feature. In order to use this
317317functionality, request to access public preview is required. For more information see [ How to: Use Text Analytics for health] [ healthcare ] .
318- <!-- embedme ./src/samples/java/com/azure/ai/textanalytics/ReadmeSamples.java#L189-L232 -->
318+ <!-- embedme ./src/samples/java/com/azure/ai/textanalytics/ReadmeSamples.java#L189-L225 -->
319319``` java
320320List<TextDocumentInput > documents = Arrays . asList(new TextDocumentInput (" 0" ,
321321 " RECORD #333582770390100 | MH | 85986313 | | 054351 | 2/14/2001 12:00:00 AM | "
@@ -353,29 +353,14 @@ syncPoller.getFinalResult().forEach(healthcareTaskResult ->
353353 healthcareEntityLink. getDataSourceId(), healthcareEntityLink. getDataSource()));
354354 }
355355 });
356- healthcareEntities. getEntityRelations(). forEach(
357- healthcareEntityRelation - >
358- System . out. printf(" Is bidirectional: %s, target: %s, source: %s, relation type: %s.%n" ,
359- healthcareEntityRelation. isBidirectional(),
360- healthcareEntityRelation. getTargetLink(),
361- healthcareEntityRelation. getSourceLink(),
362- healthcareEntityRelation. getRelationType()));
363356 }));
364357```
365- To cancel a long-running healthcare task,
366- <!-- embedme ./src/samples/java/com/azure/ai/textanalytics/ReadmeSamples.java#L239-L243 -->
367- ``` java
368- SyncPoller<TextAnalyticsOperationResult , Void > textAnalyticsOperationResultVoidSyncPoller
369- = textAnalyticsClient. beginCancelHealthcareTask(" {healthcare_task_id}" ,
370- new RecognizeHealthcareEntityOptions (). setPollInterval(Duration . ofSeconds(10 )), Context . NONE );
371- PollResponse<TextAnalyticsOperationResult > poll = textAnalyticsOperationResultVoidSyncPoller. poll();
372- System . out. printf(" Task status: %s.%n" , poll. getStatus());
373- ```
374- ### Analyze multiple tasks
358+
359+ ### Analyze multiple actions
375360The ` Analyze ` functionality allows to choose which of the supported Text Analytics features to execute in the same
376361set of documents. Currently, the supported features are: ` entity recognition ` , ` key phrase extraction ` , and
377362` Personally Identifiable Information (PII) recognition ` .
378- <!-- embedme ./src/samples/java/com/azure/ai/textanalytics/ReadmeSamples.java#L250-L290 -->
363+ <!-- embedme ./src/samples/java/com/azure/ai/textanalytics/ReadmeSamples.java#L232-L280 -->
379364``` java
380365List<TextDocumentInput > documents = Arrays . asList(
381366 new TextDocumentInput (" 0" ,
@@ -387,34 +372,42 @@ List<TextDocumentInput> documents = Arrays.asList(
387372 + " www.contososteakhouse.com, call 312-555-0176 or send email to order@contososteakhouse.com! The"
388373 + " only complaint I have is the food didn't come fast enough. Overall I highly recommend it!" )
389374);
390- SyncPoller<TextAnalyticsOperationResult , PagedIterable<AnalyzeTasksResult > > syncPoller =
391- textAnalyticsClient. beginAnalyzeTasks(documents,
392- new AnalyzeTasksOptions (). setDisplayName(" {tasks_display_name}" )
393- .setKeyPhrasesExtractionTasks(Arrays . asList(new KeyPhrasesTask ()))
394- .setPiiEntitiesRecognitionTasks(Arrays . asList(new PiiTask ())),
375+
376+ SyncPoller<AnalyzeBatchActionsOperationDetail , PagedIterable<AnalyzeBatchActionsResult > > syncPoller =
377+ textAnalyticsClient. beginAnalyzeBatchActions(documents,
378+ new TextAnalyticsActions (). setDisplayName(" {tasks_display_name}" )
379+ .setExtractKeyPhrasesOptions(new ExtractKeyPhrasesOptions ())
380+ .setRecognizePiiEntitiesOptions(new RecognizePiiEntitiesOptions ()),
381+ new AnalyzeBatchActionsOptions (). setIncludeStatistics(false ),
395382 Context . NONE );
396383syncPoller. waitForCompletion();
397- syncPoller. getFinalResult(). forEach(analyzeJobState - > {
398- analyzeJobState. getKeyPhraseExtractionTasks(). forEach(taskResult - > {
384+ syncPoller. getFinalResult(). forEach(analyzeBatchActionsResult - > {
385+ System . out. println(" Key phrases extraction action results:" );
386+ analyzeBatchActionsResult. getExtractKeyPhrasesActionResults(). forEach(actionResult - > {
399387 AtomicInteger counter = new AtomicInteger ();
400- for (ExtractKeyPhraseResult extractKeyPhraseResult : taskResult) {
401- System . out. printf(" %n%s%n" , documents. get(counter. getAndIncrement()));
402- System . out. println(" Extracted phrases:" );
403- extractKeyPhraseResult. getKeyPhrases()
404- .forEach(keyPhrases - > System . out. printf(" \t %s.%n" , keyPhrases));
388+ if (! actionResult. isError()) {
389+ for (ExtractKeyPhraseResult extractKeyPhraseResult : actionResult. getResult()) {
390+ System . out. printf(" %n%s%n" , documents. get(counter. getAndIncrement()));
391+ System . out. println(" Extracted phrases:" );
392+ extractKeyPhraseResult. getKeyPhrases()
393+ .forEach(keyPhrases - > System . out. printf(" \t %s.%n" , keyPhrases));
394+ }
405395 }
406396 });
407- analyzeJobState. getEntityRecognitionPiiTasks(). forEach(taskResult - > {
397+ System . out. println(" PII entities recognition action results:" );
398+ analyzeBatchActionsResult. getRecognizePiiEntitiesActionResults(). forEach(actionResult - > {
408399 AtomicInteger counter = new AtomicInteger ();
409- for (RecognizePiiEntitiesResult entitiesResult : taskResult) {
410- System . out. printf(" %n%s%n" , documents. get(counter. getAndIncrement()));
411- PiiEntityCollection piiEntityCollection = entitiesResult. getEntities();
412- System . out. printf(" Redacted Text: %s%n" , piiEntityCollection. getRedactedText());
413- piiEntityCollection. forEach(entity - > System . out. printf(
414- " Recognized Personally Identifiable Information entity: %s, entity category: %s, "
415- + " entity subcategory: %s, offset: %s, confidence score: %f.%n" ,
416- entity. getText(), entity. getCategory(), entity. getSubcategory(), entity. getOffset(),
417- entity. getConfidenceScore()));
400+ if (! actionResult. isError()) {
401+ for (RecognizePiiEntitiesResult entitiesResult : actionResult. getResult()) {
402+ System . out. printf(" %n%s%n" , documents. get(counter. getAndIncrement()));
403+ PiiEntityCollection piiEntityCollection = entitiesResult. getEntities();
404+ System . out. printf(" Redacted Text: %s%n" , piiEntityCollection. getRedactedText());
405+ piiEntityCollection. forEach(entity - > System . out. printf(
406+ " Recognized Personally Identifiable Information entity: %s, entity category: %s, "
407+ + " entity subcategory: %s, offset: %s, confidence score: %f.%n" ,
408+ entity. getText(), entity. getCategory(), entity. getSubcategory(), entity. getOffset(),
409+ entity. getConfidenceScore()));
410+ }
418411 }
419412 });
420413});
0 commit comments