Gestire Azure Data Lake Analytics usando un'app Java

Importante

Azure Data Lake Analytics ritirato il 29 febbraio 2024. Per altre informazioni , vedere questo annuncio.

Per l'analisi dei dati, l'organizzazione può usare Azure Synapse Analytics o Microsoft Fabric.

Questo articolo descrive come gestire account, origini dati, processi e utenti di Azure Data Lake Analytics usando un'app scritta con l'SDK Java di Azure.

Prerequisiti

  • Java Development Kit (JDK) 8 ( con Java versione 1.8).
  • IntelliJ o un altro ambiente di sviluppo Java adatto. Le istruzioni in questo documento sono relative a IntelliJ.
  • Creare un'applicazione Microsoft Entra e recuperarne l'ID client, l'ID tenant e la chiave. Per altre informazioni su Microsoft Entra applicazioni e istruzioni su come ottenere un ID client, vedere Creare un'applicazione Active Directory e un'entità servizio tramite il portale. Dopo aver creato l'applicazione e generato la chiave, anche l'URI di risposta e la chiave saranno disponibili dal portale.

Autenticazione con Microsoft Entra ID

Il frammento di codice seguente contiene il codice per l'autenticazione non interattiva, in cui l'applicazione fornisce le proprie credenziali.

Creare un'applicazione Java

  1. Aprire IntelliJ e creare un progetto Java usando il modello Command Line App.
  2. Fare clic con il pulsante destro del mouse sul progetto sul lato sinistro della schermata e scegliere Aggiungi supporto framework. Scegliere Maven e selezionare OK.
  3. Aprire il file "pom.xml" appena creato e aggiungere il frammento di testo seguente tra il <tag /version> e il <tag /project> :
<dependencies>
    <dependency>
      <groupId>com.azure.resourcemanager</groupId>
      <artifactId>azure-resourcemanager-datalakeanalytics</artifactId>
      <version>1.0.0-beta.1</version>
    </dependency>
    <dependency>
      <groupId>com.azure.resourcemanager</groupId>
      <artifactId>azure-resourcemanager-datalakestore</artifactId>
      <version>1.0.0-beta.1</version>
    </dependency>
    <dependency>
      <groupId>com.azure</groupId>
      <artifactId>azure-storage-file-datalake</artifactId>
      <version>12.7.2</version>
    </dependency>
    <dependency>
      <groupId>com.azure</groupId>
      <artifactId>azure-identity</artifactId>
      <version>1.4.1</version>
    </dependency>
</dependencies>

Passare a Impostazioni > file > Distribuzione > esecuzione compilazione>. Selezionare Strumenti > di compilazione Importazione maven>. Selezionare Import Maven projects automatically(Importa automaticamente progetti Maven).

Aprire Main.java e sostituire il blocco di codice esistente con il codice seguente:

import com.azure.core.credential.TokenCredential;
import com.azure.core.management.AzureEnvironment;
import com.azure.core.management.profile.AzureProfile;
import com.azure.identity.ClientSecretCredential;
import com.azure.identity.ClientSecretCredentialBuilder;
import com.azure.resourcemanager.datalakeanalytics.DataLakeAnalyticsManager;
import com.azure.resourcemanager.datalakeanalytics.models.DataLakeAnalyticsAccount;
import com.azure.resourcemanager.datalakestore.DataLakeStoreManager;
import com.azure.resourcemanager.datalakestore.models.DataLakeStoreAccount;
import com.azure.storage.file.datalake.DataLakeFileClient;
import com.azure.storage.file.datalake.DataLakeFileSystemClient;
import com.azure.storage.file.datalake.DataLakeServiceClient;
import com.azure.storage.file.datalake.DataLakeServiceClientBuilder;
import com.azure.storage.file.datalake.models.PathAccessControl;
import com.azure.storage.file.datalake.models.PathPermissions;

import java.io.ByteArrayInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collections;
import java.util.UUID;

public class Main {
    private static String adlsAccountName;
    private static String adlaAccountName;
    private static String resourceGroupName;
    private static String location;

    private static String tenantId;
    private static String subscriptionId;
    private static String clientId;
    private static String clientSecret;
    private static String fileSystemName;
    private static String localFolderPath;

    private static DataLakeAnalyticsManager analyticsManager;
    private static DataLakeStoreManager storeManager;
    private static DataLakeStoreAccount storeAccount;
    private static DataLakeAnalyticsAccount analyticsAccount;
    private static DataLakeServiceClient serviceClient;
    private static DataLakeFileSystemClient fileSystemClient;
    private static DataLakeFileClient fileClient;

    public static void main(String[] args) throws Exception {
        adlsAccountName = "<DATA-LAKE-STORE-NAME>";
        adlaAccountName = "<DATA-LAKE-ANALYTICS-NAME>";
        resourceGroupName = "<RESOURCE-GROUP-NAME>";
        location = "East US 2";

        tenantId = "<TENANT-ID>";
        subscriptionId = "<SUBSCRIPTION-ID>";
        clientId = "<CLIENT-ID>";
        clientSecret = "<CLIENT-SECRET>";
        fileSystemName = "<DATALAKE-FILE-SYSTEM-NAME>";

        localFolderPath = "C:\\local_path\\";

        // ----------------------------------------
        // Authenticate
        // ----------------------------------------
        AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE);
        ClientSecretCredential creds = new ClientSecretCredentialBuilder()
                .clientId(clientId).tenantId(tenantId).clientSecret(clientSecret)
                .authorityHost("https://login.microsoftonline.com/" + tenantId + "/oauth2/token")
                .build();
        setupClients(creds, profile);

        // ----------------------------------------
        // List Data Lake Store and Analytics accounts that this app can access
        // ----------------------------------------
        System.out.println(String.format("All ADL Store accounts that this app can access in subscription %s:", subscriptionId));
        storeManager.accounts().list().forEach(acct -> System.out.println(acct.name()));

        System.out.println(String.format("All ADL Analytics accounts that this app can access in subscription %s:", subscriptionId));
        analyticsManager.accounts().list().forEach(acct -> System.out.println(acct.name()));
        waitForNewline("Accounts displayed.", "Creating files.");

        // ----------------------------------------
        // Create a file in Data Lake Store: input1.csv
        // ----------------------------------------
        createFile("input1.csv", "123,abc", true);
        waitForNewline("File created.", "Submitting a job.");

        // ----------------------------------------
        // Submit a job to Data Lake Analytics
        // ----------------------------------------
        String script = "@input = EXTRACT Row1 string, Row2 string FROM \"/input1.csv\" USING Extractors.Csv(); OUTPUT @input TO @\"/output1.csv\" USING Outputters.Csv();";
        UUID jobId = submitJobByScript(script, "testJob", creds);
        waitForNewline("Job submitted.", "Getting job status.");

        // ----------------------------------------
        // Download job output from Data Lake Store
        // ----------------------------------------
        downloadFile("output1.csv", localFolderPath + "output1.csv");
        waitForNewline("Job output downloaded.", "Deleting file.");

        deleteFile("output1.csv");
        waitForNewline("File deleted.", "Done.");
    }

    public static void setupClients(TokenCredential creds, AzureProfile profile) {

        analyticsManager = DataLakeAnalyticsManager.authenticate(creds, profile);

        storeManager = DataLakeStoreManager.authenticate(creds, profile);

        createAccounts();

        serviceClient = new DataLakeServiceClientBuilder().endpoint(storeAccount.endpoint()).credential(creds).buildClient();

        fileSystemClient = serviceClient.createFileSystem(fileSystemName);

    }

    public static void waitForNewline(String reason, String nextAction) {
        if (nextAction == null)
            nextAction = "";

        System.out.println(reason + "\r\nPress ENTER to continue...");
        try {
            System.in.read();
        } catch (Exception e) {
        }

        if (!nextAction.isEmpty()) {
            System.out.println(nextAction);
        }
    }

    // Create accounts
    public static void createAccounts() {
        // Create ADLS account
        storeAccount = storeManager.accounts().define(adlsAccountName)
                .withRegion(location)
                .withExistingResourceGroup(resourceGroupName)
                .create();

        analyticsAccount = analyticsManager.accounts().define(adlaAccountName)
                .withRegion(location).withExistingResourceGroup(resourceGroupName)
                .withDefaultDataLakeStoreAccount(adlsAccountName)
                .withDataLakeStoreAccounts(Collections.EMPTY_LIST)
                .create();
    }

    // Create a file
    public static void createFile(String path, String contents, boolean force) {
        byte[] bytesContents = contents.getBytes();

        DataLakeFileClient fileClient = fileSystemClient.createFile(path,force);
        PathAccessControl accessControl = fileClient.getAccessControl();
        fileClient.setPermissions(PathPermissions.parseOctal("744"), accessControl.getGroup(), accessControl.getOwner());
        fileClient.upload(new ByteArrayInputStream(bytesContents), bytesContents.length);
    }

    // Delete a file
    public static void deleteFile(String filePath) {
        fileSystemClient.getFileClient(filePath).delete();
    }

    // Download a file
    private static void downloadFile(String srcPath, String destPath) throws IOException {

        fileClient = fileSystemClient.getFileClient(srcPath);
        OutputStream outputStream = new FileOutputStream(destPath);
        fileClient.read(outputStream);
        outputStream.close();
    }

}

Fornire i valori per i parametri indicati nel frammento di codice:

  • adlsAccountName
  • adlaAccountName
  • resourceGroupName
  • location
  • tenantId
  • subscriptionId
  • clientId
  • clientSecret
  • fileSystemName
  • localFolderPath

Passaggi successivi