Skip to content

Commit

Permalink
[appservice] Update appservice projects to use snippets (#32502)
Browse files Browse the repository at this point in the history
### Packages impacted by this PR

- @azure-rest/arm-appservice
- @azure/arm-appservice
- @azure/arm-appservice-profile-2020-09-01-hybrid

### Issues associated with this PR

- #32416

### Describe the problem that is addressed by this PR

Updates all projects under `appservice` to use snippets extraction.

### What are the possible designs available to address the problem? If
there are more than one possible design, why was the one in this PR
chosen?


### Are there test cases added in this PR? _(If not, why?)_


### Provide a list of related PRs _(if any)_


### Command used to generate this PR:**_(Applicable only to SDK release
request PRs)_

### Checklists
- [ ] Added impacted package name to the issue description
- [ ] Does this PR needs any fixes in the SDK Generator?** _(If so,
create an Issue in the
[Autorest/typescript](https://github.com/Azure/autorest.typescript)
repository and link it here)_
- [ ] Added a changelog (if necessary)
  • Loading branch information
mpodwysocki authored Jan 10, 2025
1 parent 1a03c44 commit 897aecc
Show file tree
Hide file tree
Showing 21 changed files with 408 additions and 229 deletions.
102 changes: 82 additions & 20 deletions common/tools/dev-tool/src/commands/run/update-snippets.ts
Original file line number Diff line number Diff line change
Expand Up @@ -229,15 +229,12 @@ async function parseSnippetDefinitions(
sourceFile,
);

const imports: [string, string][] = [];
const imports: { name: string; moduleSpecifier: string; isDefault: boolean }[] = [];

// This nested visitor is just for extracting the imports of a symbol.
const symbolImportVisitor: ts.Visitor = (node: ts.Node) => {
let importLocations: string[] | undefined;
if (
ts.isIdentifier(node) &&
(importLocations = extractImportLocations(node)) !== undefined
) {
if (ts.isIdentifier(node)) {
const importLocations = extractImportLocations(node);
if (importLocations.length > 1) {
// We can probably handle this, but it's an obscure case and it's probably better to let it error out and
// then observe whether or not we actually need (or even _want_) snippets with merged imports.
Expand All @@ -247,7 +244,10 @@ async function parseSnippetDefinitions(
} else if (importLocations.length === 1) {
// The symbol was imported, so we need to track the imports to add them to the snippet later.
log.debug(`symbol ${node.text} was imported from ${importLocations[0]}`);
imports.push([node.text, importLocations[0]]);
imports.push({
name: node.text,
...importLocations[0],
});
}
// else the symbol was not imported within this file, so it must be defined in the ambient context of the
// module, so we don't need to generate any code for it.
Expand All @@ -264,23 +264,56 @@ async function parseSnippetDefinitions(
// file using `convert`.
log.debug(`found a snippet named ${name.text}: \n${contents}`);

interface ImportedSymbols {
default?: string;
named?: Set<string>;
}

// We have a loose map of imports in the form { [k:symbol]: module } and we need to anneal it into a map
// { [k: module]: symbol[] } (one import statement per module with the whole list of symbols imported from it)
const importMap = new Map<string, Set<string>>(
imports.map(([, module]) => [module, new Set()]),
);
const importMap = new Map<string, ImportedSymbols>();

for (const [symbol, name] of imports) {
importMap.get(name)!.add(symbol);
for (const { name, moduleSpecifier, isDefault } of imports) {
let moduleImports = importMap.get(moduleSpecifier);
if (!moduleImports) {
moduleImports = {};
importMap.set(moduleSpecifier, moduleImports);
}
if (isDefault) {
if (moduleImports.default) {
throw new Error(
`unrecoverable error: multiple default imports from the same module '${moduleSpecifier}'`,
);
}
moduleImports.default = name;
} else {
if (!moduleImports.named) {
moduleImports.named = new Set();
}
moduleImports.named.add(name);
}
}

// Form import declarations and prepend them to the rest of the contents.
const fullSnippetTypeScriptText = (
[...importMap.entries()]
.map(
([module, symbols]) =>
`import { ${[...symbols.values()].join(", ")} } from "${module}";`,
)
.map(([module, imports]) => {
const importParts = [];
if (imports.default) {
importParts.push(imports.default);
}
if (imports.named) {
importParts.push(`{ ${[...imports.named].join(", ")} }`);
}

if (importParts.length === 0) {
throw new Error(
`unrecoverable error: no imports were generated for the snippet '${name.text}'`,
);
}

return `import ${importParts.join(", ")} from "${module}";`;
})
.join(EOL) +
EOL +
EOL +
Expand Down Expand Up @@ -387,11 +420,14 @@ async function parseSnippetDefinitions(
* @param node - the node to check for imports
* @returns a list of module specifiers that form the definition of the node's symbol, or undefined
*/
function extractImportLocations(node: ts.Node): string[] | undefined {
function extractImportLocations(node: ts.Node): {
isDefault: boolean;
moduleSpecifier: string;
}[] {
const sym = checker.getSymbolAtLocation(node);

// Get all the decls that are in source files and where the decl comes from an import clause.
return sym?.declarations
const nonDefaultExports = sym?.declarations
?.filter(
(decl) =>
decl.getSourceFile() === sourceFile &&
Expand All @@ -411,12 +447,38 @@ async function parseSnippetDefinitions(
moduleSpecifierText === path.join(relativeIndexPath, "index.js") ||
moduleSpecifierText === path.join(relativeIndexPath, "index")
) {
return project.name;
return { moduleSpecifier: project.name, isDefault: false };
} else {
return moduleSpecifierText;
return { moduleSpecifier: moduleSpecifierText, isDefault: false };
}
},
);

const defaultExports = sym?.declarations
?.filter(
(decl) =>
decl.getSourceFile() === sourceFile &&
ts.isImportClause(decl) &&
ts.isImportDeclaration(decl.parent) &&
decl.name,
)
.map((decl) => {
const moduleSpecifierText = (
(decl.parent as ts.ImportDeclaration).moduleSpecifier as ts.StringLiteral
).text;

if (
moduleSpecifierText === relativeIndexPath ||
moduleSpecifierText === path.join(relativeIndexPath, "index.js") ||
moduleSpecifierText === path.join(relativeIndexPath, "index")
) {
return { moduleSpecifier: project.name, isDefault: true };
} else {
return { moduleSpecifier: moduleSpecifierText, isDefault: true };
}
});

return [...(nonDefaultExports ?? []), ...(defaultExports ?? [])];
}
}

Expand Down
6 changes: 3 additions & 3 deletions sdk/agrifood/agrifood-farming-rest/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ AZURE_CLIENT_ID, AZURE_TENANT_ID, AZURE_CLIENT_SECRET
Use the returned token credential to authenticate the client:

```ts snippet:CreateFarmBeatsClient
import { FarmBeats } from "@azure-rest/agrifood-farming";
import FarmBeats from "@azure-rest/agrifood-farming";
import { DefaultAzureCredential } from "@azure/identity";

const client = FarmBeats(
Expand Down Expand Up @@ -96,7 +96,7 @@ Once you have authenticated and created the client object as shown in the [Authe
section, you can create a party within the Data Manager for Agriculture resource like this:

```ts snippet:CreateParty
import { FarmBeats, isUnexpected } from "@azure-rest/agrifood-farming";
import FarmBeats, { isUnexpected } from "@azure-rest/agrifood-farming";
import { DefaultAzureCredential } from "@azure/identity";

const client = FarmBeats(
Expand Down Expand Up @@ -127,7 +127,7 @@ console.log(`Created Party: ${party.name}`);
### List Parties

```ts snippet:ListParties
import { FarmBeats, isUnexpected, paginate } from "@azure-rest/agrifood-farming";
import FarmBeats, { isUnexpected, paginate } from "@azure-rest/agrifood-farming";
import { DefaultAzureCredential } from "@azure/identity";

const client = FarmBeats(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@ import type {
import { getLongRunningPoller, isUnexpected } from "../../src/index.js";
import { createClient, createRecorder } from "./utils/recordedClient.js";
import type { Recorder } from "@azure-tools/test-recorder";
import { isNode } from "@azure/core-util";
import { isNodeLike } from "@azure/core-util";
import { describe, it, assert, beforeEach, afterEach } from "vitest";

const startDateTime = new Date("2020-02-01T08:00:00.000Z");
const endDateTime = new Date("2020-03-02T08:00:00.000Z");
const suffix = isNode ? "node" : "browser";
const suffix = isNodeLike ? "node" : "browser";
const partyId = `${suffix}-contoso-party`;
const boundaryId = `${suffix}-contoso-boundary`;
const testparty = {
Expand All @@ -28,13 +28,13 @@ describe("party Operations", () => {
let recorder: Recorder;
let client: FarmBeatsClient;

beforeEach(async function (ctx) {
beforeEach(async (ctx) => {
recorder = await createRecorder(ctx);
client = createClient(recorder.configureClientOptions({}));
jobId = recorder.variable("jobId", `${suffix}-job-${Math.ceil(Math.random() * 1000)}`);
});

afterEach(async function () {
afterEach(async () => {
await recorder.stop();
});

Expand Down
2 changes: 1 addition & 1 deletion sdk/agrifood/agrifood-farming-rest/test/snippets.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// Licensed under the MIT License.

import { describe, it } from "vitest";
import { FarmBeats, isUnexpected, paginate } from "@azure-rest/agrifood-farming";
import FarmBeats, { isUnexpected, paginate } from "../src/index.js";
import { DefaultAzureCredential } from "@azure/identity";
import { setLogLevel } from "@azure/logger";

Expand Down
18 changes: 9 additions & 9 deletions sdk/anomalydetector/ai-anomaly-detector-rest/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,12 +90,12 @@ The following section provides several code snippets covering some of the most c
### Batch detection

```ts snippet:batch_detection
import {
import AnomalyDetector, {
TimeSeriesPoint,
AnomalyDetector,
DetectUnivariateEntireSeriesParameters,
isUnexpected,
} from "@azure-rest/ai-anomaly-detector";
import { readFileSync } from "node:fs";
import { parse } from "csv-parse/sync";
import { AzureKeyCredential } from "@azure/core-auth";

Expand All @@ -105,7 +105,7 @@ const timeSeriesDataPath = "./samples-dev/example-data/request-data.csv";

function read_series_from_file(path: string): Array<TimeSeriesPoint> {
const result = Array<TimeSeriesPoint>();
const input = fs.readFileSync(path).toString();
const input = readFileSync(path).toString();
const parsed = parse(input, { skip_empty_lines: true });
parsed.forEach(function (e: Array<string>) {
result.push({ timestamp: new Date(e[0]), value: Number(e[1]) });
Expand Down Expand Up @@ -149,12 +149,12 @@ if (result.body.isAnomaly) {
### Streaming Detection

```ts snippet:streaming_detection
import {
import AnomalyDetector, {
TimeSeriesPoint,
AnomalyDetector,
DetectUnivariateLastPointParameters,
isUnexpected,
} from "@azure-rest/ai-anomaly-detector";
import { readFileSync } from "node:fs";
import { parse } from "csv-parse/sync";
import { AzureKeyCredential } from "@azure/core-auth";

Expand All @@ -164,7 +164,7 @@ const timeSeriesDataPath = "./samples-dev/example-data/request-data.csv";

function read_series_from_file(path: string): Array<TimeSeriesPoint> {
const result = Array<TimeSeriesPoint>();
const input = fs.readFileSync(path).toString();
const input = readFileSync(path).toString();
const parsed = parse(input, { skip_empty_lines: true });
parsed.forEach(function (e: Array<string>) {
result.push({ timestamp: new Date(e[0]), value: Number(e[1]) });
Expand Down Expand Up @@ -205,12 +205,12 @@ if (result.body.isAnomaly) {
### Detect change points

```ts snippet:detect_change_points
import {
import AnomalyDetector, {
TimeSeriesPoint,
AnomalyDetector,
DetectUnivariateChangePointParameters,
isUnexpected,
} from "@azure-rest/ai-anomaly-detector";
import { readFileSync } from "node:fs";
import { parse } from "csv-parse/sync";
import { AzureKeyCredential } from "@azure/core-auth";

Expand All @@ -220,7 +220,7 @@ const timeSeriesDataPath = "./samples-dev/example-data/request-data.csv";

function read_series_from_file(path: string): Array<TimeSeriesPoint> {
const result = Array<TimeSeriesPoint>();
const input = fs.readFileSync(path).toString();
const input = readFileSync(path).toString();
const parsed = parse(input, { skip_empty_lines: true });
parsed.forEach(function (e: Array<string>) {
result.push({ timestamp: new Date(e[0]), value: Number(e[1]) });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,16 @@
// Licensed under the MIT License.

import { describe, it } from "vitest";
import fs from "node:fs";
import { readFileSync } from "node:fs";
import { parse } from "csv-parse/sync";
import { AzureKeyCredential } from "@azure/core-auth";
import type {
DetectUnivariateChangePointParameters,
DetectUnivariateEntireSeriesParameters,
DetectUnivariateLastPointParameters,
TimeSeriesPoint,
} from "@azure-rest/ai-anomaly-detector";
import { AnomalyDetector, isUnexpected } from "@azure-rest/ai-anomaly-detector";
} from "../src/index.js";
import AnomalyDetector, { isUnexpected } from "../src/index.js";
import { setLogLevel } from "@azure/logger";

describe("snippets", () => {
Expand All @@ -22,7 +22,7 @@ describe("snippets", () => {
// @ts-preserve-whitespace
function read_series_from_file(path: string): Array<TimeSeriesPoint> {
const result = Array<TimeSeriesPoint>();
const input = fs.readFileSync(path).toString();
const input = readFileSync(path).toString();
const parsed = parse(input, { skip_empty_lines: true });
parsed.forEach(function (e: Array<string>) {
result.push({ timestamp: new Date(e[0]), value: Number(e[1]) });
Expand Down Expand Up @@ -70,7 +70,7 @@ describe("snippets", () => {
// @ts-preserve-whitespace
function read_series_from_file(path: string): Array<TimeSeriesPoint> {
const result = Array<TimeSeriesPoint>();
const input = fs.readFileSync(path).toString();
const input = readFileSync(path).toString();
const parsed = parse(input, { skip_empty_lines: true });
parsed.forEach(function (e: Array<string>) {
result.push({ timestamp: new Date(e[0]), value: Number(e[1]) });
Expand Down Expand Up @@ -115,7 +115,7 @@ describe("snippets", () => {
// @ts-preserve-whitespace
function read_series_from_file(path: string): Array<TimeSeriesPoint> {
const result = Array<TimeSeriesPoint>();
const input = fs.readFileSync(path).toString();
const input = readFileSync(path).toString();
const parsed = parse(input, { skip_empty_lines: true });
parsed.forEach(function (e: Array<string>) {
result.push({ timestamp: new Date(e[0]), value: Number(e[1]) });
Expand Down

This file was deleted.

Loading

0 comments on commit 897aecc

Please sign in to comment.