Skip to content

Commit

Permalink
Merge branch 'canary' into 08-16-include_proper_instrumentation_entry
Browse files Browse the repository at this point in the history
  • Loading branch information
huozhi committed Aug 19, 2024
2 parents c972b5a + b8165af commit dda3606
Show file tree
Hide file tree
Showing 216 changed files with 11,503 additions and 6,817 deletions.
2 changes: 1 addition & 1 deletion .github/.react-version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
19.0.0-rc-49496d49-20240814
19.0.0-rc-1eaccd82-20240816

Large diffs are not rendered by default.

14 changes: 0 additions & 14 deletions .github/actions/next-repo-actions/lib/triage_guidelines.txt

This file was deleted.

60 changes: 40 additions & 20 deletions .github/actions/next-repo-actions/src/triage-issues-with-ai.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import { WebClient } from '@slack/web-api'
import * as path from 'node:path'
import { readFileSync } from 'node:fs'
import { info, setFailed } from '@actions/core'
import { context } from '@actions/github'
import { generateText, tool } from 'ai'
Expand All @@ -13,47 +11,67 @@ import { issueSchema } from '../lib/types'
async function main() {
if (!process.env.OPENAI_API_KEY) throw new TypeError('OPENAI_API_KEY not set')
if (!process.env.SLACK_TOKEN) throw new TypeError('SLACK_TOKEN not set')
if (!process.env.VERCEL_PROTECTION_BYPASS)
throw new TypeError('VERCEL_PROTECTION_BYPASS not set')

const slackClient = new WebClient(process.env.SLACK_TOKEN)
const model = 'gpt-4o'
const channel = '#next-info'

const issue = context.payload.issue
const filePath = path.join(
process.cwd(),
'.github/actions/next-repo-actions/lib/triage_guidelines.txt'
)
const fileText = readFileSync(filePath, 'utf8')

let latestVersion
let latestCanaryVersion
let latestVersion: string
let latestCanaryVersion: string
let html_url: string
let number: number
let title: string

try {
latestVersion = await getLatestVersion()
latestCanaryVersion = await getLatestCanaryVersion()

const res = await fetch(
'https://next-triage.vercel.sh/api/triage-guidelines',
{
method: 'GET',
headers: {
'x-vercel-protection-bypass': `${process.env.VERCEL_PROTECTION_BYPASS}`,
},
}
)

const guidelines = await res.text()

const result = await generateText({
model: openai(model),
maxAutomaticRoundtrips: 1,
tools: {
report_to_slack: tool({
description: 'Report to Slack if a GitHub issue is severe enough.',
description: 'Report to Slack.',
parameters: issueSchema,
execute: async ({ issue }) => ({
html_url: issue.html_url,
number: issue.number,
title: issue.title,
}),
execute: async ({ issue }) => {
html_url = issue.html_url
number = issue.number
title = issue.title

return { html_url, number, title }
},
}),
},
prompt: `${JSON.stringify(issue)}\n${fileText}\nlatestVersion: ${latestVersion}\nlatestCanaryVersion: ${latestCanaryVersion}\nWith the above GitHub issue (JSON), the triage guidelines for determining whether an issue is severe, and the latest versions of Next.js, can you determine whether the given issue is severe enough to report to Slack? If severe enough, report to Slack with an approximately 300 character summary (don't repeat the triage guidelines while doing so) of why you think it is severe enough to report to Slack. If not severe enough, do not report to Slack.`,
system:
`Your job is to determine the severity of a GitHub issue using the triage guidelines and the latest versions of Next.js.` +
`Succinctly explain why you chose the severity, without paraphrasing the triage guidelines.` +
`Here are the triage guidelines: ${guidelines}` +
`Here is the latest version of Next.js: ${latestVersion}` +
`Here is the latest canary version of Next.js: ${latestCanaryVersion}`,
prompt: `${JSON.stringify(issue)}\nDetermine the severity of the above GitHub issue. If the severity is severe, report it to Slack.`,
})

// the ai determined that the issue was severe enough to report on slack
if (result.roundtrips.length > 1) {
const blocks = BlockCollection([
Section({
text: `:github2: <${result.roundtrips[0].toolResults[0].result.html_url}|#${result.roundtrips[0].toolResults[0].result.number}>: ${result.roundtrips[0].toolResults[0].result.title}\n_Note: This issue was summarized and reported on Slack with the *${model}* model._`,
text: `:github2: <${html_url}|#${number}>: ${title}\n_Note: This issue was evaluated and reported on Slack with *${model}*._`,
}),
Divider(),
Section({
Expand All @@ -67,10 +85,12 @@ async function main() {
icon_emoji: ':github:',
username: 'GitHub Notifier',
})
} else {
// the ai will also provide a reason why the issue was not severe enough to report on slack
info(`${result.text}`)
}

// the ai will also provide a reason why the issue was not severe enough to report on slack
info(
`result.text: ${result.text}\nhtml_url: ${html_url}\nnumber: ${number}\ntitle: ${title}`
)
} catch (error) {
setFailed(error)
}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/triage_with_ai.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,4 @@ jobs:
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_TOKEN: ${{ secrets.SLACK_TOKEN }}
VERCEL_PROTECTION_BYPASS: ${{ secrets.VERCEL_PROTECTION_BYPASS }}
5 changes: 1 addition & 4 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,11 @@ trigger:
- docs
- errors
- examples
# Do not run Azure on `canary`, `main`, or release tags. This unnecessarily
# increases the backlog, and the change was already tested on the PR.
# Do not run Azure on release tags. This unnecessarily increases the backlog.
branches:
include:
- '*'
exclude:
- canary
- main
- refs/tags/*

pr:
Expand Down
10 changes: 7 additions & 3 deletions crates/next-core/src/next_client/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ use turbopack_browser::{react_refresh::assert_can_resolve_react_refresh, Browser
use turbopack_core::{
chunk::ChunkingContext,
compile_time_info::{
CompileTimeDefineValue, CompileTimeDefines, CompileTimeInfo, FreeVarReference,
FreeVarReferences,
CompileTimeDefineValue, CompileTimeDefines, CompileTimeInfo, DefineableNameSegment,
FreeVarReference, FreeVarReferences,
},
condition::ContextCondition,
environment::{BrowserEnvironment, Environment, ExecutionEnvironment},
Expand Down Expand Up @@ -68,7 +68,11 @@ fn defines(define_env: &IndexMap<RcStr, RcStr>) -> CompileTimeDefines {

for (k, v) in define_env {
defines
.entry(k.split('.').map(|s| s.into()).collect::<Vec<RcStr>>())
.entry(
k.split('.')
.map(|s| DefineableNameSegment::Name(s.into()))
.collect::<Vec<_>>(),
)
.or_insert_with(|| {
let val = serde_json::from_str(v);
match val {
Expand Down
10 changes: 7 additions & 3 deletions crates/next-core/src/next_edge/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ use turbopack_browser::BrowserChunkingContext;
use turbopack_core::{
chunk::ChunkingContext,
compile_time_info::{
CompileTimeDefineValue, CompileTimeDefines, CompileTimeInfo, FreeVarReference,
FreeVarReferences,
CompileTimeDefineValue, CompileTimeDefines, CompileTimeInfo, DefineableNameSegment,
FreeVarReference, FreeVarReferences,
},
environment::{EdgeWorkerEnvironment, Environment, ExecutionEnvironment},
free_var_references,
Expand All @@ -34,7 +34,11 @@ fn defines(define_env: &IndexMap<RcStr, RcStr>) -> CompileTimeDefines {

for (k, v) in define_env {
defines
.entry(k.split('.').map(|s| s.into()).collect::<Vec<RcStr>>())
.entry(
k.split('.')
.map(|s| DefineableNameSegment::Name(s.into()))
.collect::<Vec<_>>(),
)
.or_insert_with(|| {
let val = serde_json::from_str(v);
match val {
Expand Down
9 changes: 7 additions & 2 deletions crates/next-core/src/next_server/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ use turbopack::{
};
use turbopack_core::{
compile_time_info::{
CompileTimeDefineValue, CompileTimeDefines, CompileTimeInfo, FreeVarReferences,
CompileTimeDefineValue, CompileTimeDefines, CompileTimeInfo, DefineableNameSegment,
FreeVarReferences,
},
condition::ContextCondition,
environment::{Environment, ExecutionEnvironment, NodeJsEnvironment, RuntimeVersions},
Expand Down Expand Up @@ -316,7 +317,11 @@ fn defines(define_env: &IndexMap<RcStr, RcStr>) -> CompileTimeDefines {

for (k, v) in define_env {
defines
.entry(k.split('.').map(|s| s.into()).collect::<Vec<RcStr>>())
.entry(
k.split('.')
.map(|s| DefineableNameSegment::Name(s.into()))
.collect::<Vec<_>>(),
)
.or_insert_with(|| {
let val = serde_json::from_str(v);
match val {
Expand Down
7 changes: 7 additions & 0 deletions crates/next-core/src/next_server/transforms.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ use crate::{
get_server_actions_transform_rule, next_amp_attributes::get_next_amp_attr_rule,
next_cjs_optimizer::get_next_cjs_optimizer_rule,
next_disallow_re_export_all_in_page::get_next_disallow_export_all_in_page_rule,
next_edge_node_api_assert::next_edge_node_api_assert,
next_middleware_dynamic_assert::get_middleware_dynamic_assert_rule,
next_page_static_info::get_next_page_static_info_assert_rule,
next_pure::get_next_pure_rule, server_actions::ActionsTransform,
Expand Down Expand Up @@ -133,6 +134,12 @@ pub async fn get_next_server_transforms_rules(

if let NextRuntime::Edge = next_runtime {
rules.push(get_middleware_dynamic_assert_rule(mdx_rs));
if matches!(context_ty, ServerContextType::Middleware { .. }) {
rules.push(next_edge_node_api_assert(
mdx_rs,
matches!(*mode.await?, NextMode::Build),
));
}
}
}

Expand Down
1 change: 1 addition & 0 deletions crates/next-core/src/next_shared/transforms/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ pub(crate) mod next_amp_attributes;
pub(crate) mod next_cjs_optimizer;
pub(crate) mod next_disallow_re_export_all_in_page;
pub(crate) mod next_dynamic;
pub(crate) mod next_edge_node_api_assert;
pub(crate) mod next_font;
pub(crate) mod next_middleware_dynamic_assert;
pub(crate) mod next_optimize_server_react;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
use anyhow::Result;
use async_trait::async_trait;
use next_custom_transforms::transforms::warn_for_edge_runtime::warn_for_edge_runtime;
use swc_core::{
common::SyntaxContext,
ecma::{ast::*, utils::ExprCtx, visit::VisitWith},
};
use turbo_tasks::Vc;
use turbopack::module_options::{ModuleRule, ModuleRuleEffect};
use turbopack_ecmascript::{CustomTransformer, EcmascriptInputTransform, TransformContext};

use super::module_rule_match_js_no_url;

pub fn next_edge_node_api_assert(enable_mdx_rs: bool, should_error: bool) -> ModuleRule {
let transformer =
EcmascriptInputTransform::Plugin(Vc::cell(
Box::new(NextEdgeNodeApiAssert { should_error }) as _,
));
ModuleRule::new(
module_rule_match_js_no_url(enable_mdx_rs),
vec![ModuleRuleEffect::ExtendEcmascriptTransforms {
prepend: Vc::cell(vec![]),
append: Vc::cell(vec![transformer]),
}],
)
}

#[derive(Debug)]
struct NextEdgeNodeApiAssert {
should_error: bool,
}

#[async_trait]
impl CustomTransformer for NextEdgeNodeApiAssert {
#[tracing::instrument(level = tracing::Level::TRACE, name = "next_edge_node_api_assert", skip_all)]
async fn transform(&self, program: &mut Program, ctx: &TransformContext<'_>) -> Result<()> {
let mut visitor = warn_for_edge_runtime(
ctx.source_map.clone(),
ExprCtx {
is_unresolved_ref_safe: false,
unresolved_ctxt: SyntaxContext::empty().apply_mark(ctx.unresolved_mark),
},
self.should_error,
);
program.visit_with(&mut visitor);
Ok(())
}
}
3 changes: 2 additions & 1 deletion crates/next-custom-transforms/src/transforms/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ pub mod fonts;
pub mod import_analyzer;
pub mod middleware_dynamic;
pub mod next_ssg;
pub mod optimize_barrel;
pub mod optimize_server_react;
pub mod page_config;
pub mod page_static_info;
Expand All @@ -16,7 +17,7 @@ pub mod react_server_components;
pub mod server_actions;
pub mod shake_exports;
pub mod strip_page_exports;
pub mod warn_for_edge_runtime;

//[TODO] PACK-1564: need to decide reuse vs. turbopack specific
pub mod named_import_transform;
pub mod optimize_barrel;
Loading

0 comments on commit dda3606

Please sign in to comment.