Skip to content

Commit

Permalink
Add dotEnv to turbo.json (#4870)
Browse files Browse the repository at this point in the history
Enable configuring `.env` files inside of `turbo.json`. These are
addition-only to file hashes, and enables specifying load order for
cache distinctions.

---------

Co-authored-by: Nathan Hammond <Nathan Hammond>
  • Loading branch information
nathanhammond committed May 19, 2023
1 parent 14efcf2 commit f1fae9a
Show file tree
Hide file tree
Showing 64 changed files with 778 additions and 281 deletions.
8 changes: 8 additions & 0 deletions cli/internal/fs/testdata/dotenv-empty/turbo.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"globalDotEnv": [],
"pipeline": {
"build": {
"dotEnv": []
}
}
}
8 changes: 8 additions & 0 deletions cli/internal/fs/testdata/dotenv-null/turbo.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"globalDotEnv": null,
"pipeline": {
"build": {
"dotEnv": null
}
}
}
8 changes: 8 additions & 0 deletions cli/internal/fs/testdata/dotenv-populated/turbo.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"globalDotEnv": ["z", "y", "x"],
"pipeline": {
"build": {
"dotEnv": ["3", "2", "1"]
}
}
}
5 changes: 5 additions & 0 deletions cli/internal/fs/testdata/dotenv-undefined/turbo.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"pipeline": {
"build": {}
}
}
85 changes: 69 additions & 16 deletions cli/internal/fs/turbo_json.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@ type rawTurboJSON struct {
// Global passthrough env
GlobalPassthroughEnv []string `json:"globalPassThroughEnv,omitempty"`

// .env files to consider, in order.
GlobalDotEnv []string `json:"globalDotEnv,omitempty"`

// Pipeline is a map of Turbo pipeline entries which define the task graph
// and cache behavior on a per task or per package-task basis.
Pipeline Pipeline `json:"pipeline"`
Expand All @@ -54,20 +57,22 @@ type rawTurboJSON struct {
// Notably, it includes a PristinePipeline instead of the regular Pipeline. (i.e. TaskDefinition
// instead of BookkeepingTaskDefinition.)
type pristineTurboJSON struct {
GlobalDependencies []string `json:"globalDependencies,omitempty"`
GlobalEnv []string `json:"globalEnv,omitempty"`
GlobalPassthroughEnv []string `json:"globalPassThroughEnv"`
Pipeline PristinePipeline `json:"pipeline"`
RemoteCacheOptions RemoteCacheOptions `json:"remoteCache,omitempty"`
Extends []string `json:"extends,omitempty"`
Space *SpaceConfig `json:"experimentalSpaces,omitempty"`
GlobalDependencies []string `json:"globalDependencies,omitempty"`
GlobalEnv []string `json:"globalEnv,omitempty"`
GlobalPassthroughEnv []string `json:"globalPassThroughEnv"`
GlobalDotEnv turbopath.AnchoredUnixPathArray `json:"globalDotEnv"`
Pipeline PristinePipeline `json:"pipeline"`
RemoteCacheOptions RemoteCacheOptions `json:"remoteCache,omitempty"`
Extends []string `json:"extends,omitempty"`
Space *SpaceConfig `json:"experimentalSpaces,omitempty"`
}

// TurboJSON represents a turbo.json configuration file
type TurboJSON struct {
GlobalDeps []string
GlobalEnv []string
GlobalPassthroughEnv []string
GlobalDotEnv turbopath.AnchoredUnixPathArray
Pipeline Pipeline
RemoteCacheOptions RemoteCacheOptions
Extends []string // A list of Workspace names
Expand All @@ -84,14 +89,15 @@ type RemoteCacheOptions struct {
// We use this for printing ResolvedTaskConfiguration, because we _want_ to show
// the user the default values for key they have not configured.
type rawTaskWithDefaults struct {
Outputs []string `json:"outputs"`
Cache *bool `json:"cache"`
DependsOn []string `json:"dependsOn"`
Inputs []string `json:"inputs"`
OutputMode util.TaskOutputMode `json:"outputMode"`
PassthroughEnv []string `json:"passThroughEnv"`
Env []string `json:"env"`
Persistent bool `json:"persistent"`
Outputs []string `json:"outputs"`
Cache *bool `json:"cache"`
DependsOn []string `json:"dependsOn"`
Inputs []string `json:"inputs"`
OutputMode util.TaskOutputMode `json:"outputMode"`
PassthroughEnv []string `json:"passThroughEnv"`
DotEnv turbopath.AnchoredUnixPathArray `json:"dotEnv"`
Env []string `json:"env"`
Persistent bool `json:"persistent"`
}

// rawTask exists to Unmarshal from json. When fields are omitted, we _want_
Expand All @@ -104,6 +110,7 @@ type rawTask struct {
OutputMode *util.TaskOutputMode `json:"outputMode,omitempty"`
Env []string `json:"env,omitempty"`
PassthroughEnv []string `json:"passThroughEnv,omitempty"`
DotEnv []string `json:"dotEnv,omitempty"`
Persistent *bool `json:"persistent,omitempty"`
}

Expand All @@ -114,6 +121,7 @@ type taskDefinitionHashable struct {
Outputs TaskOutputs
ShouldCache bool
EnvVarDependencies []string
DotEnv turbopath.AnchoredUnixPathArray
TopologicalDependencies []string
TaskDependencies []string
Inputs []string
Expand Down Expand Up @@ -156,6 +164,9 @@ type TaskDefinition struct {
// rawTask.PassthroughEnv
PassthroughEnv []string

// rawTask.DotEnv
DotEnv turbopath.AnchoredUnixPathArray

// TopologicalDependencies are tasks from package dependencies.
// E.g. "build" is a topological dependency in:
// dependsOn: ['^build'].
Expand Down Expand Up @@ -389,6 +400,7 @@ func (btd BookkeepingTaskDefinition) GetTaskDefinition() TaskDefinition {
Outputs: btd.TaskDefinition.Outputs,
ShouldCache: btd.TaskDefinition.ShouldCache,
EnvVarDependencies: btd.TaskDefinition.EnvVarDependencies,
DotEnv: btd.TaskDefinition.DotEnv,
TopologicalDependencies: btd.TaskDefinition.TopologicalDependencies,
TaskDependencies: btd.TaskDefinition.TaskDependencies,
Inputs: btd.TaskDefinition.Inputs,
Expand Down Expand Up @@ -441,6 +453,10 @@ func MergeTaskDefinitions(taskDefinitions []BookkeepingTaskDefinition) (*TaskDef
mergedTaskDefinition.Inputs = taskDef.Inputs
}

if bookkeepingTaskDef.hasField("DotEnv") {
mergedTaskDefinition.DotEnv = taskDef.DotEnv
}

if bookkeepingTaskDef.hasField("OutputMode") {
mergedTaskDefinition.OutputMode = taskDef.OutputMode
}
Expand Down Expand Up @@ -550,6 +566,24 @@ func (btd *BookkeepingTaskDefinition) UnmarshalJSON(data []byte) error {
sort.Strings(btd.TaskDefinition.PassthroughEnv)
}

if task.DotEnv != nil {
btd.definedFields.Add("DotEnv")

// Going to _at least_ be an empty array.
btd.TaskDefinition.DotEnv = make(turbopath.AnchoredUnixPathArray, 0, len(task.DotEnv))

// Port the raw dotEnv values in.
for _, dotEnvPath := range task.DotEnv {
typeCheckedPath, err := turbopath.CheckedToAnchoredUnixPath(dotEnvPath)
if err != nil {
return err
}

// These are _explicitly_ not sorted.
btd.TaskDefinition.DotEnv = append(btd.TaskDefinition.DotEnv, typeCheckedPath)
}
}

if task.Inputs != nil {
// Note that we don't require Inputs to be sorted, we're going to
// hash the resulting files and sort that instead
Expand Down Expand Up @@ -587,6 +621,7 @@ func (c taskDefinitionHashable) MarshalJSON() ([]byte, error) {
c.Outputs,
c.EnvVarDependencies,
c.PassthroughEnv,
c.DotEnv,
c.TaskDependencies,
c.TopologicalDependencies,
)
Expand All @@ -603,6 +638,7 @@ func (c TaskDefinition) MarshalJSON() ([]byte, error) {
c.Outputs,
c.EnvVarDependencies,
c.PassthroughEnv,
c.DotEnv,
c.TaskDependencies,
c.TopologicalDependencies,
)
Expand Down Expand Up @@ -652,6 +688,19 @@ func (tj *TurboJSON) UnmarshalJSON(data []byte) error {
tj.GlobalDeps = globalFileDependencies.UnsafeListOfStrings()
sort.Strings(tj.GlobalDeps)

// Port the raw globalDotEnv values in.
if raw.GlobalDotEnv != nil {
tj.GlobalDotEnv = make(turbopath.AnchoredUnixPathArray, 0, len(raw.GlobalDotEnv))

for _, dotEnvPath := range raw.GlobalDotEnv {
typeCheckedPath, err := turbopath.CheckedToAnchoredUnixPath(dotEnvPath)
if err != nil {
return err
}
tj.GlobalDotEnv = append(tj.GlobalDotEnv, typeCheckedPath)
}
}

// copy these over, we don't need any changes here.
tj.Pipeline = raw.Pipeline
tj.RemoteCacheOptions = raw.RemoteCacheOptions
Expand All @@ -673,6 +722,7 @@ func (tj *TurboJSON) MarshalJSON() ([]byte, error) {
raw := pristineTurboJSON{}
raw.GlobalDependencies = tj.GlobalDeps
raw.GlobalEnv = tj.GlobalEnv
raw.GlobalDotEnv = tj.GlobalDotEnv
raw.GlobalPassthroughEnv = tj.GlobalPassthroughEnv
raw.Pipeline = tj.Pipeline.Pristine()
raw.RemoteCacheOptions = tj.RemoteCacheOptions
Expand All @@ -684,7 +734,7 @@ func (tj *TurboJSON) MarshalJSON() ([]byte, error) {
return json.Marshal(&raw)
}

func makeRawTask(persistent bool, shouldCache bool, outputMode util.TaskOutputMode, inputs []string, outputs TaskOutputs, envVarDependencies []string, passthroughEnv []string, taskDependencies []string, topologicalDependencies []string) *rawTaskWithDefaults {
func makeRawTask(persistent bool, shouldCache bool, outputMode util.TaskOutputMode, inputs []string, outputs TaskOutputs, envVarDependencies []string, passthroughEnv []string, dotEnv turbopath.AnchoredUnixPathArray, taskDependencies []string, topologicalDependencies []string) *rawTaskWithDefaults {
// Initialize with empty arrays, so we get empty arrays serialized into JSON
task := &rawTaskWithDefaults{
Outputs: []string{},
Expand All @@ -697,6 +747,9 @@ func makeRawTask(persistent bool, shouldCache bool, outputMode util.TaskOutputMo
task.Cache = &shouldCache
task.OutputMode = outputMode

// This should _not_ be sorted.
task.DotEnv = dotEnv

if len(inputs) > 0 {
task.Inputs = inputs
}
Expand Down
Loading

1 comment on commit f1fae9a

@vercel
Copy link

@vercel vercel bot commented on f1fae9a May 19, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.