diff --git a/.vscode/launch.json b/.vscode/launch.json index 0a74c16062289..d149059f031b8 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -5,7 +5,7 @@ "version": "0.2.0", "configurations": [ { - "name": "Basic Turbo Build", + "name": "Build Basic", "type": "go", "request": "launch", "mode": "debug", @@ -30,6 +30,24 @@ "program": "${workspaceRoot}/cli/cmd/turbo", "cwd": "${workspaceRoot}/examples/basic", "args": ["--version"] + }, + { + "name": "Build All", + "type": "go", + "request": "launch", + "mode": "debug", + "program": "${workspaceRoot}/cli/cmd/turbo", + "cwd": "${workspaceRoot}", + "args": ["run", "build"] + }, + { + "name": "Build All (Force)", + "type": "go", + "request": "launch", + "mode": "debug", + "program": "${workspaceRoot}/cli/cmd/turbo", + "cwd": "${workspaceRoot}", + "args": ["run", "build", "--force"] } ] } diff --git a/cli/internal/cache/cache_http.go b/cli/internal/cache/cache_http.go index e8f68deaaa5cc..8f852db3f805c 100644 --- a/cli/internal/cache/cache_http.go +++ b/cli/internal/cache/cache_http.go @@ -2,7 +2,9 @@ package cache import ( "archive/tar" + "bytes" "compress/gzip" + "errors" "fmt" "io" "io/ioutil" @@ -24,6 +26,7 @@ type httpCache struct { config *config.Config requestLimiter limiter recorder analytics.Recorder + signerVerifier *ArtifactSignatureAuthentication } type limiter chan struct{} @@ -49,7 +52,22 @@ func (cache *httpCache) Put(target, hash string, duration int, files []string) e r, w := io.Pipe() go cache.write(w, hash, files) - return cache.config.ApiClient.PutArtifact(hash, duration, r) + + // Read the entire aritfact tar into memory so we can easily compute the signature. + // Note: retryablehttp.NewRequest reads the files into memory anyways so there's no + // additional overhead by doing the ioutil.ReadAll here instead. + artifactBody, err := ioutil.ReadAll(r) + if err != nil { + return fmt.Errorf("failed to store files in HTTP cache: %w", err) + } + tag := "" + if cache.signerVerifier.isEnabled() { + tag, err = cache.signerVerifier.generateTag(hash, artifactBody) + if err != nil { + return fmt.Errorf("failed to store files in HTTP cache: %w", err) + } + } + return cache.config.ApiClient.PutArtifact(hash, artifactBody, duration, tag) } // write writes a series of files into the given Writer. @@ -134,8 +152,8 @@ func (cache *httpCache) logFetch(hit bool, hash string, duration int) { cache.recorder.LogEvent(payload) } -func (cache *httpCache) retrieve(key string) (bool, []string, int, error) { - resp, err := cache.config.ApiClient.FetchArtifact(key, nil) +func (cache *httpCache) retrieve(hash string) (bool, []string, int, error) { + resp, err := cache.config.ApiClient.FetchArtifact(hash, nil) if err != nil { return false, nil, 0, err } @@ -157,7 +175,29 @@ func (cache *httpCache) retrieve(key string) (bool, []string, int, error) { b, _ := ioutil.ReadAll(resp.Body) return false, files, duration, fmt.Errorf("%s", string(b)) } - gzr, err := gzip.NewReader(resp.Body) + artifactReader := resp.Body + if cache.signerVerifier.isEnabled() { + expectedTag := resp.Header.Get("x-artifact-tag") + if expectedTag == "" { + // If the verifier is enabled all incoming artifact downloads must have a signature + return false, nil, 0, errors.New("artifact verification failed: Downloaded artifact is missing required x-artifact-tag header") + } + b, _ := ioutil.ReadAll(artifactReader) + if err != nil { + return false, nil, 0, fmt.Errorf("artifact verifcation failed: %w", err) + } + isValid, err := cache.signerVerifier.validate(hash, b, expectedTag) + if err != nil { + return false, nil, 0, fmt.Errorf("artifact verifcation failed: %w", err) + } + if !isValid { + err = fmt.Errorf("artifact verification failed: artifact tag does not match expected tag %s", expectedTag) + return false, nil, 0, err + } + // The artifact has been verified and the body can be read and untarred + artifactReader = ioutil.NopCloser(bytes.NewReader(b)) + } + gzr, err := gzip.NewReader(artifactReader) if err != nil { return false, files, duration, err } @@ -236,5 +276,11 @@ func newHTTPCache(config *config.Config, recorder analytics.Recorder) *httpCache config: config, requestLimiter: make(limiter, 20), recorder: recorder, + signerVerifier: &ArtifactSignatureAuthentication{ + // TODO(Gaspar): this should use RemoteCacheOptions.TeamId once we start + // enforcing team restrictions for repositories. + teamId: config.TeamId, + options: &config.TurboConfigJSON.RemoteCacheOptions.SignatureOptions, + }, } } diff --git a/cli/internal/cache/cache_signature_authentication.go b/cli/internal/cache/cache_signature_authentication.go new file mode 100644 index 0000000000000..87301530d1b30 --- /dev/null +++ b/cli/internal/cache/cache_signature_authentication.go @@ -0,0 +1,110 @@ +package cache + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "hash" + "io" + "os" + + "github.com/vercel/turborepo/cli/internal/fs" +) + +type ArtifactSignatureAuthentication struct { + teamId string + options *fs.SignatureOptions +} + +func (asa *ArtifactSignatureAuthentication) isEnabled() bool { + return asa.options.Enabled +} + +// If the secret key is not found or the secret key length is 0, an error is returned +// Preference is given to the enviornment specifed secret key. +func (asa *ArtifactSignatureAuthentication) secretKey() ([]byte, error) { + secret := "" + switch { + case len(asa.options.KeyEnv) > 0: + secret = os.Getenv(asa.options.KeyEnv) + case len(asa.options.Key) > 0: + secret = asa.options.Key + } + if len(secret) == 0 { + return nil, errors.New("signature secret key not found. You must specify a secret key or keyEnv name in your turbo.json config") + } + return []byte(secret), nil +} + +func (asa *ArtifactSignatureAuthentication) generateTag(hash string, artifactBody []byte) (string, error) { + tag, err := asa.getTagGenerator(hash) + if err != nil { + return "", err + } + tag.Write(artifactBody) + return base64.StdEncoding.EncodeToString(tag.Sum(nil)), nil +} + +func (asa *ArtifactSignatureAuthentication) getTagGenerator(hash string) (hash.Hash, error) { + teamId := asa.teamId + secret, err := asa.secretKey() + if err != nil { + return nil, err + } + artifactMetadata := &struct { + Hash string `json:"hash"` + TeamId string `json:"teamId"` + }{ + Hash: hash, + TeamId: teamId, + } + metadata, err := json.Marshal(artifactMetadata) + if err != nil { + return nil, err + } + + // TODO(Gaspar) Support additional signing algorithms here + h := hmac.New(sha256.New, secret) + h.Write(metadata) + return h, nil +} + +func (asa *ArtifactSignatureAuthentication) validate(hash string, artifactBody []byte, expectedTag string) (bool, error) { + computedTag, err := asa.generateTag(hash, artifactBody) + if err != nil { + return false, fmt.Errorf("failed to verify artifact tag: %w", err) + } + return hmac.Equal([]byte(computedTag), []byte(expectedTag)), nil +} + +func (asa *ArtifactSignatureAuthentication) streamValidator(hash string, incomingReader io.ReadCloser) (io.ReadCloser, *StreamValidator, error) { + tag, err := asa.getTagGenerator(hash) + if err != nil { + return nil, nil, err + } + + tee := io.TeeReader(incomingReader, tag) + artifactReader := readCloser{tee, incomingReader} + return artifactReader, &StreamValidator{tag}, nil +} + +type StreamValidator struct { + currentHash hash.Hash +} + +func (sv *StreamValidator) Validate(expectedTag string) bool { + computedTag := base64.StdEncoding.EncodeToString(sv.currentHash.Sum(nil)) + return hmac.Equal([]byte(computedTag), []byte(expectedTag)) +} + +func (sv *StreamValidator) CurrentValue() string { + return base64.StdEncoding.EncodeToString(sv.currentHash.Sum(nil)) +} + +type readCloser struct { + io.Reader + io.Closer +} diff --git a/cli/internal/cache/cache_signature_authentication_test.go b/cli/internal/cache/cache_signature_authentication_test.go new file mode 100644 index 0000000000000..67fce5a63c632 --- /dev/null +++ b/cli/internal/cache/cache_signature_authentication_test.go @@ -0,0 +1,211 @@ +package cache + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/vercel/turborepo/cli/internal/fs" +) + +func Test_SecretKey(t *testing.T) { + teamId := "team_someid" + secret := "my-secret" + secretKeyEnvName := "TURBO_TEST_SIGNING_KEY" + secretKeyEnvValue := "my-secret-key-env" + t.Setenv(secretKeyEnvName, secretKeyEnvValue) + + cases := []struct { + name string + asa *ArtifactSignatureAuthentication + expectedSecretKey string + expectedSecretKeyError bool + }{ + { + name: "Accepts secret key", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + options: &fs.SignatureOptions{ + Enabled: true, + Key: secret, + }, + }, + expectedSecretKey: secret, + expectedSecretKeyError: false, + }, + { + name: "Accepts secret keyEnv", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + options: &fs.SignatureOptions{ + Enabled: true, + KeyEnv: secretKeyEnvName, + }, + }, + expectedSecretKey: secretKeyEnvValue, + expectedSecretKeyError: false, + }, + { + name: "Prefers secret keyEnv", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + options: &fs.SignatureOptions{ + Enabled: true, + Key: secret, + KeyEnv: secretKeyEnvName, + }, + }, + expectedSecretKey: secretKeyEnvValue, + expectedSecretKeyError: false, + }, + { + name: "Secret key not defined errors", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + options: &fs.SignatureOptions{ + Enabled: true, + }, + }, + expectedSecretKey: "", + expectedSecretKeyError: true, + }, + { + name: "Secret key is empty errors", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + options: &fs.SignatureOptions{ + Enabled: true, + Key: "", + }, + }, + expectedSecretKey: "", + expectedSecretKeyError: true, + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + secretKey, err := tc.asa.secretKey() + if tc.expectedSecretKeyError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expectedSecretKey, string(secretKey)) + } + }) + } +} + +func Test_GenerateTagAndValidate(t *testing.T) { + teamId := "team_someid" + hash := "the-artifact-hash" + artifactBody := []byte("the artifact body as bytes") + secret := "my-secret" + + cases := []struct { + name string + asa *ArtifactSignatureAuthentication + expectedTagMatches string + expectedTagDoesNotMatch string + }{ + { + name: "Uses hash to generate tag", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + options: &fs.SignatureOptions{ + Enabled: true, + Key: secret, + }, + }, + expectedTagMatches: testUtilGetHMACTag(hash, teamId, artifactBody, secret), + expectedTagDoesNotMatch: testUtilGetHMACTag("wrong-hash", teamId, artifactBody, secret), + }, + { + name: "Uses teamId to generate tag", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + options: &fs.SignatureOptions{ + Enabled: true, + Key: secret, + }, + }, + expectedTagMatches: testUtilGetHMACTag(hash, teamId, artifactBody, secret), + expectedTagDoesNotMatch: testUtilGetHMACTag(hash, "wrong-teamId", artifactBody, secret), + }, + { + name: "Uses artifactBody to generate tag", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + options: &fs.SignatureOptions{ + Enabled: true, + Key: secret, + }, + }, + expectedTagMatches: testUtilGetHMACTag(hash, teamId, artifactBody, secret), + expectedTagDoesNotMatch: testUtilGetHMACTag(hash, teamId, []byte("wrong-artifact-body"), secret), + }, + { + name: "Uses secret to generate tag", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + options: &fs.SignatureOptions{ + Enabled: true, + Key: secret, + }, + }, + expectedTagMatches: testUtilGetHMACTag(hash, teamId, artifactBody, secret), + expectedTagDoesNotMatch: testUtilGetHMACTag(hash, teamId, artifactBody, "wrong-secret"), + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + tag, err := tc.asa.generateTag(hash, artifactBody) + assert.NoError(t, err) + + // validates the tag + assert.Equal(t, tc.expectedTagMatches, tag) + isValid, err := tc.asa.validate(hash, artifactBody, tc.expectedTagMatches) + assert.NoError(t, err) + assert.True(t, isValid) + + // does not validate the tag + assert.NotEqual(t, tc.expectedTagDoesNotMatch, tag) + isValid, err = tc.asa.validate(hash, artifactBody, tc.expectedTagDoesNotMatch) + assert.NoError(t, err) + assert.False(t, isValid) + + }) + } +} + +// Test utils + +// Return the Base64 encoded HMAC given the artifact metadata and artifact body +func testUtilGetHMACTag(hash string, teamId string, artifactBody []byte, secret string) string { + artifactMetadata := &struct { + Hash string `json:"hash"` + TeamId string `json:"teamId"` + }{ + Hash: hash, + TeamId: teamId, + } + metadata, _ := json.Marshal(artifactMetadata) + h := hmac.New(sha256.New, []byte(secret)) + h.Write(metadata) + h.Write(artifactBody) + return base64.StdEncoding.EncodeToString(h.Sum(nil)) +} + +func Test_Utils(t *testing.T) { + teamId := "team_someid" + secret := "my-secret" + hash := "the-artifact-hash" + artifactBody := []byte("the artifact body as bytes") + testTag := testUtilGetHMACTag(hash, teamId, artifactBody, secret) + expectedTag := "9Fu8YniPZ2dEBolTPQoNlFWG0LNMW8EXrBsRmf/fEHk=" + assert.True(t, hmac.Equal([]byte(testTag), []byte(expectedTag))) +} diff --git a/cli/internal/client/client.go b/cli/internal/client/client.go index e7f4aca6710c9..44a4540f92a1a 100644 --- a/cli/internal/client/client.go +++ b/cli/internal/client/client.go @@ -132,7 +132,7 @@ func (c *ApiClient) UserAgent() string { return fmt.Sprintf("turbo %v %v %v (%v)", c.turboVersion, runtime.Version(), runtime.GOOS, runtime.GOARCH) } -func (c *ApiClient) PutArtifact(hash string, duration int, rawBody interface{}) error { +func (c *ApiClient) PutArtifact(hash string, artifactBody []byte, duration int, tag string) error { if err := c.okToRequest(); err != nil { return err } @@ -143,11 +143,16 @@ func (c *ApiClient) PutArtifact(hash string, duration int, rawBody interface{}) if encoded != "" { encoded = "?" + encoded } - req, err := retryablehttp.NewRequest(http.MethodPut, c.makeUrl("/v8/artifacts/"+hash+encoded), rawBody) + + req, err := retryablehttp.NewRequest(http.MethodPut, c.makeUrl("/v8/artifacts/"+hash+encoded), artifactBody) req.Header.Set("Content-Type", "application/octet-stream") req.Header.Set("x-artifact-duration", fmt.Sprintf("%v", duration)) req.Header.Set("Authorization", "Bearer "+c.Token) req.Header.Set("User-Agent", c.UserAgent()) + if tag != "" { + req.Header.Set("x-artifact-tag", tag) + } + if err != nil { return fmt.Errorf("[WARNING] Invalid cache URL: %w", err) } diff --git a/cli/internal/client/client_test.go b/cli/internal/client/client_test.go index e534f43f436f0..3ef50a824a856 100644 --- a/cli/internal/client/client_test.go +++ b/cli/internal/client/client_test.go @@ -1,9 +1,11 @@ package client import ( + "bytes" "encoding/json" "io/ioutil" "net/http" + "net/http/httptest" "reflect" "testing" @@ -12,22 +14,21 @@ import ( ) func Test_sendToServer(t *testing.T) { - handler := http.NewServeMux() ch := make(chan []byte, 1) - handler.HandleFunc("/v8/artifacts/events", func(w http.ResponseWriter, req *http.Request) { - defer req.Body.Close() - b, err := ioutil.ReadAll(req.Body) - if err != nil { - t.Errorf("failed to read request %v", err) - } - ch <- b - w.WriteHeader(200) - w.Write([]byte{}) - }) - server := &http.Server{Addr: "localhost:8888", Handler: handler} - go server.ListenAndServe() + ts := httptest.NewServer( + http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { + defer req.Body.Close() + b, err := ioutil.ReadAll(req.Body) + if err != nil { + t.Errorf("failed to read request %v", err) + } + ch <- b + w.WriteHeader(200) + w.Write([]byte{}) + })) + defer ts.Close() - apiClient := NewClient("http://localhost:8888", hclog.Default(), "v1", "", "my-team-slug", 1) + apiClient := NewClient(ts.URL, hclog.Default(), "v1", "", "my-team-slug", 1) apiClient.SetToken("my-token") myUUID, err := uuid.NewUUID() @@ -61,6 +62,32 @@ func Test_sendToServer(t *testing.T) { if !reflect.DeepEqual(events, result) { t.Errorf("roundtrip got %v, want %v", result, events) } +} + +func Test_PutArtifact(t *testing.T) { + ch := make(chan []byte, 1) + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { + defer req.Body.Close() + b, err := ioutil.ReadAll(req.Body) + if err != nil { + t.Errorf("failed to read request %v", err) + } + ch <- b + w.WriteHeader(200) + w.Write([]byte{}) + })) + defer ts.Close() + + // Set up test expected values + apiClient := NewClient(ts.URL+"/hash", hclog.Default(), "v1", "", "my-team-slug", 1) + apiClient.SetToken("my-token") + expectedArtifactBody := []byte("My string artifact") + + // Test Put Artifact + apiClient.PutArtifact("hash", expectedArtifactBody, 500, "") + testBody := <-ch + if !bytes.Equal(expectedArtifactBody, testBody) { + t.Errorf("Handler read '%v', wants '%v'", testBody, expectedArtifactBody) + } - server.Close() } diff --git a/cli/internal/fs/package_json.go b/cli/internal/fs/package_json.go index 2fd23af25c866..f1a9e3888b955 100644 --- a/cli/internal/fs/package_json.go +++ b/cli/internal/fs/package_json.go @@ -18,6 +18,8 @@ type TurboConfigJSON struct { // Pipeline is a map of Turbo pipeline entries which define the task graph // and cache behavior on a per task or per package-task basis. Pipeline map[string]Pipeline + // Configuration options when interfacing with the remote cache + RemoteCacheOptions RemoteCacheOptions `json:"remoteCache,omitempty"` } func ReadTurboConfigJSON(path string) (*TurboConfigJSON, error) { @@ -36,6 +38,16 @@ func ReadTurboConfigJSON(path string) (*TurboConfigJSON, error) { return turboConfig, nil } +type SignatureOptions struct { + Enabled bool `json:"enabled,omitempty"` + Key string `json:"key,omitempty"` + KeyEnv string `json:"keyEnv,omitempty"` +} +type RemoteCacheOptions struct { + TeamId string `json:"teamId,omitempty"` + SignatureOptions SignatureOptions `json:"signature,omitempty"` +} + type PPipeline struct { Outputs *[]string `json:"outputs"` Cache *bool `json:"cache,omitempty"` diff --git a/cli/internal/fs/package_json_test.go b/cli/internal/fs/package_json_test.go new file mode 100644 index 0000000000000..423347c370e47 --- /dev/null +++ b/cli/internal/fs/package_json_test.go @@ -0,0 +1,31 @@ +package fs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_ParseTurboConfigJson(t *testing.T) { + defaultCwd, err := os.Getwd() + if err != nil { + t.Errorf("failed to get cwd: %v", err) + } + turboJSONPath := filepath.Join(defaultCwd, "testdata", "turbo.json") + turboConfig, err := ReadTurboConfigJSON(turboJSONPath) + if err != nil { + t.Fatalf("invalid parse: %#v", err) + } + BoolFalse := false + + build := Pipeline{[]string{"dist/**", ".next/**"}, nil, []string{"^build"}, PPipeline{&[]string{"dist/**", ".next/**"}, nil, []string{"^build"}}} + lint := Pipeline{[]string{}, nil, nil, PPipeline{&[]string{}, nil, nil}} + dev := Pipeline{nil, &BoolFalse, nil, PPipeline{nil, &BoolFalse, nil}} + pipelineExpected := map[string]Pipeline{"build": build, "lint": lint, "dev": dev} + + remoteCacheOptionsExpected := RemoteCacheOptions{"team_id", SignatureOptions{true, "key", ""}} + assert.EqualValues(t, pipelineExpected, turboConfig.Pipeline) + assert.EqualValues(t, remoteCacheOptionsExpected, turboConfig.RemoteCacheOptions) +} diff --git a/cli/internal/fs/testdata/turbo.json b/cli/internal/fs/testdata/turbo.json new file mode 100644 index 0000000000000..7e4f07dcdd44e --- /dev/null +++ b/cli/internal/fs/testdata/turbo.json @@ -0,0 +1,21 @@ +{ + "pipeline": { + "build": { + "dependsOn": ["^build"], + "outputs": ["dist/**", ".next/**"] + }, + "lint": { + "outputs": [] + }, + "dev": { + "cache": false + } + }, + "remoteCache": { + "teamId": "team_id", + "signature": { + "enabled": true, + "key": "key" + } + } +} diff --git a/docs/schema.d.ts b/docs/schema.d.ts index 6616283b79a70..bed4825bc456a 100644 --- a/docs/schema.d.ts +++ b/docs/schema.d.ts @@ -25,7 +25,7 @@ export interface Schema { * in the traditional dependency graph * * (e.g. a root tsconfig.json, jest.config.js, .eslintrc, etc.)). - * + * * @default [] */ globalDependencies?: string[]; @@ -34,7 +34,7 @@ export interface Schema { * An object representing the task dependency graph of your project. turbo interprets * these conventions to properly schedule, execute, and cache the outputs of tasks in * your project. - * + * * @default {} */ pipeline: { @@ -46,6 +46,11 @@ export interface Schema { */ [script: string]: Pipeline; }; + /** + * Configuration options that control how turbo interfaces with the remote Cache. + * @default {} + */ + remoteCache?: RemoteCache; } export interface Pipeline { @@ -63,7 +68,7 @@ export interface Pipeline { * * Prefixing an item in dependsOn with a $ tells turbo that this pipeline task depends * the value of that environment variable. - * + * * @default [] */ dependsOn?: string[]; @@ -90,3 +95,47 @@ export interface Pipeline { */ cache?: boolean; } + +export interface RemoteCache { + /** + * The teamId used in requests to the Remote Cache. + */ + teamId?: string; + /** + * Configuration options that control the integrity and authentication checks for + * artifacts uploaded to and downloaded from the remote cache. + * + * @default {} + */ + signature?: Signature; +} + +export interface Signature { + /** + * Indicates if signature verification is enabled for requests to the remote cache. When + * `enabled` is `true`, Turborepo will sign every uploaded artifact using the `key`. + * Turborepo will reject any downloaded artifacts that have an invalid signature or are + * missing a signature. + * + * @default false + */ + enabled?: boolean; + /** + * The secret key to use for signing and verifying signatures on artifacts uploaded to + * the remote cache. + * + * If both `key` and `keyEnv` are present, then `key` will be used. + * + * @default "" + */ + key?: string; + /** + * The environment variable that contains the value of the secret key used for signing + * and verifying signatures on artifacts uploaded to the remote cache. + * + * If both `key` and `keyEnv` are present, then `key` will be used. + * + * @default "" + */ + keyEnv?: string; +} diff --git a/examples/basic/turbo.json b/examples/basic/turbo.json index df69a7e0c2b48..33d0621ee2f4d 100644 --- a/examples/basic/turbo.json +++ b/examples/basic/turbo.json @@ -1,13 +1,8 @@ { "pipeline": { "build": { - "dependsOn": [ - "^build" - ], - "outputs": [ - "dist/**", - ".next/**" - ] + "dependsOn": ["^build"], + "outputs": ["dist/**", ".next/**"] }, "lint": { "outputs": []