这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 25 additions & 5 deletions crates/turbopack-dev-server/src/source/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,26 @@ impl BodyError {
}
}

impl From<&str> for BodyError {
fn from(err: &str) -> Self {
BodyError {
err: err.to_string(),
}
}
}

impl From<String> for BodyError {
fn from(err: String) -> Self {
BodyError { err }
}
}

impl From<anyhow::Error> for BodyError {
fn from(value: anyhow::Error) -> Self {
value.to_string().into()
}
}

/// The return value of a content source when getting a path. A specificity is
/// attached and when combining results this specificity should be used to order
/// results.
Expand Down Expand Up @@ -246,29 +266,29 @@ pub struct ContentSourceData {
pub cache_buster: u64,
}

type Chunk = Result<Bytes, BodyError>;
pub type BodyChunk = Result<Bytes, BodyError>;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
pub type BodyChunk = Result<Bytes, BodyError>;
pub type BodyChunk = Result<Bytes, SharedError>;

BodyError coverts everything to a string loosing all the structure. We want to avoid that.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This requires a slightly larger change, and it's complicated by ShareError not being serializable. That would prevent any body response from being cachable, which I think is bad for our end goal? Anyways, I'd like to defer this to a follow up PR for the time being.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That would prevent any body response from being cachable, which I think is bad for our end goal?

It would still be cacheable in memory.

While I like to have static rendering to be cacheable, next.js doesn't cache rendering at all in dev and always server-renders a fresh page.

/// A request body.
#[turbo_tasks::value(shared)]
#[derive(Default, Clone, Debug)]
pub struct Body {
#[turbo_tasks(trace_ignore)]
chunks: Stream<Chunk>,
chunks: Stream<BodyChunk>,
}

impl Body {
/// Creates a new body from a list of chunks.
pub fn new(chunks: Vec<Chunk>) -> Self {
pub fn new(chunks: Vec<BodyChunk>) -> Self {
Self {
chunks: Stream::new_closed(chunks),
}
}

/// Returns an iterator over the body's chunks.
pub fn read(&self) -> StreamRead<Chunk> {
pub fn read(&self) -> StreamRead<BodyChunk> {
self.chunks.read()
}

pub fn from_stream<T: StreamTrait<Item = Chunk> + Send + Sync + Unpin + 'static>(
pub fn from_stream<T: StreamTrait<Item = BodyChunk> + Send + Unpin + 'static>(
source: T,
) -> Self {
Self {
Expand Down
42 changes: 27 additions & 15 deletions crates/turbopack-node/src/evaluate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -249,17 +249,6 @@ pub fn evaluate(
additional_invalidation: CompletionVc,
debug: bool,
) -> JavaScriptEvaluationVc {
let pool = get_evaluate_pool(
module_asset,
cwd,
env,
context,
chunking_context,
runtime_entries,
additional_invalidation,
debug,
);

// Note the following code uses some hacks to create a child task that produces
// a stream that is returned by this task.

Expand All @@ -278,11 +267,16 @@ pub fn evaluate(

// run the evaluation as side effect
compute_evaluate_stream(
pool,
module_asset,
cwd,
env,
context_ident_for_issue,
context,
chunking_context,
runtime_entries,
args,
additional_invalidation,
debug,
JavaScriptStreamSender {
get: Box::new(move || {
if let Some(sender) = initial.lock().take() {
Expand All @@ -308,11 +302,16 @@ pub fn evaluate(

#[turbo_tasks::function]
async fn compute_evaluate_stream(
pool: NodeJsPoolVc,
module_asset: AssetVc,
cwd: FileSystemPathVc,
env: ProcessEnvVc,
context_ident_for_issue: AssetIdentVc,
context: AssetContextVc,
chunking_context: ChunkingContextVc,
runtime_entries: Option<EcmascriptChunkPlaceablesVc>,
args: Vec<JsonValueVc>,
additional_invalidation: CompletionVc,
debug: bool,
sender: JavaScriptStreamSenderVc,
) {
mark_finished();
Expand All @@ -322,7 +321,20 @@ async fn compute_evaluate_stream(
};

let stream = generator! {
let pool = pool.await?;
let pool = get_evaluate_pool(
module_asset,
cwd,
env,
context,
chunking_context,
runtime_entries,
additional_invalidation,
debug,
);

// Read this strongly consistent, since we don't want to run inconsistent
// node.js code.
let pool = pool.strongly_consistent().await?;

let args = args.into_iter().try_join().await?;
// Assume this is a one-off operation, so we can kill the process
Expand Down Expand Up @@ -407,7 +419,7 @@ async fn pull_operation(
match operation.recv().await? {
EvalJavaScriptIncomingMessage::Error(error) => {
EvaluationIssue {
error: error.clone(),
error,
context_ident: context_ident_for_issue,
assets_for_source_mapping: pool.assets_for_source_mapping,
assets_root: pool.assets_root,
Expand Down
1 change: 1 addition & 0 deletions crates/turbopack-node/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,7 @@ pub async fn get_intermediate_asset(
.into())
}

#[derive(Clone, Debug)]
#[turbo_tasks::value(shared)]
pub struct ResponseHeaders {
pub status: u16,
Expand Down
14 changes: 11 additions & 3 deletions crates/turbopack-node/src/render/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,16 @@ enum RenderProxyOutgoingMessage<'a> {
BodyEnd,
}

#[derive(Deserialize)]
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "camelCase")]
enum RenderProxyIncomingMessage {
Headers { data: ResponseHeaders },
Body { data: Vec<u8> },
BodyChunk { data: Vec<u8> },
BodyEnd,
Error(StructuredError),
}

#[derive(Deserialize)]
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "camelCase")]
enum RenderStaticIncomingMessage {
#[serde(rename_all = "camelCase")]
Expand All @@ -52,6 +53,13 @@ enum RenderStaticIncomingMessage {
headers: Vec<(String, String)>,
body: String,
},
Headers {
data: ResponseHeaders,
},
BodyChunk {
data: Vec<u8>,
},
BodyEnd,
Rewrite {
path: String,
},
Expand Down
Loading