+
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 0 additions & 59 deletions agent/algorithm/results.go

This file was deleted.

3 changes: 2 additions & 1 deletion agent/algorithm/results_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"testing"

"github.com/ultravioletrs/cocos/agent/algorithm"
"github.com/ultravioletrs/cocos/internal"
)

func TestZipDirectory(t *testing.T) {
Expand Down Expand Up @@ -73,7 +74,7 @@ func TestZipDirectory(t *testing.T) {
}
}

if _, err := algorithm.ZipDirectory(); err != nil {
if _, err := internal.ZipDirectoryToMemory(algorithm.ResultsDir); err != nil {
t.Errorf("ZipDirectory() error = %v", err)
}
})
Expand Down
17 changes: 17 additions & 0 deletions agent/computations.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ import (
"context"
"encoding/json"
"fmt"

"google.golang.org/grpc/metadata"
)

var _ fmt.Stringer = (*Datasets)(nil)
Expand Down Expand Up @@ -69,3 +71,18 @@ func IndexFromContext(ctx context.Context) (int, bool) {
index, ok := ctx.Value(ManifestIndexKey{}).(int)
return index, ok
}

const DecompressKey = "decompress"

func DecompressFromContext(ctx context.Context) bool {
vals := metadata.ValueFromIncomingContext(ctx, DecompressKey)
if len(vals) == 0 {
return false
}

return vals[0] == "true"
}

func DecompressToContext(ctx context.Context, decompress bool) context.Context {
return metadata.AppendToOutgoingContext(ctx, DecompressKey, fmt.Sprintf("%t", decompress))
}
30 changes: 18 additions & 12 deletions agent/service.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"github.com/ultravioletrs/cocos/agent/algorithm/python"
"github.com/ultravioletrs/cocos/agent/algorithm/wasm"
"github.com/ultravioletrs/cocos/agent/events"
"github.com/ultravioletrs/cocos/internal"
"golang.org/x/crypto/sha3"
)

Expand Down Expand Up @@ -191,16 +192,22 @@ func (as *agentService) Data(ctx context.Context, dataset Dataset) error {

as.computation.Datasets = slices.Delete(as.computation.Datasets, i, i+1)

f, err := os.Create(fmt.Sprintf("%s/%s", algorithm.DatasetsDir, dataset.Filename))
if err != nil {
return fmt.Errorf("error creating dataset file: %v", err)
}

if _, err := f.Write(dataset.Dataset); err != nil {
return fmt.Errorf("error writing dataset to file: %v", err)
}
if err := f.Close(); err != nil {
return fmt.Errorf("error closing file: %v", err)
if DecompressFromContext(ctx) {
if err := internal.UnzipFromMemory(dataset.Dataset, algorithm.DatasetsDir); err != nil {
return fmt.Errorf("error decompressing dataset: %v", err)
}
} else {
f, err := os.Create(fmt.Sprintf("%s/%s", algorithm.DatasetsDir, dataset.Filename))
if err != nil {
return fmt.Errorf("error creating dataset file: %v", err)
}

if _, err := f.Write(dataset.Dataset); err != nil {
return fmt.Errorf("error writing dataset to file: %v", err)
}
if err := f.Close(); err != nil {
return fmt.Errorf("error closing file: %v", err)
}
}

matched = true
Expand All @@ -212,7 +219,6 @@ func (as *agentService) Data(ctx context.Context, dataset Dataset) error {
return ErrUndeclaredDataset
}

// Check if all datasets have been received
if len(as.computation.Datasets) == 0 {
as.sm.SendEvent(dataReceived)
}
Expand Down Expand Up @@ -288,7 +294,7 @@ func (as *agentService) runComputation() {
return
}

results, err := algorithm.ZipDirectory()
results, err := internal.ZipDirectoryToMemory(algorithm.ResultsDir)
if err != nil {
as.runError = err
as.sm.logger.Warn(fmt.Sprintf("failed to zip results: %s", err.Error()))
Expand Down
32 changes: 26 additions & 6 deletions cli/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@ make cli
Retrieves attestation information from the SEV guest and saves it to a file.
To retrieve attestation from agent, use the following command:
```bash
./build/cocos-cli agent attestation get '<report_data>'
./build/cocos-cli attestation get '<report_data>'
```

#### Validate attestation
Validates the retrieved attestation information against a specified policy and checks its authenticity.
To validate and verify attestation from agent, use the following command:
```bash
./build/cocos-cli agent attestation validate '<attestation>' --report_data '<report_data>'
./build/cocos-cli attestation validate '<attestation>' --report_data '<report_data>'
```
##### Flags
- --config: Path to a JSON file containing the validation configuration. This can be used to override individual flags.
Expand Down Expand Up @@ -62,21 +62,41 @@ To validate and verify attestation from agent, use the following command:
To upload an algorithm, use the following command:

```bash
./build/cocos-cli agent algo /path/to/algorithm <private_key_file_path>
./build/cocos-cli algo /path/to/algorithm <private_key_file_path>
```

##### Flags
- -a, --algorithm string Algorithm type to run (default "bin")
- --python-runtime string Python runtime to use (default "python3")
- -r, --requirements string Python requirements file


#### Upload Dataset

To upload a dataset, use the following command:

```bash
./build/cocos-cli agent data /path/to/dataset.csv <private_key_file_path>
./build/cocos-cli data /path/to/dataset.csv <private_key_file_path>
```

Users can also upload directories which will be compressed on transit. Once received by agent they will be stored as compressed files or decompressed if the user passed the decompression argument.

##### Flags
- -d, --decompress Decompress the dataset on agent



#### Retrieve result

To retrieve the computation result, use the following command:

```bash
./build/cocos-cli agent result <private_key_file_path>
```
./build/cocos-cli result <private_key_file_path>
```

#### Checksum
When defining the manifest dataset and algorithm checksums are required. This can be done as below:

```bash
./build/cocos-cli checksum <path_to_dataset_or_algorithm>
```
18 changes: 6 additions & 12 deletions cli/file_hash.go → cli/checksum.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,32 +3,26 @@
package cli

import (
"encoding/hex"
"log"
"os"

"github.com/spf13/cobra"
"golang.org/x/crypto/sha3"
"github.com/ultravioletrs/cocos/internal"
)

func (cli *CLI) NewFileHashCmd() *cobra.Command {
return &cobra.Command{
Use: "file-hash",
Use: "checksum",
Short: "Compute the sha3-256 hash of a file",
Example: "file-hash <file>",
Example: "checksum <file>",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
fileName := args[0]
path := args[0]

file, err := os.ReadFile(fileName)
hash, err := internal.ChecksumHex(path)
if err != nil {
log.Fatalf("Error reading dataset file: %v", err)
log.Fatalf("Error computing hash: %v", err)
}

hashBytes := sha3.Sum256(file)

hash := hex.EncodeToString(hashBytes[:])

log.Println("Hash of file:", hash)
},
}
Expand Down
39 changes: 33 additions & 6 deletions cli/datasets.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
package cli

import (
"context"
"crypto/x509"
"encoding/pem"
"log"
Expand All @@ -11,27 +12,45 @@ import (

"github.com/spf13/cobra"
"github.com/ultravioletrs/cocos/agent"
"github.com/ultravioletrs/cocos/internal"
"google.golang.org/grpc/metadata"
)

var decompressDataset bool
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Isn't this compression?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this states whether to decompress data on arrival or leave it compressed


func (cli *CLI) NewDatasetsCmd() *cobra.Command {
return &cobra.Command{
cmd := &cobra.Command{
Use: "data",
Short: "Upload a dataset",
Example: "data <dataset_path> <private_key_file_path>",
Args: cobra.ExactArgs(2),
Run: func(cmd *cobra.Command, args []string) {
datasetFile := args[0]
datasetPath := args[0]

log.Println("Uploading dataset:", datasetFile)
log.Println("Uploading dataset:", datasetPath)

dataset, err := os.ReadFile(datasetFile)
f, err := os.Stat(datasetPath)
if err != nil {
log.Fatalf("Error reading dataset file: %v", err)
}

var dataset []byte

if f.IsDir() {
dataset, err = internal.ZipDirectoryToMemory(datasetPath)
if err != nil {
log.Fatalf("Error zipping dataset directory: %v", err)
}
} else {
dataset, err = os.ReadFile(datasetPath)
if err != nil {
log.Fatalf("Error reading dataset file: %v", err)
}
}

dataReq := agent.Dataset{
Dataset: dataset,
Filename: path.Base(datasetFile),
Filename: path.Base(datasetPath),
}

privKeyFile, err := os.ReadFile(args[1])
Expand All @@ -43,13 +62,17 @@ func (cli *CLI) NewDatasetsCmd() *cobra.Command {

privKey := decodeKey(pemBlock)

if err := cli.agentSDK.Data(cmd.Context(), dataReq, privKey); err != nil {
ctx := metadata.NewOutgoingContext(cmd.Context(), metadata.New(make(map[string]string)))
if err := cli.agentSDK.Data(addDatasetMetadata(ctx), dataReq, privKey); err != nil {
log.Fatalf("Error uploading dataset: %v", err)
}

log.Println("Successfully uploaded dataset")
},
}

cmd.Flags().BoolVarP(&decompressDataset, "decompress", "d", false, "Decompress the dataset on agent")
return cmd
}

func decodeKey(b *pem.Block) interface{} {
Expand All @@ -74,3 +97,7 @@ func decodeKey(b *pem.Block) interface{} {
return nil
}
}

func addDatasetMetadata(ctx context.Context) context.Context {
return agent.DecompressToContext(ctx, decompressDataset)
}
Loading
点击 这是indexloc提供的php浏览器服务,不要输入任何密码和下载