diff --git a/BUILD.bazel b/BUILD.bazel index 74499e9..56a60c4 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -9,12 +9,14 @@ load("@gazelle//:def.bzl", "gazelle") # gazelle:resolve proto go build/bazel/remote/execution/v2/remote_execution.proto @bazel_remote_apis//build/bazel/remote/execution/v2:remote_execution_go_proto # gazelle:resolve proto go google/rpc/status.proto @org_golang_google_genproto_googleapis_rpc//status # gazelle:resolve proto go pkg/proto/configuration/auth/auth.proto @com_github_buildbarn_bb_storage//pkg/proto/configuration/auth +# gazelle:resolve proto go pkg/proto/configuration/blobstore/blobstore.proto @com_github_buildbarn_bb_storage//pkg/proto/configuration/blobstore # gazelle:resolve proto go pkg/proto/configuration/global/global.proto @com_github_buildbarn_bb_storage//pkg/proto/configuration/global # gazelle:resolve proto go pkg/proto/configuration/grpc/grpc.proto @com_github_buildbarn_bb_storage//pkg/proto/configuration/grpc # gazelle:resolve proto go pkg/proto/configuration/http/http.proto @com_github_buildbarn_bb_storage//pkg/proto/configuration/http # gazelle:resolve proto build/bazel/remote/execution/v2/remote_execution.proto @bazel_remote_apis//build/bazel/remote/execution/v2:remote_execution_proto # gazelle:resolve proto google/rpc/status.proto @googleapis//google/rpc:status_proto # gazelle:resolve proto pkg/proto/configuration/auth/auth.proto @com_github_buildbarn_bb_storage//pkg/proto/configuration/auth:auth_proto +# gazelle:resolve proto pkg/proto/configuration/blobstore/blobstore.proto @com_github_buildbarn_bb_storage//pkg/proto/configuration/blobstore:blobstore_proto # gazelle:resolve proto pkg/proto/configuration/global/global.proto @com_github_buildbarn_bb_storage//pkg/proto/configuration/global:global_proto # gazelle:resolve proto pkg/proto/configuration/grpc/grpc.proto @com_github_buildbarn_bb_storage//pkg/proto/configuration/grpc:grpc_proto # gazelle:resolve proto pkg/proto/configuration/http/http.proto @com_github_buildbarn_bb_storage//pkg/proto/configuration/http:http_proto diff --git a/MODULE.bazel b/MODULE.bazel index 952a140..72d3ccb 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -101,6 +101,7 @@ use_repo( "io_entgo_ent", "org_golang_google_api", "org_golang_google_genproto", + "org_golang_google_genproto_googleapis_bytestream", "org_golang_google_grpc", "org_golang_google_protobuf", "org_golang_x_lint", diff --git a/README.md b/README.md index 01fde27..e616f20 100644 --- a/README.md +++ b/README.md @@ -106,6 +106,17 @@ Once you have BEP files produced by Bazel, you can upload them via the applicati BB-portal can show the same information as the web interface from BB-scheduler. To do this, you need to configure the `buildQueueStateProxy` in the portal configuration file. The interface can be found under the `Scheduler` tab in the menu. +### BB-browser + +BB-portal can show the same information as BB-browser. Everything it can show is visible under the tab `Browser`. To make the browser functionality work, you need to configure + +- `actionCacheProxy` +- `contentAddressableStorageProxy` +- (optional) `initialSizeClassCacheProxy` +- (optional) `fileSystemAccessCacheProxy` + +in the portal configuration file. Despite having the name "browser", it is not possible to browse through the content. Instead other parts of Buildbarn will generate links to the browser. To open the content in bb-portal, the the prefix for the links should be `http://url-to-bb-portal/browser/`. After the `/browser/` prefix, the rest of the URL is compatible with urls for bb-browser. + ## Using GraphiQL To Explore the GraphQL API The GraphiQL explorer is available via http://localhost:8081/graphiql. diff --git a/cmd/bb_portal/BUILD.bazel b/cmd/bb_portal/BUILD.bazel index 3ed9a20..325d4ad 100644 --- a/cmd/bb_portal/BUILD.bazel +++ b/cmd/bb_portal/BUILD.bazel @@ -14,10 +14,16 @@ go_library( "//ent/gen/ent/migrate", "//internal/api", "//internal/api/grpc/bes", + "//internal/api/grpcweb/actioncacheproxy", "//internal/api/grpcweb/buildqueuestateproxy", + "//internal/api/grpcweb/casproxy", + "//internal/api/grpcweb/fsacproxy", + "//internal/api/grpcweb/isccproxy", + "//internal/api/servefiles:servefiles_lib", "//internal/graphql", "//pkg/processing", "//pkg/proto/configuration/bb_portal", + "@bazel_remote_apis//build/bazel/remote/execution/v2:remote_execution_go_proto", "@com_github_99designs_gqlgen//graphql/handler", "@com_github_99designs_gqlgen//graphql/playground", "@com_github_buildbarn_bb_remote_execution//pkg/proto/buildqueuestate", @@ -26,6 +32,8 @@ go_library( "@com_github_buildbarn_bb_storage//pkg/grpc", "@com_github_buildbarn_bb_storage//pkg/http", "@com_github_buildbarn_bb_storage//pkg/program", + "@com_github_buildbarn_bb_storage//pkg/proto/fsac", + "@com_github_buildbarn_bb_storage//pkg/proto/iscc", "@com_github_buildbarn_bb_storage//pkg/util", "@com_github_fsnotify_fsnotify//:fsnotify", "@com_github_gorilla_mux//:mux", @@ -36,6 +44,7 @@ go_library( "@io_entgo_ent//dialect", "@io_entgo_ent//dialect/sql", "@org_golang_google_genproto//googleapis/devtools/build/v1:build", + "@org_golang_google_genproto_googleapis_bytestream//:bytestream", "@org_golang_google_grpc//:grpc", ], ) diff --git a/cmd/bb_portal/grpcweb_proxy_server.go b/cmd/bb_portal/grpcweb_proxy_server.go index ff02226..89901f7 100644 --- a/cmd/bb_portal/grpcweb_proxy_server.go +++ b/cmd/bb_portal/grpcweb_proxy_server.go @@ -4,14 +4,22 @@ import ( "log" "slices" + remoteexecution "github.com/bazelbuild/remote-apis/build/bazel/remote/execution/v2" + "github.com/buildbarn/bb-portal/internal/api/grpcweb/actioncacheproxy" "github.com/buildbarn/bb-portal/internal/api/grpcweb/buildqueuestateproxy" + "github.com/buildbarn/bb-portal/internal/api/grpcweb/casproxy" + "github.com/buildbarn/bb-portal/internal/api/grpcweb/fsacproxy" + "github.com/buildbarn/bb-portal/internal/api/grpcweb/isccproxy" "github.com/buildbarn/bb-portal/pkg/proto/configuration/bb_portal" "github.com/buildbarn/bb-remote-execution/pkg/proto/buildqueuestate" "github.com/buildbarn/bb-storage/pkg/auth" bb_grpc "github.com/buildbarn/bb-storage/pkg/grpc" bb_http "github.com/buildbarn/bb-storage/pkg/http" "github.com/buildbarn/bb-storage/pkg/program" + "github.com/buildbarn/bb-storage/pkg/proto/fsac" + "github.com/buildbarn/bb-storage/pkg/proto/iscc" "github.com/improbable-eng/grpc-web/go/grpcweb" + "google.golang.org/genproto/googleapis/bytestream" go_grpc "google.golang.org/grpc" ) @@ -80,4 +88,64 @@ func StartGrpcWebProxyServer( } else { log.Printf("Did not start BuildQueueState proxy because BuildQueueStateProxy is not configured") } + + if configuration.ActionCacheProxy != nil { + registerAndStartServer( + configuration.ActionCacheProxy, + siblingsGroup, + grpcClientFactory, + "ActionCacheProxy", + func(grpcServer *go_grpc.Server, grpcClient go_grpc.ClientConnInterface) { + c := remoteexecution.NewActionCacheClient(grpcClient) + remoteexecution.RegisterActionCacheServer(grpcServer, actioncacheproxy.NewAcctionCacheServerImpl(c, instanceNameAuthorizer)) + }, + ) + } else { + log.Printf("Did not start ActionCache proxy because ActionCacheProxy is not configured") + } + + if configuration.ContentAddressableStorageProxy != nil { + registerAndStartServer( + configuration.ContentAddressableStorageProxy, + siblingsGroup, + grpcClientFactory, + "ContentAddressableStorageProxy", + func(grpcServer *go_grpc.Server, grpcClient go_grpc.ClientConnInterface) { + c := bytestream.NewByteStreamClient(grpcClient) + bytestream.RegisterByteStreamServer(grpcServer, casproxy.NewCasServerImpl(c, instanceNameAuthorizer)) + }, + ) + } else { + log.Printf("Did not start ContentAddressableStorage proxy because ContentAddressableStorageProxy is not configured") + } + + if configuration.InitialSizeClassCacheProxy != nil { + registerAndStartServer( + configuration.InitialSizeClassCacheProxy, + siblingsGroup, + grpcClientFactory, + "InitialSizeClassCacheProxy", + func(grpcServer *go_grpc.Server, grpcClient go_grpc.ClientConnInterface) { + c := iscc.NewInitialSizeClassCacheClient(grpcClient) + iscc.RegisterInitialSizeClassCacheServer(grpcServer, isccproxy.NewIsccServerImpl(c, instanceNameAuthorizer)) + }, + ) + } else { + log.Printf("Did not start InitialSizeClassCache proxy because InitialSizeClassCacheProxy is not configured") + } + + if configuration.FileSystemAccessCacheProxy != nil { + registerAndStartServer( + configuration.FileSystemAccessCacheProxy, + siblingsGroup, + grpcClientFactory, + "FileSystemAccessCacheProxy", + func(grpcServer *go_grpc.Server, grpcClient go_grpc.ClientConnInterface) { + c := fsac.NewFileSystemAccessCacheClient(grpcClient) + fsac.RegisterFileSystemAccessCacheServer(grpcServer, fsacproxy.NewFsacServerImpl(c, instanceNameAuthorizer)) + }, + ) + } else { + log.Printf("Did not start FileSystemAccessCache proxy because FileSystemAccessCacheProxy is not configured") + } } diff --git a/cmd/bb_portal/main.go b/cmd/bb_portal/main.go index b1a1ff2..aa4283d 100644 --- a/cmd/bb_portal/main.go +++ b/cmd/bb_portal/main.go @@ -29,6 +29,7 @@ import ( "github.com/buildbarn/bb-portal/ent/gen/ent/migrate" "github.com/buildbarn/bb-portal/internal/api" "github.com/buildbarn/bb-portal/internal/api/grpc/bes" + "github.com/buildbarn/bb-portal/internal/api/servefiles" "github.com/buildbarn/bb-portal/internal/graphql" "github.com/buildbarn/bb-portal/pkg/processing" "github.com/buildbarn/bb-portal/pkg/proto/configuration/bb_portal" @@ -117,8 +118,10 @@ func main() { defer watcher.Close() runWatcher(watcher, dbClient, *bepFolder, blobArchiver) + serveFileService := servefiles.NewFileServerServiceFromConfiguration(dependenciesGroup, &configuration, grpcClientFactory) + router := mux.NewRouter() - newPortalService(blobArchiver, dbClient, router) + newPortalService(blobArchiver, dbClient, serveFileService, router) bb_http.NewServersFromConfigurationAndServe( configuration.HttpServers, bb_http.NewMetricsHandler(router, "PortalUI"), @@ -199,13 +202,18 @@ func fatal(msg string, args ...any) { os.Exit(1) } -func newPortalService(archiver processing.BlobMultiArchiver, dbClient *ent.Client, router *mux.Router) { +func newPortalService(archiver processing.BlobMultiArchiver, dbClient *ent.Client, serveFilesService *servefiles.FileServerService, router *mux.Router) { srv := handler.NewDefaultServer(graphql.NewSchema(dbClient)) srv.Use(entgql.Transactioner{TxOpener: dbClient}) router.PathPrefix("/graphql").Handler(srv) router.Handle("/graphiql", playground.Handler("GraphQL Playground", "/graphql")) router.Handle("/api/v1/bep/upload", api.NewBEPUploadHandler(dbClient, archiver)).Methods("POST") + if serveFilesService != nil { + router.HandleFunc("/api/servefile/{instanceName:(?:.*?/)?}blobs/{digestFunction}/file/{hash}-{sizeBytes}/{name}", serveFilesService.HandleFile).Methods("GET") + router.HandleFunc("/api/servefile/{instanceName:(?:.*?/)?}blobs/{digestFunction}/command/{hash}-{sizeBytes}/", serveFilesService.HandleCommand).Methods("GET") + router.HandleFunc("/api/servefile/{instanceName:(?:.*?/)?}blobs/{digestFunction}/directory/{hash}-{sizeBytes}/", serveFilesService.HandleDirectory).Methods("GET") + } router.PathPrefix("/").Handler(frontendServer()) } diff --git a/config/portal.jsonnet b/config/portal.jsonnet index 825cb45..776484e 100644 --- a/config/portal.jsonnet +++ b/config/portal.jsonnet @@ -4,11 +4,28 @@ // [bb-deployments](https://github.com/buildbarn/bb-deployments), i.e. it // assumes that the following services are running: // - A Buildbarn scheduler, accessible at localhost:8984 +// - A Buildbarn frontend, accessible at localhost:8980 { + serveFilesCasConfiguration: { + grpc: { address: 'localhost:8980' }, + }, + maximumMessageSizeBytes: 2 * 1024 * 1024, + httpServers: [{ listenAddresses: [':8081'], - authenticationPolicy: { allow: {} }, + authenticationPolicy: { + allow: { + public: { + user: 'FooBar', + }, + private: { + groups: ['admin'], + instances: ['fuse', 'testingQueue'], + email: 'foo@example.com', + }, + }, + }, }], grpcServers: [{ listenAddresses: [':8082'], @@ -44,4 +61,91 @@ }, }], }, + + actionCacheProxy: { + client: { + address: 'localhost:8980', + }, + allowedOrigins: ['http://localhost:8081'], + httpServers: [{ + listenAddresses: [':9434'], + authenticationPolicy: { + allow: { + public: { + user: 'FooBar', + }, + private: { + groups: ['admin'], + instances: ['fuse', 'testingQueue'], + email: 'foo@example.com', + }, + }, + }, + }], + }, + + contentAddressableStorageProxy: { + client: { + address: 'localhost:8980', + }, + allowedOrigins: ['http://localhost:8081'], + httpServers: [{ + listenAddresses: [':9435'], + authenticationPolicy: { + allow: { + public: { + user: 'FooBar', + }, + private: { + groups: ['admin'], + instances: ['fuse', 'testingQueue'], + email: 'foo@example.com', + }, + }, + }, + }], + }, + + initialSizeClassCacheProxy: { + client: { + address: 'localhost:8980', + }, + allowedOrigins: ['http://localhost:8081'], + httpServers: [{ + listenAddresses: [':9436'], + authenticationPolicy: { + allow: { + public: { + user: 'FooBar', + }, + private: { + groups: ['admin'], + instances: ['fuse', 'testingQueue'], + email: 'foo@example.com', + }, + }, + }, + }], + }, + fileSystemAccessCacheProxy: { + client: { + address: 'localhost:8980', + }, + allowedOrigins: ['http://localhost:8081'], + httpServers: [{ + listenAddresses: [':9437'], + authenticationPolicy: { + allow: { + public: { + user: 'FooBar', + }, + private: { + groups: ['admin'], + instances: ['fuse', 'testingQueue'], + email: 'foo@example.com', + }, + }, + }, + }], + }, } diff --git a/frontend/.env b/frontend/.env index 9b34840..659697e 100644 --- a/frontend/.env +++ b/frontend/.env @@ -1,6 +1,10 @@ NEXT_PUBLIC_BES_BACKEND_URL=http://localhost:8081 NEXT_PUBLIC_BES_GRPC_BACKEND_URL=grpc://localhost:8082 NEXT_PUBLIC_BB_BUILDQUEUESTATE_GRPC_BACKEND_URL=http://localhost:9433 +NEXT_PUBLIC_BB_ACTIONCACHE_GRPC_BACKEND_URL=http://localhost:9434 +NEXT_PUBLIC_BB_CAS_GRPC_BACKEND_URL=http://localhost:9435 +NEXT_PUBLIC_BB_ISCC_GRPC_BACKEND_URL=http://localhost:9436 +NEXT_PUBLIC_BB_FSAC_GRPC_BACKEND_URL=http://localhost:9437 NEXT_PUBLIC_BROWSER_URL=https://browser.example.com NEXT_PUBLIC_COMPANY_NAME="Example Co" NEXT_PUBLIC_COMPANY_SLACK_CHANNEL_NAME=ExampleCoBuild diff --git a/frontend/scripts/grpc-client-gen.sh b/frontend/scripts/grpc-client-gen.sh index 1262adc..7dade53 100755 --- a/frontend/scripts/grpc-client-gen.sh +++ b/frontend/scripts/grpc-client-gen.sh @@ -19,4 +19,11 @@ generate_grpc_client() { ${proto_file} } +generate_grpc_client "${PROTO_DIR}/buildbarn/auth/auth.proto" generate_grpc_client "${PROTO_DIR}/buildbarn/buildqueuestate/buildqueuestate.proto" +generate_grpc_client "${PROTO_DIR}/buildbarn/cas/cas.proto" +generate_grpc_client "${PROTO_DIR}/buildbarn/resourceusage/resourceusage.proto" +generate_grpc_client "${PROTO_DIR}/google/bytestream/bytestream.proto" +generate_grpc_client "${PROTO_DIR}/buildbarn/iscc/iscc.proto" +generate_grpc_client "${PROTO_DIR}/buildbarn/fsac/fsac.proto" +generate_grpc_client "${PROTO_DIR}/buildbarn/query/query.proto" diff --git a/frontend/src/app/browser/[...slug]/page.tsx b/frontend/src/app/browser/[...slug]/page.tsx new file mode 100644 index 0000000..38f64be --- /dev/null +++ b/frontend/src/app/browser/[...slug]/page.tsx @@ -0,0 +1,103 @@ +"use client"; + +import BrowserActionGrid from "@/components/BrowserActionGrid"; +import BrowserCommandGrid from "@/components/BrowserCommandGrid"; +import BrowserDirectoryPage from "@/components/BrowserDirectoryPage"; +import BrowserPreviousExecutionsPage from "@/components/BrowserPreviousExecutionsPage"; +import Content from "@/components/Content"; +import PortalCard from "@/components/PortalCard"; +import { BrowserPageType } from "@/types/BrowserPageType"; +import { parseBrowserPageSlug } from "@/utils/parseBrowserPageSlug"; +import { + CalculatorOutlined, + CalendarFilled, + CodeOutlined, + FolderOpenFilled, + HistoryOutlined, +} from "@ant-design/icons"; +import { Typography } from "antd"; +import { notFound } from "next/navigation"; +import type React from "react"; + +interface PageParams { + params: { + slug: Array; + }; +} + +const Page: React.FC = ({ params }) => { + const browserPageParams = parseBrowserPageSlug(params.slug); + + if (browserPageParams === undefined) { + notFound(); + } + + const renderChild = () => { + switch (browserPageParams.browserPageType) { + case BrowserPageType.Action: + return ( + } + titleBits={[Action]} + > + + + ); + + case BrowserPageType.Command: + return ( + } + titleBits={[Command]} + > + + + ); + + case BrowserPageType.Directory: + return ( + } + titleBits={[Directory]} + > + Directory contents + + + ); + + case BrowserPageType.HistoricalExecuteResponse: + return ( + } + titleBits={[Historical Execute Response]} + > + + Historical Execute Response + + + + ); + + case BrowserPageType.PreviousExecutionStats: + return ( + } + titleBits={[Previous executions stats]} + > + + + ); + default: + return notFound(); + } + }; + + return ; +}; + +export default Page; diff --git a/frontend/src/app/browser/page.module.css b/frontend/src/app/browser/page.module.css new file mode 100644 index 0000000..103ac88 --- /dev/null +++ b/frontend/src/app/browser/page.module.css @@ -0,0 +1,4 @@ +.welcomeList > li > p > pre { + word-break: break-all; + margin-bottom: 0.5em; +} diff --git a/frontend/src/app/browser/page.tsx b/frontend/src/app/browser/page.tsx new file mode 100644 index 0000000..329b46a --- /dev/null +++ b/frontend/src/app/browser/page.tsx @@ -0,0 +1,128 @@ +"use client"; + +import Content from "@/components/Content"; +import PortalCard from "@/components/PortalCard"; +import { LayoutOutlined } from "@ant-design/icons"; +import { Space, Typography } from "antd"; +import type React from "react"; +import styles from "./page.module.css"; + +const Page: React.FC = () => { + return ( + + } + titleBits={[Browser]} + > + Welcome + + This page allows you to display objects stored in the Content + Addressable Storage (CAS) and Action Cache (AC) as defined by the{" "} + + Remote Execution API + + . Objects in these data stores have hard to guess identifiers and + the Remote Execution API provides no functions for iterating over + them. One may therefore only access this service in a meaningful + way by visiting automatically generated URLs pointing to this + page. Tools that are part of Buildbarn will generate these URLs + where applicable. + + + + + This service supports the following URL schemes: + + +
    +
  • +

    +

    +                      {
    +                        "/browser/${instance_name}/blobs/${digest_function}/action/${hash}-${size_bytes}/"
    +                      }
    +                    
    + Displays information about an Action and its associated + Command stored in the CAS. If available, displays + information about the Action's associated ActionResult + stored in the AC. +

    +
  • +
  • +

    +

    +                      {
    +                        "/browser/${instance_name}/blobs/${digest_function}/command/${hash}-${size_bytes}/"
    +                      }
    +                    
    + Displays information about a Command stored in the CAS. +

    +
  • +
  • +

    +

    +                      {
    +                        "/browser/${instance_name}/blobs/${digest_function}/directory/${hash}-${size_bytes}/"
    +                      }
    +                    
    + Displays information about a Directory stored in the CAS. +

    +
  • +
  • +

    +

    +                      {
    +                        "/api/servefile/${instance_name}/blobs/${digest_function}/file/${hash}-${size_bytes}/${filename}"
    +                      }
    +                    
    + Serves a file stored in the CAS. +

    +
  • +
  • +

    +

    +                      {
    +                        "/browser/${instance_name}/blobs/${digest_function}/historical_execute_response/${hash}-${size_bytes}/"
    +                      }
    +                    
    + Extension: displays information about an ActionResult that + was not permitted to be stored in the AC, but was stored in + the CAS instead. Buildbarn stores ActionResult messages for + failed build actions in the CAS. +

    +
  • +
  • +

    +

    +                      {
    +                        "/browser/${instance_name}/blobs/${digest_function}/previous_execution_stats/${hash}-${size_bytes}/"
    +                      }
    +                    
    + Extension: displays information about outcomes of previous + executions of similar actions. This information is extracted + from Buildbarn's Initial Size Class Cache (ISCC). +

    +
  • +
  • +

    +

    +                      {
    +                        "/browser/${instance_name}/blobs/${digest_function}/tree/${hash}-${size_bytes}/${subdirectory}/"
    +                      }
    +                    
    + Displays information about a Directory contained in a Tree + stored in the CAS. +

    +
  • +
+
+
+ + } + /> + ); +}; + +export default Page; diff --git a/frontend/src/app/layout.tsx b/frontend/src/app/layout.tsx index 62730eb..4188b3d 100644 --- a/frontend/src/app/layout.tsx +++ b/frontend/src/app/layout.tsx @@ -13,6 +13,9 @@ import Dynamic from '@/components/Dynamic'; import { ApolloWrapper } from '@/components/ApolloWrapper'; import parseStringBoolean from '@/utils/storage'; import { PublicEnvScript } from 'next-runtime-env'; +import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; +import GrpcClientsProvider from '@/context/GrpcClientsProvider'; +import { ReactQueryDevtools } from '@tanstack/react-query-devtools'; const PREFERS_DARK_KEY = 'prefers-dark'; @@ -61,6 +64,8 @@ export default function RootLayout({ children }: { children: React.ReactNode }) const [extraAppBarMenuItems, setExtraAppBarMenuItems] = useState([]); + const queryClient = new QueryClient() + return ( <> Buildbarn Portal @@ -73,18 +78,24 @@ export default function RootLayout({ children }: { children: React.ReactNode }) - - - - - {children} - - - + + + + + + + {children} + + + + + {/* Adds devtools. Is automatically removed for production builds. */} + + diff --git a/frontend/src/components/AppBar/index.tsx b/frontend/src/components/AppBar/index.tsx index 3899d3e..e75ea36 100644 --- a/frontend/src/components/AppBar/index.tsx +++ b/frontend/src/components/AppBar/index.tsx @@ -24,6 +24,7 @@ const APP_BAR_MENU_ITEMS: ItemType[] = [ getItem({ depth: 0, href: '/trends', title: 'Trends' }), getItem({ depth: 0, href: '/tests', title: 'Tests' }), getItem({ depth: 0, href: '/targets', title: 'Targets' }), + getItem({ depth: 0, href: '/browser', title: 'Browser' }), getItem({ depth: 0, href: '/scheduler', diff --git a/frontend/src/components/BrowserActionGrid/CopyBbClientdActionButton.tsx b/frontend/src/components/BrowserActionGrid/CopyBbClientdActionButton.tsx new file mode 100644 index 0000000..d0f01ee --- /dev/null +++ b/frontend/src/components/BrowserActionGrid/CopyBbClientdActionButton.tsx @@ -0,0 +1,64 @@ +"use client"; + +import type { Digest } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import type { BrowserPageParams } from "@/types/BrowserPageType"; +import { getBBClientdPath } from "@/utils/getBbClientdPath"; +import { Button, message } from "antd"; +import type React from "react"; + +interface Params { + browserPageParams: BrowserPageParams; + actionDigest: Digest; + commandDigest: Digest; + inputRootDigest: Digest; +} + +const CopyBbClientdActionButton: React.FC = ({ + browserPageParams, + actionDigest, + commandDigest, + inputRootDigest, +}) => { + const [messageApi, contextHolder] = message.useMessage(); + + const commandBbClientdPath = getBBClientdPath( + browserPageParams.instanceName, + browserPageParams.digestFunction, + commandDigest, + "command", + ); + + const inputRootBbClientdPath = getBBClientdPath( + browserPageParams.instanceName, + browserPageParams.digestFunction, + inputRootDigest, + "directory", + ); + + const script = `rsync \\ + --delete \\ + --link-dest ${inputRootBbClientdPath}/ \\ + --progress \\ + --recursive \\ + ${inputRootBbClientdPath}/ \\ + ~/bb_clientd/scratch/${actionDigest.hash}-${actionDigest.sizeBytes} && +cd ~/bb_clientd/scratch/${actionDigest.hash}-${actionDigest.sizeBytes} && +${commandBbClientdPath}`; + + return ( + <> + {contextHolder} + + + ); +}; + +export default CopyBbClientdActionButton; diff --git a/frontend/src/components/BrowserActionGrid/fetch.ts b/frontend/src/components/BrowserActionGrid/fetch.ts new file mode 100644 index 0000000..28b43af --- /dev/null +++ b/frontend/src/components/BrowserActionGrid/fetch.ts @@ -0,0 +1,429 @@ +import { + Action, + type ActionCacheClient, + Command, + type Digest, + Directory, + ExecuteResponse, + RequestMetadata, +} from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import { AuthenticationMetadata } from "@/lib/grpc-client/buildbarn/auth/auth"; +import { HistoricalExecuteResponse } from "@/lib/grpc-client/buildbarn/cas/cas"; +import type { + FileSystemAccessCacheClient, + FileSystemAccessProfile, +} from "@/lib/grpc-client/buildbarn/fsac/fsac"; +import type { + InitialSizeClassCacheClient, + PreviousExecutionStats, +} from "@/lib/grpc-client/buildbarn/iscc/iscc"; +import { + FilePoolResourceUsage, + InputRootResourceUsage, + MonetaryResourceUsage, + POSIXResourceUsage, +} from "@/lib/grpc-client/buildbarn/resourceusage/resourceusage"; +import type { ByteStreamClient } from "@/lib/grpc-client/google/bytestream/bytestream"; +import { + type BrowserPageParams, + BrowserPageType, +} from "@/types/BrowserPageType"; +import { ProtobufTypeUrls } from "@/types/protobufTypeUrls"; +import { getReducedActionDigest_SHA256 } from "@/utils/digestFunctionUtils"; +import { fetchCasObject, fetchCasObjectAndParse } from "@/utils/fetchCasObject"; +import { protobufToObject } from "@/utils/protobufToObject"; +import type { ActionConsoleOutput } from "./types"; + +export const fetchBrowserActionGrid = async ( + browserPageParams: BrowserPageParams, + actionCacheClient: ActionCacheClient, + casByteStreamClient: ByteStreamClient, + initialSizeClassCacheClient: InitialSizeClassCacheClient, + fileSystemAccessCacheClient: FileSystemAccessCacheClient, +): Promise<{ + executeResponse: ExecuteResponse | undefined; + action: Action; + actionDigest: Digest; + authenticationMetadata: AuthenticationMetadata | undefined; + requestMetadata: RequestMetadata | undefined; + posixResourceUsage: POSIXResourceUsage | undefined; + filePoolResourceUsage: FilePoolResourceUsage | undefined; + inputRootResourceUsage: InputRootResourceUsage | undefined; + monetaryResourceUsage: MonetaryResourceUsage | undefined; + casCommand: Command | undefined; + casDirectory: Directory | undefined; + consoleOutputs: ActionConsoleOutput[]; + previousExecutionStats: PreviousExecutionStats | undefined; + fileSystemAccessProfile: FileSystemAccessProfile | undefined; +}> => { + const { actionDigest, executeResponse } = await fetchExecuteResponse( + browserPageParams, + casByteStreamClient, + actionCacheClient, + ); + + const action = await fetchCasObjectAndParse( + casByteStreamClient, + browserPageParams.instanceName, + browserPageParams.digestFunction, + actionDigest, + Action, + ); + + const { + authenticationMetadata, + requestMetadata, + posixResourceUsage, + filePoolResourceUsage, + inputRootResourceUsage, + monetaryResourceUsage, + } = extractMetadataFromExecuteResponse(executeResponse); + + const [ + casCommand, + casDirectory, + previousExecutionStats, + consoleOutputs, + fileSystemAccessProfile, + ] = await Promise.all([ + // Fetch Command + action.commandDigest + ? fetchCasObjectAndParse( + casByteStreamClient, + browserPageParams.instanceName, + browserPageParams.digestFunction, + action.commandDigest, + Command, + ) + : Promise.resolve(undefined), + + // Fetch Directory + action.inputRootDigest + ? fetchCasObjectAndParse( + casByteStreamClient, + browserPageParams.instanceName, + browserPageParams.digestFunction, + action.inputRootDigest, + Directory, + ) + : Promise.resolve(undefined), + + // Fetch Previous Execution Stats + fetchPreviousExecutionStats( + action, + initialSizeClassCacheClient, + browserPageParams, + ), + + // Fetch Console Outputs + getConsoleActionOutputs( + browserPageParams, + casByteStreamClient, + executeResponse, + ), + + // Fetch File System Access Cache Profile + fetchFileSystemAccessProfile( + action, + fileSystemAccessCacheClient, + browserPageParams, + ), + ]); + + return { + executeResponse, + action, + actionDigest, + authenticationMetadata, + requestMetadata, + posixResourceUsage, + filePoolResourceUsage, + inputRootResourceUsage, + monetaryResourceUsage, + casCommand, + casDirectory, + consoleOutputs, + previousExecutionStats, + fileSystemAccessProfile, + }; +}; + +export const getActionConsoleOutput = async ( + browserPageParams: BrowserPageParams, + casByteStreamClient: ByteStreamClient, + digest: Digest | undefined, + rawBytes: Uint8Array | undefined, + name: string, +): Promise => { + if (rawBytes && rawBytes.length > 0) + return { + name, + digest, + tooLarge: false, + notFound: false, + content: new TextDecoder().decode(rawBytes), + }; + + if (digest === undefined) { + return undefined; + } + + const MAX_CONSOLE_OUTPUT_SIZE = 10000; + if (Number.parseInt(digest.sizeBytes) > MAX_CONSOLE_OUTPUT_SIZE) { + return { + name, + digest, + tooLarge: true, + notFound: false, + content: undefined, + }; + } + + try { + const content = await fetchCasObject( + casByteStreamClient, + browserPageParams.instanceName, + browserPageParams.digestFunction, + digest, + ); + return { + name, + digest, + tooLarge: false, + notFound: false, + content: new TextDecoder().decode(content), + }; + } catch (e) { + return { + name, + digest, + tooLarge: false, + notFound: true, + content: undefined, + }; + } +}; + +async function fetchPreviousExecutionStats( + action: Action, + initialSizeClassCacheClient: InitialSizeClassCacheClient, + browserPageParams: BrowserPageParams, +): Promise { + if (!action.commandDigest || !action.platform) { + return undefined; + } + + try { + return await initialSizeClassCacheClient.getPreviousExecutionStats({ + digestFunction: browserPageParams.digestFunction, + instanceName: browserPageParams.instanceName, + reducedActionDigest: getReducedActionDigest_SHA256( + action.commandDigest, + action.platform, + ), + }); + } catch (error) { + console.log("No previous execution stats found"); + } +} + +async function getConsoleActionOutputs( + browserPageParams: BrowserPageParams, + casByteStreamClient: ByteStreamClient, + executeResponse: ExecuteResponse | undefined, +): Promise { + const consoleOutputs: ActionConsoleOutput[] = []; + + const stdoutOutput = await getActionConsoleOutput( + browserPageParams, + casByteStreamClient, + executeResponse?.result?.stdoutDigest, + executeResponse?.result?.stdoutRaw, + "Standard output", + ); + if (stdoutOutput) { + consoleOutputs.push(stdoutOutput); + } + + const stderrOutput = await getActionConsoleOutput( + browserPageParams, + casByteStreamClient, + executeResponse?.result?.stderrDigest, + executeResponse?.result?.stderrRaw, + "Standard error", + ); + if (stderrOutput) { + consoleOutputs.push(stderrOutput); + } + + return consoleOutputs; +} + +function extractMetadataFromExecuteResponse( + executeResponse: ExecuteResponse | undefined, +): { + authenticationMetadata: AuthenticationMetadata | undefined; + requestMetadata: RequestMetadata | undefined; + posixResourceUsage: POSIXResourceUsage | undefined; + filePoolResourceUsage: FilePoolResourceUsage | undefined; + inputRootResourceUsage: InputRootResourceUsage | undefined; + monetaryResourceUsage: MonetaryResourceUsage | undefined; +} { + let authenticationMetadata: AuthenticationMetadata | undefined = undefined; + let requestMetadata: RequestMetadata | undefined = undefined; + let posixResourceUsage: POSIXResourceUsage | undefined = undefined; + let filePoolResourceUsage: FilePoolResourceUsage | undefined = undefined; + let inputRootResourceUsage: InputRootResourceUsage | undefined = undefined; + let monetaryResourceUsage: MonetaryResourceUsage | undefined = undefined; + + if (!executeResponse?.result?.executionMetadata?.auxiliaryMetadata) { + return { + authenticationMetadata, + requestMetadata, + posixResourceUsage, + filePoolResourceUsage, + inputRootResourceUsage, + monetaryResourceUsage, + }; + } + + for (const metadata of executeResponse.result.executionMetadata + .auxiliaryMetadata) { + switch (metadata.typeUrl) { + case ProtobufTypeUrls.AUTHENTICATION_METADATA: + authenticationMetadata = protobufToObject( + AuthenticationMetadata, + metadata.value, + false, + ); + break; + case ProtobufTypeUrls.REQUEST_METADATA: + requestMetadata = protobufToObject( + RequestMetadata, + metadata.value, + false, + ); + break; + case ProtobufTypeUrls.POSIX_RESOURCE_USAGE: + posixResourceUsage = protobufToObject( + POSIXResourceUsage, + metadata.value, + true, + ); + break; + case ProtobufTypeUrls.FILE_POOL_RESOURCE_USAGE: + filePoolResourceUsage = protobufToObject( + FilePoolResourceUsage, + metadata.value, + true, + ); + break; + case ProtobufTypeUrls.INPUT_ROOT_RESOURCE_USAGE: + inputRootResourceUsage = protobufToObject( + InputRootResourceUsage, + metadata.value, + false, + ); + break; + case ProtobufTypeUrls.MONETARY_RESOURCE_USAGE: + monetaryResourceUsage = protobufToObject( + MonetaryResourceUsage, + metadata.value, + false, + ); + break; + default: + console.error(`Unknown metadata type: ${metadata.typeUrl}`); + break; + } + } + + return { + authenticationMetadata, + requestMetadata, + posixResourceUsage, + filePoolResourceUsage, + inputRootResourceUsage, + monetaryResourceUsage, + }; +} + +async function fetchExecuteResponse( + browserPageParams: BrowserPageParams, + casByteStreamClient: ByteStreamClient, + actionCacheClient: ActionCacheClient, +): Promise<{ + actionDigest: Digest; + executeResponse: ExecuteResponse | undefined; +}> { + if ( + browserPageParams.browserPageType === + BrowserPageType.HistoricalExecuteResponse + ) { + const historicalExecuteresponse = await fetchCasObjectAndParse( + casByteStreamClient, + browserPageParams.instanceName, + browserPageParams.digestFunction, + browserPageParams.digest, + HistoricalExecuteResponse, + ); + + if (!historicalExecuteresponse.executeResponse?.result) { + throw new Error( + "HistoricalExecuteResponse does not contain ExecuteResponse", + ); + } + if (!historicalExecuteresponse.actionDigest) { + throw new Error( + "HistoricalExecuteResponse does not contain ActionDigest", + ); + } + + return { + actionDigest: historicalExecuteresponse.actionDigest, + executeResponse: historicalExecuteresponse.executeResponse, + }; + } + + try { + const actionResult = await actionCacheClient.getActionResult({ + instanceName: browserPageParams.instanceName, + digestFunction: browserPageParams.digestFunction, + actionDigest: browserPageParams.digest, + inlineStdout: true, + inlineStderr: true, + }); + return { + actionDigest: browserPageParams.digest, + executeResponse: ExecuteResponse.fromPartial({ + result: actionResult, + }), + }; + } catch (error) { + console.log("No execute response was found"); + } + + return { actionDigest: browserPageParams.digest, executeResponse: undefined }; +} + +async function fetchFileSystemAccessProfile( + action: Action, + fileSystemAccessCacheClient: FileSystemAccessCacheClient, + browserPageParams: BrowserPageParams, +): Promise { + if (!action.commandDigest || !action.platform) { + return undefined; + } + try { + return await fileSystemAccessCacheClient.getFileSystemAccessProfile({ + digestFunction: browserPageParams.digestFunction, + instanceName: browserPageParams.instanceName, + reducedActionDigest: getReducedActionDigest_SHA256( + action.commandDigest, + action.platform, + ), + }); + } catch (error) { + console.log("No file system access cache profile was found"); + } +} diff --git a/frontend/src/components/BrowserActionGrid/index.tsx b/frontend/src/components/BrowserActionGrid/index.tsx new file mode 100644 index 0000000..7aa0e07 --- /dev/null +++ b/frontend/src/components/BrowserActionGrid/index.tsx @@ -0,0 +1,479 @@ +"use client"; + +import { useGrpcClients } from "@/context/GrpcClientsContext"; +import { FileSystemAccessProfileReference } from "@/lib/grpc-client/buildbarn/query/query"; +import type { BrowserPageParams } from "@/types/BrowserPageType"; +import { + PATH_HASH_BASE_HASH, + generateFileSystemReferenceQueryParams, +} from "@/utils/bloomFilter"; +import { + digestFunctionValueToString, + getReducedActionDigest_SHA256, +} from "@/utils/digestFunctionUtils"; +import { formatDuration, formatFileSizeFromString } from "@/utils/formatValues"; +import { useQuery } from "@tanstack/react-query"; +import { Descriptions, Space, Spin, Typography } from "antd"; +import Link from "next/link"; +import type React from "react"; +import BrowserCommandDescription from "../BrowserCommandDescription"; +import BrowserDirectory from "../BrowserDirectory"; +import BrowserPreviousExecutionsDisplay from "../BrowserPreviousExecutionsDisplay"; +import BrowserResultDescription from "../BrowserResultDescription"; +import ExecutionMetadataTimeline from "../ExecutionMetadataTimeline"; +import FilesTable from "../FilesTable"; +import { + filesTableEntriesFromActionResultAndCommand, + filesTableEntriesFromServerLogs, +} from "../FilesTable/utils"; +import PortalAlert from "../PortalAlert"; +import PropertyTagList from "../PropertyTagList"; +import type { PropertyTagListEntry } from "../PropertyTagList/types"; +import CopyBbClientdActionButton from "./CopyBbClientdActionButton"; +import { fetchBrowserActionGrid } from "./fetch"; + +interface Params { + browserPageParams: BrowserPageParams; + showTitle?: boolean; +} + +const BrowserActionGrid: React.FC = ({ + browserPageParams, + showTitle, +}) => { + const { + actionCacheClient, + casByteStreamClient, + initialSizeClassCacheClient, + fileSystemAccessCacheClient, + } = useGrpcClients(); + + const { data, isError, isPending, error } = useQuery({ + queryKey: ["browserActionGrid", browserPageParams], + queryFn: fetchBrowserActionGrid.bind( + window, + browserPageParams, + actionCacheClient, + casByteStreamClient, + initialSizeClassCacheClient, + fileSystemAccessCacheClient, + ), + }); + + let fileSystemAccessProfileReference: + | FileSystemAccessProfileReference + | undefined = undefined; + + if (isError) { + return ( + + + There was a problem communicating with the backend server: + +
{String(error)}
+ + } + /> + ); + } + + if (isPending) { + return ; + } + + if (data.fileSystemAccessProfile) { + if (data.action.commandDigest && data.action.platform) { + fileSystemAccessProfileReference = + FileSystemAccessProfileReference.create({ + digest: getReducedActionDigest_SHA256( + data.action.commandDigest, + data.action.platform, + ), + pathHashesBaseHash: PATH_HASH_BASE_HASH, + }); + } + } + + const workerPropertyList = (): PropertyTagListEntry[] => { + const workerData = JSON.parse( + data.executeResponse?.result?.executionMetadata?.worker || "{}", + ); + return Object.keys(workerData).map( + (key) => ({ name: key, value: workerData[key] }) as PropertyTagListEntry, + ); + }; + + return ( + + {data.action ? ( + + {showTitle && ( + + + Action + + + )} + + {data.action.timeout && ( + + {formatDuration(data.action.timeout)} + + )} + + {data.action.doNotCache ? "Yes" : "No"} + + {data.action.platform && ( + + + + )} + + {data.action.commandDigest && data.action.inputRootDigest && ( + + )} + + ) : ( + This action could not be found. + )} + + {data.casCommand ? ( + + ) : ( + + The command of this action could not be found. + + )} + + + Result + {data.executeResponse ? ( + + ) : ( + + The action result of this action could not be found. + + )} + + + {data.action.inputRootDigest && ( + + + + Input files + + + + + )} + + + Output files + + + + {data.executeResponse?.serverLogs && + Object.keys(data.executeResponse.serverLogs).length !== 0 && ( + + Server logs + + + )} + + {data.executeResponse?.result?.executionMetadata && ( + + Execution metadata + + + + + + + + + {data.executeResponse.result.executionMetadata + .virtualExecutionDuration && ( + + {formatDuration( + data.executeResponse.result.executionMetadata + .virtualExecutionDuration, + )} + + )} + + + )} + + {data.authenticationMetadata && ( + + Authentication metadata + + + +
+                {JSON.stringify(data.authenticationMetadata.public, null, 2)}
+              
+
+
+
+ )} + + {data.requestMetadata && ( + + Request metadata + + + {data.requestMetadata.toolDetails && ( + + {`${data.requestMetadata.toolDetails.toolName} ${data.requestMetadata.toolDetails.toolVersion}`} + + )} + + {data.requestMetadata.toolInvocationId} + + + {data.requestMetadata.correlatedInvocationsId} + + + {data.requestMetadata.targetId} + + + {data.requestMetadata.actionMnemonic} + + + {data.requestMetadata.actionId} + + + {data.requestMetadata.configurationId} + + + + )} + + {data.posixResourceUsage && ( + + POSIX resource usage + + + + {data.posixResourceUsage.userTime && + `${formatDuration(data.posixResourceUsage.userTime)} user`} + {data.posixResourceUsage.userTime && + data.posixResourceUsage.systemTime && + ","}{" "} + {data.posixResourceUsage.systemTime && + `${formatDuration(data.posixResourceUsage.systemTime)} system`} + + + {formatFileSizeFromString( + data.posixResourceUsage.maximumResidentSetSize, + )} + + + {`${data.posixResourceUsage.pageReclaims} reclaims, ${data.posixResourceUsage.pageFaults} faults, ${data.posixResourceUsage.swaps} swaps`} + + + {`${data.posixResourceUsage.blockInputOperations} inputs, ${data.posixResourceUsage.blockOutputOperations} outputs`} + + + {`${data.posixResourceUsage.messagesSent} sent, ${data.posixResourceUsage.messagesReceived} received`} + + + {`${data.posixResourceUsage.signalsReceived} received`} + + + {`${data.posixResourceUsage.voluntaryContextSwitches} voluntary, ${data.posixResourceUsage.involuntaryContextSwitches} involuntary`} + + + + )} + + {data.filePoolResourceUsage && ( + + + File pool resource usage + + + + + {data.filePoolResourceUsage.filesCreated} + + + {`${ + data.filePoolResourceUsage.filesCountPeak + } files, having a total size of ${formatFileSizeFromString( + data.filePoolResourceUsage.filesSizeBytesPeak, + )}`} + + + {`${ + data.filePoolResourceUsage.readsCount + } operations, having a total size of ${formatFileSizeFromString( + data.filePoolResourceUsage.readsSizeBytes, + )}`} + + + {`${ + data.filePoolResourceUsage.writesCount + } operations, having a total size of ${formatFileSizeFromString( + data.filePoolResourceUsage.writesSizeBytes, + )}`} + + + {`${data.filePoolResourceUsage.truncatesCount} operations`} + + + + )} + + {data.inputRootResourceUsage && ( + + + Input root resource usage + + + + + {`${data.inputRootResourceUsage.directoriesResolved} resolved, ${data.inputRootResourceUsage.directoriesRead} read`} + + + {`${data.inputRootResourceUsage.filesRead} read`} + + + + )} + + {data.monetaryResourceUsage && ( + + Monetary resource usage + + + {Object.entries(data.monetaryResourceUsage.expenses).map( + ([key, value]) => ( + + {`${value.currency} ${value.cost}`} + + ), + )} + + + )} + {data.previousExecutionStats && + data.action.commandDigest && + data.action.platform && ( + + )} +
+ ); +}; + +export default BrowserActionGrid; diff --git a/frontend/src/components/BrowserActionGrid/types.ts b/frontend/src/components/BrowserActionGrid/types.ts new file mode 100644 index 0000000..b52eec3 --- /dev/null +++ b/frontend/src/components/BrowserActionGrid/types.ts @@ -0,0 +1,9 @@ +import type { Digest } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; + +export type ActionConsoleOutput = { + name: string; + digest: Digest | undefined; + tooLarge: boolean; + notFound: boolean; + content: string | undefined; +}; diff --git a/frontend/src/components/BrowserCommandDescription/CopyBbClientdCommandButton.tsx b/frontend/src/components/BrowserCommandDescription/CopyBbClientdCommandButton.tsx new file mode 100644 index 0000000..82ec93d --- /dev/null +++ b/frontend/src/components/BrowserCommandDescription/CopyBbClientdCommandButton.tsx @@ -0,0 +1,41 @@ +import type { Digest } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import type { BrowserPageParams } from "@/types/BrowserPageType"; +import { getBBClientdPath } from "@/utils/getBbClientdPath"; +import { Button, message } from "antd"; +import type React from "react"; + +interface Params { + browserPageParams: BrowserPageParams; + commandDigest: Digest; +} + +const CopyBbClientdCommandButton: React.FC = ({ + browserPageParams, + commandDigest, +}) => { + const [messageApi, contextHolder] = message.useMessage(); + + const commandBbClientdPath = getBBClientdPath( + browserPageParams.instanceName, + browserPageParams.digestFunction, + commandDigest, + "command", + ); + + return ( + <> + {contextHolder} + + + ); +}; + +export default CopyBbClientdCommandButton; diff --git a/frontend/src/components/BrowserCommandDescription/DownloadAsShellScriptButton.tsx b/frontend/src/components/BrowserCommandDescription/DownloadAsShellScriptButton.tsx new file mode 100644 index 0000000..881a1a7 --- /dev/null +++ b/frontend/src/components/BrowserCommandDescription/DownloadAsShellScriptButton.tsx @@ -0,0 +1,30 @@ +import { Digest } from '@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution'; +import { BrowserPageParams } from '@/types/BrowserPageType'; +import { generateCommandShellScriptUrl } from '@/utils/urlGenerator'; +import { Button } from 'antd'; +import React from 'react'; + +interface Params { + browserPageParams: BrowserPageParams; + commandDigest: Digest; +} + +const DownloadAsShellScriptButton: React.FC = ({ + browserPageParams, + commandDigest, +}) => { + return ( + + ); +}; + +export default DownloadAsShellScriptButton; diff --git a/frontend/src/components/BrowserCommandDescription/index.tsx b/frontend/src/components/BrowserCommandDescription/index.tsx new file mode 100644 index 0000000..614c623 --- /dev/null +++ b/frontend/src/components/BrowserCommandDescription/index.tsx @@ -0,0 +1,94 @@ +import type { + Command, + Digest, +} from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import type { BrowserPageParams } from "@/types/BrowserPageType"; +import { digestFunctionValueToString } from "@/utils/digestFunctionUtils"; +import { Descriptions, Flex, Space, Typography } from "antd"; +import Link from "next/link"; +import type React from "react"; +import CopyBbClientdCommandButton from "./CopyBbClientdCommandButton"; +import DownloadAsShellScriptButton from "./DownloadAsShellScriptButton"; + +interface Params { + browserPageParams: BrowserPageParams; + command: Command; + commandDigest: Digest | undefined; + showTitle: boolean; +} + +const BrowserCommandDescription: React.FC = ({ + browserPageParams, + command, + commandDigest, + showTitle, +}) => { + return ( + + {showTitle && ( + + {commandDigest ? ( + + Command + + ) : ( + "Command" + )} + + )} + + + + {command.arguments.map((arg, index) => ( +
+                {index === 0 ? {arg} : arg}
+              
+ ))} +
+
+ + {command.environmentVariables.map((env) => ( +
+              {env.name}
+              {`=${env.value}`}
+            
+ ))} +
+ {command.workingDirectory !== "" && ( + + {command.workingDirectory} + + )} +
+ {commandDigest && ( + + + + + )} +
+ ); +}; + +export default BrowserCommandDescription; diff --git a/frontend/src/components/BrowserCommandGrid/index.tsx b/frontend/src/components/BrowserCommandGrid/index.tsx new file mode 100644 index 0000000..72e1ea8 --- /dev/null +++ b/frontend/src/components/BrowserCommandGrid/index.tsx @@ -0,0 +1,75 @@ +"use client"; + +import { useGrpcClients } from "@/context/GrpcClientsContext"; +import { Command } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import type { BrowserPageParams } from "@/types/BrowserPageType"; +import { fetchCasObjectAndParse } from "@/utils/fetchCasObject"; +import { useQuery } from "@tanstack/react-query"; +import { Space, Spin, Typography } from "antd"; +import type React from "react"; +import BrowserCommandDescription from "../BrowserCommandDescription"; +import FilesTable from "../FilesTable"; +import { filesTableEntriesFromOutputPath } from "../FilesTable/utils"; +import PortalAlert from "../PortalAlert"; + +interface Params { + browserPageParams: BrowserPageParams; +} + +const BrowserCommandGrid: React.FC = ({ browserPageParams }) => { + const { casByteStreamClient } = useGrpcClients(); + + const { data, isError, isPending, error } = useQuery({ + queryKey: ["browserCommandGrid", browserPageParams], + queryFn: () => + fetchCasObjectAndParse( + casByteStreamClient, + browserPageParams.instanceName, + browserPageParams.digestFunction, + browserPageParams.digest, + Command, + ), + }); + + if (isError) { + return ( + + + There was a problem communicating with the backend server: + +
{String(error)}
+ + } + /> + ); + } + + if (isPending) { + return ; + } + + return ( + + Command + + + Output files + + filesTableEntriesFromOutputPath(entry), + )} + isPending={isPending} + /> + + ); +}; + +export default BrowserCommandGrid; diff --git a/frontend/src/components/BrowserDirectory/CopyBbClientdDirectoryButton.tsx b/frontend/src/components/BrowserDirectory/CopyBbClientdDirectoryButton.tsx new file mode 100644 index 0000000..dcece52 --- /dev/null +++ b/frontend/src/components/BrowserDirectory/CopyBbClientdDirectoryButton.tsx @@ -0,0 +1,45 @@ +import type { + Digest, + DigestFunction_Value, +} from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import { getBBClientdPath } from "@/utils/getBbClientdPath"; +import { Button, message } from "antd"; +import type React from "react"; + +interface Params { + instanceName: string; + digestFunction: DigestFunction_Value; + inputRootDigest: Digest; +} + +const CopyBbClientdDirectoryButton: React.FC = ({ + instanceName, + digestFunction, + inputRootDigest, +}) => { + const [messageApi, contextHolder] = message.useMessage(); + + const inputRootBbClientdPath = getBBClientdPath( + instanceName, + digestFunction, + inputRootDigest, + "directory", + ); + + return ( + <> + {contextHolder} + + + ); +}; + +export default CopyBbClientdDirectoryButton; diff --git a/frontend/src/components/BrowserDirectory/DownloadAsTarballButton.tsx b/frontend/src/components/BrowserDirectory/DownloadAsTarballButton.tsx new file mode 100644 index 0000000..2ca02f0 --- /dev/null +++ b/frontend/src/components/BrowserDirectory/DownloadAsTarballButton.tsx @@ -0,0 +1,34 @@ +import { + Digest, + DigestFunction_Value, +} from '@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution'; +import { generateDirectoryTarballUrl } from '@/utils/urlGenerator'; +import { Button } from 'antd'; +import React from 'react'; + +interface Params { + instanceName: string; + digestFunction: DigestFunction_Value; + directoryDigest: Digest; +} + +const DownloadAsTarballButton: React.FC = ({ + instanceName, + digestFunction, + directoryDigest, +}) => { + return ( + + ); +}; + +export default DownloadAsTarballButton; diff --git a/frontend/src/components/BrowserDirectory/index.tsx b/frontend/src/components/BrowserDirectory/index.tsx new file mode 100644 index 0000000..91742e7 --- /dev/null +++ b/frontend/src/components/BrowserDirectory/index.tsx @@ -0,0 +1,372 @@ +"use client"; + +import type { UrlObject } from "node:url"; +import { useGrpcClients } from "@/context/GrpcClientsContext"; +import { + type Digest, + type DigestFunction_Value, + Directory, +} from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import type { FileSystemAccessProfile } from "@/lib/grpc-client/buildbarn/fsac/fsac"; +import type { FileSystemAccessProfileReference } from "@/lib/grpc-client/buildbarn/query/query"; +import type { ByteStreamClient } from "@/lib/grpc-client/google/bytestream/bytestream"; +import themeStyles from "@/theme/theme.module.css"; +import { + type BloomFilterReader, + PathHashes, + containsPathHashes, + generateFileSystemReferenceQueryParams, + readBloomFilter, +} from "@/utils/bloomFilter"; +import { digestFunctionValueToString } from "@/utils/digestFunctionUtils"; +import { fetchCasObjectAndParse } from "@/utils/fetchCasObject"; +import { formatFileSizeFromString } from "@/utils/formatValues"; +import { generateFileUrl } from "@/utils/urlGenerator"; +import { DownOutlined, RightOutlined } from "@ant-design/icons"; +import { useQuery, useQueryClient } from "@tanstack/react-query"; +import { Button, Flex, Space, Spin, Typography } from "antd"; +import Link from "next/link"; +import React, { useEffect } from "react"; +import PortalAlert from "../PortalAlert"; +import CopyBbClientdDirectoryButton from "./CopyBbClientdDirectoryButton"; +import DownloadAsTarballButton from "./DownloadAsTarballButton"; + +const FETCH_STALE_TIME = 30000; + +interface Params { + instanceName: string; + digestFunction: DigestFunction_Value; + inputRootDigest: Digest; + fileSystemAccessProfile: FileSystemAccessProfile | undefined; + fileSystemAccessProfileReference: + | FileSystemAccessProfileReference + | undefined; +} + +const BrowserDirectory: React.FC = ({ + instanceName, + digestFunction, + inputRootDigest, + fileSystemAccessProfile, + fileSystemAccessProfileReference, +}) => { + const bloomFilterReader = fileSystemAccessProfile + ? readBloomFilter(fileSystemAccessProfile) + : undefined; + + return ( + + + + {bloomFilterReader && ( + + Note:{" "} + Green and{" "} + + red + {" "} + filenames above indicate which files and directories will be + prefetched the next time a similar action executes. Though it is + representative of what is actually accessed by the action, it may + contain false positives and negatives. + + )} + + + + + + + + ); +}; + +const fetchDirectory = async ( + casByteStreamClient: ByteStreamClient, + instanceName: string, + digestFunction: DigestFunction_Value, + digest: Digest, +) => { + return fetchCasObjectAndParse( + casByteStreamClient, + instanceName, + digestFunction, + digest, + Directory, + ); +}; + +const RecursiveDirectoryNode: React.FC<{ + instanceName: string; + digestFunction: DigestFunction_Value; + directoryDigest: Digest; + directoryName: string; + isTopLevel: boolean; + bloomFilterReader?: BloomFilterReader; + pathHashes?: PathHashes; + willBePrefetched?: boolean; + fileSystemAccessProfileRef: FileSystemAccessProfileReference | undefined; +}> = ({ + instanceName, + digestFunction, + directoryDigest, + directoryName, + isTopLevel, + bloomFilterReader, + pathHashes, + willBePrefetched, + fileSystemAccessProfileRef, +}) => { + const [expanded, setExpanded] = React.useState(isTopLevel); + const queryClient = useQueryClient(); + const { casByteStreamClient } = useGrpcClients(); + + const { data, isError, isPending, error } = useQuery({ + queryKey: [ + "browserDirectory", + instanceName, + digestFunction, + directoryDigest, + ], + queryFn: fetchDirectory.bind( + null, + casByteStreamClient, + instanceName, + digestFunction, + directoryDigest, + ), + staleTime: FETCH_STALE_TIME, + }); + + // Prefetch all child directories. React-query will cache the results for us + // and reuse them for the `useQuery` above. + useEffect(() => { + if (data) { + for (const dirNode of data.directories) { + if (dirNode.digest) { + queryClient.prefetchQuery({ + queryKey: [ + "browserDirectory", + instanceName, + digestFunction, + dirNode.digest, + ], + queryFn: fetchDirectory.bind( + null, + casByteStreamClient, + instanceName, + digestFunction, + dirNode.digest, + ), + staleTime: FETCH_STALE_TIME, + }); + } + } + } + }, [casByteStreamClient, data, digestFunction, instanceName, queryClient]); + + const calcWillBePrefetched = ( + currentPathHashes: PathHashes | undefined = pathHashes, + ) => { + if (willBePrefetched === false) { + return false; + } + if (bloomFilterReader === undefined || currentPathHashes === undefined) { + return undefined; + } + return containsPathHashes(bloomFilterReader, currentPathHashes); + }; + + if (isError) { + return ( + + + There was a problem communicating with the backend server: + +
{String(error)}
+ + } + /> + ); + } + + if (isPending) { + return ; + } + + return ( + <> + {!isTopLevel && ( + + )} + + {expanded && ( +
+ {data.directories.map( + (dirNode) => + dirNode.name && + dirNode.digest && ( + + ), + )} + {data.files.map((file) => ( + + ))} + {data.symlinks.map((symlink) => ( + ${symlink.target}`} + permissions="lrwxrwxrwx" + /> + ))} +
+ )} + + ); +}; + +const ROW_HEIGHT = 20; +const BUTTON_WIDTH = 32; +const BUTTON_PADDING = 8; + +const DirectoryNode: React.FC<{ + isDirectory?: boolean; + name: string; + href?: UrlObject; + sizeBytes?: string; + permissions: string; + expanded?: boolean; + setExpanded?: (expanded: boolean) => void; + willBePrefetched?: boolean; +}> = ({ + isDirectory = false, + name, + href, + sizeBytes, + permissions, + expanded, + setExpanded, + willBePrefetched, +}) => { + const indent = isDirectory ? "0px" : `${BUTTON_WIDTH + BUTTON_PADDING}px`; + + const formattedFileName = () => { + switch (willBePrefetched) { + case true: + return {name}; + case false: + return ( + + {name} + + ); + case undefined: + return {name}; + } + }; + + return ( + + + {isDirectory && expanded !== undefined && setExpanded !== undefined && ( + + )} + {href ? ( + {formattedFileName()} + ) : ( + {formattedFileName()} + )} + + + {sizeBytes &&
{formatFileSizeFromString(sizeBytes)}
} +
{permissions}
+
+
+ ); +}; + +export default BrowserDirectory; diff --git a/frontend/src/components/BrowserDirectoryPage/index.tsx b/frontend/src/components/BrowserDirectoryPage/index.tsx new file mode 100644 index 0000000..1d2bf6e --- /dev/null +++ b/frontend/src/components/BrowserDirectoryPage/index.tsx @@ -0,0 +1,81 @@ +import { useGrpcClients } from "@/context/GrpcClientsContext"; +import { FileSystemAccessProfileReference } from "@/lib/grpc-client/buildbarn/query/query"; +import type { BrowserPageParams } from "@/types/BrowserPageType"; +import { useQuery } from "@tanstack/react-query"; +import { Spin, Typography } from "antd"; +import { useSearchParams } from "next/navigation"; +import BrowserDirectory from "../BrowserDirectory"; +import PortalAlert from "../PortalAlert"; + +interface Params { + browserPageParams: BrowserPageParams; +} + +const BrowserDirectoryPage: React.FC = ({ browserPageParams }) => { + const { fileSystemAccessCacheClient } = useGrpcClients(); + const searchParams = useSearchParams(); + const params = searchParams.get("fileSystemAccessProfile"); + let fileSystemAccessProfileReference: + | FileSystemAccessProfileReference + | undefined = undefined; + + if (params) { + try { + fileSystemAccessProfileReference = + FileSystemAccessProfileReference.fromJSON( + JSON.parse(decodeURIComponent(params)), + ); + } catch (error) { + console.error("Could not parse query parameters"); + } + } + + const { data, isError, error, isLoading } = useQuery({ + queryKey: [ + "fileSystemAccessProfile", + browserPageParams, + fileSystemAccessProfileReference, + ], + queryFn: fileSystemAccessCacheClient.getFileSystemAccessProfile.bind( + {}, + { + instanceName: browserPageParams.instanceName, + digestFunction: browserPageParams.digestFunction, + reducedActionDigest: fileSystemAccessProfileReference?.digest, + }, + ), + enabled: fileSystemAccessProfileReference !== undefined, + }); + + if (isLoading) { + return ; + } + + if (isError) { + return ( + + + There was a problem communicating with the backend server: + +
{String(error)}
+ + } + /> + ); + } + + return ( + + ); +}; + +export default BrowserDirectoryPage; diff --git a/frontend/src/components/BrowserPreviousExecutionsDisplay/index.tsx b/frontend/src/components/BrowserPreviousExecutionsDisplay/index.tsx new file mode 100644 index 0000000..5316440 --- /dev/null +++ b/frontend/src/components/BrowserPreviousExecutionsDisplay/index.tsx @@ -0,0 +1,64 @@ +import type { Digest } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import type { PreviousExecutionStats } from "@/lib/grpc-client/buildbarn/iscc/iscc"; +import type { BrowserPageParams } from "@/types/BrowserPageType"; +import { digestFunctionValueToString } from "@/utils/digestFunctionUtils"; +import { Descriptions, Space, Typography } from "antd"; +import Link from "next/link"; +import PreviousExecutionsPlot from "../PreviousExecuteStatsPlot"; +import SizeClassOutcome from "../SizeClassOutcome"; + +interface Props { + browserParams: BrowserPageParams; + reducedActionDigest: Digest; + previousExecutionStats: PreviousExecutionStats; + showTitle: boolean; +} + +const BrowserPreviousExecutionsDisplay: React.FC = ({ + browserParams, + previousExecutionStats, + showTitle, + reducedActionDigest, +}) => ( + + {showTitle && ( + + + Previous execution stats + + + )} + + + {previousExecutionStats.lastSeenFailure && ( + + {previousExecutionStats.lastSeenFailure.toISOString()} + + )} + {Object.entries(previousExecutionStats.sizeClasses).map((value) => ( + + + + ))} + + + +); + +export default BrowserPreviousExecutionsDisplay; diff --git a/frontend/src/components/BrowserPreviousExecutionsPage/index.tsx b/frontend/src/components/BrowserPreviousExecutionsPage/index.tsx new file mode 100644 index 0000000..ea6fe19 --- /dev/null +++ b/frontend/src/components/BrowserPreviousExecutionsPage/index.tsx @@ -0,0 +1,62 @@ +import { useGrpcClients } from "@/context/GrpcClientsContext"; +import { Digest } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import type { BrowserPageParams } from "@/types/BrowserPageType"; +import { useQuery } from "@tanstack/react-query"; +import { Space, Spin, Typography } from "antd"; +import BrowserPreviousExecutionsDisplay from "../BrowserPreviousExecutionsDisplay"; +import PortalAlert from "../PortalAlert"; + +interface Params { + browserPageParams: BrowserPageParams; +} + +const BrowserPreviousExecutionsPage: React.FC = ({ + browserPageParams, +}) => { + const { initialSizeClassCacheClient } = useGrpcClients(); + + const reducedActionDigest = Digest.create(browserPageParams.digest); + + const { data, isPending, isError, error } = useQuery({ + queryKey: ["browserPreviousExecutionsPage", browserPageParams], + queryFn: initialSizeClassCacheClient.getPreviousExecutionStats.bind(null, { + digestFunction: browserPageParams.digestFunction, + instanceName: browserPageParams.instanceName, + reducedActionDigest: reducedActionDigest, + }), + }); + + if (isError) { + return ( + + + There was a problem communicating with the backend server: + +
{String(error)}
+ + } + /> + ); + } + + if (isPending) { + return ; + } + + return ( + + Previous executions stats + + + ); +}; + +export default BrowserPreviousExecutionsPage; diff --git a/frontend/src/components/BrowserResultDescription/index.tsx b/frontend/src/components/BrowserResultDescription/index.tsx new file mode 100644 index 0000000..a7b5e89 --- /dev/null +++ b/frontend/src/components/BrowserResultDescription/index.tsx @@ -0,0 +1,117 @@ +import type { ExecuteResponse } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import type { POSIXResourceUsage } from "@/lib/grpc-client/buildbarn/resourceusage/resourceusage"; +import type { BrowserPageParams } from "@/types/BrowserPageType"; +import { formatFileSizeFromString } from "@/utils/formatValues"; +import { generateFileUrl } from "@/utils/urlGenerator"; +import { Descriptions, Space, Tag, Typography } from "antd"; +import Paragraph from "antd/es/typography/Paragraph"; +import Link from "next/link"; +import type React from "react"; +import type { ActionConsoleOutput } from "../BrowserActionGrid/types"; + +interface Params { + browserPageParams: BrowserPageParams; + executeResponse: ExecuteResponse; + posixResourceUsage: POSIXResourceUsage | undefined; + consoleOutputs: ActionConsoleOutput[]; +} + +const BrowserResultDescription: React.FC = ({ + browserPageParams, + executeResponse, + posixResourceUsage, + consoleOutputs, +}) => { + const renderResult = () => { + if (executeResponse.status !== undefined) { + return ( + + {`Code ${executeResponse.status.code}: ${executeResponse.status.message}`} + + ); + } + + if ( + posixResourceUsage?.terminationSignal !== undefined && + posixResourceUsage?.terminationSignal !== "" + ) { + return ( + + {`SIG${posixResourceUsage.terminationSignal}`} + + ); + } + + return ( + + + {executeResponse.result?.exitCode} + + {executeResponse.result?.exitCode === 0 ? "Success" : "Failure"} + + + + ); + }; + + const renderConsoleOutput = (consoleOutput: ActionConsoleOutput) => { + const logLinkHref = consoleOutput.digest + ? generateFileUrl( + browserPageParams.instanceName, + browserPageParams.digestFunction, + consoleOutput.digest, + "log.txt", + ) + : undefined; + + const label = () => { + if (logLinkHref) { + return {consoleOutput.name}; + } + return consoleOutput.name; + }; + + const content = () => { + if (consoleOutput.notFound) { + return "The log file for this action could not be found."; + } + if (consoleOutput.tooLarge) { + if (consoleOutput.digest && logLinkHref) { + return ( + + The log file for this action is to + large to display ( + {formatFileSizeFromString(consoleOutput.digest.sizeBytes)}). + + ); + } + return "The log file for this action is to large to display."; + } + return ( + +
{consoleOutput.content}
+
+ ); + }; + + return ( + + {content()} + + ); + }; + + return ( + + {renderResult()} + {consoleOutputs.map(renderConsoleOutput)} + + ); +}; + +export default BrowserResultDescription; diff --git a/frontend/src/components/ExecutionMetadataTimeline/index.tsx b/frontend/src/components/ExecutionMetadataTimeline/index.tsx new file mode 100644 index 0000000..4237a1f --- /dev/null +++ b/frontend/src/components/ExecutionMetadataTimeline/index.tsx @@ -0,0 +1,103 @@ +"use client"; + +import type { ExecutedActionMetadata } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import { formatDurationFromDates } from "@/utils/formatValues"; +import { ClockCircleOutlined } from "@ant-design/icons"; +import { Flex, Space, Typography } from "antd"; +import type React from "react"; + +interface Params { + executionMetadata: ExecutedActionMetadata; +} + +const formatTimelineElement = ( + timestamp: Date, + previous: Date | undefined = undefined, +) => { + if (timestamp.getTime() === previous?.getTime()) { + return null; + } + + return ( + + {timestamp.toISOString()} + {previous && ( + <> + + + {" "} + (+{formatDurationFromDates(previous, timestamp, 3, 0)}) + + + )} + + ); +}; + +const ExecutionMetadataTimeline: React.FC = ({ + executionMetadata: em, +}) => { + return ( + + {em.queuedTimestamp && formatTimelineElement(em.queuedTimestamp)} + Action added to the queue. + {em.workerStartTimestamp && + formatTimelineElement(em.workerStartTimestamp, em.queuedTimestamp)} + Worker received the action. + {em.inputFetchStartTimestamp && + formatTimelineElement( + em.inputFetchStartTimestamp, + em.workerStartTimestamp, + )} + Worker started fetching action inputs. + {em.inputFetchCompletedTimestamp && + formatTimelineElement( + em.inputFetchCompletedTimestamp, + em.inputFetchStartTimestamp, + )} + Worker finished fetching action inputs. + {em.executionStartTimestamp && + formatTimelineElement( + em.executionStartTimestamp, + em.inputFetchCompletedTimestamp, + )} + + Worker started executing the action command. + + {em.executionCompletedTimestamp && + formatTimelineElement( + em.executionCompletedTimestamp, + em.executionStartTimestamp, + )} + + Worker completed executing the action command. + + {em.outputUploadStartTimestamp && + formatTimelineElement( + em.outputUploadStartTimestamp, + em.executionCompletedTimestamp, + )} + + Worker started uploading action outputs. + + {em.outputUploadCompletedTimestamp && + formatTimelineElement( + em.outputUploadCompletedTimestamp, + em.outputUploadStartTimestamp, + )} + + Worker completed uploading action outputs. + + {em.workerCompletedTimestamp && + formatTimelineElement( + em.workerCompletedTimestamp, + em.outputUploadCompletedTimestamp, + )} + + Worker completed the action, including all stages. + + + ); +}; + +export default ExecutionMetadataTimeline; diff --git a/frontend/src/components/ExecutionResponseDisplay/index.tsx b/frontend/src/components/ExecutionResponseDisplay/index.tsx new file mode 100644 index 0000000..6b48c61 --- /dev/null +++ b/frontend/src/components/ExecutionResponseDisplay/index.tsx @@ -0,0 +1,43 @@ +import { ExecuteResponse } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import { protobufToObjectWithTypeField } from "@/utils/protobufToObject"; +import { CodeFilled } from "@ant-design/icons"; +import { Space } from "antd"; +import PortalCard from "../PortalCard"; + +interface Props { + executeResponse: ExecuteResponse; +} + +const ExecuteResponseDisplay: React.FC = ({ executeResponse }) => { + const auxiliaryMetadata = + executeResponse?.result?.executionMetadata?.auxiliaryMetadata.map( + (value) => { + return protobufToObjectWithTypeField(value, false); + }, + ); + + return ( + + }> +
+          {
+            // `ts-proto` currently does not support JSON string
+            // encoding of well-known type google.protobuf.Duration
+            JSON.stringify(
+              ExecuteResponse.toJSON(executeResponse),
+              (key, val) => {
+                if (key === "auxiliaryMetadata") {
+                  return auxiliaryMetadata;
+                }
+                return val;
+              },
+              1,
+            )
+          }
+        
+
+
+ ); +}; + +export default ExecuteResponseDisplay; diff --git a/frontend/src/components/FilesTable/Columns.tsx b/frontend/src/components/FilesTable/Columns.tsx new file mode 100644 index 0000000..bef13f3 --- /dev/null +++ b/frontend/src/components/FilesTable/Columns.tsx @@ -0,0 +1,39 @@ +import { type TableColumnsType, Typography } from "antd"; +import type { ColumnType } from "antd/lib/table"; +import Link from "next/link"; + +export interface FilesTableEntry { + mode: string | undefined; + size: string | undefined; + filename: string; + href: string | undefined; +} + +const modeColumn: ColumnType = { + key: "mode", + title: "Mode", + dataIndex: "mode", +}; + +const sizeColumn: ColumnType = { + key: "size", + title: "Size", + dataIndex: "size", +}; + +const filenameColumn: ColumnType = { + key: "filename", + title: "Filename", + render: (_, record) => { + if (record.href) { + return {record.filename}; + } + return {record.filename}; + }, +}; + +const getColumns = (): TableColumnsType => { + return [modeColumn, sizeColumn, filenameColumn]; +}; + +export default getColumns; diff --git a/frontend/src/components/FilesTable/index.tsx b/frontend/src/components/FilesTable/index.tsx new file mode 100644 index 0000000..1ecd2d9 --- /dev/null +++ b/frontend/src/components/FilesTable/index.tsx @@ -0,0 +1,50 @@ +import themeStyles from "@/theme/theme.module.css"; +import { BuildOutlined } from "@ant-design/icons"; +import { Space, Table, Typography } from "antd"; +import type React from "react"; +import getColumns, { type FilesTableEntry } from "./Columns"; + +type Props = { + entries: FilesTableEntry[]; + isPending: boolean; +}; + +const FilesTable: React.FC = ({ entries, isPending }) => { + return ( + item.filename} + rowClassName={() => themeStyles.compactTable} + locale={{ + emptyText: isPending ? ( + + + + Loading... + + + ) : ( + + + + No files found + + + ), + }} + /> + ); +}; + +export default FilesTable; diff --git a/frontend/src/components/FilesTable/utils.ts b/frontend/src/components/FilesTable/utils.ts new file mode 100644 index 0000000..4f4b1af --- /dev/null +++ b/frontend/src/components/FilesTable/utils.ts @@ -0,0 +1,153 @@ +import type { + ActionResult, + Command, + DigestFunction_Value, + LogFile, + OutputDirectory, + OutputFile, + OutputSymlink, +} from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import { digestFunctionValueToString } from "@/utils/digestFunctionUtils"; +import { generateFileUrl } from "@/utils/urlGenerator"; +import type { FilesTableEntry } from "./Columns"; + +export function filesTableEntryFromOutputDirectory( + outputDirectory: OutputDirectory, + instanceName: string, + digestFunction: DigestFunction_Value, +): FilesTableEntry { + const digest = outputDirectory.rootDirectoryDigest + ? outputDirectory.rootDirectoryDigest + : outputDirectory.treeDigest; + + return { + mode: "drwxr-xr-x", + size: digest?.sizeBytes, + filename: outputDirectory.path, + href: `/${instanceName}/blobs/${digestFunctionValueToString( + digestFunction, + )}/directory/${digest?.hash}-${digest?.sizeBytes}`, + }; +} + +export function filesTableEntryFromOutputSymlink( + outputSymlink: OutputSymlink, +): FilesTableEntry { + return { + mode: "lrwxrwxrwx", + size: undefined, + filename: `${outputSymlink.path} -> ${outputSymlink.target}`, + href: undefined, + }; +} + +export function filesTableEntryFromOutputFile( + outputFile: OutputFile, + instanceName: string, + digestFunction: DigestFunction_Value, +): FilesTableEntry { + return { + mode: `-rw${outputFile.isExecutable ? "x" : "-"}r-${ + outputFile.isExecutable ? "x" : "-" + }r-${outputFile.isExecutable ? "x" : "-"}`, + size: outputFile.digest?.sizeBytes, + filename: outputFile.path, + href: outputFile.digest + ? generateFileUrl( + instanceName, + digestFunction, + outputFile.digest, + outputFile.path.split("/").slice(-1)[0], + ) + : undefined, + }; +} + +export function filesTableEntriesFromOutputPath( + outputPath: string, +): FilesTableEntry { + return { + mode: undefined, + size: undefined, + filename: outputPath, + href: undefined, + }; +} + +export function filesTableEntriesFromActionResultAndCommand( + actionResult: ActionResult | undefined, + command: Command | undefined, + instanceName: string, + digestFunction: DigestFunction_Value, +): FilesTableEntry[] { + const entries: FilesTableEntry[] = []; + + if (actionResult) { + if (actionResult.outputDirectories) { + for (const outputDirectory of actionResult.outputDirectories) { + entries.push( + filesTableEntryFromOutputDirectory( + outputDirectory, + instanceName, + digestFunction, + ), + ); + } + } + + if (actionResult.outputSymlinks) { + for (const outputSymlink of actionResult.outputSymlinks) { + entries.push(filesTableEntryFromOutputSymlink(outputSymlink)); + } + } + if (actionResult.outputFiles) { + for (const outputFile of actionResult.outputFiles) { + entries.push( + filesTableEntryFromOutputFile( + outputFile, + instanceName, + digestFunction, + ), + ); + } + } + } + + if (command) { + for (const outputPath of command.outputPaths) { + if ( + !entries.find((filesTableEntry) => { + return filesTableEntry.filename === outputPath; + }) + ) { + entries.push(filesTableEntriesFromOutputPath(outputPath)); + } + } + } + + return entries; +} + +export function filesTableEntriesFromServerLogs( + serverLogs: { + [key: string]: LogFile; + }, + instanceName: string, + digestFunction: DigestFunction_Value, +): FilesTableEntry[] { + const entries: FilesTableEntry[] = []; + + for (const key of Object.keys(serverLogs)) { + const logFile = serverLogs[key]; + entries.push({ + mode: "-rw-r--r--", + size: logFile.digest?.sizeBytes, + filename: key, + href: logFile.digest + ? generateFileUrl(instanceName, digestFunction, logFile.digest, key) + : undefined, + }); + } + + return entries; +} diff --git a/frontend/src/components/PreviousExecuteStatsPlot/index.tsx b/frontend/src/components/PreviousExecuteStatsPlot/index.tsx new file mode 100644 index 0000000..426e28e --- /dev/null +++ b/frontend/src/components/PreviousExecuteStatsPlot/index.tsx @@ -0,0 +1,138 @@ +import type { PreviousExecutionStats } from "@/lib/grpc-client/buildbarn/iscc/iscc"; +import { formatDurationFromSeconds } from "@/utils/formatValues"; +import { + Legend, + ReferenceArea, + Scatter, + ScatterChart, + Tooltip, + XAxis, + YAxis, +} from "recharts"; +import { durationToSeconds } from "../Utilities/time"; + +interface Props { + prevStats: PreviousExecutionStats; +} + +interface PlotDataPoint { + x: number; + y: number; + sizeClass: number; +} + +const PADDING_FACTOR = 4; + +const PreviousExecutionsPlot: React.FC = ({ prevStats }) => { + const succeeded: PlotDataPoint[] = []; + const timedOut: PlotDataPoint[] = []; + const sizeClasses: number[] = []; + + for (const sizeClassEntry of Object.entries(prevStats.sizeClasses)) { + const sizeClass = Number.parseInt(sizeClassEntry[0]); + sizeClasses.push(sizeClass); + for (const prevExec of sizeClassEntry[1].previousExecutions) { + // TODO: Make random scatter deterministic for each data point + // TODO: Nicely visualize class gaps in plot (i.e. if the only + // two size classes are labeled 0 and 64, it should not + // look weird) + const xValue = sizeClass + (Math.random() - 0.5) / 3; + if (prevExec.succeeded) { + const time = durationToSeconds(prevExec.succeeded); + succeeded.push({ + x: xValue, + y: time, + sizeClass: sizeClass, + }); + } + if (prevExec.timedOut) { + const time = durationToSeconds(prevExec.timedOut); + timedOut.push({ + x: xValue, + y: time, + sizeClass: sizeClass, + }); + // `prevExec.failed` has no time information, + // so we cannot visualize them in the graph + } + } + } + + return ( + + { + const len = sizeClasses.length; + return [-PADDING_FACTOR / len, len - 1 + PADDING_FACTOR / len]; + }} + /> + + + { + switch (name) { + case "Size class": { + return [props.payload.sizeClass, name]; + } + case "Execution time": { + return [formatDurationFromSeconds(props.payload.y, 10), name]; + } + default: { + return [value, name]; + } + } + }} + /> + {sizeClasses.map((sizeClass) => { + return ( + + ); + })} + {succeeded.length > 0 && ( + + )} + {timedOut.length > 0 && ( + + )} + + + ); +}; + +export default PreviousExecutionsPlot; diff --git a/frontend/src/components/SizeClassOutcome/index.tsx b/frontend/src/components/SizeClassOutcome/index.tsx new file mode 100644 index 0000000..fe9a904 --- /dev/null +++ b/frontend/src/components/SizeClassOutcome/index.tsx @@ -0,0 +1,48 @@ +import type { PerSizeClassStats } from "@/lib/grpc-client/buildbarn/iscc/iscc"; +import { formatDuration } from "@/utils/formatValues"; +import { Space, Typography } from "antd"; +import SizeClassOutcomeTag from "../SizeClassOutcomeTag"; + +interface Props { + sizeClassStats: PerSizeClassStats; +} + +const SizeClassOutcome: React.FC = ({ sizeClassStats }) => { + return ( + + + {sizeClassStats.previousExecutions.map((val, index) => { + if (val.succeeded) { + return ( + // biome-ignore lint/suspicious/noArrayIndexKey: We have nothing better to use + + Succeeded: {formatDuration(val.succeeded)} + + ); + } + if (val.timedOut) { + return ( + // biome-ignore lint/suspicious/noArrayIndexKey: We have nothing better to use + + Timed out: {formatDuration(val.timedOut)} + + ); + } + if (val.failed) { + return ( + // biome-ignore lint/suspicious/noArrayIndexKey: We have nothing better to use + + Failed + + ); + } + })} + + + {`Initial PageRank probability: ${sizeClassStats.initialPageRankProbability}`} + + + ); +}; + +export default SizeClassOutcome; diff --git a/frontend/src/components/SizeClassOutcomeTag/index.tsx b/frontend/src/components/SizeClassOutcomeTag/index.tsx new file mode 100644 index 0000000..981780f --- /dev/null +++ b/frontend/src/components/SizeClassOutcomeTag/index.tsx @@ -0,0 +1,20 @@ +import themeStyles from "@/theme/theme.module.css"; +import { Tag } from "antd"; + +interface Props { + color?: string; + children: React.ReactNode; +} + +const SizeClassOutcomeTag: React.FC = ({ + color = "default", + children, +}) => { + return ( + + {children} + + ); +}; + +export default SizeClassOutcomeTag; diff --git a/frontend/src/context/GrpcClientsContext.tsx b/frontend/src/context/GrpcClientsContext.tsx index 222f22b..a3d81f5 100644 --- a/frontend/src/context/GrpcClientsContext.tsx +++ b/frontend/src/context/GrpcClientsContext.tsx @@ -1,8 +1,30 @@ -import { BuildQueueStateClient } from '@/lib/grpc-client/buildbarn/buildqueuestate/buildqueuestate'; -import { createContext, useContext } from 'react'; +import type { + ActionCacheClient, + DigestFunction_Value, +} from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import type { BuildQueueStateClient } from "@/lib/grpc-client/buildbarn/buildqueuestate/buildqueuestate"; +import type { FileSystemAccessCacheClient } from "@/lib/grpc-client/buildbarn/fsac/fsac"; +import type { InitialSizeClassCacheClient } from "@/lib/grpc-client/buildbarn/iscc/iscc"; +import type { ByteStreamClient } from "@/lib/grpc-client/google/bytestream/bytestream"; +import { createContext, useContext } from "react"; + +export type CasObjectFetchFunction = ( + objectType: { + decode: (input: Uint8Array) => T; + toJSON: (input: T) => unknown; + }, + instanceName: string | undefined, + digestFunction: DigestFunction_Value, + digest: string, + sizeBytes: string, +) => Promise; interface GrpcClientsContextState { buildQueueStateClient: BuildQueueStateClient; + actionCacheClient: ActionCacheClient; + casByteStreamClient: ByteStreamClient; + initialSizeClassCacheClient: InitialSizeClassCacheClient; + fileSystemAccessCacheClient: FileSystemAccessCacheClient; } // biome-ignore lint/style/noNonNullAssertion: We want to throw an error if the context is used without provider, instead of failing silently. diff --git a/frontend/src/context/GrpcClientsProvider.tsx b/frontend/src/context/GrpcClientsProvider.tsx index bc80568..42bc244 100644 --- a/frontend/src/context/GrpcClientsProvider.tsx +++ b/frontend/src/context/GrpcClientsProvider.tsx @@ -1,11 +1,27 @@ import { - BuildQueueStateClient, + type ActionCacheClient, + ActionCacheDefinition, +} from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import { + type BuildQueueStateClient, BuildQueueStateDefinition, -} from '@/lib/grpc-client/buildbarn/buildqueuestate/buildqueuestate'; -import { env } from 'next-runtime-env'; -import { createChannel, createClient } from 'nice-grpc-web'; -import { ReactNode } from 'react'; -import { GrpcClientsContext } from './GrpcClientsContext'; +} from "@/lib/grpc-client/buildbarn/buildqueuestate/buildqueuestate"; +import { + type FileSystemAccessCacheClient, + FileSystemAccessCacheDefinition, +} from "@/lib/grpc-client/buildbarn/fsac/fsac"; +import { + type InitialSizeClassCacheClient, + InitialSizeClassCacheDefinition, +} from "@/lib/grpc-client/buildbarn/iscc/iscc"; +import { + type ByteStreamClient, + ByteStreamDefinition, +} from "@/lib/grpc-client/google/bytestream/bytestream"; +import { env } from "next-runtime-env"; +import { createChannel, createClient } from "nice-grpc-web"; +import type { ReactNode } from "react"; +import { GrpcClientsContext } from "./GrpcClientsContext"; export interface GrpcClientsProviderProps { children: ReactNode; @@ -17,10 +33,34 @@ const GrpcClientsProvider = ({ children }: GrpcClientsProviderProps) => { createChannel(env("NEXT_PUBLIC_BB_BUILDQUEUESTATE_GRPC_BACKEND_URL") || ""), ); + const actionCacheClient: ActionCacheClient = createClient( + ActionCacheDefinition, + createChannel(env("NEXT_PUBLIC_BB_ACTIONCACHE_GRPC_BACKEND_URL") || ""), + ); + + const casByteStreamClient: ByteStreamClient = createClient( + ByteStreamDefinition, + createChannel(env("NEXT_PUBLIC_BB_CAS_GRPC_BACKEND_URL") || ""), + ); + + const initialSizeClassCacheClient: InitialSizeClassCacheClient = createClient( + InitialSizeClassCacheDefinition, + createChannel(env("NEXT_PUBLIC_BB_ISCC_GRPC_BACKEND_URL") || ""), + ); + + const fileSystemAccessCacheClient: FileSystemAccessCacheClient = createClient( + FileSystemAccessCacheDefinition, + createChannel(env("NEXT_PUBLIC_BB_FSAC_GRPC_BACKEND_URL") || ""), + ); + return ( {children} diff --git a/frontend/src/lib/grpc-client/buildbarn/auth/auth.ts b/frontend/src/lib/grpc-client/buildbarn/auth/auth.ts new file mode 100644 index 0000000..c79f5c1 --- /dev/null +++ b/frontend/src/lib/grpc-client/buildbarn/auth/auth.ts @@ -0,0 +1,156 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc v3.19.1 +// source: buildbarn/auth/auth.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { Value } from "../../google/protobuf/struct"; +import { KeyValue } from "../../opentelemetry/proto/common/v1/common"; + +export const protobufPackage = "buildbarn.auth"; + +/** + * Protobuf equivalent of the AuthenticationMetadata structure that is + * used by the auth framework to store information on an authenticated + * user. + */ +export interface AuthenticationMetadata { + /** + * Part of the authentication metadata that is safe to display + * publicly (e.g., as part of logs or bb_browser). + */ + public: + | any + | undefined; + /** + * OpenTelemetry tracing attributes to add to spans in which the + * authentication took place (e.g., gRPC server call spans). All + * attributes will have "auth." prepended to their names + * automatically. + */ + tracingAttributes: KeyValue[]; + /** + * Part of the authentication metadata that should not be displayed + * publicly. This field is useful for propagating information from the + * authentication layer to the authorization layer, as this data can + * be accessed by JMESPathExpressionAuthorizer. + */ + private: any | undefined; +} + +function createBaseAuthenticationMetadata(): AuthenticationMetadata { + return { public: undefined, tracingAttributes: [], private: undefined }; +} + +export const AuthenticationMetadata: MessageFns = { + encode(message: AuthenticationMetadata, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.public !== undefined) { + Value.encode(Value.wrap(message.public), writer.uint32(10).fork()).join(); + } + for (const v of message.tracingAttributes) { + KeyValue.encode(v!, writer.uint32(18).fork()).join(); + } + if (message.private !== undefined) { + Value.encode(Value.wrap(message.private), writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): AuthenticationMetadata { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAuthenticationMetadata(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.public = Value.unwrap(Value.decode(reader, reader.uint32())); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.tracingAttributes.push(KeyValue.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.private = Value.unwrap(Value.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): AuthenticationMetadata { + return { + public: isSet(object?.public) ? object.public : undefined, + tracingAttributes: globalThis.Array.isArray(object?.tracingAttributes) + ? object.tracingAttributes.map((e: any) => KeyValue.fromJSON(e)) + : [], + private: isSet(object?.private) ? object.private : undefined, + }; + }, + + toJSON(message: AuthenticationMetadata): unknown { + const obj: any = {}; + if (message.public !== undefined) { + obj.public = message.public; + } + if (message.tracingAttributes?.length) { + obj.tracingAttributes = message.tracingAttributes.map((e) => KeyValue.toJSON(e)); + } + if (message.private !== undefined) { + obj.private = message.private; + } + return obj; + }, + + create(base?: DeepPartial): AuthenticationMetadata { + return AuthenticationMetadata.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): AuthenticationMetadata { + const message = createBaseAuthenticationMetadata(); + message.public = object.public ?? undefined; + message.tracingAttributes = object.tracingAttributes?.map((e) => KeyValue.fromPartial(e)) || []; + message.private = object.private ?? undefined; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/frontend/src/lib/grpc-client/buildbarn/cas/cas.ts b/frontend/src/lib/grpc-client/buildbarn/cas/cas.ts new file mode 100644 index 0000000..7f36179 --- /dev/null +++ b/frontend/src/lib/grpc-client/buildbarn/cas/cas.ts @@ -0,0 +1,133 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc v3.19.1 +// source: buildbarn/cas/cas.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { Digest, ExecuteResponse } from "../../build/bazel/remote/execution/v2/remote_execution"; + +export const protobufPackage = "buildbarn.cas"; + +/** + * HistoricalExecuteResponse is a custom message that is stored into the + * Content Addressable Storage. The Action Cache is only permitted to + * contain ActionResults of successful builds. In our case we also want + * to provide the user insight as to why their build fails by storing + * the ActionResult upon failure. + * + * This message is written into the ContentAddressableStorage by + * bb_worker by the CachingBuildExecutor. The digest is returned to the + * user by providing a URL to bb_browser as a message in the + * ExecuteResponse. + * + * Additionally, this message is attached to CompletedActions that are + * streamed through a CompletedActionLogger in order to provide metadata + * for uniquely identifying actions. + */ +export interface HistoricalExecuteResponse { + actionDigest: Digest | undefined; + executeResponse: ExecuteResponse | undefined; +} + +function createBaseHistoricalExecuteResponse(): HistoricalExecuteResponse { + return { actionDigest: undefined, executeResponse: undefined }; +} + +export const HistoricalExecuteResponse: MessageFns = { + encode(message: HistoricalExecuteResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.actionDigest !== undefined) { + Digest.encode(message.actionDigest, writer.uint32(10).fork()).join(); + } + if (message.executeResponse !== undefined) { + ExecuteResponse.encode(message.executeResponse, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): HistoricalExecuteResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHistoricalExecuteResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.actionDigest = Digest.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.executeResponse = ExecuteResponse.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): HistoricalExecuteResponse { + return { + actionDigest: isSet(object.actionDigest) ? Digest.fromJSON(object.actionDigest) : undefined, + executeResponse: isSet(object.executeResponse) ? ExecuteResponse.fromJSON(object.executeResponse) : undefined, + }; + }, + + toJSON(message: HistoricalExecuteResponse): unknown { + const obj: any = {}; + if (message.actionDigest !== undefined) { + obj.actionDigest = Digest.toJSON(message.actionDigest); + } + if (message.executeResponse !== undefined) { + obj.executeResponse = ExecuteResponse.toJSON(message.executeResponse); + } + return obj; + }, + + create(base?: DeepPartial): HistoricalExecuteResponse { + return HistoricalExecuteResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): HistoricalExecuteResponse { + const message = createBaseHistoricalExecuteResponse(); + message.actionDigest = (object.actionDigest !== undefined && object.actionDigest !== null) + ? Digest.fromPartial(object.actionDigest) + : undefined; + message.executeResponse = (object.executeResponse !== undefined && object.executeResponse !== null) + ? ExecuteResponse.fromPartial(object.executeResponse) + : undefined; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/frontend/src/lib/grpc-client/buildbarn/fsac/fsac.ts b/frontend/src/lib/grpc-client/buildbarn/fsac/fsac.ts new file mode 100644 index 0000000..036f2a2 --- /dev/null +++ b/frontend/src/lib/grpc-client/buildbarn/fsac/fsac.ts @@ -0,0 +1,489 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc v3.19.1 +// source: buildbarn/fsac/fsac.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { type CallContext, type CallOptions } from "nice-grpc-common"; +import { + Digest, + DigestFunction_Value, + digestFunction_ValueFromJSON, + digestFunction_ValueToJSON, +} from "../../build/bazel/remote/execution/v2/remote_execution"; +import { Empty } from "../../google/protobuf/empty"; + +export const protobufPackage = "buildbarn.fsac"; + +/** The file system access profile of a build action. */ +export interface FileSystemAccessProfile { + /** + * A Bloom filter that captures paths of regular files and directories + * in the input root whose contents have been read. In case files with + * the same digest are present in the input root multiple times, there + * is no guarantee that all paths are added to the Bloom filter. + * + * This Bloom filter can be used by workers to perform readahead of + * Content Addressable Storage (CAS) objects, thereby reducing the + * probability of execution of actions getting suspended to load data + * over the network. + * + * Hashes are computed by taking the FNV-1a hash of the path in the + * input root, modulo the size of the Bloom filter. The path uses the + * following pattern: + * + * (/${filename})* /* + * + * This means that the root directory uses the empty path. A file + * contained in the root directory may use path "/hello.txt". Between + * [0, k) trailing slashes are added to the path to obtain k + * independent hashes. + * + * The size of the Bloom filter in bits (m) SHOULD be prime. To be + * able to reobtain the exact size in bits, the bits in the final byte + * are terminated with a 1 bit, followed by zero or more 0 bits. + */ + bloomFilter: Uint8Array; + /** + * The number of hash functions (k) that should be considered when + * querying the Bloom filter. + */ + bloomFilterHashFunctions: number; +} + +export interface GetFileSystemAccessProfileRequest { + /** The instance of the execution system to operate against. */ + instanceName: string; + /** The digest function that was used to compute the reduced action digest. */ + digestFunction: DigestFunction_Value; + /** + * The digest of a trimmed down Action message for which a file system + * access profile is requested. This digest is obtained by removing + * all fields from the original Action, except for 'command_digest' + * and 'platform'. + * + * This means that contents of the input root, the Action's timeout + * and the do_not_cache flag are ignored. + */ + reducedActionDigest: Digest | undefined; +} + +export interface UpdateFileSystemAccessProfileRequest { + /** The instance of the execution system to operate against. */ + instanceName: string; + /** The digest function that was used to compute the reduced action digest. */ + digestFunction: DigestFunction_Value; + /** + * The digest of a trimmed down Action message for which a file system + * access profile is being stored. + */ + reducedActionDigest: + | Digest + | undefined; + /** The file system access profile to store. */ + fileSystemAccessProfile: FileSystemAccessProfile | undefined; +} + +function createBaseFileSystemAccessProfile(): FileSystemAccessProfile { + return { bloomFilter: new Uint8Array(0), bloomFilterHashFunctions: 0 }; +} + +export const FileSystemAccessProfile: MessageFns = { + encode(message: FileSystemAccessProfile, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.bloomFilter.length !== 0) { + writer.uint32(10).bytes(message.bloomFilter); + } + if (message.bloomFilterHashFunctions !== 0) { + writer.uint32(16).uint32(message.bloomFilterHashFunctions); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FileSystemAccessProfile { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileSystemAccessProfile(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.bloomFilter = reader.bytes(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.bloomFilterHashFunctions = reader.uint32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileSystemAccessProfile { + return { + bloomFilter: isSet(object.bloomFilter) ? bytesFromBase64(object.bloomFilter) : new Uint8Array(0), + bloomFilterHashFunctions: isSet(object.bloomFilterHashFunctions) + ? globalThis.Number(object.bloomFilterHashFunctions) + : 0, + }; + }, + + toJSON(message: FileSystemAccessProfile): unknown { + const obj: any = {}; + if (message.bloomFilter.length !== 0) { + obj.bloomFilter = base64FromBytes(message.bloomFilter); + } + if (message.bloomFilterHashFunctions !== 0) { + obj.bloomFilterHashFunctions = Math.round(message.bloomFilterHashFunctions); + } + return obj; + }, + + create(base?: DeepPartial): FileSystemAccessProfile { + return FileSystemAccessProfile.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): FileSystemAccessProfile { + const message = createBaseFileSystemAccessProfile(); + message.bloomFilter = object.bloomFilter ?? new Uint8Array(0); + message.bloomFilterHashFunctions = object.bloomFilterHashFunctions ?? 0; + return message; + }, +}; + +function createBaseGetFileSystemAccessProfileRequest(): GetFileSystemAccessProfileRequest { + return { instanceName: "", digestFunction: 0, reducedActionDigest: undefined }; +} + +export const GetFileSystemAccessProfileRequest: MessageFns = { + encode(message: GetFileSystemAccessProfileRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.instanceName !== "") { + writer.uint32(10).string(message.instanceName); + } + if (message.digestFunction !== 0) { + writer.uint32(16).int32(message.digestFunction); + } + if (message.reducedActionDigest !== undefined) { + Digest.encode(message.reducedActionDigest, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GetFileSystemAccessProfileRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetFileSystemAccessProfileRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.instanceName = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.digestFunction = reader.int32() as any; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.reducedActionDigest = Digest.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GetFileSystemAccessProfileRequest { + return { + instanceName: isSet(object.instanceName) ? globalThis.String(object.instanceName) : "", + digestFunction: isSet(object.digestFunction) ? digestFunction_ValueFromJSON(object.digestFunction) : 0, + reducedActionDigest: isSet(object.reducedActionDigest) ? Digest.fromJSON(object.reducedActionDigest) : undefined, + }; + }, + + toJSON(message: GetFileSystemAccessProfileRequest): unknown { + const obj: any = {}; + if (message.instanceName !== "") { + obj.instanceName = message.instanceName; + } + if (message.digestFunction !== 0) { + obj.digestFunction = digestFunction_ValueToJSON(message.digestFunction); + } + if (message.reducedActionDigest !== undefined) { + obj.reducedActionDigest = Digest.toJSON(message.reducedActionDigest); + } + return obj; + }, + + create(base?: DeepPartial): GetFileSystemAccessProfileRequest { + return GetFileSystemAccessProfileRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): GetFileSystemAccessProfileRequest { + const message = createBaseGetFileSystemAccessProfileRequest(); + message.instanceName = object.instanceName ?? ""; + message.digestFunction = object.digestFunction ?? 0; + message.reducedActionDigest = (object.reducedActionDigest !== undefined && object.reducedActionDigest !== null) + ? Digest.fromPartial(object.reducedActionDigest) + : undefined; + return message; + }, +}; + +function createBaseUpdateFileSystemAccessProfileRequest(): UpdateFileSystemAccessProfileRequest { + return { instanceName: "", digestFunction: 0, reducedActionDigest: undefined, fileSystemAccessProfile: undefined }; +} + +export const UpdateFileSystemAccessProfileRequest: MessageFns = { + encode(message: UpdateFileSystemAccessProfileRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.instanceName !== "") { + writer.uint32(10).string(message.instanceName); + } + if (message.digestFunction !== 0) { + writer.uint32(16).int32(message.digestFunction); + } + if (message.reducedActionDigest !== undefined) { + Digest.encode(message.reducedActionDigest, writer.uint32(26).fork()).join(); + } + if (message.fileSystemAccessProfile !== undefined) { + FileSystemAccessProfile.encode(message.fileSystemAccessProfile, writer.uint32(34).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): UpdateFileSystemAccessProfileRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUpdateFileSystemAccessProfileRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.instanceName = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.digestFunction = reader.int32() as any; + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.reducedActionDigest = Digest.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.fileSystemAccessProfile = FileSystemAccessProfile.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): UpdateFileSystemAccessProfileRequest { + return { + instanceName: isSet(object.instanceName) ? globalThis.String(object.instanceName) : "", + digestFunction: isSet(object.digestFunction) ? digestFunction_ValueFromJSON(object.digestFunction) : 0, + reducedActionDigest: isSet(object.reducedActionDigest) ? Digest.fromJSON(object.reducedActionDigest) : undefined, + fileSystemAccessProfile: isSet(object.fileSystemAccessProfile) + ? FileSystemAccessProfile.fromJSON(object.fileSystemAccessProfile) + : undefined, + }; + }, + + toJSON(message: UpdateFileSystemAccessProfileRequest): unknown { + const obj: any = {}; + if (message.instanceName !== "") { + obj.instanceName = message.instanceName; + } + if (message.digestFunction !== 0) { + obj.digestFunction = digestFunction_ValueToJSON(message.digestFunction); + } + if (message.reducedActionDigest !== undefined) { + obj.reducedActionDigest = Digest.toJSON(message.reducedActionDigest); + } + if (message.fileSystemAccessProfile !== undefined) { + obj.fileSystemAccessProfile = FileSystemAccessProfile.toJSON(message.fileSystemAccessProfile); + } + return obj; + }, + + create(base?: DeepPartial): UpdateFileSystemAccessProfileRequest { + return UpdateFileSystemAccessProfileRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): UpdateFileSystemAccessProfileRequest { + const message = createBaseUpdateFileSystemAccessProfileRequest(); + message.instanceName = object.instanceName ?? ""; + message.digestFunction = object.digestFunction ?? 0; + message.reducedActionDigest = (object.reducedActionDigest !== undefined && object.reducedActionDigest !== null) + ? Digest.fromPartial(object.reducedActionDigest) + : undefined; + message.fileSystemAccessProfile = + (object.fileSystemAccessProfile !== undefined && object.fileSystemAccessProfile !== null) + ? FileSystemAccessProfile.fromPartial(object.fileSystemAccessProfile) + : undefined; + return message; + }, +}; + +/** + * The File System Access Cache (FSAC) is a Buildbarn specific data + * store that workers can use to store information on how build actions + * access the data stored in the input root. This information can be + * used during subsequent executions of similar actions to speed up file + * system access. + */ +export type FileSystemAccessCacheDefinition = typeof FileSystemAccessCacheDefinition; +export const FileSystemAccessCacheDefinition = { + name: "FileSystemAccessCache", + fullName: "buildbarn.fsac.FileSystemAccessCache", + methods: { + /** + * GetFileSystemAccessProfile() reads a single FileSystemAccessProfile + * from the FSAC. + */ + getFileSystemAccessProfile: { + name: "GetFileSystemAccessProfile", + requestType: GetFileSystemAccessProfileRequest, + requestStream: false, + responseType: FileSystemAccessProfile, + responseStream: false, + options: {}, + }, + /** + * UpdateFileSystemAccessProfile() writes a single + * FileSystemAccessProfile message into the FSAC. + */ + updateFileSystemAccessProfile: { + name: "UpdateFileSystemAccessProfile", + requestType: UpdateFileSystemAccessProfileRequest, + requestStream: false, + responseType: Empty, + responseStream: false, + options: {}, + }, + }, +} as const; + +export interface FileSystemAccessCacheServiceImplementation { + /** + * GetFileSystemAccessProfile() reads a single FileSystemAccessProfile + * from the FSAC. + */ + getFileSystemAccessProfile( + request: GetFileSystemAccessProfileRequest, + context: CallContext & CallContextExt, + ): Promise>; + /** + * UpdateFileSystemAccessProfile() writes a single + * FileSystemAccessProfile message into the FSAC. + */ + updateFileSystemAccessProfile( + request: UpdateFileSystemAccessProfileRequest, + context: CallContext & CallContextExt, + ): Promise>; +} + +export interface FileSystemAccessCacheClient { + /** + * GetFileSystemAccessProfile() reads a single FileSystemAccessProfile + * from the FSAC. + */ + getFileSystemAccessProfile( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + /** + * UpdateFileSystemAccessProfile() writes a single + * FileSystemAccessProfile message into the FSAC. + */ + updateFileSystemAccessProfile( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; +} + +function bytesFromBase64(b64: string): Uint8Array { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; +} + +function base64FromBytes(arr: Uint8Array): string { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/frontend/src/lib/grpc-client/buildbarn/iscc/iscc.ts b/frontend/src/lib/grpc-client/buildbarn/iscc/iscc.ts new file mode 100644 index 0000000..f7653cf --- /dev/null +++ b/frontend/src/lib/grpc-client/buildbarn/iscc/iscc.ts @@ -0,0 +1,788 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc v3.19.1 +// source: buildbarn/iscc/iscc.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { type CallContext, type CallOptions } from "nice-grpc-common"; +import { + Digest, + DigestFunction_Value, + digestFunction_ValueFromJSON, + digestFunction_ValueToJSON, +} from "../../build/bazel/remote/execution/v2/remote_execution"; +import { Duration } from "../../google/protobuf/duration"; +import { Empty } from "../../google/protobuf/empty"; +import { Timestamp } from "../../google/protobuf/timestamp"; + +export const protobufPackage = "buildbarn.iscc"; + +/** The outcome of a single action at some point in the past. */ +export interface PreviousExecution { + /** Execution failed with an error. */ + failed?: + | Empty + | undefined; + /** Execution failed due to a timeout. The timeout value is stored. */ + timedOut?: + | Duration + | undefined; + /** Execution succeeded. The virtual execution duration is stored. */ + succeeded?: Duration | undefined; +} + +/** Outcomes of actions for a given size class. */ +export interface PerSizeClassStats { + /** + * The most recent outcomes for this size class, where the last entry + * corresponds to the most recent one. + */ + previousExecutions: PreviousExecution[]; + /** + * An initial probability value to be used for PageRank computation. + * These values may correspond to outcomes of previous PageRank + * computations. Reloading them may make it possible to recompute + * future PageRank probabilities values more quickly. + */ + initialPageRankProbability: number; +} + +export interface PreviousExecutionStats { + /** Outcomes of previous executions of actions, per size class. */ + sizeClasses: { [key: number]: PerSizeClassStats }; + /** The time at which this action failed on the largest size class. */ + lastSeenFailure: Date | undefined; +} + +export interface PreviousExecutionStats_SizeClassesEntry { + key: number; + value: PerSizeClassStats | undefined; +} + +export interface GetPreviousExecutionStatsRequest { + /** The instance of the execution system to operate against. */ + instanceName: string; + /** + * The digest of a trimmed down Action message for which statistics + * are requested. This digest is obtained by removing all fields from + * the original Action, except for 'command_digest' and 'platform'. + * + * This means that contents of the input root, the Action's timeout + * and the do_not_cache flag are ignored. + */ + reducedActionDigest: + | Digest + | undefined; + /** The digest function that was used to compute the reduced action digest. */ + digestFunction: DigestFunction_Value; +} + +export interface UpdatePreviousExecutionStatsRequest { + /** The instance of the execution system to operate against. */ + instanceName: string; + /** + * The digest of a trimmed down Action message for which statistics + * are being stored. + */ + reducedActionDigest: + | Digest + | undefined; + /** The statistics to store. */ + previousExecutionStats: + | PreviousExecutionStats + | undefined; + /** The digest function that was used to compute the reduced action digest. */ + digestFunction: DigestFunction_Value; +} + +function createBasePreviousExecution(): PreviousExecution { + return { failed: undefined, timedOut: undefined, succeeded: undefined }; +} + +export const PreviousExecution: MessageFns = { + encode(message: PreviousExecution, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.failed !== undefined) { + Empty.encode(message.failed, writer.uint32(10).fork()).join(); + } + if (message.timedOut !== undefined) { + Duration.encode(message.timedOut, writer.uint32(18).fork()).join(); + } + if (message.succeeded !== undefined) { + Duration.encode(message.succeeded, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PreviousExecution { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePreviousExecution(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.failed = Empty.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.timedOut = Duration.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.succeeded = Duration.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PreviousExecution { + return { + failed: isSet(object.failed) ? Empty.fromJSON(object.failed) : undefined, + timedOut: isSet(object.timedOut) ? Duration.fromJSON(object.timedOut) : undefined, + succeeded: isSet(object.succeeded) ? Duration.fromJSON(object.succeeded) : undefined, + }; + }, + + toJSON(message: PreviousExecution): unknown { + const obj: any = {}; + if (message.failed !== undefined) { + obj.failed = Empty.toJSON(message.failed); + } + if (message.timedOut !== undefined) { + obj.timedOut = Duration.toJSON(message.timedOut); + } + if (message.succeeded !== undefined) { + obj.succeeded = Duration.toJSON(message.succeeded); + } + return obj; + }, + + create(base?: DeepPartial): PreviousExecution { + return PreviousExecution.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): PreviousExecution { + const message = createBasePreviousExecution(); + message.failed = (object.failed !== undefined && object.failed !== null) + ? Empty.fromPartial(object.failed) + : undefined; + message.timedOut = (object.timedOut !== undefined && object.timedOut !== null) + ? Duration.fromPartial(object.timedOut) + : undefined; + message.succeeded = (object.succeeded !== undefined && object.succeeded !== null) + ? Duration.fromPartial(object.succeeded) + : undefined; + return message; + }, +}; + +function createBasePerSizeClassStats(): PerSizeClassStats { + return { previousExecutions: [], initialPageRankProbability: 0 }; +} + +export const PerSizeClassStats: MessageFns = { + encode(message: PerSizeClassStats, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.previousExecutions) { + PreviousExecution.encode(v!, writer.uint32(10).fork()).join(); + } + if (message.initialPageRankProbability !== 0) { + writer.uint32(25).double(message.initialPageRankProbability); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PerSizeClassStats { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePerSizeClassStats(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.previousExecutions.push(PreviousExecution.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 25) { + break; + } + + message.initialPageRankProbability = reader.double(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PerSizeClassStats { + return { + previousExecutions: globalThis.Array.isArray(object?.previousExecutions) + ? object.previousExecutions.map((e: any) => PreviousExecution.fromJSON(e)) + : [], + initialPageRankProbability: isSet(object.initialPageRankProbability) + ? globalThis.Number(object.initialPageRankProbability) + : 0, + }; + }, + + toJSON(message: PerSizeClassStats): unknown { + const obj: any = {}; + if (message.previousExecutions?.length) { + obj.previousExecutions = message.previousExecutions.map((e) => PreviousExecution.toJSON(e)); + } + if (message.initialPageRankProbability !== 0) { + obj.initialPageRankProbability = message.initialPageRankProbability; + } + return obj; + }, + + create(base?: DeepPartial): PerSizeClassStats { + return PerSizeClassStats.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): PerSizeClassStats { + const message = createBasePerSizeClassStats(); + message.previousExecutions = object.previousExecutions?.map((e) => PreviousExecution.fromPartial(e)) || []; + message.initialPageRankProbability = object.initialPageRankProbability ?? 0; + return message; + }, +}; + +function createBasePreviousExecutionStats(): PreviousExecutionStats { + return { sizeClasses: {}, lastSeenFailure: undefined }; +} + +export const PreviousExecutionStats: MessageFns = { + encode(message: PreviousExecutionStats, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + Object.entries(message.sizeClasses).forEach(([key, value]) => { + PreviousExecutionStats_SizeClassesEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).join(); + }); + if (message.lastSeenFailure !== undefined) { + Timestamp.encode(toTimestamp(message.lastSeenFailure), writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PreviousExecutionStats { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePreviousExecutionStats(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + const entry1 = PreviousExecutionStats_SizeClassesEntry.decode(reader, reader.uint32()); + if (entry1.value !== undefined) { + message.sizeClasses[entry1.key] = entry1.value; + } + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.lastSeenFailure = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PreviousExecutionStats { + return { + sizeClasses: isObject(object.sizeClasses) + ? Object.entries(object.sizeClasses).reduce<{ [key: number]: PerSizeClassStats }>((acc, [key, value]) => { + acc[globalThis.Number(key)] = PerSizeClassStats.fromJSON(value); + return acc; + }, {}) + : {}, + lastSeenFailure: isSet(object.lastSeenFailure) ? fromJsonTimestamp(object.lastSeenFailure) : undefined, + }; + }, + + toJSON(message: PreviousExecutionStats): unknown { + const obj: any = {}; + if (message.sizeClasses) { + const entries = Object.entries(message.sizeClasses); + if (entries.length > 0) { + obj.sizeClasses = {}; + entries.forEach(([k, v]) => { + obj.sizeClasses[k] = PerSizeClassStats.toJSON(v); + }); + } + } + if (message.lastSeenFailure !== undefined) { + obj.lastSeenFailure = message.lastSeenFailure.toISOString(); + } + return obj; + }, + + create(base?: DeepPartial): PreviousExecutionStats { + return PreviousExecutionStats.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): PreviousExecutionStats { + const message = createBasePreviousExecutionStats(); + message.sizeClasses = Object.entries(object.sizeClasses ?? {}).reduce<{ [key: number]: PerSizeClassStats }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[globalThis.Number(key)] = PerSizeClassStats.fromPartial(value); + } + return acc; + }, + {}, + ); + message.lastSeenFailure = object.lastSeenFailure ?? undefined; + return message; + }, +}; + +function createBasePreviousExecutionStats_SizeClassesEntry(): PreviousExecutionStats_SizeClassesEntry { + return { key: 0, value: undefined }; +} + +export const PreviousExecutionStats_SizeClassesEntry: MessageFns = { + encode(message: PreviousExecutionStats_SizeClassesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== 0) { + writer.uint32(8).uint32(message.key); + } + if (message.value !== undefined) { + PerSizeClassStats.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PreviousExecutionStats_SizeClassesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePreviousExecutionStats_SizeClassesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.key = reader.uint32(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = PerSizeClassStats.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PreviousExecutionStats_SizeClassesEntry { + return { + key: isSet(object.key) ? globalThis.Number(object.key) : 0, + value: isSet(object.value) ? PerSizeClassStats.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: PreviousExecutionStats_SizeClassesEntry): unknown { + const obj: any = {}; + if (message.key !== 0) { + obj.key = Math.round(message.key); + } + if (message.value !== undefined) { + obj.value = PerSizeClassStats.toJSON(message.value); + } + return obj; + }, + + create(base?: DeepPartial): PreviousExecutionStats_SizeClassesEntry { + return PreviousExecutionStats_SizeClassesEntry.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): PreviousExecutionStats_SizeClassesEntry { + const message = createBasePreviousExecutionStats_SizeClassesEntry(); + message.key = object.key ?? 0; + message.value = (object.value !== undefined && object.value !== null) + ? PerSizeClassStats.fromPartial(object.value) + : undefined; + return message; + }, +}; + +function createBaseGetPreviousExecutionStatsRequest(): GetPreviousExecutionStatsRequest { + return { instanceName: "", reducedActionDigest: undefined, digestFunction: 0 }; +} + +export const GetPreviousExecutionStatsRequest: MessageFns = { + encode(message: GetPreviousExecutionStatsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.instanceName !== "") { + writer.uint32(10).string(message.instanceName); + } + if (message.reducedActionDigest !== undefined) { + Digest.encode(message.reducedActionDigest, writer.uint32(18).fork()).join(); + } + if (message.digestFunction !== 0) { + writer.uint32(24).int32(message.digestFunction); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GetPreviousExecutionStatsRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetPreviousExecutionStatsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.instanceName = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.reducedActionDigest = Digest.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.digestFunction = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GetPreviousExecutionStatsRequest { + return { + instanceName: isSet(object.instanceName) ? globalThis.String(object.instanceName) : "", + reducedActionDigest: isSet(object.reducedActionDigest) ? Digest.fromJSON(object.reducedActionDigest) : undefined, + digestFunction: isSet(object.digestFunction) ? digestFunction_ValueFromJSON(object.digestFunction) : 0, + }; + }, + + toJSON(message: GetPreviousExecutionStatsRequest): unknown { + const obj: any = {}; + if (message.instanceName !== "") { + obj.instanceName = message.instanceName; + } + if (message.reducedActionDigest !== undefined) { + obj.reducedActionDigest = Digest.toJSON(message.reducedActionDigest); + } + if (message.digestFunction !== 0) { + obj.digestFunction = digestFunction_ValueToJSON(message.digestFunction); + } + return obj; + }, + + create(base?: DeepPartial): GetPreviousExecutionStatsRequest { + return GetPreviousExecutionStatsRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): GetPreviousExecutionStatsRequest { + const message = createBaseGetPreviousExecutionStatsRequest(); + message.instanceName = object.instanceName ?? ""; + message.reducedActionDigest = (object.reducedActionDigest !== undefined && object.reducedActionDigest !== null) + ? Digest.fromPartial(object.reducedActionDigest) + : undefined; + message.digestFunction = object.digestFunction ?? 0; + return message; + }, +}; + +function createBaseUpdatePreviousExecutionStatsRequest(): UpdatePreviousExecutionStatsRequest { + return { instanceName: "", reducedActionDigest: undefined, previousExecutionStats: undefined, digestFunction: 0 }; +} + +export const UpdatePreviousExecutionStatsRequest: MessageFns = { + encode(message: UpdatePreviousExecutionStatsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.instanceName !== "") { + writer.uint32(10).string(message.instanceName); + } + if (message.reducedActionDigest !== undefined) { + Digest.encode(message.reducedActionDigest, writer.uint32(18).fork()).join(); + } + if (message.previousExecutionStats !== undefined) { + PreviousExecutionStats.encode(message.previousExecutionStats, writer.uint32(26).fork()).join(); + } + if (message.digestFunction !== 0) { + writer.uint32(32).int32(message.digestFunction); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): UpdatePreviousExecutionStatsRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUpdatePreviousExecutionStatsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.instanceName = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.reducedActionDigest = Digest.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.previousExecutionStats = PreviousExecutionStats.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.digestFunction = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): UpdatePreviousExecutionStatsRequest { + return { + instanceName: isSet(object.instanceName) ? globalThis.String(object.instanceName) : "", + reducedActionDigest: isSet(object.reducedActionDigest) ? Digest.fromJSON(object.reducedActionDigest) : undefined, + previousExecutionStats: isSet(object.previousExecutionStats) + ? PreviousExecutionStats.fromJSON(object.previousExecutionStats) + : undefined, + digestFunction: isSet(object.digestFunction) ? digestFunction_ValueFromJSON(object.digestFunction) : 0, + }; + }, + + toJSON(message: UpdatePreviousExecutionStatsRequest): unknown { + const obj: any = {}; + if (message.instanceName !== "") { + obj.instanceName = message.instanceName; + } + if (message.reducedActionDigest !== undefined) { + obj.reducedActionDigest = Digest.toJSON(message.reducedActionDigest); + } + if (message.previousExecutionStats !== undefined) { + obj.previousExecutionStats = PreviousExecutionStats.toJSON(message.previousExecutionStats); + } + if (message.digestFunction !== 0) { + obj.digestFunction = digestFunction_ValueToJSON(message.digestFunction); + } + return obj; + }, + + create(base?: DeepPartial): UpdatePreviousExecutionStatsRequest { + return UpdatePreviousExecutionStatsRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): UpdatePreviousExecutionStatsRequest { + const message = createBaseUpdatePreviousExecutionStatsRequest(); + message.instanceName = object.instanceName ?? ""; + message.reducedActionDigest = (object.reducedActionDigest !== undefined && object.reducedActionDigest !== null) + ? Digest.fromPartial(object.reducedActionDigest) + : undefined; + message.previousExecutionStats = + (object.previousExecutionStats !== undefined && object.previousExecutionStats !== null) + ? PreviousExecutionStats.fromPartial(object.previousExecutionStats) + : undefined; + message.digestFunction = object.digestFunction ?? 0; + return message; + }, +}; + +/** + * The Initial Size Class Cache (ISCC) is a data store that can be used + * by bb_scheduler to store outcomes of previous executions of an + * action. This data is then used during successive actions of the same + * shape to pick the initial size class on which the action needs to be + * run. + * + * It generally only makes sense to use this data store in case + * platforms are declared that have multiple size classes. + */ +export type InitialSizeClassCacheDefinition = typeof InitialSizeClassCacheDefinition; +export const InitialSizeClassCacheDefinition = { + name: "InitialSizeClassCache", + fullName: "buildbarn.iscc.InitialSizeClassCache", + methods: { + /** + * GetPreviousExecutionStats() reads a single PreviousExecutionStats + * message from the ISCC. + */ + getPreviousExecutionStats: { + name: "GetPreviousExecutionStats", + requestType: GetPreviousExecutionStatsRequest, + requestStream: false, + responseType: PreviousExecutionStats, + responseStream: false, + options: {}, + }, + /** + * UpdatePreviousExecutionStats() writes a single + * PreviousExecutionStats message to the ISCC. + */ + updatePreviousExecutionStats: { + name: "UpdatePreviousExecutionStats", + requestType: UpdatePreviousExecutionStatsRequest, + requestStream: false, + responseType: Empty, + responseStream: false, + options: {}, + }, + }, +} as const; + +export interface InitialSizeClassCacheServiceImplementation { + /** + * GetPreviousExecutionStats() reads a single PreviousExecutionStats + * message from the ISCC. + */ + getPreviousExecutionStats( + request: GetPreviousExecutionStatsRequest, + context: CallContext & CallContextExt, + ): Promise>; + /** + * UpdatePreviousExecutionStats() writes a single + * PreviousExecutionStats message to the ISCC. + */ + updatePreviousExecutionStats( + request: UpdatePreviousExecutionStatsRequest, + context: CallContext & CallContextExt, + ): Promise>; +} + +export interface InitialSizeClassCacheClient { + /** + * GetPreviousExecutionStats() reads a single PreviousExecutionStats + * message from the ISCC. + */ + getPreviousExecutionStats( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + /** + * UpdatePreviousExecutionStats() writes a single + * PreviousExecutionStats message to the ISCC. + */ + updatePreviousExecutionStats( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function toTimestamp(date: Date): Timestamp { + const seconds = Math.trunc(date.getTime() / 1_000).toString(); + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = (globalThis.Number(t.seconds) || 0) * 1_000; + millis += (t.nanos || 0) / 1_000_000; + return new globalThis.Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof globalThis.Date) { + return o; + } else if (typeof o === "string") { + return new globalThis.Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function isObject(value: any): boolean { + return typeof value === "object" && value !== null; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/frontend/src/lib/grpc-client/buildbarn/query/query.ts b/frontend/src/lib/grpc-client/buildbarn/query/query.ts new file mode 100644 index 0000000..588d3b6 --- /dev/null +++ b/frontend/src/lib/grpc-client/buildbarn/query/query.ts @@ -0,0 +1,130 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc v3.19.1 +// source: buildbarn/query/query.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { Digest } from "../../build/bazel/remote/execution/v2/remote_execution"; + +export const protobufPackage = "buildbarn.query"; + +/** + * Reference to a certain directory contained in a + * FileSystemAccessProfile, which is provided as part of bb_browser URL + * query parameters when listing directories. + */ +export interface FileSystemAccessProfileReference { + /** + * The digest of the profile stored in the File System Access Cache + * (FSAC). + */ + digest: + | Digest + | undefined; + /** + * The FNV-1a base hash value of the path hashes corresponding to the + * current directory. + */ + pathHashesBaseHash: string; +} + +function createBaseFileSystemAccessProfileReference(): FileSystemAccessProfileReference { + return { digest: undefined, pathHashesBaseHash: "0" }; +} + +export const FileSystemAccessProfileReference: MessageFns = { + encode(message: FileSystemAccessProfileReference, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.digest !== undefined) { + Digest.encode(message.digest, writer.uint32(10).fork()).join(); + } + if (message.pathHashesBaseHash !== "0") { + writer.uint32(16).uint64(message.pathHashesBaseHash); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FileSystemAccessProfileReference { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileSystemAccessProfileReference(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.digest = Digest.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.pathHashesBaseHash = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileSystemAccessProfileReference { + return { + digest: isSet(object.digest) ? Digest.fromJSON(object.digest) : undefined, + pathHashesBaseHash: isSet(object.pathHashesBaseHash) ? globalThis.String(object.pathHashesBaseHash) : "0", + }; + }, + + toJSON(message: FileSystemAccessProfileReference): unknown { + const obj: any = {}; + if (message.digest !== undefined) { + obj.digest = Digest.toJSON(message.digest); + } + if (message.pathHashesBaseHash !== "0") { + obj.pathHashesBaseHash = message.pathHashesBaseHash; + } + return obj; + }, + + create(base?: DeepPartial): FileSystemAccessProfileReference { + return FileSystemAccessProfileReference.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): FileSystemAccessProfileReference { + const message = createBaseFileSystemAccessProfileReference(); + message.digest = (object.digest !== undefined && object.digest !== null) + ? Digest.fromPartial(object.digest) + : undefined; + message.pathHashesBaseHash = object.pathHashesBaseHash ?? "0"; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/frontend/src/lib/grpc-client/buildbarn/resourceusage/resourceusage.ts b/frontend/src/lib/grpc-client/buildbarn/resourceusage/resourceusage.ts new file mode 100644 index 0000000..1b86d87 --- /dev/null +++ b/frontend/src/lib/grpc-client/buildbarn/resourceusage/resourceusage.ts @@ -0,0 +1,963 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc v3.19.1 +// source: buildbarn/resourceusage/resourceusage.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { Duration } from "../../google/protobuf/duration"; + +export const protobufPackage = "buildbarn.resourceusage"; + +/** + * File pool resource usage statistics. File pools are used by bb_worker + * to allocate temporary files that are created by build actions. + * Temporary files include output files created through the FUSE file + * system. + */ +export interface FilePoolResourceUsage { + /** Total number of files created. */ + filesCreated: string; + /** Maximum number of files that existed at some point in time. */ + filesCountPeak: string; + /** Maximum total size of all files at some point in time. */ + filesSizeBytesPeak: string; + /** Total number of ReadAt() calls performed. */ + readsCount: string; + /** Total amount of data returned by all ReadAt() calls. */ + readsSizeBytes: string; + /** Total number of WriteAt() calls performed. */ + writesCount: string; + /** Total amount of data processed by all WriteAt() calls. */ + writesSizeBytes: string; + /** Total number of Truncate() calls performed. */ + truncatesCount: string; +} + +/** + * The equivalent of 'struct rusage' in POSIX, generally returned by + * getrusage(2) or wait4(2). + */ +export interface POSIXResourceUsage { + /** ru_utime: Amount of CPU time in seconds spent in userspace. */ + userTime: + | Duration + | undefined; + /** ru_stime: Amount of CPU time in seconds spent in kernelspace. */ + systemTime: + | Duration + | undefined; + /** ru_maxrss: Maximum amount of resident memory in bytes. */ + maximumResidentSetSize: string; + /** ru_minflt: Page reclaims. */ + pageReclaims: string; + /** ru_majflt: Page faults. */ + pageFaults: string; + /** ru_nswap: Number of swaps. */ + swaps: string; + /** ru_inblock: Block input operations. */ + blockInputOperations: string; + /** ru_oublock: Block output operations. */ + blockOutputOperations: string; + /** ru_msgsnd: Messages sent. */ + messagesSent: string; + /** ru_msgrcv: Messages received. */ + messagesReceived: string; + /** ru_nsignals: Signals received. */ + signalsReceived: string; + /** ru_nvcsw: Voluntary context switches. */ + voluntaryContextSwitches: string; + /** ru_nivcsw: Involuntary context switches. */ + involuntaryContextSwitches: string; + /** + * If abnormal process termination occurred, the name of the signal + * that was delivered, without the "SIG" prefix (e.g., "BUS", "KILL", + * "SEGV"). + * + * Abnormal process termination can occur by calling abort(), or by + * receiving a signal for which no signal handler is installed. + */ + terminationSignal: string; +} + +/** + * A representation of unique factors that may be aggregated to + * compute a given build action's total price. + */ +export interface MonetaryResourceUsage { + /** A mapping of expense categories to their respective costs. */ + expenses: { [key: string]: MonetaryResourceUsage_Expense }; +} + +export interface MonetaryResourceUsage_Expense { + /** + * The type of currency the cost is measured in. Required to be in + * ISO 4217 format: https://en.wikipedia.org/wiki/ISO_4217#Active_codes + */ + currency: string; + /** The value of a specific expense for a build action. */ + cost: number; +} + +export interface MonetaryResourceUsage_ExpensesEntry { + key: string; + value: MonetaryResourceUsage_Expense | undefined; +} + +/** + * Input root resource usage statistics. These statistics indicate how + * many directories and files inside the virtual file system were + * accessed. These statistics are only reported if prefetching is + * enabled, as they are computed together with the Bloom filter. + */ +export interface InputRootResourceUsage { + /** + * The number of directories in the input root that have been + * resolved. This equates to the total number of directories that are + * present in all directories that have been read. + */ + directoriesResolved: string; + /** + * The number of directories whose contents have been read from the + * Content Addressable Storage (CAS). + */ + directoriesRead: string; + /** + * The number of files whose contents have been read from the Content + * Addressable Storage (CAS). + */ + filesRead: string; +} + +function createBaseFilePoolResourceUsage(): FilePoolResourceUsage { + return { + filesCreated: "0", + filesCountPeak: "0", + filesSizeBytesPeak: "0", + readsCount: "0", + readsSizeBytes: "0", + writesCount: "0", + writesSizeBytes: "0", + truncatesCount: "0", + }; +} + +export const FilePoolResourceUsage: MessageFns = { + encode(message: FilePoolResourceUsage, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.filesCreated !== "0") { + writer.uint32(8).uint64(message.filesCreated); + } + if (message.filesCountPeak !== "0") { + writer.uint32(16).uint64(message.filesCountPeak); + } + if (message.filesSizeBytesPeak !== "0") { + writer.uint32(24).uint64(message.filesSizeBytesPeak); + } + if (message.readsCount !== "0") { + writer.uint32(32).uint64(message.readsCount); + } + if (message.readsSizeBytes !== "0") { + writer.uint32(40).uint64(message.readsSizeBytes); + } + if (message.writesCount !== "0") { + writer.uint32(48).uint64(message.writesCount); + } + if (message.writesSizeBytes !== "0") { + writer.uint32(56).uint64(message.writesSizeBytes); + } + if (message.truncatesCount !== "0") { + writer.uint32(64).uint64(message.truncatesCount); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): FilePoolResourceUsage { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFilePoolResourceUsage(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.filesCreated = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.filesCountPeak = reader.uint64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.filesSizeBytesPeak = reader.uint64().toString(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.readsCount = reader.uint64().toString(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.readsSizeBytes = reader.uint64().toString(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.writesCount = reader.uint64().toString(); + continue; + } + case 7: { + if (tag !== 56) { + break; + } + + message.writesSizeBytes = reader.uint64().toString(); + continue; + } + case 8: { + if (tag !== 64) { + break; + } + + message.truncatesCount = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): FilePoolResourceUsage { + return { + filesCreated: isSet(object.filesCreated) ? globalThis.String(object.filesCreated) : "0", + filesCountPeak: isSet(object.filesCountPeak) ? globalThis.String(object.filesCountPeak) : "0", + filesSizeBytesPeak: isSet(object.filesSizeBytesPeak) ? globalThis.String(object.filesSizeBytesPeak) : "0", + readsCount: isSet(object.readsCount) ? globalThis.String(object.readsCount) : "0", + readsSizeBytes: isSet(object.readsSizeBytes) ? globalThis.String(object.readsSizeBytes) : "0", + writesCount: isSet(object.writesCount) ? globalThis.String(object.writesCount) : "0", + writesSizeBytes: isSet(object.writesSizeBytes) ? globalThis.String(object.writesSizeBytes) : "0", + truncatesCount: isSet(object.truncatesCount) ? globalThis.String(object.truncatesCount) : "0", + }; + }, + + toJSON(message: FilePoolResourceUsage): unknown { + const obj: any = {}; + if (message.filesCreated !== "0") { + obj.filesCreated = message.filesCreated; + } + if (message.filesCountPeak !== "0") { + obj.filesCountPeak = message.filesCountPeak; + } + if (message.filesSizeBytesPeak !== "0") { + obj.filesSizeBytesPeak = message.filesSizeBytesPeak; + } + if (message.readsCount !== "0") { + obj.readsCount = message.readsCount; + } + if (message.readsSizeBytes !== "0") { + obj.readsSizeBytes = message.readsSizeBytes; + } + if (message.writesCount !== "0") { + obj.writesCount = message.writesCount; + } + if (message.writesSizeBytes !== "0") { + obj.writesSizeBytes = message.writesSizeBytes; + } + if (message.truncatesCount !== "0") { + obj.truncatesCount = message.truncatesCount; + } + return obj; + }, + + create(base?: DeepPartial): FilePoolResourceUsage { + return FilePoolResourceUsage.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): FilePoolResourceUsage { + const message = createBaseFilePoolResourceUsage(); + message.filesCreated = object.filesCreated ?? "0"; + message.filesCountPeak = object.filesCountPeak ?? "0"; + message.filesSizeBytesPeak = object.filesSizeBytesPeak ?? "0"; + message.readsCount = object.readsCount ?? "0"; + message.readsSizeBytes = object.readsSizeBytes ?? "0"; + message.writesCount = object.writesCount ?? "0"; + message.writesSizeBytes = object.writesSizeBytes ?? "0"; + message.truncatesCount = object.truncatesCount ?? "0"; + return message; + }, +}; + +function createBasePOSIXResourceUsage(): POSIXResourceUsage { + return { + userTime: undefined, + systemTime: undefined, + maximumResidentSetSize: "0", + pageReclaims: "0", + pageFaults: "0", + swaps: "0", + blockInputOperations: "0", + blockOutputOperations: "0", + messagesSent: "0", + messagesReceived: "0", + signalsReceived: "0", + voluntaryContextSwitches: "0", + involuntaryContextSwitches: "0", + terminationSignal: "", + }; +} + +export const POSIXResourceUsage: MessageFns = { + encode(message: POSIXResourceUsage, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.userTime !== undefined) { + Duration.encode(message.userTime, writer.uint32(10).fork()).join(); + } + if (message.systemTime !== undefined) { + Duration.encode(message.systemTime, writer.uint32(18).fork()).join(); + } + if (message.maximumResidentSetSize !== "0") { + writer.uint32(24).int64(message.maximumResidentSetSize); + } + if (message.pageReclaims !== "0") { + writer.uint32(56).int64(message.pageReclaims); + } + if (message.pageFaults !== "0") { + writer.uint32(64).int64(message.pageFaults); + } + if (message.swaps !== "0") { + writer.uint32(72).int64(message.swaps); + } + if (message.blockInputOperations !== "0") { + writer.uint32(80).int64(message.blockInputOperations); + } + if (message.blockOutputOperations !== "0") { + writer.uint32(88).int64(message.blockOutputOperations); + } + if (message.messagesSent !== "0") { + writer.uint32(96).int64(message.messagesSent); + } + if (message.messagesReceived !== "0") { + writer.uint32(104).int64(message.messagesReceived); + } + if (message.signalsReceived !== "0") { + writer.uint32(112).int64(message.signalsReceived); + } + if (message.voluntaryContextSwitches !== "0") { + writer.uint32(120).int64(message.voluntaryContextSwitches); + } + if (message.involuntaryContextSwitches !== "0") { + writer.uint32(128).int64(message.involuntaryContextSwitches); + } + if (message.terminationSignal !== "") { + writer.uint32(138).string(message.terminationSignal); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): POSIXResourceUsage { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePOSIXResourceUsage(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.userTime = Duration.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.systemTime = Duration.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.maximumResidentSetSize = reader.int64().toString(); + continue; + } + case 7: { + if (tag !== 56) { + break; + } + + message.pageReclaims = reader.int64().toString(); + continue; + } + case 8: { + if (tag !== 64) { + break; + } + + message.pageFaults = reader.int64().toString(); + continue; + } + case 9: { + if (tag !== 72) { + break; + } + + message.swaps = reader.int64().toString(); + continue; + } + case 10: { + if (tag !== 80) { + break; + } + + message.blockInputOperations = reader.int64().toString(); + continue; + } + case 11: { + if (tag !== 88) { + break; + } + + message.blockOutputOperations = reader.int64().toString(); + continue; + } + case 12: { + if (tag !== 96) { + break; + } + + message.messagesSent = reader.int64().toString(); + continue; + } + case 13: { + if (tag !== 104) { + break; + } + + message.messagesReceived = reader.int64().toString(); + continue; + } + case 14: { + if (tag !== 112) { + break; + } + + message.signalsReceived = reader.int64().toString(); + continue; + } + case 15: { + if (tag !== 120) { + break; + } + + message.voluntaryContextSwitches = reader.int64().toString(); + continue; + } + case 16: { + if (tag !== 128) { + break; + } + + message.involuntaryContextSwitches = reader.int64().toString(); + continue; + } + case 17: { + if (tag !== 138) { + break; + } + + message.terminationSignal = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): POSIXResourceUsage { + return { + userTime: isSet(object.userTime) ? Duration.fromJSON(object.userTime) : undefined, + systemTime: isSet(object.systemTime) ? Duration.fromJSON(object.systemTime) : undefined, + maximumResidentSetSize: isSet(object.maximumResidentSetSize) + ? globalThis.String(object.maximumResidentSetSize) + : "0", + pageReclaims: isSet(object.pageReclaims) ? globalThis.String(object.pageReclaims) : "0", + pageFaults: isSet(object.pageFaults) ? globalThis.String(object.pageFaults) : "0", + swaps: isSet(object.swaps) ? globalThis.String(object.swaps) : "0", + blockInputOperations: isSet(object.blockInputOperations) ? globalThis.String(object.blockInputOperations) : "0", + blockOutputOperations: isSet(object.blockOutputOperations) + ? globalThis.String(object.blockOutputOperations) + : "0", + messagesSent: isSet(object.messagesSent) ? globalThis.String(object.messagesSent) : "0", + messagesReceived: isSet(object.messagesReceived) ? globalThis.String(object.messagesReceived) : "0", + signalsReceived: isSet(object.signalsReceived) ? globalThis.String(object.signalsReceived) : "0", + voluntaryContextSwitches: isSet(object.voluntaryContextSwitches) + ? globalThis.String(object.voluntaryContextSwitches) + : "0", + involuntaryContextSwitches: isSet(object.involuntaryContextSwitches) + ? globalThis.String(object.involuntaryContextSwitches) + : "0", + terminationSignal: isSet(object.terminationSignal) ? globalThis.String(object.terminationSignal) : "", + }; + }, + + toJSON(message: POSIXResourceUsage): unknown { + const obj: any = {}; + if (message.userTime !== undefined) { + obj.userTime = Duration.toJSON(message.userTime); + } + if (message.systemTime !== undefined) { + obj.systemTime = Duration.toJSON(message.systemTime); + } + if (message.maximumResidentSetSize !== "0") { + obj.maximumResidentSetSize = message.maximumResidentSetSize; + } + if (message.pageReclaims !== "0") { + obj.pageReclaims = message.pageReclaims; + } + if (message.pageFaults !== "0") { + obj.pageFaults = message.pageFaults; + } + if (message.swaps !== "0") { + obj.swaps = message.swaps; + } + if (message.blockInputOperations !== "0") { + obj.blockInputOperations = message.blockInputOperations; + } + if (message.blockOutputOperations !== "0") { + obj.blockOutputOperations = message.blockOutputOperations; + } + if (message.messagesSent !== "0") { + obj.messagesSent = message.messagesSent; + } + if (message.messagesReceived !== "0") { + obj.messagesReceived = message.messagesReceived; + } + if (message.signalsReceived !== "0") { + obj.signalsReceived = message.signalsReceived; + } + if (message.voluntaryContextSwitches !== "0") { + obj.voluntaryContextSwitches = message.voluntaryContextSwitches; + } + if (message.involuntaryContextSwitches !== "0") { + obj.involuntaryContextSwitches = message.involuntaryContextSwitches; + } + if (message.terminationSignal !== "") { + obj.terminationSignal = message.terminationSignal; + } + return obj; + }, + + create(base?: DeepPartial): POSIXResourceUsage { + return POSIXResourceUsage.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): POSIXResourceUsage { + const message = createBasePOSIXResourceUsage(); + message.userTime = (object.userTime !== undefined && object.userTime !== null) + ? Duration.fromPartial(object.userTime) + : undefined; + message.systemTime = (object.systemTime !== undefined && object.systemTime !== null) + ? Duration.fromPartial(object.systemTime) + : undefined; + message.maximumResidentSetSize = object.maximumResidentSetSize ?? "0"; + message.pageReclaims = object.pageReclaims ?? "0"; + message.pageFaults = object.pageFaults ?? "0"; + message.swaps = object.swaps ?? "0"; + message.blockInputOperations = object.blockInputOperations ?? "0"; + message.blockOutputOperations = object.blockOutputOperations ?? "0"; + message.messagesSent = object.messagesSent ?? "0"; + message.messagesReceived = object.messagesReceived ?? "0"; + message.signalsReceived = object.signalsReceived ?? "0"; + message.voluntaryContextSwitches = object.voluntaryContextSwitches ?? "0"; + message.involuntaryContextSwitches = object.involuntaryContextSwitches ?? "0"; + message.terminationSignal = object.terminationSignal ?? ""; + return message; + }, +}; + +function createBaseMonetaryResourceUsage(): MonetaryResourceUsage { + return { expenses: {} }; +} + +export const MonetaryResourceUsage: MessageFns = { + encode(message: MonetaryResourceUsage, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + Object.entries(message.expenses).forEach(([key, value]) => { + MonetaryResourceUsage_ExpensesEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).join(); + }); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MonetaryResourceUsage { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMonetaryResourceUsage(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + const entry1 = MonetaryResourceUsage_ExpensesEntry.decode(reader, reader.uint32()); + if (entry1.value !== undefined) { + message.expenses[entry1.key] = entry1.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MonetaryResourceUsage { + return { + expenses: isObject(object.expenses) + ? Object.entries(object.expenses).reduce<{ [key: string]: MonetaryResourceUsage_Expense }>( + (acc, [key, value]) => { + acc[key] = MonetaryResourceUsage_Expense.fromJSON(value); + return acc; + }, + {}, + ) + : {}, + }; + }, + + toJSON(message: MonetaryResourceUsage): unknown { + const obj: any = {}; + if (message.expenses) { + const entries = Object.entries(message.expenses); + if (entries.length > 0) { + obj.expenses = {}; + entries.forEach(([k, v]) => { + obj.expenses[k] = MonetaryResourceUsage_Expense.toJSON(v); + }); + } + } + return obj; + }, + + create(base?: DeepPartial): MonetaryResourceUsage { + return MonetaryResourceUsage.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): MonetaryResourceUsage { + const message = createBaseMonetaryResourceUsage(); + message.expenses = Object.entries(object.expenses ?? {}).reduce<{ [key: string]: MonetaryResourceUsage_Expense }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = MonetaryResourceUsage_Expense.fromPartial(value); + } + return acc; + }, + {}, + ); + return message; + }, +}; + +function createBaseMonetaryResourceUsage_Expense(): MonetaryResourceUsage_Expense { + return { currency: "", cost: 0 }; +} + +export const MonetaryResourceUsage_Expense: MessageFns = { + encode(message: MonetaryResourceUsage_Expense, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.currency !== "") { + writer.uint32(10).string(message.currency); + } + if (message.cost !== 0) { + writer.uint32(17).double(message.cost); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MonetaryResourceUsage_Expense { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMonetaryResourceUsage_Expense(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.currency = reader.string(); + continue; + } + case 2: { + if (tag !== 17) { + break; + } + + message.cost = reader.double(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MonetaryResourceUsage_Expense { + return { + currency: isSet(object.currency) ? globalThis.String(object.currency) : "", + cost: isSet(object.cost) ? globalThis.Number(object.cost) : 0, + }; + }, + + toJSON(message: MonetaryResourceUsage_Expense): unknown { + const obj: any = {}; + if (message.currency !== "") { + obj.currency = message.currency; + } + if (message.cost !== 0) { + obj.cost = message.cost; + } + return obj; + }, + + create(base?: DeepPartial): MonetaryResourceUsage_Expense { + return MonetaryResourceUsage_Expense.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): MonetaryResourceUsage_Expense { + const message = createBaseMonetaryResourceUsage_Expense(); + message.currency = object.currency ?? ""; + message.cost = object.cost ?? 0; + return message; + }, +}; + +function createBaseMonetaryResourceUsage_ExpensesEntry(): MonetaryResourceUsage_ExpensesEntry { + return { key: "", value: undefined }; +} + +export const MonetaryResourceUsage_ExpensesEntry: MessageFns = { + encode(message: MonetaryResourceUsage_ExpensesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + MonetaryResourceUsage_Expense.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): MonetaryResourceUsage_ExpensesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMonetaryResourceUsage_ExpensesEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = MonetaryResourceUsage_Expense.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): MonetaryResourceUsage_ExpensesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? MonetaryResourceUsage_Expense.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: MonetaryResourceUsage_ExpensesEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = MonetaryResourceUsage_Expense.toJSON(message.value); + } + return obj; + }, + + create(base?: DeepPartial): MonetaryResourceUsage_ExpensesEntry { + return MonetaryResourceUsage_ExpensesEntry.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): MonetaryResourceUsage_ExpensesEntry { + const message = createBaseMonetaryResourceUsage_ExpensesEntry(); + message.key = object.key ?? ""; + message.value = (object.value !== undefined && object.value !== null) + ? MonetaryResourceUsage_Expense.fromPartial(object.value) + : undefined; + return message; + }, +}; + +function createBaseInputRootResourceUsage(): InputRootResourceUsage { + return { directoriesResolved: "0", directoriesRead: "0", filesRead: "0" }; +} + +export const InputRootResourceUsage: MessageFns = { + encode(message: InputRootResourceUsage, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.directoriesResolved !== "0") { + writer.uint32(8).uint64(message.directoriesResolved); + } + if (message.directoriesRead !== "0") { + writer.uint32(16).uint64(message.directoriesRead); + } + if (message.filesRead !== "0") { + writer.uint32(24).uint64(message.filesRead); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): InputRootResourceUsage { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInputRootResourceUsage(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.directoriesResolved = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.directoriesRead = reader.uint64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.filesRead = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): InputRootResourceUsage { + return { + directoriesResolved: isSet(object.directoriesResolved) ? globalThis.String(object.directoriesResolved) : "0", + directoriesRead: isSet(object.directoriesRead) ? globalThis.String(object.directoriesRead) : "0", + filesRead: isSet(object.filesRead) ? globalThis.String(object.filesRead) : "0", + }; + }, + + toJSON(message: InputRootResourceUsage): unknown { + const obj: any = {}; + if (message.directoriesResolved !== "0") { + obj.directoriesResolved = message.directoriesResolved; + } + if (message.directoriesRead !== "0") { + obj.directoriesRead = message.directoriesRead; + } + if (message.filesRead !== "0") { + obj.filesRead = message.filesRead; + } + return obj; + }, + + create(base?: DeepPartial): InputRootResourceUsage { + return InputRootResourceUsage.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): InputRootResourceUsage { + const message = createBaseInputRootResourceUsage(); + message.directoriesResolved = object.directoriesResolved ?? "0"; + message.directoriesRead = object.directoriesRead ?? "0"; + message.filesRead = object.filesRead ?? "0"; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isObject(value: any): boolean { + return typeof value === "object" && value !== null; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/frontend/src/lib/grpc-client/google/bytestream/bytestream.ts b/frontend/src/lib/grpc-client/google/bytestream/bytestream.ts new file mode 100644 index 0000000..0e2efea --- /dev/null +++ b/frontend/src/lib/grpc-client/google/bytestream/bytestream.ts @@ -0,0 +1,816 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.6.1 +// protoc v3.19.1 +// source: google/bytestream/bytestream.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { type CallContext, type CallOptions } from "nice-grpc-common"; + +export const protobufPackage = "google.bytestream"; + +/** Request object for ByteStream.Read. */ +export interface ReadRequest { + /** The name of the resource to read. */ + resourceName: string; + /** + * The offset for the first byte to return in the read, relative to the start + * of the resource. + * + * A `read_offset` that is negative or greater than the size of the resource + * will cause an `OUT_OF_RANGE` error. + */ + readOffset: string; + /** + * The maximum number of `data` bytes the server is allowed to return in the + * sum of all `ReadResponse` messages. A `read_limit` of zero indicates that + * there is no limit, and a negative `read_limit` will cause an error. + * + * If the stream returns fewer bytes than allowed by the `read_limit` and no + * error occurred, the stream includes all data from the `read_offset` to the + * end of the resource. + */ + readLimit: string; +} + +/** Response object for ByteStream.Read. */ +export interface ReadResponse { + /** + * A portion of the data for the resource. The service **may** leave `data` + * empty for any given `ReadResponse`. This enables the service to inform the + * client that the request is still live while it is running an operation to + * generate more data. + */ + data: Uint8Array; +} + +/** Request object for ByteStream.Write. */ +export interface WriteRequest { + /** + * The name of the resource to write. This **must** be set on the first + * `WriteRequest` of each `Write()` action. If it is set on subsequent calls, + * it **must** match the value of the first request. + */ + resourceName: string; + /** + * The offset from the beginning of the resource at which the data should be + * written. It is required on all `WriteRequest`s. + * + * In the first `WriteRequest` of a `Write()` action, it indicates + * the initial offset for the `Write()` call. The value **must** be equal to + * the `committed_size` that a call to `QueryWriteStatus()` would return. + * + * On subsequent calls, this value **must** be set and **must** be equal to + * the sum of the first `write_offset` and the sizes of all `data` bundles + * sent previously on this stream. + * + * An incorrect value will cause an error. + */ + writeOffset: string; + /** + * If `true`, this indicates that the write is complete. Sending any + * `WriteRequest`s subsequent to one in which `finish_write` is `true` will + * cause an error. + */ + finishWrite: boolean; + /** + * A portion of the data for the resource. The client **may** leave `data` + * empty for any given `WriteRequest`. This enables the client to inform the + * service that the request is still live while it is running an operation to + * generate more data. + */ + data: Uint8Array; +} + +/** Response object for ByteStream.Write. */ +export interface WriteResponse { + /** The number of bytes that have been processed for the given resource. */ + committedSize: string; +} + +/** Request object for ByteStream.QueryWriteStatus. */ +export interface QueryWriteStatusRequest { + /** The name of the resource whose write status is being requested. */ + resourceName: string; +} + +/** Response object for ByteStream.QueryWriteStatus. */ +export interface QueryWriteStatusResponse { + /** The number of bytes that have been processed for the given resource. */ + committedSize: string; + /** + * `complete` is `true` only if the client has sent a `WriteRequest` with + * `finish_write` set to true, and the server has processed that request. + */ + complete: boolean; +} + +function createBaseReadRequest(): ReadRequest { + return { resourceName: "", readOffset: "0", readLimit: "0" }; +} + +export const ReadRequest: MessageFns = { + encode(message: ReadRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.resourceName !== "") { + writer.uint32(10).string(message.resourceName); + } + if (message.readOffset !== "0") { + writer.uint32(16).int64(message.readOffset); + } + if (message.readLimit !== "0") { + writer.uint32(24).int64(message.readLimit); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ReadRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseReadRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.resourceName = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.readOffset = reader.int64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.readLimit = reader.int64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ReadRequest { + return { + resourceName: isSet(object.resourceName) ? globalThis.String(object.resourceName) : "", + readOffset: isSet(object.readOffset) ? globalThis.String(object.readOffset) : "0", + readLimit: isSet(object.readLimit) ? globalThis.String(object.readLimit) : "0", + }; + }, + + toJSON(message: ReadRequest): unknown { + const obj: any = {}; + if (message.resourceName !== "") { + obj.resourceName = message.resourceName; + } + if (message.readOffset !== "0") { + obj.readOffset = message.readOffset; + } + if (message.readLimit !== "0") { + obj.readLimit = message.readLimit; + } + return obj; + }, + + create(base?: DeepPartial): ReadRequest { + return ReadRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): ReadRequest { + const message = createBaseReadRequest(); + message.resourceName = object.resourceName ?? ""; + message.readOffset = object.readOffset ?? "0"; + message.readLimit = object.readLimit ?? "0"; + return message; + }, +}; + +function createBaseReadResponse(): ReadResponse { + return { data: new Uint8Array(0) }; +} + +export const ReadResponse: MessageFns = { + encode(message: ReadResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.data.length !== 0) { + writer.uint32(82).bytes(message.data); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ReadResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseReadResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 10: { + if (tag !== 82) { + break; + } + + message.data = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ReadResponse { + return { data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0) }; + }, + + toJSON(message: ReadResponse): unknown { + const obj: any = {}; + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + return obj; + }, + + create(base?: DeepPartial): ReadResponse { + return ReadResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): ReadResponse { + const message = createBaseReadResponse(); + message.data = object.data ?? new Uint8Array(0); + return message; + }, +}; + +function createBaseWriteRequest(): WriteRequest { + return { resourceName: "", writeOffset: "0", finishWrite: false, data: new Uint8Array(0) }; +} + +export const WriteRequest: MessageFns = { + encode(message: WriteRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.resourceName !== "") { + writer.uint32(10).string(message.resourceName); + } + if (message.writeOffset !== "0") { + writer.uint32(16).int64(message.writeOffset); + } + if (message.finishWrite !== false) { + writer.uint32(24).bool(message.finishWrite); + } + if (message.data.length !== 0) { + writer.uint32(82).bytes(message.data); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): WriteRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseWriteRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.resourceName = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.writeOffset = reader.int64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.finishWrite = reader.bool(); + continue; + } + case 10: { + if (tag !== 82) { + break; + } + + message.data = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): WriteRequest { + return { + resourceName: isSet(object.resourceName) ? globalThis.String(object.resourceName) : "", + writeOffset: isSet(object.writeOffset) ? globalThis.String(object.writeOffset) : "0", + finishWrite: isSet(object.finishWrite) ? globalThis.Boolean(object.finishWrite) : false, + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), + }; + }, + + toJSON(message: WriteRequest): unknown { + const obj: any = {}; + if (message.resourceName !== "") { + obj.resourceName = message.resourceName; + } + if (message.writeOffset !== "0") { + obj.writeOffset = message.writeOffset; + } + if (message.finishWrite !== false) { + obj.finishWrite = message.finishWrite; + } + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + return obj; + }, + + create(base?: DeepPartial): WriteRequest { + return WriteRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): WriteRequest { + const message = createBaseWriteRequest(); + message.resourceName = object.resourceName ?? ""; + message.writeOffset = object.writeOffset ?? "0"; + message.finishWrite = object.finishWrite ?? false; + message.data = object.data ?? new Uint8Array(0); + return message; + }, +}; + +function createBaseWriteResponse(): WriteResponse { + return { committedSize: "0" }; +} + +export const WriteResponse: MessageFns = { + encode(message: WriteResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.committedSize !== "0") { + writer.uint32(8).int64(message.committedSize); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): WriteResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseWriteResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.committedSize = reader.int64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): WriteResponse { + return { committedSize: isSet(object.committedSize) ? globalThis.String(object.committedSize) : "0" }; + }, + + toJSON(message: WriteResponse): unknown { + const obj: any = {}; + if (message.committedSize !== "0") { + obj.committedSize = message.committedSize; + } + return obj; + }, + + create(base?: DeepPartial): WriteResponse { + return WriteResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): WriteResponse { + const message = createBaseWriteResponse(); + message.committedSize = object.committedSize ?? "0"; + return message; + }, +}; + +function createBaseQueryWriteStatusRequest(): QueryWriteStatusRequest { + return { resourceName: "" }; +} + +export const QueryWriteStatusRequest: MessageFns = { + encode(message: QueryWriteStatusRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.resourceName !== "") { + writer.uint32(10).string(message.resourceName); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWriteStatusRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWriteStatusRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.resourceName = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWriteStatusRequest { + return { resourceName: isSet(object.resourceName) ? globalThis.String(object.resourceName) : "" }; + }, + + toJSON(message: QueryWriteStatusRequest): unknown { + const obj: any = {}; + if (message.resourceName !== "") { + obj.resourceName = message.resourceName; + } + return obj; + }, + + create(base?: DeepPartial): QueryWriteStatusRequest { + return QueryWriteStatusRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): QueryWriteStatusRequest { + const message = createBaseQueryWriteStatusRequest(); + message.resourceName = object.resourceName ?? ""; + return message; + }, +}; + +function createBaseQueryWriteStatusResponse(): QueryWriteStatusResponse { + return { committedSize: "0", complete: false }; +} + +export const QueryWriteStatusResponse: MessageFns = { + encode(message: QueryWriteStatusResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.committedSize !== "0") { + writer.uint32(8).int64(message.committedSize); + } + if (message.complete !== false) { + writer.uint32(16).bool(message.complete); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): QueryWriteStatusResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryWriteStatusResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.committedSize = reader.int64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.complete = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryWriteStatusResponse { + return { + committedSize: isSet(object.committedSize) ? globalThis.String(object.committedSize) : "0", + complete: isSet(object.complete) ? globalThis.Boolean(object.complete) : false, + }; + }, + + toJSON(message: QueryWriteStatusResponse): unknown { + const obj: any = {}; + if (message.committedSize !== "0") { + obj.committedSize = message.committedSize; + } + if (message.complete !== false) { + obj.complete = message.complete; + } + return obj; + }, + + create(base?: DeepPartial): QueryWriteStatusResponse { + return QueryWriteStatusResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): QueryWriteStatusResponse { + const message = createBaseQueryWriteStatusResponse(); + message.committedSize = object.committedSize ?? "0"; + message.complete = object.complete ?? false; + return message; + }, +}; + +/** + * #### Introduction + * + * The Byte Stream API enables a client to read and write a stream of bytes to + * and from a resource. Resources have names, and these names are supplied in + * the API calls below to identify the resource that is being read from or + * written to. + * + * All implementations of the Byte Stream API export the interface defined here: + * + * * `Read()`: Reads the contents of a resource. + * + * * `Write()`: Writes the contents of a resource. The client can call `Write()` + * multiple times with the same resource and can check the status of the write + * by calling `QueryWriteStatus()`. + * + * #### Service parameters and metadata + * + * The ByteStream API provides no direct way to access/modify any metadata + * associated with the resource. + * + * #### Errors + * + * The errors returned by the service are in the Google canonical error space. + */ +export type ByteStreamDefinition = typeof ByteStreamDefinition; +export const ByteStreamDefinition = { + name: "ByteStream", + fullName: "google.bytestream.ByteStream", + methods: { + /** + * `Read()` is used to retrieve the contents of a resource as a sequence + * of bytes. The bytes are returned in a sequence of responses, and the + * responses are delivered as the results of a server-side streaming RPC. + */ + read: { + name: "Read", + requestType: ReadRequest, + requestStream: false, + responseType: ReadResponse, + responseStream: true, + options: {}, + }, + /** + * `Write()` is used to send the contents of a resource as a sequence of + * bytes. The bytes are sent in a sequence of request protos of a client-side + * streaming RPC. + * + * A `Write()` action is resumable. If there is an error or the connection is + * broken during the `Write()`, the client should check the status of the + * `Write()` by calling `QueryWriteStatus()` and continue writing from the + * returned `committed_size`. This may be less than the amount of data the + * client previously sent. + * + * Calling `Write()` on a resource name that was previously written and + * finalized could cause an error, depending on whether the underlying service + * allows over-writing of previously written resources. + * + * When the client closes the request channel, the service will respond with + * a `WriteResponse`. The service will not view the resource as `complete` + * until the client has sent a `WriteRequest` with `finish_write` set to + * `true`. Sending any requests on a stream after sending a request with + * `finish_write` set to `true` will cause an error. The client **should** + * check the `WriteResponse` it receives to determine how much data the + * service was able to commit and whether the service views the resource as + * `complete` or not. + */ + write: { + name: "Write", + requestType: WriteRequest, + requestStream: true, + responseType: WriteResponse, + responseStream: false, + options: {}, + }, + /** + * `QueryWriteStatus()` is used to find the `committed_size` for a resource + * that is being written, which can then be used as the `write_offset` for + * the next `Write()` call. + * + * If the resource does not exist (i.e., the resource has been deleted, or the + * first `Write()` has not yet reached the service), this method returns the + * error `NOT_FOUND`. + * + * The client **may** call `QueryWriteStatus()` at any time to determine how + * much data has been processed for this resource. This is useful if the + * client is buffering data and needs to know which data can be safely + * evicted. For any sequence of `QueryWriteStatus()` calls for a given + * resource name, the sequence of returned `committed_size` values will be + * non-decreasing. + */ + queryWriteStatus: { + name: "QueryWriteStatus", + requestType: QueryWriteStatusRequest, + requestStream: false, + responseType: QueryWriteStatusResponse, + responseStream: false, + options: {}, + }, + }, +} as const; + +export interface ByteStreamServiceImplementation { + /** + * `Read()` is used to retrieve the contents of a resource as a sequence + * of bytes. The bytes are returned in a sequence of responses, and the + * responses are delivered as the results of a server-side streaming RPC. + */ + read( + request: ReadRequest, + context: CallContext & CallContextExt, + ): ServerStreamingMethodResult>; + /** + * `Write()` is used to send the contents of a resource as a sequence of + * bytes. The bytes are sent in a sequence of request protos of a client-side + * streaming RPC. + * + * A `Write()` action is resumable. If there is an error or the connection is + * broken during the `Write()`, the client should check the status of the + * `Write()` by calling `QueryWriteStatus()` and continue writing from the + * returned `committed_size`. This may be less than the amount of data the + * client previously sent. + * + * Calling `Write()` on a resource name that was previously written and + * finalized could cause an error, depending on whether the underlying service + * allows over-writing of previously written resources. + * + * When the client closes the request channel, the service will respond with + * a `WriteResponse`. The service will not view the resource as `complete` + * until the client has sent a `WriteRequest` with `finish_write` set to + * `true`. Sending any requests on a stream after sending a request with + * `finish_write` set to `true` will cause an error. The client **should** + * check the `WriteResponse` it receives to determine how much data the + * service was able to commit and whether the service views the resource as + * `complete` or not. + */ + write( + request: AsyncIterable, + context: CallContext & CallContextExt, + ): Promise>; + /** + * `QueryWriteStatus()` is used to find the `committed_size` for a resource + * that is being written, which can then be used as the `write_offset` for + * the next `Write()` call. + * + * If the resource does not exist (i.e., the resource has been deleted, or the + * first `Write()` has not yet reached the service), this method returns the + * error `NOT_FOUND`. + * + * The client **may** call `QueryWriteStatus()` at any time to determine how + * much data has been processed for this resource. This is useful if the + * client is buffering data and needs to know which data can be safely + * evicted. For any sequence of `QueryWriteStatus()` calls for a given + * resource name, the sequence of returned `committed_size` values will be + * non-decreasing. + */ + queryWriteStatus( + request: QueryWriteStatusRequest, + context: CallContext & CallContextExt, + ): Promise>; +} + +export interface ByteStreamClient { + /** + * `Read()` is used to retrieve the contents of a resource as a sequence + * of bytes. The bytes are returned in a sequence of responses, and the + * responses are delivered as the results of a server-side streaming RPC. + */ + read(request: DeepPartial, options?: CallOptions & CallOptionsExt): AsyncIterable; + /** + * `Write()` is used to send the contents of a resource as a sequence of + * bytes. The bytes are sent in a sequence of request protos of a client-side + * streaming RPC. + * + * A `Write()` action is resumable. If there is an error or the connection is + * broken during the `Write()`, the client should check the status of the + * `Write()` by calling `QueryWriteStatus()` and continue writing from the + * returned `committed_size`. This may be less than the amount of data the + * client previously sent. + * + * Calling `Write()` on a resource name that was previously written and + * finalized could cause an error, depending on whether the underlying service + * allows over-writing of previously written resources. + * + * When the client closes the request channel, the service will respond with + * a `WriteResponse`. The service will not view the resource as `complete` + * until the client has sent a `WriteRequest` with `finish_write` set to + * `true`. Sending any requests on a stream after sending a request with + * `finish_write` set to `true` will cause an error. The client **should** + * check the `WriteResponse` it receives to determine how much data the + * service was able to commit and whether the service views the resource as + * `complete` or not. + */ + write( + request: AsyncIterable>, + options?: CallOptions & CallOptionsExt, + ): Promise; + /** + * `QueryWriteStatus()` is used to find the `committed_size` for a resource + * that is being written, which can then be used as the `write_offset` for + * the next `Write()` call. + * + * If the resource does not exist (i.e., the resource has been deleted, or the + * first `Write()` has not yet reached the service), this method returns the + * error `NOT_FOUND`. + * + * The client **may** call `QueryWriteStatus()` at any time to determine how + * much data has been processed for this resource. This is useful if the + * client is buffering data and needs to know which data can be safely + * evicted. For any sequence of `QueryWriteStatus()` calls for a given + * resource name, the sequence of returned `committed_size` values will be + * non-decreasing. + */ + queryWriteStatus( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; +} + +function bytesFromBase64(b64: string): Uint8Array { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; +} + +function base64FromBytes(arr: Uint8Array): string { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export type ServerStreamingMethodResult = { [Symbol.asyncIterator](): AsyncIterator }; + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/frontend/src/proto/buildbarn/auth/auth.proto b/frontend/src/proto/buildbarn/auth/auth.proto new file mode 100644 index 0000000..af01ef6 --- /dev/null +++ b/frontend/src/proto/buildbarn/auth/auth.proto @@ -0,0 +1,29 @@ +syntax = "proto3"; + +package buildbarn.auth; + +import "google/protobuf/struct.proto"; +import "opentelemetry/proto/common/v1/common.proto"; + +option go_package = "github.com/buildbarn/bb-storage/pkg/proto/auth"; + +// Protobuf equivalent of the AuthenticationMetadata structure that is +// used by the auth framework to store information on an authenticated +// user. +message AuthenticationMetadata { + // Part of the authentication metadata that is safe to display + // publicly (e.g., as part of logs or bb_browser). + google.protobuf.Value public = 1; + + // OpenTelemetry tracing attributes to add to spans in which the + // authentication took place (e.g., gRPC server call spans). All + // attributes will have "auth." prepended to their names + // automatically. + repeated opentelemetry.proto.common.v1.KeyValue tracing_attributes = 2; + + // Part of the authentication metadata that should not be displayed + // publicly. This field is useful for propagating information from the + // authentication layer to the authorization layer, as this data can + // be accessed by JMESPathExpressionAuthorizer. + google.protobuf.Value private = 3; +} \ No newline at end of file diff --git a/frontend/src/proto/buildbarn/cas/cas.proto b/frontend/src/proto/buildbarn/cas/cas.proto new file mode 100644 index 0000000..9df1d37 --- /dev/null +++ b/frontend/src/proto/buildbarn/cas/cas.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; + +package buildbarn.cas; + +import "build/bazel/remote/execution/v2/remote_execution.proto"; + +option go_package = "github.com/buildbarn/bb-remote-execution/pkg/proto/cas"; + +// HistoricalExecuteResponse is a custom message that is stored into the +// Content Addressable Storage. The Action Cache is only permitted to +// contain ActionResults of successful builds. In our case we also want +// to provide the user insight as to why their build fails by storing +// the ActionResult upon failure. +// +// This message is written into the ContentAddressableStorage by +// bb_worker by the CachingBuildExecutor. The digest is returned to the +// user by providing a URL to bb_browser as a message in the +// ExecuteResponse. +// +// Additionally, this message is attached to CompletedActions that are +// streamed through a CompletedActionLogger in order to provide metadata +// for uniquely identifying actions. +message HistoricalExecuteResponse { + reserved 2; + + build.bazel.remote.execution.v2.Digest action_digest = 1; + build.bazel.remote.execution.v2.ExecuteResponse execute_response = 3; +} diff --git a/frontend/src/proto/buildbarn/fsac/fsac.proto b/frontend/src/proto/buildbarn/fsac/fsac.proto new file mode 100644 index 0000000..444df3a --- /dev/null +++ b/frontend/src/proto/buildbarn/fsac/fsac.proto @@ -0,0 +1,90 @@ +syntax = "proto3"; + +package buildbarn.fsac; + +import "build/bazel/remote/execution/v2/remote_execution.proto"; +import "google/protobuf/empty.proto"; + +option go_package = "github.com/buildbarn/bb-storage/pkg/proto/fsac"; + +// The File System Access Cache (FSAC) is a Buildbarn specific data +// store that workers can use to store information on how build actions +// access the data stored in the input root. This information can be +// used during subsequent executions of similar actions to speed up file +// system access. +service FileSystemAccessCache { + // GetFileSystemAccessProfile() reads a single FileSystemAccessProfile + // from the FSAC. + rpc GetFileSystemAccessProfile(GetFileSystemAccessProfileRequest) + returns (FileSystemAccessProfile); + + // UpdateFileSystemAccessProfile() writes a single + // FileSystemAccessProfile message into the FSAC. + rpc UpdateFileSystemAccessProfile(UpdateFileSystemAccessProfileRequest) + returns (google.protobuf.Empty); +} + +// The file system access profile of a build action. +message FileSystemAccessProfile { + // A Bloom filter that captures paths of regular files and directories + // in the input root whose contents have been read. In case files with + // the same digest are present in the input root multiple times, there + // is no guarantee that all paths are added to the Bloom filter. + // + // This Bloom filter can be used by workers to perform readahead of + // Content Addressable Storage (CAS) objects, thereby reducing the + // probability of execution of actions getting suspended to load data + // over the network. + // + // Hashes are computed by taking the FNV-1a hash of the path in the + // input root, modulo the size of the Bloom filter. The path uses the + // following pattern: + // + // (/${filename})*/* + // + // This means that the root directory uses the empty path. A file + // contained in the root directory may use path "/hello.txt". Between + // [0, k) trailing slashes are added to the path to obtain k + // independent hashes. + // + // The size of the Bloom filter in bits (m) SHOULD be prime. To be + // able to reobtain the exact size in bits, the bits in the final byte + // are terminated with a 1 bit, followed by zero or more 0 bits. + bytes bloom_filter = 1; + + // The number of hash functions (k) that should be considered when + // querying the Bloom filter. + uint32 bloom_filter_hash_functions = 2; +} + +message GetFileSystemAccessProfileRequest { + // The instance of the execution system to operate against. + string instance_name = 1; + + // The digest function that was used to compute the reduced action digest. + build.bazel.remote.execution.v2.DigestFunction.Value digest_function = 2; + + // The digest of a trimmed down Action message for which a file system + // access profile is requested. This digest is obtained by removing + // all fields from the original Action, except for 'command_digest' + // and 'platform'. + // + // This means that contents of the input root, the Action's timeout + // and the do_not_cache flag are ignored. + build.bazel.remote.execution.v2.Digest reduced_action_digest = 3; +} + +message UpdateFileSystemAccessProfileRequest { + // The instance of the execution system to operate against. + string instance_name = 1; + + // The digest function that was used to compute the reduced action digest. + build.bazel.remote.execution.v2.DigestFunction.Value digest_function = 2; + + // The digest of a trimmed down Action message for which a file system + // access profile is being stored. + build.bazel.remote.execution.v2.Digest reduced_action_digest = 3; + + // The file system access profile to store. + FileSystemAccessProfile file_system_access_profile = 4; +} \ No newline at end of file diff --git a/frontend/src/proto/buildbarn/iscc/iscc.proto b/frontend/src/proto/buildbarn/iscc/iscc.proto new file mode 100644 index 0000000..18b6414 --- /dev/null +++ b/frontend/src/proto/buildbarn/iscc/iscc.proto @@ -0,0 +1,98 @@ +syntax = "proto3"; + +package buildbarn.iscc; + +import "build/bazel/remote/execution/v2/remote_execution.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "github.com/buildbarn/bb-storage/pkg/proto/iscc"; + +// The Initial Size Class Cache (ISCC) is a data store that can be used +// by bb_scheduler to store outcomes of previous executions of an +// action. This data is then used during successive actions of the same +// shape to pick the initial size class on which the action needs to be +// run. +// +// It generally only makes sense to use this data store in case +// platforms are declared that have multiple size classes. +service InitialSizeClassCache { + // GetPreviousExecutionStats() reads a single PreviousExecutionStats + // message from the ISCC. + rpc GetPreviousExecutionStats(GetPreviousExecutionStatsRequest) + returns (PreviousExecutionStats); + + // UpdatePreviousExecutionStats() writes a single + // PreviousExecutionStats message to the ISCC. + rpc UpdatePreviousExecutionStats(UpdatePreviousExecutionStatsRequest) + returns (google.protobuf.Empty); +} + +// The outcome of a single action at some point in the past. +message PreviousExecution { + oneof outcome { + // Execution failed with an error. + google.protobuf.Empty failed = 1; + + // Execution failed due to a timeout. The timeout value is stored. + google.protobuf.Duration timed_out = 2; + + // Execution succeeded. The virtual execution duration is stored. + google.protobuf.Duration succeeded = 3; + } +} + +// Outcomes of actions for a given size class. +message PerSizeClassStats { + // The most recent outcomes for this size class, where the last entry + // corresponds to the most recent one. + repeated PreviousExecution previous_executions = 1; + + reserved 2; + + // An initial probability value to be used for PageRank computation. + // These values may correspond to outcomes of previous PageRank + // computations. Reloading them may make it possible to recompute + // future PageRank probabilities values more quickly. + double initial_page_rank_probability = 3; +} + +message PreviousExecutionStats { + // Outcomes of previous executions of actions, per size class. + map size_classes = 1; + + // The time at which this action failed on the largest size class. + google.protobuf.Timestamp last_seen_failure = 2; +} + +message GetPreviousExecutionStatsRequest { + // The instance of the execution system to operate against. + string instance_name = 1; + + // The digest of a trimmed down Action message for which statistics + // are requested. This digest is obtained by removing all fields from + // the original Action, except for 'command_digest' and 'platform'. + // + // This means that contents of the input root, the Action's timeout + // and the do_not_cache flag are ignored. + build.bazel.remote.execution.v2.Digest reduced_action_digest = 2; + + // The digest function that was used to compute the reduced action digest. + build.bazel.remote.execution.v2.DigestFunction.Value digest_function = 3; +} + +message UpdatePreviousExecutionStatsRequest { + // The instance of the execution system to operate against. + string instance_name = 1; + + // The digest of a trimmed down Action message for which statistics + // are being stored. + build.bazel.remote.execution.v2.Digest reduced_action_digest = 2; + + // The statistics to store. + PreviousExecutionStats previous_execution_stats = 3; + + // The digest function that was used to compute the reduced action digest. + build.bazel.remote.execution.v2.DigestFunction.Value digest_function = 4; +} \ No newline at end of file diff --git a/frontend/src/proto/buildbarn/query/query.proto b/frontend/src/proto/buildbarn/query/query.proto new file mode 100644 index 0000000..36c23fe --- /dev/null +++ b/frontend/src/proto/buildbarn/query/query.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package buildbarn.query; + +import "build/bazel/remote/execution/v2/remote_execution.proto"; + +option go_package = "github.com/buildbarn/bb-browser/pkg/proto/query"; + +// Reference to a certain directory contained in a +// FileSystemAccessProfile, which is provided as part of bb_browser URL +// query parameters when listing directories. +message FileSystemAccessProfileReference { + // The digest of the profile stored in the File System Access Cache + // (FSAC). + build.bazel.remote.execution.v2.Digest digest = 1; + + // The FNV-1a base hash value of the path hashes corresponding to the + // current directory. + uint64 path_hashes_base_hash = 2; +} diff --git a/frontend/src/proto/buildbarn/resourceusage/resourceusage.proto b/frontend/src/proto/buildbarn/resourceusage/resourceusage.proto new file mode 100644 index 0000000..f3c45d0 --- /dev/null +++ b/frontend/src/proto/buildbarn/resourceusage/resourceusage.proto @@ -0,0 +1,128 @@ +syntax = "proto3"; + +package buildbarn.resourceusage; + +import "google/protobuf/duration.proto"; + +option go_package = "github.com/buildbarn/bb-portal/pkg/proto/resourceusage"; + +// File pool resource usage statistics. File pools are used by bb_worker +// to allocate temporary files that are created by build actions. +// Temporary files include output files created through the FUSE file +// system. +message FilePoolResourceUsage { + // Total number of files created. + uint64 files_created = 1; + + // Maximum number of files that existed at some point in time. + uint64 files_count_peak = 2; + + // Maximum total size of all files at some point in time. + uint64 files_size_bytes_peak = 3; + + // Total number of ReadAt() calls performed. + uint64 reads_count = 4; + + // Total amount of data returned by all ReadAt() calls. + uint64 reads_size_bytes = 5; + + // Total number of WriteAt() calls performed. + uint64 writes_count = 6; + + // Total amount of data processed by all WriteAt() calls. + uint64 writes_size_bytes = 7; + + // Total number of Truncate() calls performed. + uint64 truncates_count = 8; +} + +// The equivalent of 'struct rusage' in POSIX, generally returned by +// getrusage(2) or wait4(2). +message POSIXResourceUsage { + // ru_utime: Amount of CPU time in seconds spent in userspace. + google.protobuf.Duration user_time = 1; + + // ru_stime: Amount of CPU time in seconds spent in kernelspace. + google.protobuf.Duration system_time = 2; + + // ru_maxrss: Maximum amount of resident memory in bytes. + int64 maximum_resident_set_size = 3; + + // ru_ixrss, ru_idrss and ru_isrss are omitted, as there is no + // portable way to obtain the number of ticks used to compute these + // integrals. + reserved 4, 5, 6; + + // ru_minflt: Page reclaims. + int64 page_reclaims = 7; + + // ru_majflt: Page faults. + int64 page_faults = 8; + + // ru_nswap: Number of swaps. + int64 swaps = 9; + + // ru_inblock: Block input operations. + int64 block_input_operations = 10; + + // ru_oublock: Block output operations. + int64 block_output_operations = 11; + + // ru_msgsnd: Messages sent. + int64 messages_sent = 12; + + // ru_msgrcv: Messages received. + int64 messages_received = 13; + + // ru_nsignals: Signals received. + int64 signals_received = 14; + + // ru_nvcsw: Voluntary context switches. + int64 voluntary_context_switches = 15; + + // ru_nivcsw: Involuntary context switches. + int64 involuntary_context_switches = 16; + + // If abnormal process termination occurred, the name of the signal + // that was delivered, without the "SIG" prefix (e.g., "BUS", "KILL", + // "SEGV"). + // + // Abnormal process termination can occur by calling abort(), or by + // receiving a signal for which no signal handler is installed. + string termination_signal = 17; +} + +// A representation of unique factors that may be aggregated to +// compute a given build action's total price. +message MonetaryResourceUsage { + message Expense { + // The type of currency the cost is measured in. Required to be in + // ISO 4217 format: https://en.wikipedia.org/wiki/ISO_4217#Active_codes + string currency = 1; + + // The value of a specific expense for a build action. + double cost = 2; + } + + // A mapping of expense categories to their respective costs. + map expenses = 1; +} + +// Input root resource usage statistics. These statistics indicate how +// many directories and files inside the virtual file system were +// accessed. These statistics are only reported if prefetching is +// enabled, as they are computed together with the Bloom filter. +message InputRootResourceUsage { + // The number of directories in the input root that have been + // resolved. This equates to the total number of directories that are + // present in all directories that have been read. + uint64 directories_resolved = 1; + + // The number of directories whose contents have been read from the + // Content Addressable Storage (CAS). + uint64 directories_read = 2; + + // The number of files whose contents have been read from the Content + // Addressable Storage (CAS). + uint64 files_read = 3; +} diff --git a/frontend/src/proto/google/bytestream/bytestream.proto b/frontend/src/proto/google/bytestream/bytestream.proto new file mode 100644 index 0000000..a7ad4eb --- /dev/null +++ b/frontend/src/proto/google/bytestream/bytestream.proto @@ -0,0 +1,178 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.bytestream; + +option go_package = "google.golang.org/genproto/googleapis/bytestream;bytestream"; +option java_outer_classname = "ByteStreamProto"; +option java_package = "com.google.bytestream"; + +// #### Introduction +// +// The Byte Stream API enables a client to read and write a stream of bytes to +// and from a resource. Resources have names, and these names are supplied in +// the API calls below to identify the resource that is being read from or +// written to. +// +// All implementations of the Byte Stream API export the interface defined here: +// +// * `Read()`: Reads the contents of a resource. +// +// * `Write()`: Writes the contents of a resource. The client can call `Write()` +// multiple times with the same resource and can check the status of the write +// by calling `QueryWriteStatus()`. +// +// #### Service parameters and metadata +// +// The ByteStream API provides no direct way to access/modify any metadata +// associated with the resource. +// +// #### Errors +// +// The errors returned by the service are in the Google canonical error space. +service ByteStream { + // `Read()` is used to retrieve the contents of a resource as a sequence + // of bytes. The bytes are returned in a sequence of responses, and the + // responses are delivered as the results of a server-side streaming RPC. + rpc Read(ReadRequest) returns (stream ReadResponse); + + // `Write()` is used to send the contents of a resource as a sequence of + // bytes. The bytes are sent in a sequence of request protos of a client-side + // streaming RPC. + // + // A `Write()` action is resumable. If there is an error or the connection is + // broken during the `Write()`, the client should check the status of the + // `Write()` by calling `QueryWriteStatus()` and continue writing from the + // returned `committed_size`. This may be less than the amount of data the + // client previously sent. + // + // Calling `Write()` on a resource name that was previously written and + // finalized could cause an error, depending on whether the underlying service + // allows over-writing of previously written resources. + // + // When the client closes the request channel, the service will respond with + // a `WriteResponse`. The service will not view the resource as `complete` + // until the client has sent a `WriteRequest` with `finish_write` set to + // `true`. Sending any requests on a stream after sending a request with + // `finish_write` set to `true` will cause an error. The client **should** + // check the `WriteResponse` it receives to determine how much data the + // service was able to commit and whether the service views the resource as + // `complete` or not. + rpc Write(stream WriteRequest) returns (WriteResponse); + + // `QueryWriteStatus()` is used to find the `committed_size` for a resource + // that is being written, which can then be used as the `write_offset` for + // the next `Write()` call. + // + // If the resource does not exist (i.e., the resource has been deleted, or the + // first `Write()` has not yet reached the service), this method returns the + // error `NOT_FOUND`. + // + // The client **may** call `QueryWriteStatus()` at any time to determine how + // much data has been processed for this resource. This is useful if the + // client is buffering data and needs to know which data can be safely + // evicted. For any sequence of `QueryWriteStatus()` calls for a given + // resource name, the sequence of returned `committed_size` values will be + // non-decreasing. + rpc QueryWriteStatus(QueryWriteStatusRequest) + returns (QueryWriteStatusResponse); +} + +// Request object for ByteStream.Read. +message ReadRequest { + // The name of the resource to read. + string resource_name = 1; + + // The offset for the first byte to return in the read, relative to the start + // of the resource. + // + // A `read_offset` that is negative or greater than the size of the resource + // will cause an `OUT_OF_RANGE` error. + int64 read_offset = 2; + + // The maximum number of `data` bytes the server is allowed to return in the + // sum of all `ReadResponse` messages. A `read_limit` of zero indicates that + // there is no limit, and a negative `read_limit` will cause an error. + // + // If the stream returns fewer bytes than allowed by the `read_limit` and no + // error occurred, the stream includes all data from the `read_offset` to the + // end of the resource. + int64 read_limit = 3; +} + +// Response object for ByteStream.Read. +message ReadResponse { + // A portion of the data for the resource. The service **may** leave `data` + // empty for any given `ReadResponse`. This enables the service to inform the + // client that the request is still live while it is running an operation to + // generate more data. + bytes data = 10; +} + +// Request object for ByteStream.Write. +message WriteRequest { + // The name of the resource to write. This **must** be set on the first + // `WriteRequest` of each `Write()` action. If it is set on subsequent calls, + // it **must** match the value of the first request. + string resource_name = 1; + + // The offset from the beginning of the resource at which the data should be + // written. It is required on all `WriteRequest`s. + // + // In the first `WriteRequest` of a `Write()` action, it indicates + // the initial offset for the `Write()` call. The value **must** be equal to + // the `committed_size` that a call to `QueryWriteStatus()` would return. + // + // On subsequent calls, this value **must** be set and **must** be equal to + // the sum of the first `write_offset` and the sizes of all `data` bundles + // sent previously on this stream. + // + // An incorrect value will cause an error. + int64 write_offset = 2; + + // If `true`, this indicates that the write is complete. Sending any + // `WriteRequest`s subsequent to one in which `finish_write` is `true` will + // cause an error. + bool finish_write = 3; + + // A portion of the data for the resource. The client **may** leave `data` + // empty for any given `WriteRequest`. This enables the client to inform the + // service that the request is still live while it is running an operation to + // generate more data. + bytes data = 10; +} + +// Response object for ByteStream.Write. +message WriteResponse { + // The number of bytes that have been processed for the given resource. + int64 committed_size = 1; +} + +// Request object for ByteStream.QueryWriteStatus. +message QueryWriteStatusRequest { + // The name of the resource whose write status is being requested. + string resource_name = 1; +} + +// Response object for ByteStream.QueryWriteStatus. +message QueryWriteStatusResponse { + // The number of bytes that have been processed for the given resource. + int64 committed_size = 1; + + // `complete` is `true` only if the client has sent a `WriteRequest` with + // `finish_write` set to true, and the server has processed that request. + bool complete = 2; +} \ No newline at end of file diff --git a/frontend/src/types/BrowserPageType.ts b/frontend/src/types/BrowserPageType.ts new file mode 100644 index 0000000..6e8f677 --- /dev/null +++ b/frontend/src/types/BrowserPageType.ts @@ -0,0 +1,31 @@ +import type { + Digest, + DigestFunction_Value, +} from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; + +export enum BrowserPageType { + Action = "action", + Command = "command", + Directory = "directory", + File = "file", + HistoricalExecuteResponse = "historical_execute_response", + PreviousExecutionStats = "previous_execution_stats", + Tree = "tree", +} + +export const getBrowserPageTypeFromString = ( + value: string, +): BrowserPageType | undefined => { + if (Object.values(BrowserPageType).includes(value as BrowserPageType)) { + return value as BrowserPageType; + } + return undefined; +}; + +export interface BrowserPageParams { + instanceName: string; + digestFunction: DigestFunction_Value; + browserPageType: BrowserPageType; + digest: Digest; + otherParams: Array; +} diff --git a/frontend/src/types/protobufTypeUrls.ts b/frontend/src/types/protobufTypeUrls.ts new file mode 100644 index 0000000..46e7947 --- /dev/null +++ b/frontend/src/types/protobufTypeUrls.ts @@ -0,0 +1,8 @@ +export enum ProtobufTypeUrls { + AUTHENTICATION_METADATA = 'type.googleapis.com/buildbarn.auth.AuthenticationMetadata', + REQUEST_METADATA = 'type.googleapis.com/build.bazel.remote.execution.v2.RequestMetadata', + POSIX_RESOURCE_USAGE = 'type.googleapis.com/buildbarn.resourceusage.POSIXResourceUsage', + FILE_POOL_RESOURCE_USAGE = 'type.googleapis.com/buildbarn.resourceusage.FilePoolResourceUsage', + INPUT_ROOT_RESOURCE_USAGE = 'type.googleapis.com/buildbarn.resourceusage.InputRootResourceUsage', + MONETARY_RESOURCE_USAGE = 'type.googleapis.com/buildbarn.resourceusage.MonetaryResourceUsage', +} diff --git a/frontend/src/utils/bloomFilter.test.ts b/frontend/src/utils/bloomFilter.test.ts new file mode 100644 index 0000000..a8db97c --- /dev/null +++ b/frontend/src/utils/bloomFilter.test.ts @@ -0,0 +1,92 @@ +import { Digest } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import { FileSystemAccessProfile } from "@/lib/grpc-client/buildbarn/fsac/fsac"; +import { expect, test } from "vitest"; +import { + PATH_HASH_BASE_HASH, + PathHashes, + containsPathHashes, + generateFileSystemReferenceQueryParams, + readBloomFilter, +} from "./bloomFilter"; + +test("bloomFilterReader", () => { + expect(() => readBloomFilter(FileSystemAccessProfile.create())).toThrowError( + "Bloom filter is empty", + ); + + expect(() => + readBloomFilter({ + bloomFilter: Uint8Array.from([0x01]), + bloomFilterHashFunctions: 123, + }), + ).toThrowError("Bloom filter has zero bits"); + + expect(() => + readBloomFilter({ + bloomFilter: Uint8Array.from([0x12, 0x00]), + bloomFilterHashFunctions: 123, + }), + ).toThrowError("Bloom filter's trailing byte is not properly padded"); +}); + +test("containsPathHashes", () => { + const bloomFilterReader = readBloomFilter({ + bloomFilter: Uint8Array.from([ + 0x1d, 0xb2, 0x43, 0xf1, 0x61, 0xfa, 0x18, 0x3f, + ]), + bloomFilterHashFunctions: 11, + }); + + expect( + containsPathHashes( + bloomFilterReader, + new PathHashes().appendComponent("dir"), + ), + ).toBe(true); + + expect( + containsPathHashes( + bloomFilterReader, + new PathHashes().appendComponent("file"), + ), + ).toBe(true); + + expect( + containsPathHashes( + bloomFilterReader, + new PathHashes().appendComponent("dir").appendComponent("file"), + ), + ).toBe(true); + + expect( + containsPathHashes( + bloomFilterReader, + new PathHashes().appendComponent("nonexistent"), + ), + ).toBe(false); +}); + +test("generateFileSystemReferenceQueryParams", () => { + expect(generateFileSystemReferenceQueryParams(undefined)).toBeUndefined(); + + expect( + generateFileSystemReferenceQueryParams({ + digest: Digest.create({ + hash: "01234", + sizeBytes: "999", + }), + pathHashesBaseHash: "56789", + }), + ).toEqual({ + fileSystemAccessProfile: + "%7B%22digest%22%3A%7B%22hash%22%3A%2201234%22%2C%22sizeBytes%22%3A%22999%22%7D%2C%22pathHashesBaseHash%22%3A%2256789%22%7D", + }); +}); + +test("PathHashes", () => { + expect(new PathHashes().baseHash).toEqual(BigInt(PATH_HASH_BASE_HASH)); + + expect(new PathHashes(BigInt("123456789")).baseHash).toEqual( + BigInt("123456789"), + ); +}); diff --git a/frontend/src/utils/bloomFilter.ts b/frontend/src/utils/bloomFilter.ts new file mode 100644 index 0000000..6ead7de --- /dev/null +++ b/frontend/src/utils/bloomFilter.ts @@ -0,0 +1,121 @@ +import type { ParsedUrlQueryInput } from "node:querystring"; +import type { FileSystemAccessProfile } from "@/lib/grpc-client/buildbarn/fsac/fsac"; +import { FileSystemAccessProfileReference } from "@/lib/grpc-client/buildbarn/query/query"; + +export interface BloomFilterReader { + bloomFilter: Uint8Array; + sizeBits: number; + hashFunctions: number; +} + +const MAXIMUM_HASH_FUNCTIONS = 1000; +const FNV1A_PRIME = BigInt("1099511628211"); +const SLASH_UNICODE_VALUE = BigInt("/".charCodeAt(0)); +export const PATH_HASH_BASE_HASH = "14695981039346656037"; + +const countLeadingZeros = (byte: number) => { + if (byte > 0xff) { + throw new Error("Input is larger than a byte"); + } + + // This function is equivalent to the function `LeadingZeros8`: + // Return value equal to 8 minus the minimum number of bits + // required to represent `byte`, or 8 if `byte` == 0. + // https://pkg.go.dev/math/bits#LeadingZeros8 + return byte === 0 ? 8 : 8 - byte.toString(2).length; +}; + +const getNextHash = (hash: bigint) => { + return BigInt.asUintN(64, (hash ^ SLASH_UNICODE_VALUE) * FNV1A_PRIME); +}; + +export const readBloomFilter = ( + fsacProfile: FileSystemAccessProfile, +): BloomFilterReader => { + const bloomFilter = fsacProfile.bloomFilter; + let hashFunctions = fsacProfile.bloomFilterHashFunctions; + + const lastByte = bloomFilter.at(bloomFilter.length - 1); + if (lastByte === undefined) { + throw new Error("Bloom filter is empty"); + } + + const leadingZeros: number = countLeadingZeros(lastByte); + if (leadingZeros > 7) { + throw new Error("Bloom filter's trailing byte is not properly padded"); + } + + const sizeBits: number = bloomFilter.length * 8 - leadingZeros - 1; + if (sizeBits === 0) { + throw new Error("Bloom filter has zero bits"); + } + + if (hashFunctions > MAXIMUM_HASH_FUNCTIONS) { + hashFunctions = MAXIMUM_HASH_FUNCTIONS; + } + + return { + bloomFilter: bloomFilter, + sizeBits: sizeBits, + hashFunctions: hashFunctions, + }; +}; + +export const containsPathHashes = ( + r: BloomFilterReader, + pathHashes: PathHashes, +): boolean => { + let iterHash = pathHashes.baseHash; + for (let i = 0; i < r.hashFunctions; ++i) { + const bit = Number(iterHash % BigInt(r.sizeBits)); + + if ((r.bloomFilter[Math.floor(bit >> 3)] & (1 << (bit % 8))) === 0) { + return false; + } + iterHash = getNextHash(iterHash); + } + return true; +}; + +export const generateFileSystemReferenceQueryParams = ( + fileSystemAccessProfileReference: + | FileSystemAccessProfileReference + | undefined, + pathHashes?: PathHashes, +): ParsedUrlQueryInput | undefined => { + if (fileSystemAccessProfileReference === undefined) { + return undefined; + } + + let newPathHash = pathHashes; + + if (newPathHash === undefined) { + newPathHash = new PathHashes( + BigInt(fileSystemAccessProfileReference.pathHashesBaseHash), + ); + } + + return { + fileSystemAccessProfile: JSON.stringify( + FileSystemAccessProfileReference.toJSON({ + digest: fileSystemAccessProfileReference.digest, + pathHashesBaseHash: newPathHash.baseHash.toString(), + }), + ), + }; +}; + +export class PathHashes { + baseHash: bigint; + constructor(baseHash?: bigint) { + this.baseHash = baseHash ? baseHash : BigInt(PATH_HASH_BASE_HASH); + } + + appendComponent(name: string): PathHashes { + let hash = (this.baseHash ^ SLASH_UNICODE_VALUE) * FNV1A_PRIME; + for (const c of name) { + hash = (hash ^ BigInt(c.charCodeAt(0))) * FNV1A_PRIME; + } + return new PathHashes(BigInt.asUintN(64, hash)); + } +} diff --git a/frontend/src/utils/digestFunctionUtils.ts b/frontend/src/utils/digestFunctionUtils.ts index 7d0df90..0855d55 100644 --- a/frontend/src/utils/digestFunctionUtils.ts +++ b/frontend/src/utils/digestFunctionUtils.ts @@ -1,6 +1,12 @@ +// biome-ignore lint/style/useNodejsImportProtocol: This feature is only available in Node version 23.8+ +import { createHash } from "crypto"; import { - DigestFunction_Value, digestFunction_ValueFromJSON, - digestFunction_ValueToJSON + Action, + Digest, + DigestFunction_Value, + type Platform, + digestFunction_ValueFromJSON, + digestFunction_ValueToJSON, } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; export const digestFunctionValueFromString = ( @@ -28,3 +34,23 @@ export const includeDigestFunctionInCasFetch = ( DigestFunction_Value.VSO, ].includes(digestFunction); }; + +// Currently we only support SHA256, as some of the other +// algorithms are difficult to implement in Node. +// TODO: Handle different types of algorithms. +export const getReducedActionDigest_SHA256 = ( + commandDigest: Digest, + platform: Platform, +): Digest => { + const encodedReducedAction = Action.encode( + Action.fromPartial({ + commandDigest: commandDigest, + platform: platform, + }), + ).finish(); + + return Digest.create({ + hash: createHash("sha256").update(encodedReducedAction).digest("hex"), + sizeBytes: encodedReducedAction.length.toString(), + }); +}; diff --git a/frontend/src/utils/fetchCasObject.ts b/frontend/src/utils/fetchCasObject.ts new file mode 100644 index 0000000..93b9932 --- /dev/null +++ b/frontend/src/utils/fetchCasObject.ts @@ -0,0 +1,64 @@ +import type { + Digest, + DigestFunction_Value, +} from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import type { ByteStreamClient } from "@/lib/grpc-client/google/bytestream/bytestream"; +import { + digestFunctionValueToString, + includeDigestFunctionInCasFetch, +} from "@/utils/digestFunctionUtils"; +import { protobufToObject } from "@/utils/protobufToObject"; + +export const fetchCasObject = async ( + casByteStreamClient: ByteStreamClient, + instanceName: string | undefined, + digestFunction: DigestFunction_Value, + digest: Digest, +): Promise => { + const resourceName = [ + instanceName ? `/${instanceName}` : "", + "/blobs", + includeDigestFunctionInCasFetch(digestFunction) + ? `/${digestFunctionValueToString(digestFunction)}` + : "", + `/${digest.hash}/${digest.sizeBytes}`, + ].join(""); + + const responseStream = casByteStreamClient.read({ + resourceName, + readOffset: "0", + readLimit: "0", + }); + + const chunks: Uint8Array[] = []; + for await (const chunk of responseStream) { + chunks.push(chunk.data); + } + + return new Uint8Array( + chunks.reduce( + (acc: number[], chunk) => acc.concat(Array.from(chunk)), + [] as number[], + ), + ); +}; + +export const fetchCasObjectAndParse = async ( + casByteStreamClient: ByteStreamClient, + instanceName: string | undefined, + digestFunction: DigestFunction_Value, + digest: Digest, + objectType: { + decode: (input: Uint8Array) => T; + toJSON: (input: T) => unknown; + }, +): Promise => { + const combinedChunks = await fetchCasObject( + casByteStreamClient, + instanceName, + digestFunction, + digest, + ); + + return protobufToObject(objectType, combinedChunks, true); +}; diff --git a/frontend/src/utils/getBbClientdPath.ts b/frontend/src/utils/getBbClientdPath.ts new file mode 100644 index 0000000..7f0ccd3 --- /dev/null +++ b/frontend/src/utils/getBbClientdPath.ts @@ -0,0 +1,16 @@ +import type { + Digest, + DigestFunction_Value, +} from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import { digestFunctionValueToString } from "./digestFunctionUtils"; + +export function getBBClientdPath( + instanceName: string, + digestFunction: DigestFunction_Value, + digest: Digest, + blobType: string, +): string { + return `~/bb_clientd/cas/${instanceName}/blobs/${digestFunctionValueToString( + digestFunction, + )}/${blobType}/${digest.hash}-${digest.sizeBytes}`; +} diff --git a/frontend/src/utils/parseBrowserPageSlug.test.ts b/frontend/src/utils/parseBrowserPageSlug.test.ts new file mode 100644 index 0000000..8e580e8 --- /dev/null +++ b/frontend/src/utils/parseBrowserPageSlug.test.ts @@ -0,0 +1,101 @@ +import { describe, expect, it } from "vitest"; +import { parseBrowserPageSlug } from "./parseBrowserPageSlug"; + +describe("parseBrowserPageSlug", () => { + it('should return undefined if "blobs" is not in the slug', () => { + const result = parseBrowserPageSlug([ + "instance", + "not-blobs", + "sha256", + "action", + "hash-size", + "other", + "params", + ]); + expect(result).toBeUndefined(); + }); + + it('should return undefined if "blobs" is at the end of the slug', () => { + const result = parseBrowserPageSlug([ + "instance", + "sha256", + "action", + "hash-size", + "other", + "params", + "blobs", + ]); + expect(result).toBeUndefined(); + }); + + it("should return undefined if instanceName is empty", () => { + const result = parseBrowserPageSlug([ + "blobs", + "sha256", + "action", + "hash-size", + ]); + expect(result).toBeUndefined(); + }); + + it("should return undefined if browserPageType is undefined", () => { + const result = parseBrowserPageSlug([ + "instance", + "blobs", + "sha256", + "invalidType", + "hash-size", + ]); + expect(result).toBeUndefined(); + }); + + it("should return undefined if digest or sizeBytes is missing", () => { + const result = parseBrowserPageSlug([ + "instance", + "blobs", + "sha256", + "action", + "digest", + ]); + expect(result).toBeUndefined(); + }); + + it("should parse valid slug correctly", () => { + const result = parseBrowserPageSlug([ + "instance", + "blobs", + "sha256", + "action", + "hash-size", + "other", + "params", + ]); + expect(result).toEqual({ + instanceName: "instance", + digestFunction: 1, + browserPageType: "action", + digest: { hash: "hash", sizeBytes: "size" }, + otherParams: ["other", "params"], + }); + }); + + it("should parse valid slug with instance name with slashes correctly", () => { + const result = parseBrowserPageSlug([ + "instance", + "name", + "with", + "slashes", + "blobs", + "sha256", + "action", + "hash-size", + ]); + expect(result).toEqual({ + instanceName: "instance/name/with/slashes", + digestFunction: 1, + browserPageType: "action", + digest: { hash: "hash", sizeBytes: "size" }, + otherParams: [], + }); + }); +}); diff --git a/frontend/src/utils/parseBrowserPageSlug.ts b/frontend/src/utils/parseBrowserPageSlug.ts new file mode 100644 index 0000000..d344ae6 --- /dev/null +++ b/frontend/src/utils/parseBrowserPageSlug.ts @@ -0,0 +1,42 @@ +import { + type BrowserPageParams, + getBrowserPageTypeFromString, +} from "@/types/BrowserPageType"; +import { digestFunctionValueFromString } from "./digestFunctionUtils"; + +export const parseBrowserPageSlug = ( + slug: Array, +): BrowserPageParams | undefined => { + const blobIndex = slug.indexOf("blobs"); + if (blobIndex === -1 || blobIndex + 3 >= slug.length) { + return undefined; + } + + const instanceName = slug.slice(0, blobIndex).join("/"); + const digestFunction = digestFunctionValueFromString(slug[blobIndex + 1]); + const browserPageType = getBrowserPageTypeFromString(slug[blobIndex + 2]); + + if ( + instanceName === "" || + digestFunction === undefined || + browserPageType === undefined + ) { + return undefined; + } + + const hashAndSize = slug[blobIndex + 3]; + const [hash, sizeBytes] = hashAndSize.split("-"); + + if (!hash || !sizeBytes) { + return undefined; + } + + const otherParams = slug.slice(blobIndex + 4); + return { + instanceName, + digestFunction, + browserPageType, + digest: { hash, sizeBytes }, + otherParams, + }; +}; diff --git a/frontend/src/utils/protobufToObject.ts b/frontend/src/utils/protobufToObject.ts new file mode 100644 index 0000000..5a292bc --- /dev/null +++ b/frontend/src/utils/protobufToObject.ts @@ -0,0 +1,68 @@ +import { RequestMetadata } from "@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution"; +import { AuthenticationMetadata } from "@/lib/grpc-client/buildbarn/auth/auth"; +import { + FilePoolResourceUsage, + InputRootResourceUsage, + MonetaryResourceUsage, + POSIXResourceUsage, +} from "@/lib/grpc-client/buildbarn/resourceusage/resourceusage"; +import type { Any } from "@/lib/grpc-client/google/protobuf/any"; +import { ProtobufTypeUrls } from "@/types/protobufTypeUrls"; + +export function protobufToObject( + objectType: { + decode: (input: Uint8Array) => T; + toJSON: (input: T) => unknown; + }, + protobuf: Uint8Array, + keepDefaultValues: boolean, +): T { + if (keepDefaultValues) { + return objectType.decode(protobuf); + } + return objectType.toJSON(objectType.decode(protobuf)) as T; +} + +export function protobufToObjectWithTypeField( + protobuf: Any, + keepDefaultValues: boolean, +): unknown { + const typeUrl = protobuf.typeUrl; + const value = protobuf.value; + + switch (typeUrl) { + case ProtobufTypeUrls.AUTHENTICATION_METADATA: + return { + "@type": typeUrl, + ...protobufToObject(AuthenticationMetadata, value, keepDefaultValues), + }; + case ProtobufTypeUrls.REQUEST_METADATA: + return { + "@type": typeUrl, + ...protobufToObject(RequestMetadata, value, keepDefaultValues), + }; + case ProtobufTypeUrls.POSIX_RESOURCE_USAGE: + return { + "@type": typeUrl, + ...protobufToObject(POSIXResourceUsage, value, keepDefaultValues), + }; + case ProtobufTypeUrls.FILE_POOL_RESOURCE_USAGE: + return { + "@type": typeUrl, + ...protobufToObject(FilePoolResourceUsage, value, keepDefaultValues), + }; + case ProtobufTypeUrls.INPUT_ROOT_RESOURCE_USAGE: + return { + "@type": typeUrl, + ...protobufToObject(InputRootResourceUsage, value, keepDefaultValues), + }; + case ProtobufTypeUrls.MONETARY_RESOURCE_USAGE: + return { + "@type": typeUrl, + ...protobufToObject(MonetaryResourceUsage, value, keepDefaultValues), + }; + default: + console.error(`Unknown typeUrl: ${typeUrl}`); + return {}; + } +} diff --git a/frontend/src/utils/urlGenerator.ts b/frontend/src/utils/urlGenerator.ts new file mode 100644 index 0000000..23d3fde --- /dev/null +++ b/frontend/src/utils/urlGenerator.ts @@ -0,0 +1,43 @@ +import { + Digest, + DigestFunction_Value, +} from '@/lib/grpc-client/build/bazel/remote/execution/v2/remote_execution'; +import { env } from 'next-runtime-env'; +import { digestFunctionValueToString } from './digestFunctionUtils'; + +export function generateFileUrl( + instanceName: string, + digestFunction: DigestFunction_Value, + digest: Digest, + fileName: string, +): string { + return `${env( + 'NEXT_PUBLIC_BES_BACKEND_URL', + )}/api/servefile/${instanceName}/blobs/${digestFunctionValueToString( + digestFunction, + )}/file/${digest.hash}-${digest.sizeBytes}/${fileName}`; +} + +export function generateCommandShellScriptUrl( + instanceName: string, + digestFunction: DigestFunction_Value, + digest: Digest, +): string { + return `${env( + 'NEXT_PUBLIC_BES_BACKEND_URL', + )}/api/servefile/${instanceName}/blobs/${digestFunctionValueToString( + digestFunction, + )}/command/${digest.hash}-${digest.sizeBytes}/?format=sh`; +} + +export function generateDirectoryTarballUrl( + instanceName: string, + digestFunction: DigestFunction_Value, + digest: Digest, +): string { + return `${env( + 'NEXT_PUBLIC_BES_BACKEND_URL', + )}/api/servefile/${instanceName}/blobs/${digestFunctionValueToString( + digestFunction, + )}/directory/${digest.hash}-${digest.sizeBytes}/?format=tar`; +} diff --git a/go.mod b/go.mod index 66ae085..5fab088 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( entgo.io/ent v0.13.1 github.com/99designs/gqlgen v0.17.55 github.com/bazelbuild/buildtools v0.0.0-20240918101019-be1c24cc9a44 + github.com/bazelbuild/remote-apis v0.0.0-20240924123010-a6328f5026d3 github.com/bazelbuild/remote-apis-sdks v0.0.0-20240522145720-89b6d6b399ad github.com/buildbarn/bb-remote-execution v0.0.0-20250201092335-31d23d1a2b0c github.com/buildbarn/bb-storage v0.0.0-20241007042721-0941111f29e3 @@ -29,6 +30,7 @@ require ( golang.org/x/sync v0.8.0 google.golang.org/api v0.198.0 google.golang.org/genproto v0.0.0-20240903143218-8af14fe29dc1 + google.golang.org/genproto/googleapis/bytestream v0.0.0-20240903143218-8af14fe29dc1 google.golang.org/grpc v1.67.0 google.golang.org/protobuf v1.34.2 mvdan.cc/gofumpt v0.7.0 @@ -36,25 +38,53 @@ require ( require ( ariga.io/atlas v0.19.1-0.20240203083654-5948b60a8e43 // indirect + cloud.google.com/go v0.115.1 // indirect + cloud.google.com/go/auth v0.9.4 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.4 // indirect cloud.google.com/go/compute/metadata v0.5.2 // indirect + cloud.google.com/go/iam v1.2.1 // indirect cloud.google.com/go/longrunning v0.6.1 // indirect + cloud.google.com/go/storage v1.43.0 // indirect github.com/agext/levenshtein v1.2.1 // indirect github.com/agnivade/levenshtein v1.1.1 // indirect github.com/aohorodnyk/mimeheader v0.0.6 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect - github.com/bazelbuild/remote-apis v0.0.0-20240924123010-a6328f5026d3 // indirect + github.com/aws/aws-sdk-go-v2 v1.31.0 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.5 // indirect + github.com/aws/aws-sdk-go-v2/config v1.27.37 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.17.35 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.14 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.18 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.18 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.18 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.5 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.20 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.20 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.18 // indirect + github.com/aws/aws-sdk-go-v2/service/s3 v1.63.1 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.23.1 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.27.1 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.31.1 // indirect + github.com/aws/smithy-go v1.21.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cenkalti/backoff/v4 v4.1.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/fxtlabs/primes v0.0.0-20150821004651-dad82d10a449 // indirect github.com/go-jose/go-jose/v3 v3.0.3 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/inflect v0.19.0 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/google/go-cmp v0.6.0 // indirect github.com/google/go-jsonnet v0.20.0 // indirect + github.com/google/s2a-go v0.1.8 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect + github.com/googleapis/gax-go/v2 v2.13.0 // indirect github.com/gorilla/websocket v1.5.0 // indirect github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 // indirect @@ -65,7 +95,9 @@ require ( github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect github.com/jackc/puddle/v2 v2.2.2 // indirect + github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect github.com/klauspost/compress v1.17.10 // indirect + github.com/lazybeaver/xorshift v0.0.0-20170702203709-ce511d4823dd // indirect github.com/matryer/is v1.4.1 // indirect github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect @@ -81,7 +113,9 @@ require ( github.com/vmihailenco/msgpack/v5 v5.0.0-beta.9 // indirect github.com/vmihailenco/tagparser v0.1.2 // indirect github.com/zclconf/go-cty v1.8.0 // indirect + go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.55.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0 // indirect go.opentelemetry.io/contrib/propagators/b3 v1.30.0 // indirect go.opentelemetry.io/otel v1.30.0 // indirect go.opentelemetry.io/otel/exporters/jaeger v1.17.0 // indirect @@ -97,6 +131,7 @@ require ( golang.org/x/oauth2 v0.23.0 // indirect golang.org/x/sys v0.25.0 // indirect golang.org/x/text v0.18.0 // indirect + golang.org/x/time v0.6.0 // indirect golang.org/x/tools v0.25.0 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect diff --git a/go.sum b/go.sum index d801ac2..7f36b99 100644 --- a/go.sum +++ b/go.sum @@ -30,6 +30,12 @@ cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= cloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM= cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= +cloud.google.com/go v0.115.1 h1:Jo0SM9cQnSkYfp44+v+NQXHpcHqlnRJk2qxh6yvxxxQ= +cloud.google.com/go v0.115.1/go.mod h1:DuujITeaufu3gL68/lOFIirVNJwQeyf5UXyi+Wbgknc= +cloud.google.com/go/auth v0.9.4 h1:DxF7imbEbiFu9+zdKC6cKBko1e8XeJnipNqIbWZ+kDI= +cloud.google.com/go/auth v0.9.4/go.mod h1:SHia8n6//Ya940F1rLimhJCjjx7KE17t0ctFEci3HkA= +cloud.google.com/go/auth/oauth2adapt v0.2.4 h1:0GWE/FUsXhf6C+jAkWgYm7X9tK8cuEIfy19DBn6B6bY= +cloud.google.com/go/auth/oauth2adapt v0.2.4/go.mod h1:jC/jOpwFP6JBxhB3P5Rr0a9HLMC/Pe3eaL4NmdvqPtc= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -41,6 +47,8 @@ cloud.google.com/go/compute/metadata v0.5.2/go.mod h1:C66sj2AluDcIqakBq/M8lw8/yb cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= +cloud.google.com/go/iam v1.2.1 h1:QFct02HRb7H12J/3utj0qf5tobFh9V4vR6h9eX5EBRU= +cloud.google.com/go/iam v1.2.1/go.mod h1:3VUIJDPpwT6p/amXRC5GY8fCCh70lxPygguVtI0Z4/g= cloud.google.com/go/longrunning v0.6.1 h1:lOLTFxYpr8hcRtcwWir5ITh1PAKUD/sG2lKrTSYjyMc= cloud.google.com/go/longrunning v0.6.1/go.mod h1:nHISoOZpBcmlwbJmiVk5oDRz0qG/ZxPynEGs1iZ79s0= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= @@ -52,6 +60,8 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.43.0 h1:CcxnSohZwizt4LCzQHWvBf1/kvtHUn7gk9QERXPyXFs= +cloud.google.com/go/storage v1.43.0/go.mod h1:ajvxEa7WmZS1PxvKRq4bq0tFT3vMd502JwstCcYv0Q0= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= entgo.io/contrib v0.5.0 h1:M4IqodImfUm327RDwNAITLNz3PsxVeC3rD4DPeVA8Gs= entgo.io/contrib v0.5.0/go.mod h1:q8dXQCmzqpSlVdT2bWDydjgznGcy3y4zmsYmVFC9V/U= @@ -106,6 +116,42 @@ github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6l github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= +github.com/aws/aws-sdk-go-v2 v1.31.0 h1:3V05LbxTSItI5kUqNwhJrrrY1BAXxXt0sN0l72QmG5U= +github.com/aws/aws-sdk-go-v2 v1.31.0/go.mod h1:ztolYtaEUtdpf9Wftr31CJfLVjOnD/CVRkKOOYgF8hA= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.5 h1:xDAuZTn4IMm8o1LnBZvmrL8JA1io4o3YWNXgohbf20g= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.5/go.mod h1:wYSv6iDS621sEFLfKvpPE2ugjTuGlAG7iROg0hLOkfc= +github.com/aws/aws-sdk-go-v2/config v1.27.37 h1:xaoIwzHVuRWRHFI0jhgEdEGc8xE1l91KaeRDsWEIncU= +github.com/aws/aws-sdk-go-v2/config v1.27.37/go.mod h1:S2e3ax9/8KnMSyRVNd3sWTKs+1clJ2f1U6nE0lpvQRg= +github.com/aws/aws-sdk-go-v2/credentials v1.17.35 h1:7QknrZhYySEB1lEXJxGAmuD5sWwys5ZXNr4m5oEz0IE= +github.com/aws/aws-sdk-go-v2/credentials v1.17.35/go.mod h1:8Vy4kk7at4aPSmibr7K+nLTzG6qUQAUO4tW49fzUV4E= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.14 h1:C/d03NAmh8C4BZXhuRNboF/DqhBkBCeDiJDcaqIT5pA= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.14/go.mod h1:7I0Ju7p9mCIdlrfS+JCgqcYD0VXz/N4yozsox+0o078= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.18 h1:kYQ3H1u0ANr9KEKlGs/jTLrBFPo8P8NaH/w7A01NeeM= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.18/go.mod h1:r506HmK5JDUh9+Mw4CfGJGSSoqIiLCndAuqXuhbv67Y= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.18 h1:Z7IdFUONvTcvS7YuhtVxN99v2cCoHRXOS4mTr0B/pUc= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.18/go.mod h1:DkKMmksZVVyat+Y+r1dEOgJEfUeA7UngIHWeKsi0yNc= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvKgqdiXoTxAF4HQcQ= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.18 h1:OWYvKL53l1rbsUmW7bQyJVsYU/Ii3bbAAQIIFNbM0Tk= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.18/go.mod h1:CUx0G1v3wG6l01tUB+j7Y8kclA8NSqK4ef0YG79a4cg= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.5 h1:QFASJGfT8wMXtuP3D5CRmMjARHv9ZmzFUMJznHDOY3w= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.5/go.mod h1:QdZ3OmoIjSX+8D1OPAzPxDfjXASbBMDsz9qvtyIhtik= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.20 h1:rTWjG6AvWekO2B1LHeM3ktU7MqyX9rzWQ7hgzneZW7E= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.20/go.mod h1:RGW2DDpVc8hu6Y6yG8G5CHVmVOAn1oV8rNKOHRJyswg= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.20 h1:Xbwbmk44URTiHNx6PNo0ujDE6ERlsCKJD3u1zfnzAPg= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.20/go.mod h1:oAfOFzUB14ltPZj1rWwRc3d/6OgD76R8KlvU3EqM9Fg= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.18 h1:eb+tFOIl9ZsUe2259/BKPeniKuz4/02zZFH/i4Nf8Rg= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.18/go.mod h1:GVCC2IJNJTmdlyEsSmofEy7EfJncP7DNnXDzRjJ5Keg= +github.com/aws/aws-sdk-go-v2/service/s3 v1.63.1 h1:TR96r56VwELV0qguNFCuz+/bEpRfnR3ZsS9/IG05C7Q= +github.com/aws/aws-sdk-go-v2/service/s3 v1.63.1/go.mod h1:NLTqRLe3pUNu3nTEHI6XlHLKYmc8fbHUdMxAB6+s41Q= +github.com/aws/aws-sdk-go-v2/service/sso v1.23.1 h1:2jrVsMHqdLD1+PA4BA6Nh1eZp0Gsy3mFSB5MxDvcJtU= +github.com/aws/aws-sdk-go-v2/service/sso v1.23.1/go.mod h1:XRlMvmad0ZNL+75C5FYdMvbbLkd6qiqz6foR1nA1PXY= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.27.1 h1:0L7yGCg3Hb3YQqnSgBTZM5wepougtL1aEccdcdYhHME= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.27.1/go.mod h1:FnvDM4sfa+isJ3kDXIzAB9GAwVSzFzSy97uZ3IsHo4E= +github.com/aws/aws-sdk-go-v2/service/sts v1.31.1 h1:8K0UNOkZiK9Uh3HIF6Bx0rcNCftqGCeKmOaR7Gp5BSo= +github.com/aws/aws-sdk-go-v2/service/sts v1.31.1/go.mod h1:yMWe0F+XG0DkRZK5ODZhG7BEFYhLXi2dqGsv6tX0cgI= +github.com/aws/smithy-go v1.21.0 h1:H7L8dtDRk0P1Qm6y0ji7MCYMQObJ5R9CRpyPhRUkLYA= +github.com/aws/smithy-go v1.21.0/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= github.com/bazelbuild/buildtools v0.0.0-20240918101019-be1c24cc9a44 h1:FGzENZi+SX9I7h9xvMtRA3rel8hCEfyzSixteBgn7MU= github.com/bazelbuild/buildtools v0.0.0-20240918101019-be1c24cc9a44/go.mod h1:PLNUetjLa77TCCziPsz0EI8a6CUxgC+1jgmWv0H25tg= github.com/bazelbuild/remote-apis v0.0.0-20240924123010-a6328f5026d3 h1:gLdOIZMdBPwLnJmj8WmqQpldVvoUzLbbldJtYhZ5WbA= @@ -188,12 +234,16 @@ github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= +github.com/fxtlabs/primes v0.0.0-20150821004651-dad82d10a449 h1:HOYnhuVrhAVGKdg3rZapII640so7QfXQmkLkefUN/uM= +github.com/fxtlabs/primes v0.0.0-20150821004651-dad82d10a449/go.mod h1:i+vbdOOivRRh2j+WwBkjZXloGN/+KAqfKDwNfUJeugc= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= @@ -249,6 +299,7 @@ github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4er github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= @@ -301,10 +352,13 @@ github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/go-jsonnet v0.20.0 h1:WG4TTSARuV7bSm4PMB4ohjxe33IHT5WVTrJSU33uT4g= github.com/google/go-jsonnet v0.20.0/go.mod h1:VbgWF9JX7ztlv770x/TolZNGGFfiHEVx9G6ca2eUmeA= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= +github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -320,15 +374,21 @@ github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM= +github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.4 h1:XYIDZApgAnrN1c855gTgghdIA6Stxb52D5RnLI1SLyw= +github.com/googleapis/enterprise-certificate-proxy v0.3.4/go.mod h1:YKe7cfqYXjKGpGvmSg28/fFvhNzinZQm8DGnaburhGA= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/gax-go/v2 v2.13.0 h1:yitjD5f7jQHhyDsnhKEBU52NdvvdSeGzlAnDPT0hH1s= +github.com/googleapis/gax-go/v2 v2.13.0/go.mod h1:Z/fvTZXF8/uw7Xu5GuslPw+bplx6SS338j1Is2S+B7A= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= @@ -435,6 +495,8 @@ github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/X github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= @@ -458,6 +520,8 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lazybeaver/xorshift v0.0.0-20170702203709-ce511d4823dd h1:TfmftEfB1zJiDTFi3Qw1xlbEbfJPKUhEDC19clfBMb8= +github.com/lazybeaver/xorshift v0.0.0-20170702203709-ce511d4823dd/go.mod h1:qXyNSomGEqu0M7ewNl3CLgle09PFHk8++5NrBWCz7+Q= github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/lib/pq v1.10.3/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= @@ -642,6 +706,8 @@ github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5J github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= @@ -651,6 +717,9 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5 github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= @@ -697,8 +766,12 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.55.0 h1:hCq2hNMwsegUvPzI7sPOvtO9cqyy5GbWt/Ybp2xrx8Q= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.55.0/go.mod h1:LqaApwGx/oUmzsbqxkzuBvyoPpkxk3JQWnqfVrJ3wCA= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0 h1:ZIg3ZT/aQ7AfKqdwp7ECpOK6vHqquXXuyTjIO8ZdmPs= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0/go.mod h1:DQAwmETtZV00skUwgD6+0U89g80NKsJE3DCKeLLPQMI= go.opentelemetry.io/contrib/propagators/b3 v1.30.0 h1:vumy4r1KMyaoQRltX7cJ37p3nluzALX9nugCjNNefuY= go.opentelemetry.io/contrib/propagators/b3 v1.30.0/go.mod h1:fRbvRsaeVZ82LIl3u0rIvusIel2UUf+JcaaIpy5taho= go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts= @@ -989,6 +1062,8 @@ golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxb golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U= +golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -1172,6 +1247,8 @@ google.golang.org/genproto v0.0.0-20240903143218-8af14fe29dc1 h1:BulPr26Jqjnd4eY google.golang.org/genproto v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:hL97c3SYopEHblzpxRL4lSs523++l8DYxGM1FQiYmb4= google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 h1:hjSy6tcFQZ171igDaN5QHOw2n6vx40juYbC/x67CEhc= google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:qpvKtACPCQhAdu3PyQgV4l3LMXZEtft7y8QcarRsp9I= +google.golang.org/genproto/googleapis/bytestream v0.0.0-20240903143218-8af14fe29dc1 h1:W0PHii1rtgc5UgBtJif8xGePValKeZRomnuC5hatKME= +google.golang.org/genproto/googleapis/bytestream v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:q0eWNnCW04EJlyrmLT+ZHsjuoUiZ36/eAEdCCezZoco= google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ= google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= diff --git a/internal/api/grpcweb/actioncacheproxy/BUILD.bazel b/internal/api/grpcweb/actioncacheproxy/BUILD.bazel new file mode 100644 index 0000000..54592ca --- /dev/null +++ b/internal/api/grpcweb/actioncacheproxy/BUILD.bazel @@ -0,0 +1,15 @@ +load("@rules_go//go:def.bzl", "go_library") + +go_library( + name = "actioncacheproxy", + srcs = ["server.go"], + importpath = "github.com/buildbarn/bb-portal/internal/api/grpcweb/actioncacheproxy", + visibility = ["//:__subpackages__"], + deps = [ + "//internal/api/grpcweb", + "@bazel_remote_apis//build/bazel/remote/execution/v2:remote_execution_go_proto", + "@com_github_buildbarn_bb_storage//pkg/auth", + "@org_golang_google_grpc//codes", + "@org_golang_google_grpc//status", + ], +) diff --git a/internal/api/grpcweb/actioncacheproxy/server.go b/internal/api/grpcweb/actioncacheproxy/server.go new file mode 100644 index 0000000..1e4aa9f --- /dev/null +++ b/internal/api/grpcweb/actioncacheproxy/server.go @@ -0,0 +1,45 @@ +package actioncacheproxy + +import ( + "context" + + remoteexecution "github.com/bazelbuild/remote-apis/build/bazel/remote/execution/v2" + "github.com/buildbarn/bb-portal/internal/api/grpcweb" + "github.com/buildbarn/bb-storage/pkg/auth" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +// ActionCacheServerImpl is a gRPC server that forwards requests to an ActionCacheClient. +type ActionCacheServerImpl struct { + client remoteexecution.ActionCacheClient + authorizer auth.Authorizer +} + +// NewAcctionCacheServerImpl creates a new ActionCacheServerImpl from a given client. +func NewAcctionCacheServerImpl(client remoteexecution.ActionCacheClient, authorizer auth.Authorizer) *ActionCacheServerImpl { + return &ActionCacheServerImpl{client: client, authorizer: authorizer} +} + +// GetActionResult proxies GetActionResult requests to the client. +func (s *ActionCacheServerImpl) GetActionResult(ctx context.Context, req *remoteexecution.GetActionResultRequest) (*remoteexecution.ActionResult, error) { + if req == nil { + return nil, status.Errorf(codes.InvalidArgument, "Invalid request") + } + + if !grpcweb.IsInstanceNamePrefixAllowed(ctx, s.authorizer, req.InstanceName) { + return nil, status.Errorf(codes.NotFound, "Not found") + } + + response, err := s.client.GetActionResult(ctx, req) + if err != nil { + return nil, status.Errorf(codes.NotFound, "Not found") + } + + return response, err +} + +// UpdateActionResult proxies UpdateActionResult requests to the client. +func (s *ActionCacheServerImpl) UpdateActionResult(ctx context.Context, req *remoteexecution.UpdateActionResultRequest) (*remoteexecution.ActionResult, error) { + return nil, status.Errorf(codes.Unimplemented, "Action is not supported") +} diff --git a/internal/api/grpcweb/casproxy/BUILD.bazel b/internal/api/grpcweb/casproxy/BUILD.bazel new file mode 100644 index 0000000..e84e7ab --- /dev/null +++ b/internal/api/grpcweb/casproxy/BUILD.bazel @@ -0,0 +1,21 @@ +load("@rules_go//go:def.bzl", "go_library", "go_test") + +go_library( + name = "casproxy", + srcs = ["server.go"], + importpath = "github.com/buildbarn/bb-portal/internal/api/grpcweb/casproxy", + visibility = ["//:__subpackages__"], + deps = [ + "//internal/api/grpcweb", + "@com_github_buildbarn_bb_storage//pkg/auth", + "@org_golang_google_genproto_googleapis_bytestream//:bytestream", + "@org_golang_google_grpc//codes", + "@org_golang_google_grpc//status", + ], +) + +go_test( + name = "casproxy_test", + srcs = ["server_test.go"], + embed = [":casproxy"], +) diff --git a/internal/api/grpcweb/casproxy/server.go b/internal/api/grpcweb/casproxy/server.go new file mode 100644 index 0000000..7de5f18 --- /dev/null +++ b/internal/api/grpcweb/casproxy/server.go @@ -0,0 +1,69 @@ +package casproxy + +import ( + "context" + "io" + "strings" + + "github.com/buildbarn/bb-portal/internal/api/grpcweb" + "github.com/buildbarn/bb-storage/pkg/auth" + "google.golang.org/genproto/googleapis/bytestream" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +// CasServerImpl is an gRPC server that forwards requests to a ByteStreamClient. +type CasServerImpl struct { + client bytestream.ByteStreamClient + authorizer auth.Authorizer +} + +// NewCasServerImpl creates a new CasServerImpl from a given client. +func NewCasServerImpl(client bytestream.ByteStreamClient, authorizer auth.Authorizer) *CasServerImpl { + return &CasServerImpl{client: client, authorizer: authorizer} +} + +// Read proxies Read requests to the client. +func (s *CasServerImpl) Read(req *bytestream.ReadRequest, stream bytestream.ByteStream_ReadServer) error { + if req == nil { + return status.Errorf(codes.InvalidArgument, "Invalid request") + } + + instanceName := getInstanceName(req.ResourceName) + if !grpcweb.IsInstanceNamePrefixAllowed(stream.Context(), s.authorizer, instanceName) { + return status.Errorf(codes.PermissionDenied, "Not authorized") + } + + clientStream, err := s.client.Read(context.Background(), req) + if err != nil { + return err + } + for { + resp, err := clientStream.Recv() + if err == io.EOF { + break + } + if err != nil { + return err + } + if err := stream.Send(resp); err != nil { + return err + } + } + return nil +} + +// Write proxies Write requests to the client. +func (s *CasServerImpl) Write(stream bytestream.ByteStream_WriteServer) error { + return status.Errorf(codes.Unimplemented, "Action is not supported") +} + +// QueryWriteStatus proxies QueryWriteStatus requests to the client. +func (s *CasServerImpl) QueryWriteStatus(ctx context.Context, req *bytestream.QueryWriteStatusRequest) (*bytestream.QueryWriteStatusResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "Action is not supported") +} + +func getInstanceName(resourceName string) string { + splitString := strings.Split(resourceName, "/blobs")[0] + return strings.TrimPrefix(splitString, "/") +} diff --git a/internal/api/grpcweb/casproxy/server_test.go b/internal/api/grpcweb/casproxy/server_test.go new file mode 100644 index 0000000..230187f --- /dev/null +++ b/internal/api/grpcweb/casproxy/server_test.go @@ -0,0 +1,33 @@ +package casproxy + +import ( + "testing" +) + +func TestGetInstanceName(t *testing.T) { + tests := []struct { + resourceName string + expected string + }{ + {"/instance1/blobs/abc123", "instance1"}, + {"instance2/blobs/abc123", "instance2"}, + {"/instance3/blobs", "instance3"}, + {"instance4/blobs", "instance4"}, + {"/blobs/abc123", ""}, + {"/blobs", ""}, + {"", ""}, + {"/in/stance/5/blobs/abc123", "in/stance/5"}, + {"in/stance/6/blobs/abc123", "in/stance/6"}, + {"/in/stance/7/blobs", "in/stance/7"}, + {"in/stance/8/blobs", "in/stance/8"}, + } + + for _, test := range tests { + t.Run(test.resourceName, func(t *testing.T) { + result := getInstanceName(test.resourceName) + if result != test.expected { + t.Errorf("getInstanceName(%q) = %q; want %q", test.resourceName, result, test.expected) + } + }) + } +} diff --git a/internal/api/grpcweb/fsacproxy/BUILD.bazel b/internal/api/grpcweb/fsacproxy/BUILD.bazel new file mode 100644 index 0000000..4a239b9 --- /dev/null +++ b/internal/api/grpcweb/fsacproxy/BUILD.bazel @@ -0,0 +1,16 @@ +load("@rules_go//go:def.bzl", "go_library") + +go_library( + name = "fsacproxy", + srcs = ["server.go"], + importpath = "github.com/buildbarn/bb-portal/internal/api/grpcweb/fsacproxy", + visibility = ["//:__subpackages__"], + deps = [ + "//internal/api/grpcweb", + "@com_github_buildbarn_bb_storage//pkg/auth", + "@com_github_buildbarn_bb_storage//pkg/proto/fsac", + "@org_golang_google_grpc//codes", + "@org_golang_google_grpc//status", + "@org_golang_google_protobuf//types/known/emptypb", + ], +) diff --git a/internal/api/grpcweb/fsacproxy/server.go b/internal/api/grpcweb/fsacproxy/server.go new file mode 100644 index 0000000..a5fc455 --- /dev/null +++ b/internal/api/grpcweb/fsacproxy/server.go @@ -0,0 +1,40 @@ +package fsacproxy + +import ( + "context" + + "github.com/buildbarn/bb-portal/internal/api/grpcweb" + "github.com/buildbarn/bb-storage/pkg/auth" + "github.com/buildbarn/bb-storage/pkg/proto/fsac" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + "google.golang.org/protobuf/types/known/emptypb" +) + +// FsacServerImpl is a gRPC server that forwards requests to an FileSystemAccessCacheClient. +type FsacServerImpl struct { + client fsac.FileSystemAccessCacheClient + authorizer auth.Authorizer +} + +// NewFsacServerImpl creates a new FsacServerImpl from a given client. +func NewFsacServerImpl(client fsac.FileSystemAccessCacheClient, authorizer auth.Authorizer) *FsacServerImpl { + return &FsacServerImpl{client: client, authorizer: authorizer} +} + +// GetFileSystemAccessProfile proxies GetFileSystemAccessProfile requests to the client. +func (s *FsacServerImpl) GetFileSystemAccessProfile(ctx context.Context, req *fsac.GetFileSystemAccessProfileRequest) (*fsac.FileSystemAccessProfile, error) { + if req == nil { + return nil, status.Errorf(codes.InvalidArgument, "Invalid request") + } + + if !grpcweb.IsInstanceNamePrefixAllowed(ctx, s.authorizer, req.InstanceName) { + return nil, status.Errorf(codes.PermissionDenied, "Not authorized") + } + return s.client.GetFileSystemAccessProfile(ctx, req) +} + +// UpdateFileSystemAccessProfile proxies UpdateFileSystemAccessProfile requests to the client. +func (s *FsacServerImpl) UpdateFileSystemAccessProfile(ctx context.Context, req *fsac.UpdateFileSystemAccessProfileRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "Action is not supported") +} diff --git a/internal/api/grpcweb/isccproxy/BUILD.bazel b/internal/api/grpcweb/isccproxy/BUILD.bazel new file mode 100644 index 0000000..a3f61a2 --- /dev/null +++ b/internal/api/grpcweb/isccproxy/BUILD.bazel @@ -0,0 +1,16 @@ +load("@rules_go//go:def.bzl", "go_library") + +go_library( + name = "isccproxy", + srcs = ["server.go"], + importpath = "github.com/buildbarn/bb-portal/internal/api/grpcweb/isccproxy", + visibility = ["//:__subpackages__"], + deps = [ + "//internal/api/grpcweb", + "@com_github_buildbarn_bb_storage//pkg/auth", + "@com_github_buildbarn_bb_storage//pkg/proto/iscc", + "@org_golang_google_grpc//codes", + "@org_golang_google_grpc//status", + "@org_golang_google_protobuf//types/known/emptypb", + ], +) diff --git a/internal/api/grpcweb/isccproxy/server.go b/internal/api/grpcweb/isccproxy/server.go new file mode 100644 index 0000000..8521bca --- /dev/null +++ b/internal/api/grpcweb/isccproxy/server.go @@ -0,0 +1,40 @@ +package isccproxy + +import ( + "context" + + "github.com/buildbarn/bb-portal/internal/api/grpcweb" + "github.com/buildbarn/bb-storage/pkg/auth" + "github.com/buildbarn/bb-storage/pkg/proto/iscc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + "google.golang.org/protobuf/types/known/emptypb" +) + +// IsccServerImpl is a gRPC server that forwards requests to an InitialSizeClassCacheClient. +type IsccServerImpl struct { + client iscc.InitialSizeClassCacheClient + authorizer auth.Authorizer +} + +// NewIsccServerImpl creates a new IsccServerImpl from a given client. +func NewIsccServerImpl(client iscc.InitialSizeClassCacheClient, authorizer auth.Authorizer) *IsccServerImpl { + return &IsccServerImpl{client: client, authorizer: authorizer} +} + +// GetPreviousExecutionStats proxies GetPreviousExecutionStats requests to the client. +func (s *IsccServerImpl) GetPreviousExecutionStats(ctx context.Context, req *iscc.GetPreviousExecutionStatsRequest) (*iscc.PreviousExecutionStats, error) { + if req == nil { + return nil, status.Errorf(codes.InvalidArgument, "Invalid request") + } + + if !grpcweb.IsInstanceNamePrefixAllowed(ctx, s.authorizer, req.InstanceName) { + return nil, status.Errorf(codes.PermissionDenied, "Not authorized") + } + return s.client.GetPreviousExecutionStats(ctx, req) +} + +// UpdatePreviousExecutionStats proxies UpdatePreviousExecutionStats requests to the client. +func (s *IsccServerImpl) UpdatePreviousExecutionStats(ctx context.Context, req *iscc.UpdatePreviousExecutionStatsRequest) (*emptypb.Empty, error) { + return nil, status.Errorf(codes.Unimplemented, "Action is not supported") +} diff --git a/internal/api/servefiles/BUILD.bazel b/internal/api/servefiles/BUILD.bazel new file mode 100644 index 0000000..277eb6b --- /dev/null +++ b/internal/api/servefiles/BUILD.bazel @@ -0,0 +1,28 @@ +load("@rules_go//go:def.bzl", "go_library") + +go_library( + name = "servefiles_lib", + srcs = [ + "serve_tarball.go", + "server.go", + ], + importpath = "github.com/buildbarn/bb-portal/internal/api/servefiles", + visibility = ["//:__subpackages__"], + deps = [ + "//pkg/proto/configuration/bb_portal", + "@bazel_remote_apis//build/bazel/remote/execution/v2:remote_execution_go_proto", + "@com_github_buildbarn_bb_remote_execution//pkg/builder", + "@com_github_buildbarn_bb_storage//pkg/auth", + "@com_github_buildbarn_bb_storage//pkg/blobstore", + "@com_github_buildbarn_bb_storage//pkg/blobstore/configuration", + "@com_github_buildbarn_bb_storage//pkg/digest", + "@com_github_buildbarn_bb_storage//pkg/filesystem/path", + "@com_github_buildbarn_bb_storage//pkg/grpc", + "@com_github_buildbarn_bb_storage//pkg/program", + "@com_github_buildbarn_bb_storage//pkg/util", + "@com_github_gorilla_mux//:mux", + "@org_golang_google_grpc//codes", + "@org_golang_google_grpc//metadata", + "@org_golang_google_grpc//status", + ], +) diff --git a/internal/api/servefiles/serve_tarball.go b/internal/api/servefiles/serve_tarball.go new file mode 100644 index 0000000..a26630d --- /dev/null +++ b/internal/api/servefiles/serve_tarball.go @@ -0,0 +1,182 @@ +package servefiles + +import ( + "archive/tar" + "compress/gzip" + "context" + "fmt" + "log" + "net/http" + + remoteexecution "github.com/bazelbuild/remote-apis/build/bazel/remote/execution/v2" + "github.com/buildbarn/bb-storage/pkg/digest" + "github.com/buildbarn/bb-storage/pkg/filesystem/path" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (s FileServerService) generateTarballDirectory(ctx context.Context, w *tar.Writer, digestFunction digest.Function, directory *remoteexecution.Directory, directoryPath *path.Trace, getDirectory func(context.Context, digest.Digest) (*remoteexecution.Directory, error), filesSeen map[string]string) error { + // Emit child directories. + for _, directoryNode := range directory.Directories { + childName, ok := path.NewComponent(directoryNode.Name) + if !ok { + return status.Errorf(codes.InvalidArgument, "Directory %#v in directory %#v has an invalid name", directoryNode.Name, directoryPath.GetUNIXString()) + } + childPath := directoryPath.Append(childName) + + if err := w.WriteHeader(&tar.Header{ + Typeflag: tar.TypeDir, + Name: childPath.GetUNIXString(), + Mode: 0o777, + }); err != nil { + return err + } + childDigest, err := digestFunction.NewDigestFromProto(directoryNode.Digest) + if err != nil { + return err + } + childDirectory, err := getDirectory(ctx, childDigest) + if err != nil { + return err + } + if err := s.generateTarballDirectory(ctx, w, digestFunction, childDirectory, childPath, getDirectory, filesSeen); err != nil { + return err + } + } + + // Emit symlinks. + for _, symlinkNode := range directory.Symlinks { + childName, ok := path.NewComponent(symlinkNode.Name) + if !ok { + return status.Errorf(codes.InvalidArgument, "Symbolic link %#v in directory %#v has an invalid name", symlinkNode.Name, directoryPath.GetUNIXString()) + } + childPath := directoryPath.Append(childName) + + if err := w.WriteHeader(&tar.Header{ + Typeflag: tar.TypeSymlink, + Name: childPath.GetUNIXString(), + Linkname: symlinkNode.Target, + Mode: 0o777, + }); err != nil { + return err + } + } + + // Emit regular files. + for _, fileNode := range directory.Files { + childName, ok := path.NewComponent(fileNode.Name) + if !ok { + return status.Errorf(codes.InvalidArgument, "File %#v in directory %#v has an invalid name", fileNode.Name, directoryPath.GetUNIXString()) + } + childPath := directoryPath.Append(childName) + childPathString := childPath.GetUNIXString() + + childDigest, err := digestFunction.NewDigestFromProto(fileNode.Digest) + if err != nil { + return err + } + + childKey := childDigest.GetKey(digest.KeyWithoutInstance) + if fileNode.IsExecutable { + childKey += "+x" + } else { + childKey += "-x" + } + + if linkPath, ok := filesSeen[childKey]; ok { + // This file was already returned previously. + // Emit a hardlink pointing to the first + // occurrence. + // + // Not only does this reduce the size of the + // tarball, it also makes the directory more + // representative of what it looks like when + // executed through bb_worker. + if err := w.WriteHeader(&tar.Header{ + Typeflag: tar.TypeLink, + Name: childPathString, + Linkname: linkPath, + }); err != nil { + return err + } + } else { + // This is the first time we're returning this + // file. Actually add it to the archive. + mode := int64(0o666) + if fileNode.IsExecutable { + mode = 0o777 + } + if err := w.WriteHeader(&tar.Header{ + Typeflag: tar.TypeReg, + Name: childPathString, + Size: fileNode.Digest.SizeBytes, + Mode: mode, + }); err != nil { + return err + } + + if err := s.contentAddressableStorage.Get(ctx, childDigest).IntoWriter(w); err != nil { + return err + } + + filesSeen[childKey] = childPathString + } + } + return nil +} + +func (s FileServerService) generateTarball(ctx context.Context, w http.ResponseWriter, digest digest.Digest, directory *remoteexecution.Directory, getDirectory func(context.Context, digest.Digest) (*remoteexecution.Directory, error)) { + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s.tar.gz\"", digest.GetHashString())) + w.Header().Set("Content-Type", "application/gzip") + gzipWriter := gzip.NewWriter(w) + tarWriter := tar.NewWriter(gzipWriter) + filesSeen := map[string]string{} + if err := s.generateTarballDirectory(ctx, tarWriter, digest.GetDigestFunction(), directory, nil, getDirectory, filesSeen); err != nil { + // TODO(edsch): Any way to propagate this to the client? + log.Print(err) + panic(http.ErrAbortHandler) + } + if err := tarWriter.Close(); err != nil { + log.Print(err) + panic(http.ErrAbortHandler) + } + if err := gzipWriter.Close(); err != nil { + log.Print(err) + panic(http.ErrAbortHandler) + } +} + +// HandleDirectory serves a directory as a tarball. +func (s FileServerService) HandleDirectory(w http.ResponseWriter, req *http.Request) { + if req.Method != "GET" { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + if req.URL.Query().Get("format") != "tar" { + http.Error(w, "Invalid format. Only supports \"tar\"", http.StatusNotFound) + return + } + + directoryDigest, err := getDigestFromRequest(req) + if err != nil { + http.Error(w, "Digest not found", http.StatusNotFound) + return + } + + ctx := extractContextFromRequest(req) + directoryMessage, err := s.contentAddressableStorage.Get(ctx, directoryDigest).ToProto(&remoteexecution.Directory{}, s.maximumMessageSizeBytes) + if err != nil { + http.Error(w, "Digest not found", http.StatusNotFound) + return + } + directory := directoryMessage.(*remoteexecution.Directory) + + s.generateTarball(ctx, w, directoryDigest, directory, func(ctx context.Context, digest digest.Digest) (*remoteexecution.Directory, error) { + directoryMessage, err := s.contentAddressableStorage.Get(ctx, digest).ToProto(&remoteexecution.Directory{}, s.maximumMessageSizeBytes) + if err != nil { + return nil, err + } + return directoryMessage.(*remoteexecution.Directory), nil + }) +} diff --git a/internal/api/servefiles/server.go b/internal/api/servefiles/server.go new file mode 100644 index 0000000..23fd4e0 --- /dev/null +++ b/internal/api/servefiles/server.go @@ -0,0 +1,178 @@ +package servefiles + +import ( + "bufio" + "context" + "io" + "log" + "net/http" + "strconv" + "strings" + "unicode/utf8" + + "github.com/buildbarn/bb-portal/pkg/proto/configuration/bb_portal" + "github.com/buildbarn/bb-remote-execution/pkg/builder" + "github.com/buildbarn/bb-storage/pkg/auth" + "github.com/buildbarn/bb-storage/pkg/blobstore" + blobstore_configuration "github.com/buildbarn/bb-storage/pkg/blobstore/configuration" + "github.com/buildbarn/bb-storage/pkg/digest" + bb_grpc "github.com/buildbarn/bb-storage/pkg/grpc" + "github.com/buildbarn/bb-storage/pkg/program" + "github.com/buildbarn/bb-storage/pkg/util" + "github.com/gorilla/mux" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + + remoteexecution "github.com/bazelbuild/remote-apis/build/bazel/remote/execution/v2" +) + +var digestFunctionStrings = map[string]remoteexecution.DigestFunction_Value{} + +func init() { + for _, digestFunction := range digest.SupportedDigestFunctions { + digestFunctionStrings[strings.ToLower(digestFunction.String())] = digestFunction + } +} + +func getDigestFromRequest(req *http.Request) (digest.Digest, error) { + vars := mux.Vars(req) + instanceNameStr := strings.TrimSuffix(vars["instanceName"], "/") + instanceName, err := digest.NewInstanceName(instanceNameStr) + if err != nil { + return digest.BadDigest, util.StatusWrapf(err, "Invalid instance name %#v", instanceNameStr) + } + digestFunctionStr := vars["digestFunction"] + digestFunctionEnum, ok := digestFunctionStrings[digestFunctionStr] + if !ok { + return digest.BadDigest, status.Errorf(codes.InvalidArgument, "Unknown digest function %#v", digestFunctionStr) + } + digestFunction, err := instanceName.GetDigestFunction(digestFunctionEnum, 0) + if err != nil { + return digest.BadDigest, err + } + sizeBytes, err := strconv.ParseInt(vars["sizeBytes"], 10, 64) + if err != nil { + return digest.BadDigest, util.StatusWrapf(err, "Invalid blob size %#v", vars["sizeBytes"]) + } + return digestFunction.NewDigest(vars["hash"], sizeBytes) +} + +// Generates a Context from an incoming HTTP request, forwarding any +// request headers as gRPC metadata. +func extractContextFromRequest(req *http.Request) context.Context { + var pairs []string + for key, values := range req.Header { + for _, value := range values { + pairs = append(pairs, key, value) + } + } + return metadata.NewIncomingContext(req.Context(), metadata.Pairs(pairs...)) +} + +// FileServerService is a service that serves files from the Content +// Addressable Storage (CAS) over HTTP. It also serves shell scripts generated +// from Command messages, and directories as Tarballs. +type FileServerService struct { + contentAddressableStorage blobstore.BlobAccess + maximumMessageSizeBytes int +} + +// NewFileServerServiceFromConfiguration creates a new ServeFilesService +// with an authorizing CAS if ServeFilesCasConfiguration is configured. +func NewFileServerServiceFromConfiguration(dependenciesGroup program.Group, configuration *bb_portal.ApplicationConfiguration, grpcClientFactory bb_grpc.ClientFactory) *FileServerService { + if configuration.ServeFilesCasConfiguration == nil { + log.Printf("Did not start serving files from Content Addressable Storage because ServeFilesCasConfiguration is not configured") + return nil + } + + instanceNameAuthorizer, err := auth.DefaultAuthorizerFactory.NewAuthorizerFromConfiguration(configuration.InstanceNameAuthorizer) + if err != nil { + log.Fatalf("Failed to create InstanceNameAuthorizer: %v", err) + } + + contentAddressableStorage, err := blobstore_configuration.NewBlobAccessFromConfiguration( + dependenciesGroup, + configuration.ServeFilesCasConfiguration, + blobstore_configuration.NewCASBlobAccessCreator(grpcClientFactory, int(configuration.MaximumMessageSizeBytes)), + ) + if err != nil { + log.Fatalf("Failed to serve files from Content Addressable Storage: %v", err) + } + + return &FileServerService{ + blobstore.NewAuthorizingBlobAccess(contentAddressableStorage.BlobAccess, instanceNameAuthorizer, nil, nil), + int(configuration.MaximumMessageSizeBytes), + } +} + +// HandleFile serves a file from the Content Addressable Storage (CAS) over HTTP. +func (s FileServerService) HandleFile(w http.ResponseWriter, req *http.Request) { + digest, err := getDigestFromRequest(req) + if err != nil { + http.Error(w, "Digest not found", http.StatusNotFound) + return + } + + ctx := extractContextFromRequest(req) + r := s.contentAddressableStorage.Get(ctx, digest).ToReader() + defer r.Close() + + // Attempt to read the first chunk of data to see whether we can + // trigger an error. Only when no error occurs, we start setting + // response headers. + var first [4096]byte + n, err := r.Read(first[:]) + if err != nil && err != io.EOF { + http.Error(w, "Could not send file", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Length", strconv.FormatInt(digest.GetSizeBytes(), 10)) + if utf8.ValidString(string(first[:])) { + w.Header().Set("Content-Type", "text/plain; charset=utf-8") + } else { + w.Header().Set("Content-Type", "application/octet-stream") + } + w.Write(first[:n]) + io.Copy(w, r) +} + +// HandleCommand serves a Command message from the Content Addressable Storage +// (CAS) as a shell script over HTTP. +func (s FileServerService) HandleCommand(w http.ResponseWriter, req *http.Request) { + if req.Method != "GET" { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + if req.URL.Query().Get("format") != "sh" { + http.Error(w, "Invalid format. Only supports \"sh\"", http.StatusNotFound) + return + } + + digest, err := getDigestFromRequest(req) + if err != nil { + http.Error(w, "Digest not found", http.StatusNotFound) + return + } + ctx := extractContextFromRequest(req) + + commandMessage, err := s.contentAddressableStorage.Get(ctx, digest).ToProto(&remoteexecution.Command{}, s.maximumMessageSizeBytes) + if err != nil { + http.Error(w, "Not found", http.StatusNotFound) + return + } + command := commandMessage.(*remoteexecution.Command) + + w.Header().Set("Content-Type", "text/plain; charset=utf-8") + bw := bufio.NewWriter(w) + if err := builder.ConvertCommandToShellScript(command, bw); err != nil { + log.Print(err) + panic(http.ErrAbortHandler) + } + if err := bw.Flush(); err != nil { + log.Print(err) + panic(http.ErrAbortHandler) + } +} diff --git a/pkg/proto/configuration/bb_portal/BUILD.bazel b/pkg/proto/configuration/bb_portal/BUILD.bazel index 0a5eb8a..13ce8ff 100644 --- a/pkg/proto/configuration/bb_portal/BUILD.bazel +++ b/pkg/proto/configuration/bb_portal/BUILD.bazel @@ -8,6 +8,7 @@ proto_library( visibility = ["//visibility:public"], deps = [ "@com_github_buildbarn_bb_storage//pkg/proto/configuration/auth:auth_proto", + "@com_github_buildbarn_bb_storage//pkg/proto/configuration/blobstore:blobstore_proto", "@com_github_buildbarn_bb_storage//pkg/proto/configuration/global:global_proto", "@com_github_buildbarn_bb_storage//pkg/proto/configuration/grpc:grpc_proto", "@com_github_buildbarn_bb_storage//pkg/proto/configuration/http:http_proto", @@ -21,6 +22,7 @@ go_proto_library( visibility = ["//visibility:public"], deps = [ "@com_github_buildbarn_bb_storage//pkg/proto/configuration/auth", + "@com_github_buildbarn_bb_storage//pkg/proto/configuration/blobstore", "@com_github_buildbarn_bb_storage//pkg/proto/configuration/global", "@com_github_buildbarn_bb_storage//pkg/proto/configuration/grpc", "@com_github_buildbarn_bb_storage//pkg/proto/configuration/http", diff --git a/pkg/proto/configuration/bb_portal/bb_portal.pb.go b/pkg/proto/configuration/bb_portal/bb_portal.pb.go index ea62021..b6a3e25 100644 --- a/pkg/proto/configuration/bb_portal/bb_portal.pb.go +++ b/pkg/proto/configuration/bb_portal/bb_portal.pb.go @@ -8,6 +8,7 @@ package bb_portal import ( auth "github.com/buildbarn/bb-storage/pkg/proto/configuration/auth" + blobstore "github.com/buildbarn/bb-storage/pkg/proto/configuration/blobstore" global "github.com/buildbarn/bb-storage/pkg/proto/configuration/global" grpc "github.com/buildbarn/bb-storage/pkg/proto/configuration/grpc" http "github.com/buildbarn/bb-storage/pkg/proto/configuration/http" @@ -92,11 +93,17 @@ type ApplicationConfiguration struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - HttpServers []*http.ServerConfiguration `protobuf:"bytes,1,rep,name=http_servers,json=httpServers,proto3" json:"http_servers,omitempty"` - GrpcServers []*grpc.ServerConfiguration `protobuf:"bytes,2,rep,name=grpc_servers,json=grpcServers,proto3" json:"grpc_servers,omitempty"` - Global *global.Configuration `protobuf:"bytes,3,opt,name=global,proto3" json:"global,omitempty"` - InstanceNameAuthorizer *auth.AuthorizerConfiguration `protobuf:"bytes,4,opt,name=instance_name_authorizer,json=instanceNameAuthorizer,proto3" json:"instance_name_authorizer,omitempty"` - BuildQueueStateProxy *GrpcWebProxyConfiguration `protobuf:"bytes,5,opt,name=build_queue_state_proxy,json=buildQueueStateProxy,proto3" json:"build_queue_state_proxy,omitempty"` + HttpServers []*http.ServerConfiguration `protobuf:"bytes,1,rep,name=http_servers,json=httpServers,proto3" json:"http_servers,omitempty"` + GrpcServers []*grpc.ServerConfiguration `protobuf:"bytes,2,rep,name=grpc_servers,json=grpcServers,proto3" json:"grpc_servers,omitempty"` + Global *global.Configuration `protobuf:"bytes,3,opt,name=global,proto3" json:"global,omitempty"` + InstanceNameAuthorizer *auth.AuthorizerConfiguration `protobuf:"bytes,4,opt,name=instance_name_authorizer,json=instanceNameAuthorizer,proto3" json:"instance_name_authorizer,omitempty"` + BuildQueueStateProxy *GrpcWebProxyConfiguration `protobuf:"bytes,5,opt,name=build_queue_state_proxy,json=buildQueueStateProxy,proto3" json:"build_queue_state_proxy,omitempty"` + ActionCacheProxy *GrpcWebProxyConfiguration `protobuf:"bytes,6,opt,name=action_cache_proxy,json=actionCacheProxy,proto3" json:"action_cache_proxy,omitempty"` + ContentAddressableStorageProxy *GrpcWebProxyConfiguration `protobuf:"bytes,7,opt,name=content_addressable_storage_proxy,json=contentAddressableStorageProxy,proto3" json:"content_addressable_storage_proxy,omitempty"` + InitialSizeClassCacheProxy *GrpcWebProxyConfiguration `protobuf:"bytes,8,opt,name=initial_size_class_cache_proxy,json=initialSizeClassCacheProxy,proto3" json:"initial_size_class_cache_proxy,omitempty"` + FileSystemAccessCacheProxy *GrpcWebProxyConfiguration `protobuf:"bytes,9,opt,name=file_system_access_cache_proxy,json=fileSystemAccessCacheProxy,proto3" json:"file_system_access_cache_proxy,omitempty"` + ServeFilesCasConfiguration *blobstore.BlobAccessConfiguration `protobuf:"bytes,10,opt,name=serve_files_cas_configuration,json=serveFilesCasConfiguration,proto3" json:"serve_files_cas_configuration,omitempty"` + MaximumMessageSizeBytes int64 `protobuf:"varint,11,opt,name=maximum_message_size_bytes,json=maximumMessageSizeBytes,proto3" json:"maximum_message_size_bytes,omitempty"` } func (x *ApplicationConfiguration) Reset() { @@ -166,6 +173,48 @@ func (x *ApplicationConfiguration) GetBuildQueueStateProxy() *GrpcWebProxyConfig return nil } +func (x *ApplicationConfiguration) GetActionCacheProxy() *GrpcWebProxyConfiguration { + if x != nil { + return x.ActionCacheProxy + } + return nil +} + +func (x *ApplicationConfiguration) GetContentAddressableStorageProxy() *GrpcWebProxyConfiguration { + if x != nil { + return x.ContentAddressableStorageProxy + } + return nil +} + +func (x *ApplicationConfiguration) GetInitialSizeClassCacheProxy() *GrpcWebProxyConfiguration { + if x != nil { + return x.InitialSizeClassCacheProxy + } + return nil +} + +func (x *ApplicationConfiguration) GetFileSystemAccessCacheProxy() *GrpcWebProxyConfiguration { + if x != nil { + return x.FileSystemAccessCacheProxy + } + return nil +} + +func (x *ApplicationConfiguration) GetServeFilesCasConfiguration() *blobstore.BlobAccessConfiguration { + if x != nil { + return x.ServeFilesCasConfiguration + } + return nil +} + +func (x *ApplicationConfiguration) GetMaximumMessageSizeBytes() int64 { + if x != nil { + return x.MaximumMessageSizeBytes + } + return 0 +} + var File_pkg_proto_configuration_bb_portal_bb_portal_proto protoreflect.FileDescriptor var file_pkg_proto_configuration_bb_portal_bb_portal_proto_rawDesc = []byte{ @@ -177,65 +226,112 @@ var file_pkg_proto_configuration_bb_portal_bb_portal_proto_rawDesc = []byte{ 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x1a, 0x27, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x61, 0x75, 0x74, 0x68, 0x2f, 0x61, 0x75, 0x74, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, - 0x2b, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x2f, - 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x27, 0x70, 0x6b, - 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x27, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, - 0x74, 0x74, 0x70, 0x2f, 0x68, 0x74, 0x74, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xe5, - 0x01, 0x0a, 0x19, 0x47, 0x72, 0x70, 0x63, 0x57, 0x65, 0x62, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x49, 0x0a, 0x06, - 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x62, - 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6c, 0x69, 0x65, - 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x06, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x12, 0x54, 0x0a, 0x0c, 0x68, 0x74, 0x74, 0x70, 0x5f, - 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, - 0x62, 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x68, 0x74, 0x74, 0x70, 0x2e, 0x53, 0x65, 0x72, - 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x52, 0x0b, 0x68, 0x74, 0x74, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x27, 0x0a, - 0x0f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x5f, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x73, - 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0e, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x4f, - 0x72, 0x69, 0x67, 0x69, 0x6e, 0x73, 0x22, 0xf3, 0x03, 0x0a, 0x18, 0x41, 0x70, 0x70, 0x6c, 0x69, - 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x54, 0x0a, 0x0c, 0x68, 0x74, 0x74, 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x62, 0x75, 0x69, 0x6c, - 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x2e, 0x68, 0x74, 0x74, 0x70, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x68, 0x74, - 0x74, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x54, 0x0a, 0x0c, 0x67, 0x72, 0x70, - 0x63, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x31, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2f, 0x62, 0x6c, 0x6f, 0x62, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x1a, 0x2b, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x67, 0x6c, 0x6f, 0x62, + 0x61, 0x6c, 0x2f, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, + 0x27, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x2f, 0x67, 0x72, + 0x70, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x27, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x2f, 0x68, 0x74, 0x74, 0x70, 0x2f, 0x68, 0x74, 0x74, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x22, 0xe5, 0x01, 0x0a, 0x19, 0x47, 0x72, 0x70, 0x63, 0x57, 0x65, 0x62, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x49, 0x0a, 0x06, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x53, - 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x0b, 0x67, 0x72, 0x70, 0x63, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, - 0x45, 0x0a, 0x06, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x2d, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, - 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x06, - 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x12, 0x6f, 0x0a, 0x18, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, - 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, - 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, - 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x2e, 0x61, 0x75, 0x74, 0x68, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, + 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x43, + 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x06, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x12, 0x54, 0x0a, 0x0c, 0x68, 0x74, + 0x74, 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x31, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x68, 0x74, 0x74, 0x70, 0x2e, + 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x68, 0x74, 0x74, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, + 0x12, 0x27, 0x0a, 0x0f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x5f, 0x6f, 0x72, 0x69, 0x67, + 0x69, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0e, 0x61, 0x6c, 0x6c, 0x6f, 0x77, + 0x65, 0x64, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x73, 0x22, 0xab, 0x09, 0x0a, 0x18, 0x41, 0x70, + 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x54, 0x0a, 0x0c, 0x68, 0x74, 0x74, 0x70, 0x5f, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x62, + 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x68, 0x74, 0x74, 0x70, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x16, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x41, 0x75, 0x74, - 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x72, 0x12, 0x73, 0x0a, 0x17, 0x62, 0x75, 0x69, 0x6c, 0x64, - 0x5f, 0x71, 0x75, 0x65, 0x75, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x70, 0x72, 0x6f, - 0x78, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, - 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x2e, 0x62, 0x62, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x47, 0x72, 0x70, - 0x63, 0x57, 0x65, 0x62, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x14, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x51, 0x75, 0x65, - 0x75, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x42, 0x42, 0x5a, 0x40, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x62, 0x75, 0x69, 0x6c, 0x64, - 0x62, 0x61, 0x72, 0x6e, 0x2f, 0x62, 0x62, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x70, - 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x62, 0x62, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, - 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x0b, 0x68, 0x74, 0x74, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x54, 0x0a, 0x0c, + 0x67, 0x72, 0x70, 0x63, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x67, 0x72, 0x70, + 0x63, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x67, 0x72, 0x70, 0x63, 0x53, 0x65, 0x72, 0x76, 0x65, + 0x72, 0x73, 0x12, 0x45, 0x0a, 0x06, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x67, 0x6c, 0x6f, + 0x62, 0x61, 0x6c, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x06, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x12, 0x6f, 0x0a, 0x18, 0x69, 0x6e, 0x73, + 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x6f, + 0x72, 0x69, 0x7a, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x62, 0x75, + 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x61, 0x75, 0x74, 0x68, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x6f, + 0x72, 0x69, 0x7a, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x16, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, + 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x72, 0x12, 0x73, 0x0a, 0x17, 0x62, 0x75, + 0x69, 0x6c, 0x64, 0x5f, 0x71, 0x75, 0x65, 0x75, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, + 0x70, 0x72, 0x6f, 0x78, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x62, 0x75, + 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x62, 0x62, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, + 0x47, 0x72, 0x70, 0x63, 0x57, 0x65, 0x62, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x14, 0x62, 0x75, 0x69, 0x6c, 0x64, + 0x51, 0x75, 0x65, 0x75, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x12, + 0x6a, 0x0a, 0x12, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, + 0x70, 0x72, 0x6f, 0x78, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x62, 0x75, + 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x62, 0x62, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, + 0x47, 0x72, 0x70, 0x63, 0x57, 0x65, 0x62, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x10, 0x61, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x12, 0x87, 0x01, 0x0a, 0x21, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x5f, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x78, + 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x62, + 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x2e, 0x62, 0x62, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x47, 0x72, 0x70, 0x63, + 0x57, 0x65, 0x62, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x1e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x41, 0x64, + 0x64, 0x72, 0x65, 0x73, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x50, 0x72, 0x6f, 0x78, 0x79, 0x12, 0x80, 0x01, 0x0a, 0x1e, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, + 0x6c, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x5f, 0x63, 0x61, 0x63, + 0x68, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3c, + 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x62, 0x62, 0x5f, 0x70, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x2e, 0x47, 0x72, 0x70, 0x63, 0x57, 0x65, 0x62, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x1a, 0x69, 0x6e, + 0x69, 0x74, 0x69, 0x61, 0x6c, 0x53, 0x69, 0x7a, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x43, 0x61, + 0x63, 0x68, 0x65, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x12, 0x80, 0x01, 0x0a, 0x1e, 0x66, 0x69, 0x6c, + 0x65, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, + 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x18, 0x09, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x3c, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x62, 0x62, 0x5f, 0x70, + 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x47, 0x72, 0x70, 0x63, 0x57, 0x65, 0x62, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x1a, 0x66, 0x69, 0x6c, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x41, 0x63, 0x63, 0x65, 0x73, + 0x73, 0x43, 0x61, 0x63, 0x68, 0x65, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x12, 0x7d, 0x0a, 0x1d, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x63, 0x61, 0x73, 0x5f, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0a, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2e, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x62, 0x6c, 0x6f, + 0x62, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x42, 0x6c, 0x6f, 0x62, 0x41, 0x63, 0x63, 0x65, 0x73, + 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x1a, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x43, 0x61, 0x73, 0x43, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3b, 0x0a, 0x1a, 0x6d, 0x61, + 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, + 0x7a, 0x65, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x03, 0x52, 0x17, + 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x53, 0x69, + 0x7a, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x42, 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x62, 0x61, 0x72, 0x6e, 0x2f, + 0x62, 0x62, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x2f, 0x62, 0x62, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, } var ( @@ -252,27 +348,33 @@ func file_pkg_proto_configuration_bb_portal_bb_portal_proto_rawDescGZIP() []byte var file_pkg_proto_configuration_bb_portal_bb_portal_proto_msgTypes = make([]protoimpl.MessageInfo, 2) var file_pkg_proto_configuration_bb_portal_bb_portal_proto_goTypes = []any{ - (*GrpcWebProxyConfiguration)(nil), // 0: buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration - (*ApplicationConfiguration)(nil), // 1: buildbarn.configuration.bb_portal.ApplicationConfiguration - (*grpc.ClientConfiguration)(nil), // 2: buildbarn.configuration.grpc.ClientConfiguration - (*http.ServerConfiguration)(nil), // 3: buildbarn.configuration.http.ServerConfiguration - (*grpc.ServerConfiguration)(nil), // 4: buildbarn.configuration.grpc.ServerConfiguration - (*global.Configuration)(nil), // 5: buildbarn.configuration.global.Configuration - (*auth.AuthorizerConfiguration)(nil), // 6: buildbarn.configuration.auth.AuthorizerConfiguration + (*GrpcWebProxyConfiguration)(nil), // 0: buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration + (*ApplicationConfiguration)(nil), // 1: buildbarn.configuration.bb_portal.ApplicationConfiguration + (*grpc.ClientConfiguration)(nil), // 2: buildbarn.configuration.grpc.ClientConfiguration + (*http.ServerConfiguration)(nil), // 3: buildbarn.configuration.http.ServerConfiguration + (*grpc.ServerConfiguration)(nil), // 4: buildbarn.configuration.grpc.ServerConfiguration + (*global.Configuration)(nil), // 5: buildbarn.configuration.global.Configuration + (*auth.AuthorizerConfiguration)(nil), // 6: buildbarn.configuration.auth.AuthorizerConfiguration + (*blobstore.BlobAccessConfiguration)(nil), // 7: buildbarn.configuration.blobstore.BlobAccessConfiguration } var file_pkg_proto_configuration_bb_portal_bb_portal_proto_depIdxs = []int32{ - 2, // 0: buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration.client:type_name -> buildbarn.configuration.grpc.ClientConfiguration - 3, // 1: buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration.http_servers:type_name -> buildbarn.configuration.http.ServerConfiguration - 3, // 2: buildbarn.configuration.bb_portal.ApplicationConfiguration.http_servers:type_name -> buildbarn.configuration.http.ServerConfiguration - 4, // 3: buildbarn.configuration.bb_portal.ApplicationConfiguration.grpc_servers:type_name -> buildbarn.configuration.grpc.ServerConfiguration - 5, // 4: buildbarn.configuration.bb_portal.ApplicationConfiguration.global:type_name -> buildbarn.configuration.global.Configuration - 6, // 5: buildbarn.configuration.bb_portal.ApplicationConfiguration.instance_name_authorizer:type_name -> buildbarn.configuration.auth.AuthorizerConfiguration - 0, // 6: buildbarn.configuration.bb_portal.ApplicationConfiguration.build_queue_state_proxy:type_name -> buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration - 7, // [7:7] is the sub-list for method output_type - 7, // [7:7] is the sub-list for method input_type - 7, // [7:7] is the sub-list for extension type_name - 7, // [7:7] is the sub-list for extension extendee - 0, // [0:7] is the sub-list for field type_name + 2, // 0: buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration.client:type_name -> buildbarn.configuration.grpc.ClientConfiguration + 3, // 1: buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration.http_servers:type_name -> buildbarn.configuration.http.ServerConfiguration + 3, // 2: buildbarn.configuration.bb_portal.ApplicationConfiguration.http_servers:type_name -> buildbarn.configuration.http.ServerConfiguration + 4, // 3: buildbarn.configuration.bb_portal.ApplicationConfiguration.grpc_servers:type_name -> buildbarn.configuration.grpc.ServerConfiguration + 5, // 4: buildbarn.configuration.bb_portal.ApplicationConfiguration.global:type_name -> buildbarn.configuration.global.Configuration + 6, // 5: buildbarn.configuration.bb_portal.ApplicationConfiguration.instance_name_authorizer:type_name -> buildbarn.configuration.auth.AuthorizerConfiguration + 0, // 6: buildbarn.configuration.bb_portal.ApplicationConfiguration.build_queue_state_proxy:type_name -> buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration + 0, // 7: buildbarn.configuration.bb_portal.ApplicationConfiguration.action_cache_proxy:type_name -> buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration + 0, // 8: buildbarn.configuration.bb_portal.ApplicationConfiguration.content_addressable_storage_proxy:type_name -> buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration + 0, // 9: buildbarn.configuration.bb_portal.ApplicationConfiguration.initial_size_class_cache_proxy:type_name -> buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration + 0, // 10: buildbarn.configuration.bb_portal.ApplicationConfiguration.file_system_access_cache_proxy:type_name -> buildbarn.configuration.bb_portal.GrpcWebProxyConfiguration + 7, // 11: buildbarn.configuration.bb_portal.ApplicationConfiguration.serve_files_cas_configuration:type_name -> buildbarn.configuration.blobstore.BlobAccessConfiguration + 12, // [12:12] is the sub-list for method output_type + 12, // [12:12] is the sub-list for method input_type + 12, // [12:12] is the sub-list for extension type_name + 12, // [12:12] is the sub-list for extension extendee + 0, // [0:12] is the sub-list for field type_name } func init() { file_pkg_proto_configuration_bb_portal_bb_portal_proto_init() } diff --git a/pkg/proto/configuration/bb_portal/bb_portal.proto b/pkg/proto/configuration/bb_portal/bb_portal.proto index a8b99e2..c4c3780 100644 --- a/pkg/proto/configuration/bb_portal/bb_portal.proto +++ b/pkg/proto/configuration/bb_portal/bb_portal.proto @@ -3,6 +3,7 @@ syntax = "proto3"; package buildbarn.configuration.bb_portal; import "pkg/proto/configuration/auth/auth.proto"; +import "pkg/proto/configuration/blobstore/blobstore.proto"; import "pkg/proto/configuration/global/global.proto"; import "pkg/proto/configuration/grpc/grpc.proto"; import "pkg/proto/configuration/http/http.proto"; @@ -39,4 +40,15 @@ message ApplicationConfiguration { // Configures the gRPC-Web proxies that are used from the web UI to // communicate with the Buildbarn. GrpcWebProxyConfiguration build_queue_state_proxy = 5; + GrpcWebProxyConfiguration action_cache_proxy = 6; + GrpcWebProxyConfiguration content_addressable_storage_proxy = 7; + GrpcWebProxyConfiguration initial_size_class_cache_proxy = 8; + GrpcWebProxyConfiguration file_system_access_cache_proxy = 9; + + // Configuration for serving files with Content Addressable Storage. + buildbarn.configuration.blobstore.BlobAccessConfiguration + serve_files_cas_configuration = 10; + + // Maximum Protobuf message size to unmarshal. + int64 maximum_message_size_bytes = 11; }