Skip to content

Commit

Permalink
add alternative to execute image-build every time
Browse files Browse the repository at this point in the history
why?:

because some CI systems cannot get past the dockerhub rate limitation.
We want to use kiln test for the csb tiles. But we use the tpe / runway
concourse. These instances make it impossible to communicate with dockerhub
because of rate limits ( even with authenticated pulls )

We can make the `kiln test` execution work by pre loading the images that
are used as FROM stages in `internal/test/Dockerfile` into the docker host
we start in the concourse task. Essentially we end up pulling the images from
an accessible repository, then we retag them to match the expected FROM args
and that makes the implicit image build work.

Instead it would be nice to avoid having to run the build stage and make kiln
test consume a provided image instead so it can be run offline.
  • Loading branch information
nouseforaname committed Jun 6, 2024
1 parent 757c9ec commit 0963425
Show file tree
Hide file tree
Showing 4 changed files with 246 additions and 12 deletions.
1 change: 1 addition & 0 deletions internal/test/assets/alpine.tgz
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

48 changes: 36 additions & 12 deletions internal/test/container.go
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ func (configuration Configuration) commands() ([]string, error) {
//counterfeiter:generate -o ./fakes/moby_client.go --fake-name MobyClient . mobyClient
type mobyClient interface {
DialHijack(ctx context.Context, url, proto string, meta map[string][]string) (net.Conn, error)
ImageLoad(ctx context.Context, input io.Reader, quiet bool) (types.ImageLoadResponse, error)
ImageBuild(ctx context.Context, buildContext io.Reader, options types.ImageBuildOptions) (types.ImageBuildResponse, error)
Ping(ctx context.Context) (types.Ping, error)
ContainerCreate(ctx context.Context, config *container.Config, hostConfig *container.HostConfig, networkingConfig *network.NetworkingConfig, platform *specV1.Platform, containerName string) (container.CreateResponse, error)
Expand All @@ -141,22 +142,45 @@ func runTestWithSession(ctx context.Context, logger *log.Logger, w io.Writer, do
}

var dockerfileTarball bytes.Buffer
if err := createDockerfileTarball(tar.NewWriter(&dockerfileTarball), dockerfile); err != nil {
if err = createDockerfileTarball(tar.NewWriter(&dockerfileTarball), dockerfile); err != nil {
return err
}

logger.Println("creating test image")
imageBuildResult, err := dockerDaemon.ImageBuild(ctx, &dockerfileTarball, types.ImageBuildOptions{
Tags: []string{"kiln_test_dependencies:vmware"},
Version: types.BuilderBuildKit,
SessionID: sessionID,
})
if err != nil {
return fmt.Errorf("failed to build image: %w", err)
}
if configuration.ImagePath == "" {
logger.Println("creating test image")
imageBuildResult, err := dockerDaemon.ImageBuild(ctx, &dockerfileTarball, types.ImageBuildOptions{
Tags: []string{"kiln_test_dependencies:vmware"},
Version: types.BuilderBuildKit,
SessionID: sessionID,
})
if err != nil {
return fmt.Errorf("failed to build image: %w", err)
}
if err = checkSSHPrivateKeyError(imageBuildResult.Body); err != nil {
return err
}
} else {
logger.Println("loading test image")
imageReader, err := os.Open(configuration.ImagePath)
if err != nil {
return fmt.Errorf("failed to read image '%s': %w", configuration.ImagePath, err)
}

if err := checkSSHPrivateKeyError(imageBuildResult.Body); err != nil {
return err
loadResponse, err := dockerDaemon.ImageLoad(
ctx,
imageReader,
true,
)
if err != nil {
return fmt.Errorf("failed to import image: %w", err)
}

respBytes, err := io.ReadAll(loadResponse.Body)
defer loadResponse.Body.Close()
if err != nil {
return fmt.Errorf(`failed to parse load image response: %w`, err)
}
logger.Printf("loaded image %s: \n%s\n", configuration.ImagePath, string(respBytes))
}

parentDir := path.Dir(configuration.AbsoluteTileDirectory)
Expand Down
41 changes: 41 additions & 0 deletions internal/test/container_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,43 @@ func Test_configureSession(t *testing.T) {
})
}

func Test_loadImage(t *testing.T) {
absoluteTileDirectory := filepath.Join(t.TempDir(), "test")
logger := log.New(io.Discard, "", 0)
t.Run("when loading a provided test image with a wrong path", func(t *testing.T) {
ctx := context.Background()
out := bytes.Buffer{}
configuration := Configuration{
AbsoluteTileDirectory: absoluteTileDirectory,
ImagePath: "non-existing",
}
client := runTestWithSessionHelper(t, "", container.WaitResponse{
StatusCode: 0,
})

err := runTestWithSession(ctx, logger, &out, client, configuration)("some-session-id")
require.ErrorContains(t, err, "failed to read image 'non-existing': open non-existing: no such file or directory")

})
t.Run(`when loading a provided test image with an existing path`, func(t *testing.T) {
ctx := context.Background()
out := bytes.Buffer{}

configuration := Configuration{
AbsoluteTileDirectory: absoluteTileDirectory,
ImagePath: `assets/alpine.tgz`,
}

client := runTestWithSessionHelper(t, "", container.WaitResponse{
StatusCode: 0,
})

err := runTestWithSession(ctx, logger, &out, client, configuration)("some-session-id")
require.NoError(t, err)

})
}

func Test_runTestWithSession(t *testing.T) {
absoluteTileDirectory := filepath.Join(t.TempDir(), "test")
logger := log.New(io.Discard, "", 0)
Expand Down Expand Up @@ -200,6 +237,10 @@ func runTestWithSessionHelper(t *testing.T, logs string, response container.Wait
client.ImageBuildReturns(types.ImageBuildResponse{
Body: io.NopCloser(strings.NewReader("")),
}, nil)
client.ImageLoadReturns(types.ImageLoadResponse{
Body: io.NopCloser(strings.NewReader("")),
}, nil)

client.ContainerStartReturns(nil)
client.ContainerLogsReturns(io.NopCloser(strings.NewReader(logs)), nil)

Expand Down
168 changes: 168 additions & 0 deletions internal/test/fakes/moby_client.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 0963425

Please sign in to comment.