diff --git a/README.md b/README.md index f083d4a3..0ba4903a 100644 --- a/README.md +++ b/README.md @@ -150,7 +150,13 @@ Manifest is loaded as **template**, so you can use variables, Go **range** and * // [SINGLE TESTCASE]: See below for more information // We also support the external loading of a complete test: - "@pathToTest.json" + "@pathToTest.json", + + // By prefixing it with a number, the testtool runs that many instances of + // the included test file in parallel to each other. + // + // Only tests directly included by the manifest are allowed to run in parallel. + "5@pathToTestsThatShouldRunInParallel.json" ] } ``` @@ -160,8 +166,9 @@ Manifest is loaded as **template**, so you can use variables, Go **range** and * ### manifest.json ```yaml { - // Define if the testuite should continue even if this test fails. (default:false) + // Define if the test suite should continue even if this test fails. (default: false) "continue_on_failure": true, + // Name to identify this single test. Is important for the log. Try to give an explaning name "name": "Testname", @@ -413,6 +420,33 @@ However that one will be stripped before parsing the template, which would be ju ** Unlike with delimiters, external tests/requests/responses don't inherit those removals, and need to be specified per file. +## Run tests in parallel +The tool is able to run tests in parallel to themselves. You activate this +mechanism by including an external test file with `N@pathtofile.json`, where N +is the number of parallel "clones" you want to have of the included tests. + +The included tests themselves are still run serially, only the entire set of +tests will run in parallel for the specified number of replications. + +This is useful e.g. for stress-testing an API. + +Only tests directly included by a manifest are allowed to run in parallel. + +Using "0@file.json" will not run that specific test. + +```yaml +{ + "name": "Binary Comparison", + "request": { + "endpoint": "suggest", + "method": "GET" + }, + + // Path to binary file with N@ + "response": "123@simple.bin" +} +``` + ## Binary data comparison The tool is able to do a comparison with a binary file. Here we take a MD5 hash of the file and and then later compare @@ -2261,6 +2295,10 @@ Removes from **key** from **url**'s query, returns the **url** with the **key** Returns the **value** from the **url**'s query for **key**. In case of an error, an empty string is returned. Unparsable urls are ignored and an empty string is returned. +## `parallel_run_idx` +Returns the index of the Parallel Run that the template is executed in, or -1 if it is not executed +within a parallel run. + # HTTP Server The apitest tool includes an HTTP Server. It can be used to serve files from the local disk temporarily. The HTTP Server can run in test mode. In this mode, the apitest tool does not run any tests, but starts the HTTP Server in the foreground, until CTRL-C in pressed. diff --git a/api_testsuite.go b/api_testsuite.go index 6d9cbae6..5be3eb70 100644 --- a/api_testsuite.go +++ b/api_testsuite.go @@ -9,6 +9,8 @@ import ( "os" "path/filepath" "strconv" + "sync" + "sync/atomic" "time" "github.com/pkg/errors" @@ -45,7 +47,7 @@ type Suite struct { manifestPath string reporterRoot *report.ReportElement index int - serverURL string + serverURL *url.URL httpServer http.Server httpServerProxy *httpproxy.Proxy httpServerDir string @@ -149,6 +151,12 @@ func NewTestSuite(config TestToolConfig, manifestPath string, manifestDir string //Append suite manifest path to name, so we know in an automatic setup where the test is loaded from suite.Name = fmt.Sprintf("%s (%s)", suite.Name, manifestPath) + // Parse serverURL + suite.serverURL, err = url.Parse(suite.Config.ServerURL) + if err != nil { + return nil, fmt.Errorf("can not load server url : %s", err) + } + // init store err = suite.datastore.SetMap(suite.Store) if err != nil { @@ -179,8 +187,17 @@ func (ats *Suite) Run() bool { success := true for k, v := range ats.Tests { child := r.NewChild(strconv.Itoa(k)) - sTestSuccess := ats.parseAndRunTest(v, ats.manifestDir, ats.manifestPath, child, ats.loader) + + sTestSuccess := ats.parseAndRunTest( + v, + ats.manifestPath, + child, + ats.loader, + true, // parallel exec allowed for top-level tests + ) + child.Leave(sTestSuccess) + if !sTestSuccess { success = false break @@ -213,59 +230,106 @@ type TestContainer struct { Path string } -func (ats *Suite) parseAndRunTest(v any, manifestDir, testFilePath string, r *report.ReportElement, rootLoader template.Loader) bool { - //Init variables - // logrus.Warnf("Test %s, Prev delimiters: %#v", testFilePath, rootLoader.Delimiters) +func (ats *Suite) buildLoader(rootLoader template.Loader, parallelRunIdx int) template.Loader { loader := template.NewLoader(ats.datastore) loader.Delimiters = rootLoader.Delimiters loader.HTTPServerHost = ats.HTTPServerHost - serverURL, err := url.Parse(ats.Config.ServerURL) - if err != nil { - logrus.Error(fmt.Errorf("can not load server url into test (%s): %s", testFilePath, err)) - return false - } - loader.ServerURL = serverURL + loader.ServerURL = ats.serverURL loader.OAuthClient = ats.Config.OAuthClient - //Get the Manifest with @ logic - fileh, testObj, err := template.LoadManifestDataAsRawJson(v, manifestDir) - dir := filepath.Dir(fileh) - if fileh != "" { - testFilePath = filepath.Join(filepath.Dir(testFilePath), fileh) + if rootLoader.ParallelRunIdx < 0 { + loader.ParallelRunIdx = parallelRunIdx + } else { + loader.ParallelRunIdx = rootLoader.ParallelRunIdx } + + return loader +} + +func (ats *Suite) parseAndRunTest( + v any, testFilePath string, r *report.ReportElement, rootLoader template.Loader, + allowParallelExec bool, +) bool { + parallelRuns := 1 + + // Get the Manifest with @ logic + referencedPathSpec, testRaw, err := template.LoadManifestDataAsRawJson(v, filepath.Dir(testFilePath)) if err != nil { r.SaveToReportLog(err.Error()) logrus.Error(fmt.Errorf("can not LoadManifestDataAsRawJson (%s): %s", testFilePath, err)) return false } + if referencedPathSpec != nil { + testFilePath = filepath.Join(filepath.Dir(testFilePath), referencedPathSpec.Path) + parallelRuns = referencedPathSpec.ParallelRuns + } + + // If parallel runs are requested, check that they're actually allowed + if parallelRuns > 1 && !allowParallelExec { + logrus.Error(fmt.Errorf("parallel runs are not allowed in nested tests (%s)", testFilePath)) + return false + } + + // Execute test cases + var successCount atomic.Uint32 + var waitGroup sync.WaitGroup + + waitGroup.Add(parallelRuns) + + for runIdx := range parallelRuns { + go ats.testGoroutine( + &waitGroup, &successCount, testFilePath, r, rootLoader, + runIdx, testRaw, + ) + } + + waitGroup.Wait() + + return successCount.Load() == uint32(parallelRuns) +} + +func (ats *Suite) testGoroutine( + waitGroup *sync.WaitGroup, successCount *atomic.Uint32, + testFilePath string, r *report.ReportElement, rootLoader template.Loader, + runIdx int, testRaw json.RawMessage, +) { + defer waitGroup.Done() + + testFileDir := filepath.Dir(testFilePath) - // Parse as template always - testObj, err = loader.Render(testObj, filepath.Join(manifestDir, dir), nil) + // Build template loader + loader := ats.buildLoader(rootLoader, runIdx) + + // Parse testRaw as template + testRendered, err := loader.Render(testRaw, testFileDir, nil) if err != nil { r.SaveToReportLog(err.Error()) logrus.Error(fmt.Errorf("can not render template (%s): %s", testFilePath, err)) - return false + + // note that successCount is not incremented + return } // Build list of test cases var testCases []json.RawMessage - err = util.Unmarshal(testObj, &testCases) + err = util.Unmarshal(testRendered, &testCases) if err != nil { // Input could not be deserialized into list, try to deserialize into single object var singleTest json.RawMessage - err = util.Unmarshal(testObj, &singleTest) + err = util.Unmarshal(testRendered, &singleTest) if err != nil { // Malformed json r.SaveToReportLog(err.Error()) logrus.Error(fmt.Errorf("can not unmarshal (%s): %s", testFilePath, err)) - return false + + // note that successCount is not incremented + return } testCases = []json.RawMessage{singleTest} } - // Execute test cases - for i, testCase := range testCases { + for testIdx, testCase := range testCases { var success bool // If testCase can be unmarshalled as string, we may have a @@ -276,34 +340,38 @@ func (ats *Suite) parseAndRunTest(v any, manifestDir, testFilePath string, r *re // Recurse if the testCase points to another file using @ notation success = ats.parseAndRunTest( testCaseStr, - filepath.Join(manifestDir, dir), testFilePath, r, loader, + false, // no parallel exec allowed in nested tests ) } else { // Otherwise simply run the literal test case success = ats.runLiteralTest( TestContainer{ CaseByte: testCase, - Path: filepath.Join(manifestDir, dir), + Path: testFileDir, }, r, testFilePath, loader, - i, + runIdx*len(testCases)+testIdx, ) } if !success { - return false + // note that successCount is not incremented + return } } - return true + successCount.Add(1) } -func (ats *Suite) runLiteralTest(tc TestContainer, r *report.ReportElement, testFilePath string, loader template.Loader, k int) bool { +func (ats *Suite) runLiteralTest( + tc TestContainer, r *report.ReportElement, testFilePath string, loader template.Loader, + index int, +) bool { r.SetName(testFilePath) var test Case @@ -320,7 +388,7 @@ func (ats *Suite) runLiteralTest(tc TestContainer, r *report.ReportElement, test test.loader = loader test.manifestDir = tc.Path test.suiteIndex = ats.index - test.index = k + test.index = index test.dataStore = ats.datastore test.standardHeader = ats.StandardHeader test.standardHeaderFromStore = ats.StandardHeaderFromStore diff --git a/config.go b/config.go index 3687d636..72fa4b44 100644 --- a/config.go +++ b/config.go @@ -75,6 +75,9 @@ func NewTestToolConfig(serverURL string, rootDirectory []string, logNetwork bool LogShort: logShort, OAuthClient: Config.Apitest.OAuthClient, } + + config.fillInOAuthClientNames() + err = config.extractTestDirectories() return config, err } @@ -116,3 +119,14 @@ func (config *TestToolConfig) extractTestDirectories() error { } return nil } + +// fillInOAuthClientNames fills in the Client field of loaded OAuthClientConfig +// structs, which the user may have left unset in the config yaml file. +func (config *TestToolConfig) fillInOAuthClientNames() { + for key, clientConfig := range config.OAuthClient { + if clientConfig.Client == "" { + clientConfig.Client = key + config.OAuthClient[key] = clientConfig + } + } +} diff --git a/pkg/lib/api/build_policies.go b/pkg/lib/api/build_policies.go index d60d977e..271a6262 100644 --- a/pkg/lib/api/build_policies.go +++ b/pkg/lib/api/build_policies.go @@ -24,47 +24,56 @@ func buildMultipart(request Request) (additionalHeaders map[string]string, body if ok { f, ok := val.(util.JsonString) if !ok { - return additionalHeaders, body, fmt.Errorf("file:filename should be a string") + return nil, nil, fmt.Errorf("file:filename should be a string") } replaceFilename = &f } additionalHeaders["Content-Type"] = w.FormDataContentType() - for key, val := range request.Body.(map[string]any) { + createPart := func(key string, val any) error { if key == "file:filename" { - continue + return nil } - pathSpec, ok := val.(util.JsonString) + rawPathSpec, ok := val.(util.JsonString) if !ok { - return additionalHeaders, body, fmt.Errorf("pathSpec should be a string") + return fmt.Errorf("pathSpec should be a string") } - if !util.IsPathSpec(pathSpec) { - return additionalHeaders, body, fmt.Errorf("pathSpec %s is not valid", pathSpec) + pathSpec, err := util.ParsePathSpec(rawPathSpec) + if err != nil { + return fmt.Errorf("pathSpec %s is not valid: %w", rawPathSpec, err) } - var err error - - _, file, err := util.OpenFileOrUrl(pathSpec, request.ManifestDir) + file, err := util.OpenFileOrUrl(pathSpec.Path, request.ManifestDir) if err != nil { - return additionalHeaders, nil, err + return err } defer file.Close() var part io.Writer if replaceFilename == nil { - part, err = w.CreateFormFile(key, pathSpec[1:]) + part, err = w.CreateFormFile(key, pathSpec.Path) } else { part, err = w.CreateFormFile(key, *replaceFilename) } if err != nil { - return additionalHeaders, nil, err + return err } if _, err := io.Copy(part, file); err != nil { - return additionalHeaders, nil, err + return err + } + + return nil + } + + for key, val := range request.Body.(map[string]any) { + err = createPart(key, val) + if err != nil { + return nil, nil, err } } + err = w.Close() body = bytes.NewBuffer(buf.Bytes()) @@ -99,25 +108,31 @@ func buildRegular(request Request) (additionalHeaders map[string]string, body io } else { bodyBytes, err := json.Marshal(request.Body) if err != nil { - return additionalHeaders, body, fmt.Errorf("error marshaling request body: %s", err) + return nil, nil, fmt.Errorf("error marshaling request body: %s", err) } body = bytes.NewBuffer(bodyBytes) } return additionalHeaders, body, nil } +// buildFile opens a file for use with buildPolicy. +// WARNING: This returns a file handle that must be closed! func buildFile(req Request) (map[string]string, io.Reader, error) { - headers := map[string]string{} if req.BodyFile == "" { return nil, nil, errors.New(`Request.buildFile: Missing "body_file"`) } - _, file, err := util.OpenFileOrUrl(req.BodyFile, req.ManifestDir) + path := req.BodyFile + pathSpec, err := util.ParsePathSpec(req.BodyFile) + if err == nil && pathSpec != nil { // we unwrap the path, if an @-notation path spec was passed into body_file + path = pathSpec.Path + } + + file, err := util.OpenFileOrUrl(path, req.ManifestDir) if err != nil { return nil, nil, err } - return headers, file, err } diff --git a/pkg/lib/api/request.go b/pkg/lib/api/request.go index e3055f04..5d3e1d8a 100755 --- a/pkg/lib/api/request.go +++ b/pkg/lib/api/request.go @@ -88,6 +88,10 @@ func (request Request) buildHttpRequest() (req *http.Request, err error) { return nil, errors.Wrapf(err, "Unable to buildHttpRequest with URL %q", requestUrl) } + // Note that buildPolicy may return a file handle that needs to be + // closed. According to standard library documentation, the NewRequest + // call below will take into account if body also happens to implement + // io.Closer. additionalHeaders, body, err := request.buildPolicy(request) if err != nil { return req, fmt.Errorf("error executing buildpolicy: %s", err) diff --git a/pkg/lib/api/request_test.go b/pkg/lib/api/request_test.go index 8381647c..d761ac04 100755 --- a/pkg/lib/api/request_test.go +++ b/pkg/lib/api/request_test.go @@ -3,7 +3,6 @@ package api import ( "fmt" "io" - "io/ioutil" "net/http" "strings" "testing" @@ -32,7 +31,7 @@ func TestRequestBuildHttp(t *testing.T) { go_test_utils.ExpectNoError(t, err, fmt.Sprintf("error building http-request: %s", err)) go_test_utils.AssertStringEquals(t, httpRequest.Header.Get("mock-header"), "application/mock") - assertBody, err := ioutil.ReadAll(httpRequest.Body) + assertBody, err := io.ReadAll(httpRequest.Body) go_test_utils.ExpectNoError(t, err, fmt.Sprintf("error reading http-request body: %s", err)) go_test_utils.AssertStringEquals(t, string(assertBody), "mock_body") diff --git a/pkg/lib/datastore/datastore.go b/pkg/lib/datastore/datastore.go index f00b16c4..f7453e96 100755 --- a/pkg/lib/datastore/datastore.go +++ b/pkg/lib/datastore/datastore.go @@ -16,7 +16,7 @@ type Datastore struct { storage map[string]any // custom storage responseJson []string // store the responses logDatastore bool - lock *sync.Mutex + lock *sync.RWMutex } func NewStore(logDatastore bool) *Datastore { @@ -24,7 +24,7 @@ func NewStore(logDatastore bool) *Datastore { ds.storage = make(map[string]any, 0) ds.responseJson = make([]string, 0) ds.logDatastore = logDatastore - ds.lock = &sync.Mutex{} + ds.lock = &sync.RWMutex{} return &ds } @@ -84,11 +84,17 @@ func (ds *Datastore) Delete(k string) { // We store the response func (ds *Datastore) UpdateLastResponse(s string) { + ds.lock.Lock() + defer ds.lock.Unlock() + ds.responseJson[len(ds.responseJson)-1] = s } // We store the response func (ds *Datastore) AppendResponse(s string) { + ds.lock.Lock() + defer ds.lock.Unlock() + ds.responseJson = append(ds.responseJson, s) } @@ -167,6 +173,9 @@ func (ds *Datastore) Set(index string, value any) error { } func (ds Datastore) Get(index string) (res any, err error) { + ds.lock.RLock() + defer ds.lock.RUnlock() + // strings are evalulated as int, so // that we can support "-" notations diff --git a/pkg/lib/template/template_funcs.go b/pkg/lib/template/template_funcs.go index 1eb046c5..833f0d00 100644 --- a/pkg/lib/template/template_funcs.go +++ b/pkg/lib/template/template_funcs.go @@ -3,7 +3,7 @@ package template import ( "encoding/json" "fmt" - "io/ioutil" + "io" "path/filepath" "reflect" "strconv" @@ -323,12 +323,13 @@ func divide(b, a any) (any, error) { } func fileReadInternal(pathOrURL, rootDir string) ([]byte, error) { - _, file, err := util.OpenFileOrUrl(pathOrURL, rootDir) + file, err := util.OpenFileOrUrl(pathOrURL, rootDir) if err != nil { return nil, errors.Wrapf(err, "fileReadInternal: %q", pathOrURL) } + defer file.Close() - data, err := ioutil.ReadAll(file) + data, err := io.ReadAll(file) if err != nil { return nil, errors.Wrapf(err, "fileReadInternal: %q", pathOrURL) } diff --git a/pkg/lib/template/template_loader.go b/pkg/lib/template/template_loader.go index 9fd41a58..b85b279e 100644 --- a/pkg/lib/template/template_loader.go +++ b/pkg/lib/template/template_loader.go @@ -23,7 +23,6 @@ import ( "github.com/programmfabrik/apitest/pkg/lib/util" - "io/ioutil" "path/filepath" _ "github.com/mattn/go-sqlite3" @@ -49,10 +48,13 @@ type Loader struct { ServerURL *url.URL OAuthClient util.OAuthClientsConfig Delimiters delimiters + + // ParallelRunIdx is the index of the Parallel Run that this Loader is used in + ParallelRunIdx int } func NewLoader(datastore *datastore.Datastore) Loader { - return Loader{datastore: datastore} + return Loader{datastore: datastore, ParallelRunIdx: -1} } // Render loads and executes a manifest template. @@ -111,12 +113,7 @@ func (loader *Loader) Render( return strings.Split(s, sep) }, "md5sum": func(path string) (string, error) { - _, file, err := util.OpenFileOrUrl(path, rootDir) - if err != nil { - return "", err - } - - fileBytes, err := ioutil.ReadAll(file) + fileBytes, err := fileReadInternal(path, rootDir) if err != nil { return "", err } @@ -383,7 +380,7 @@ func (loader *Loader) Render( if !ok { return nil, errors.Errorf("OAuth client %q not configured", client) } - oAuthClient.Client = client + return oAuthClient.GetPasswordCredentialsAuthToken(login, password) }, @@ -392,7 +389,7 @@ func (loader *Loader) Render( if !ok { return nil, errors.Errorf("OAuth client %q not configured", client) } - oAuthClient.Client = client + return oAuthClient.GetClientCredentialsAuthToken() }, "oauth2_code_token": func(client string, params ...string) (tok *oauth2.Token, err error) { @@ -400,7 +397,7 @@ func (loader *Loader) Render( if !ok { return nil, errors.Errorf("OAuth client %q not configured", client) } - oAuthClient.Client = client + return oAuthClient.GetCodeAuthToken(params...) }, "oauth2_implicit_token": func(client string, params ...string) (tok *oauth2.Token, err error) { @@ -408,7 +405,7 @@ func (loader *Loader) Render( if !ok { return nil, errors.Errorf("OAuth client %q not configured", client) } - oAuthClient.Client = client + return oAuthClient.GetAuthToken(params...) }, "oauth2_client": func(client string) (c *util.OAuthClientConfig, err error) { @@ -416,7 +413,7 @@ func (loader *Loader) Render( if !ok { return nil, errors.Errorf("OAuth client %s not configured", client) } - oAuthClient.Client = client + return &oAuthClient, nil }, "oauth2_basic_auth": func(client string) (string, error) { @@ -424,7 +421,7 @@ func (loader *Loader) Render( if !ok { return "", errors.Errorf("OAuth client %s not configured", client) } - oAuthClient.Client = client + return "Basic " + base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s:%s", oAuthClient.Client, oAuthClient.Secret))), nil }, "query_escape": func(in string) string { @@ -489,6 +486,11 @@ func (loader *Loader) Render( q := u.Query() return q.Get(qKey) }, + // parallel_run_idx returns the index of the Parallel Run that the current template + // is rendered in. + "parallel_run_idx": func() int { + return loader.ParallelRunIdx + }, } tmpl, err := template. New("tmpl"). diff --git a/pkg/lib/template/util.go b/pkg/lib/template/util.go index 9b914da5..f899d476 100644 --- a/pkg/lib/template/util.go +++ b/pkg/lib/template/util.go @@ -3,43 +3,26 @@ package template import ( "encoding/json" "fmt" - "io/ioutil" "github.com/programmfabrik/apitest/pkg/lib/util" ) -func loadFileFromPathSpec(pathSpec, manifestDir string) (string, []byte, error) { - if !util.IsPathSpec(pathSpec) { - return "", nil, fmt.Errorf("spec was expected to be path spec, got %s instead", pathSpec) - } - - filepath, requestFile, err := util.OpenFileOrUrl(pathSpec, manifestDir) - if err != nil { - return "", nil, fmt.Errorf("error opening path: %s", err) - } - - defer requestFile.Close() - requestTmpl, err := ioutil.ReadAll(requestFile) - - if err != nil { - return "", nil, fmt.Errorf("error loading file %s: %s", requestFile, err) - } - - return filepath, requestTmpl, nil -} - -func LoadManifestDataAsObject(data any, manifestDir string, loader Loader) (filepath string, res any, err error) { +func LoadManifestDataAsObject(data any, manifestDir string, loader Loader) (pathSpec *util.PathSpec, res any, err error) { switch typedData := data.(type) { case string: - filepath, requestTmpl, err := loadFileFromPathSpec(typedData, manifestDir) + pathSpec, err = util.ParsePathSpec(typedData) + if err != nil { + return nil, res, fmt.Errorf("error parsing pathSpec: %w", err) + } + requestTmpl, err := pathSpec.LoadContents(manifestDir) if err != nil { - return "", res, fmt.Errorf("error loading fileFromPathSpec: %s", err) + return nil, res, fmt.Errorf("error loading fileFromPathSpec: %s", err) } // We have json, and load it thereby into our apitest structure requestBytes, err := loader.Render(requestTmpl, manifestDir, nil) if err != nil { - return "", res, fmt.Errorf("error rendering request: %s", err) + return nil, res, fmt.Errorf("error rendering request: %s", err) } var jsonObject util.JsonObject @@ -48,41 +31,45 @@ func LoadManifestDataAsObject(data any, manifestDir string, loader Loader) (file if err = util.Unmarshal(requestBytes, &jsonObject); err != nil { if err = util.Unmarshal(requestBytes, &jsonArray); err == nil { - return filepath, jsonArray, nil + return pathSpec, jsonArray, nil } - return "", res, fmt.Errorf("error unmarshalling: %s", err) + return nil, res, fmt.Errorf("error unmarshalling: %s", err) } - return filepath, jsonObject, nil + return pathSpec, jsonObject, nil case util.JsonObject: - return "", typedData, nil + return nil, typedData, nil case util.JsonArray: - return "", typedData, nil + return nil, typedData, nil default: - return "", res, fmt.Errorf("specification needs to be string[@...] or jsonObject but is: %s", data) + return nil, res, fmt.Errorf("specification needs to be string[@...] or jsonObject but is: %s", data) } } -func LoadManifestDataAsRawJson(data any, manifestDir string) (filepath string, res json.RawMessage, err error) { +func LoadManifestDataAsRawJson(data any, manifestDir string) (pathSpec *util.PathSpec, res json.RawMessage, err error) { switch typedData := data.(type) { case []byte: err = res.UnmarshalJSON(typedData) return case string: - filepath, res, err := loadFileFromPathSpec(typedData, manifestDir) + pathSpec, err = util.ParsePathSpec(typedData) + if err != nil { + return nil, res, fmt.Errorf("error parsing pathSpec: %w", err) + } + res, err := pathSpec.LoadContents(manifestDir) if err != nil { - return "", res, fmt.Errorf("error loading fileFromPathSpec: %s", err) + return nil, res, fmt.Errorf("error loading fileFromPathSpec: %s", err) } - return filepath, res, nil + return pathSpec, res, nil case util.JsonObject, util.JsonArray: jsonMar, err := json.Marshal(typedData) if err != nil { - return "", res, fmt.Errorf("error marshaling: %s", err) + return nil, res, fmt.Errorf("error marshaling: %s", err) } if err = util.Unmarshal(jsonMar, &res); err != nil { - return "", res, fmt.Errorf("error unmarshalling: %s", err) + return nil, res, fmt.Errorf("error unmarshalling: %s", err) } - return "", res, nil + return nil, res, nil default: - return "", res, fmt.Errorf("specification needs to be string[@...] or jsonObject but is: %s", data) + return nil, res, fmt.Errorf("specification needs to be string[@...] or jsonObject but is: %s", data) } } diff --git a/pkg/lib/util/file.go b/pkg/lib/util/file.go index d760e7b4..b2f79840 100644 --- a/pkg/lib/util/file.go +++ b/pkg/lib/util/file.go @@ -16,23 +16,13 @@ var c = &http.Client{ } // OpenFileOrUrl opens either a local file or gives the resp.Body from a remote file -func OpenFileOrUrl(path, rootDir string) (string, io.ReadCloser, error) { - if strings.HasPrefix(path, "@") { - path = path[1:] - } else if strings.HasPrefix(path, "p@") { - // p@ -> parallel tests - path = path[2:] - } else if IsParallelPathSpec(path) { - // pN@ -> N parallel repetitions of tests - _, path = GetParallelPathSpec(path) - } - +func OpenFileOrUrl(path, rootDir string) (io.ReadCloser, error) { if strings.HasPrefix(path, "http://") || strings.HasPrefix(path, "https://") { io, err := openRemoteFile(path) - return path, io, err + return io, err } else { io, err := openLocalFile(path, rootDir) - return path, io, err + return io, err } } diff --git a/pkg/lib/util/file_test.go b/pkg/lib/util/file_test.go index 2bb96001..cca1ab14 100644 --- a/pkg/lib/util/file_test.go +++ b/pkg/lib/util/file_test.go @@ -3,7 +3,7 @@ package util import ( "crypto/md5" "fmt" - "io/ioutil" + "io" "net/http" "net/http/httptest" "path/filepath" @@ -13,147 +13,12 @@ import ( "github.com/spf13/afero" ) -type testParallelPathSpecStruct struct { - pathSpec string - expIsPath bool - expIsParallel bool - expPath string - expParallelRepititions int -} - type testOpenFileStruct struct { filename string expError error expHash [16]byte } -func TestGetParallelPathSpec(t *testing.T) { - - tests := []testParallelPathSpecStruct{ - { - pathSpec: "\"", - expIsPath: false, - expIsParallel: false, - }, - { - pathSpec: "[]", - expIsPath: false, - expIsParallel: false, - }, - { - pathSpec: "{}", - expIsPath: false, - expIsParallel: false, - }, - { - pathSpec: "p", - expIsPath: false, - expIsParallel: false, - }, - { - pathSpec: "@", - expIsPath: false, - expIsParallel: false, - }, - { - pathSpec: "1@", - expIsPath: false, - expIsParallel: false, - }, - { - pathSpec: "x@", - expIsPath: false, - expIsParallel: false, - }, - { - pathSpec: "p@", - expIsPath: false, - expIsParallel: false, - }, - { - pathSpec: "@path", - expIsPath: true, - expIsParallel: false, - }, - { - pathSpec: "1@a", - expIsPath: false, - expIsParallel: false, - }, - { - pathSpec: "x@a", - expIsPath: false, - expIsParallel: false, - }, - { - pathSpec: "p1@", - expIsPath: false, - expIsParallel: false, - }, - { - pathSpec: "p1@path", - expIsPath: true, - expIsParallel: true, - expPath: "path", - expParallelRepititions: 1, - }, - { - pathSpec: "p10@path", - expIsPath: true, - expIsParallel: true, - expPath: "path", - expParallelRepititions: 10, - }, - { - pathSpec: "p01@path", - expIsPath: true, - expIsParallel: true, - expPath: "path", - expParallelRepititions: 1, - }, - { - pathSpec: "@path", - expIsPath: true, - expIsParallel: false, - }, - { - pathSpec: "@../path", - expIsPath: true, - expIsParallel: false, - }, - } - - for _, v := range tests { - t.Run(fmt.Sprintf("pathSpec:'%s'", v.pathSpec), func(t *testing.T) { - isPathSpec := IsPathSpec(v.pathSpec) - isParallelPathSpec := IsParallelPathSpec(v.pathSpec) - if isPathSpec != v.expIsPath { - t.Errorf("IsPathSpec: Got %v != %v Exp", isPathSpec, v.expIsPath) - } - if isParallelPathSpec != v.expIsParallel { - t.Errorf("IsParallelPathSpec: Got %v != %v Exp", isParallelPathSpec, v.expIsParallel) - } - - if v.expIsPath { - // the path must also be recognized as a path in case it has trailing quotes - if !IsPathSpec(fmt.Sprintf("\"%s\"", v.pathSpec)) { - t.Errorf("IsPathSpec (with trailing \"): Got %v != %v Exp", isPathSpec, v.expIsPath) - } - } - - if v.expIsParallel { - parallelRepititions, path := GetParallelPathSpec(v.pathSpec) - if parallelRepititions != v.expParallelRepititions { - t.Errorf("ParallelRepititions: Got '%d' != '%d' Exp", parallelRepititions, v.expParallelRepititions) - } - if path != v.expPath { - t.Errorf("Path: Got '%s' != '%s' Exp", path, v.expPath) - } - } - }) - } -} - func TestOpenFileOrUrl(t *testing.T) { filesystem.Fs = afero.NewMemMapFs() ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { @@ -192,13 +57,14 @@ func TestOpenFileOrUrl(t *testing.T) { for _, v := range tests { t.Run(fmt.Sprintf("%s", v.filename), func(t *testing.T) { - _, io, err := OpenFileOrUrl(v.filename, "") + file, err := OpenFileOrUrl(v.filename, "") if err != nil { if err.Error() != v.expError.Error() { t.Errorf("Got '%s' != '%s' Exp", err, v.expError) } } else { - data, err := ioutil.ReadAll(io) + defer file.Close() + data, err := io.ReadAll(file) if err != nil { t.Fatal(err) } @@ -223,7 +89,8 @@ func TestOpenLocalFile(t *testing.T) { if err != nil { t.Fatal("Root File: ", err) } - rootFile, err := ioutil.ReadAll(reader) + defer reader.Close() + rootFile, err := io.ReadAll(reader) if err != nil { t.Fatal("Root File: ", err) } @@ -235,7 +102,8 @@ func TestOpenLocalFile(t *testing.T) { if err != nil { t.Fatal("Manifest file: ", err) } - manifestFile, err := ioutil.ReadAll(reader) + defer reader.Close() + manifestFile, err := io.ReadAll(reader) if err != nil { t.Fatal("Manifest file: ", err) } @@ -248,7 +116,8 @@ func TestOpenLocalFile(t *testing.T) { if err != nil { t.Fatal("Binary file: ", err) } - binaryFile, err := ioutil.ReadAll(reader) + defer reader.Close() + binaryFile, err := io.ReadAll(reader) if err != nil { t.Fatal("Binary file: ", err) } diff --git a/pkg/lib/util/path_util.go b/pkg/lib/util/path_util.go index 02031598..f203aadb 100644 --- a/pkg/lib/util/path_util.go +++ b/pkg/lib/util/path_util.go @@ -1,57 +1,70 @@ package util import ( - "regexp" + "fmt" + "io" "strconv" "strings" ) -/* -throughout this file we assume 'manifestDir' to be an absolute path -*/ +// PathSpec is a path specifier for including tests within manifests. +type PathSpec struct { + // ParallelRuns matches the number of parallel runs specified + // in a path spec like "5@foo.json" + ParallelRuns int -func IsPathSpec(pathSpec string) bool { - if len(pathSpec) < 3 { - return false - } - if strings.HasPrefix(pathSpec, "@") { - return true - } - if strings.HasPrefix(pathSpec, "p@") { - return true - } - // pathSpec could have trailing quotes - if strings.HasPrefix(pathSpec, "\"@") { - return true + // Path matches the literal path, e.g. foo.json in "@foo.json" + Path string +} + +// ParsePathSpec tries to parse the given string into a PathSpec. +// +// The string takes the format "[n]@file.json". Invalid path specs +// result in an error. +func ParsePathSpec(s string) (*PathSpec, error) { + var ( + ok bool + err error + parallelRuns string + spec PathSpec + ) + + parallelRuns, spec.Path, ok = strings.Cut(s, "@") + if parallelRuns != "" { + spec.ParallelRuns, err = strconv.Atoi(parallelRuns) + if err != nil { + return nil, fmt.Errorf("error parsing ParallelRuns of path spec %q: %w", s, err) + } + } else { + spec.ParallelRuns = 1 } - if strings.HasPrefix(pathSpec, "\"p@") { - return true + + if !ok || spec.Path == "" || spec.ParallelRuns < 0 { + return nil, fmt.Errorf("invalid path spec %q", s) } - return IsParallelPathSpec(pathSpec) + return &spec, err } -func IsParallelPathSpec(pathSpec string) bool { - n, _ := GetParallelPathSpec(pathSpec) - return n > 0 +// IsPathSpec is a wrapper around ParsePathSpec that discards the parsed PathSpec. +// It's useful for chaining within boolean expressions. +func IsPathSpec(s string) bool { + _, err := ParsePathSpec(s) + return err == nil } -func GetParallelPathSpec(pathSpec string) (parallelRepititions int, parsedPath string) { - regex := *regexp.MustCompile(`^\"{0,1}p(\d+)@(.+)\"{0,1}$`) - res := regex.FindAllStringSubmatch(pathSpec, -1) - - if len(res) != 1 { - return 0, "" - } - if len(res[0]) != 3 { - return 0, "" +// Load loads the contents of the file pointed to by the PathSpec into a byte array. +func (ps PathSpec) LoadContents(manifestDir string) ([]byte, error) { + requestFile, err := OpenFileOrUrl(ps.Path, manifestDir) + if err != nil { + return nil, fmt.Errorf("error opening path: %w", err) } + defer requestFile.Close() - parsedPath = res[0][2] - parallelRepititions, err := strconv.Atoi(res[0][1]) + contents, err := io.ReadAll(requestFile) if err != nil { - return 0, parsedPath + return nil, fmt.Errorf("error loading file at %q: %w", ps, err) } - return parallelRepititions, parsedPath + return contents, nil } diff --git a/pkg/lib/util/path_util_test.go b/pkg/lib/util/path_util_test.go new file mode 100644 index 00000000..32bfc762 --- /dev/null +++ b/pkg/lib/util/path_util_test.go @@ -0,0 +1,74 @@ +package util + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestParsePathSpec(t *testing.T) { + t.Run("happy path", func(t *testing.T) { + testCases := []struct { + s string + expected PathSpec + }{ + { + s: "@foo.json", + expected: PathSpec{ + ParallelRuns: 1, + Path: "foo.json", + }, + }, + { + s: "5@bar.json", + expected: PathSpec{ + ParallelRuns: 5, + Path: "bar.json", + }, + }, + { + s: "123@baz.json", + expected: PathSpec{ + ParallelRuns: 123, + Path: "baz.json", + }, + }, + { + s: "0@foobar.json", + expected: PathSpec{ + ParallelRuns: 0, + Path: "foobar.json", + }, + }, + } + + for i := range testCases { + testCase := testCases[i] + + t.Run(testCase.s, func(t *testing.T) { + actual, err := ParsePathSpec(testCase.s) + require.NoError(t, err) + require.Equal(t, testCase.expected, *actual) + }) + } + }) + + t.Run("invalid path specs are detected", func(t *testing.T) { + testCases := []string{ + "", // empty + "foo@bar.baz", "1.23@foo.json", // non-digit parallel runs + "p@old.syntax", "p5@old.syntax", "p123@old.syntax", // old syntax + "-1@foo.json", "-123@foo.json", // negative parallel runs + } + + for _, testCase := range testCases { + s := testCase + + t.Run(s, func(t *testing.T) { + actual, err := ParsePathSpec(s) + require.Error(t, err) + require.Nil(t, actual) + }) + } + }) +} diff --git a/test/parallel/direct/check_collected_responses.json b/test/parallel/direct/check_collected_responses.json new file mode 100644 index 00000000..1712aaab --- /dev/null +++ b/test/parallel/direct/check_collected_responses.json @@ -0,0 +1,26 @@ +[ + { + "name": "bounce-json: bounce collected responses from N={{datastore `n_parallel` }} parallel runs: {{ datastore `responses` }}", + "request": { + "server_url": "http://localhost{{ datastore `local_port` }}", + "endpoint": "bounce-json", + "method": "POST", + "body": { + "responses": {{ datastore "responses" | marshal }} + } + }, + "response": { + "statuscode": 200, + "body": { + "body": { + "responses": [ + {{ range $idx, $n := N (datastore "n_parallel") }} + {{ if gt $idx 0 }}, {{ end }} + {{ $idx }} + {{ end }} + ] + } + } + } + } +] diff --git a/test/parallel/direct/manifest.json b/test/parallel/direct/manifest.json new file mode 100644 index 00000000..78c8ad73 --- /dev/null +++ b/test/parallel/direct/manifest.json @@ -0,0 +1,21 @@ +{{ $local_port := ":9999" }} +{{ $n_parallel := 5 }} +{ + "http_server": { + "addr": "{{ $local_port }}", + "dir": "../_res", + "testmode": false + }, + "name": "parallel run of N={{ $n_parallel }} parallel runs", + "tests": [ + { + "name": "port {{ $local_port }}", + "store": { + "n_parallel": {{ $n_parallel }}, + "local_port": {{ $local_port | marshal }} + } + } + , "{{ $n_parallel }}@parallel.json" + , "@check_collected_responses.json" + ] +} diff --git a/test/parallel/direct/parallel.json b/test/parallel/direct/parallel.json new file mode 100644 index 00000000..9097c081 --- /dev/null +++ b/test/parallel/direct/parallel.json @@ -0,0 +1,24 @@ +[ + { + "name": "bounce-json: bounce n={{ parallel_run_idx }}", + "request": { + "server_url": "http://localhost{{ datastore `local_port` }}", + "endpoint": "bounce-json", + "method": "POST", + "body": { + "n": {{ parallel_run_idx }} + } + }, + "response": { + "statuscode": 200, + "body": { + "body": { + "n": {{ parallel_run_idx }} + } + } + }, + "store_response_qjson": { + "responses[]": "body.body.n" + } + } +] diff --git a/test/parallel/indirect/check_collected_responses.json b/test/parallel/indirect/check_collected_responses.json new file mode 100644 index 00000000..1712aaab --- /dev/null +++ b/test/parallel/indirect/check_collected_responses.json @@ -0,0 +1,26 @@ +[ + { + "name": "bounce-json: bounce collected responses from N={{datastore `n_parallel` }} parallel runs: {{ datastore `responses` }}", + "request": { + "server_url": "http://localhost{{ datastore `local_port` }}", + "endpoint": "bounce-json", + "method": "POST", + "body": { + "responses": {{ datastore "responses" | marshal }} + } + }, + "response": { + "statuscode": 200, + "body": { + "body": { + "responses": [ + {{ range $idx, $n := N (datastore "n_parallel") }} + {{ if gt $idx 0 }}, {{ end }} + {{ $idx }} + {{ end }} + ] + } + } + } + } +] diff --git a/test/parallel/indirect/manifest.json b/test/parallel/indirect/manifest.json new file mode 100644 index 00000000..95a37b9c --- /dev/null +++ b/test/parallel/indirect/manifest.json @@ -0,0 +1,21 @@ +{{ $local_port := ":9999" }} +{{ $n_parallel := 5 }} +{ + "http_server": { + "addr": "{{ $local_port }}", + "dir": "../_res", + "testmode": false + }, + "name": "parallel run of N={{ $n_parallel }} parallel runs (indirect)", + "tests": [ + { + "name": "port {{ $local_port }}", + "store": { + "n_parallel": {{ $n_parallel }}, + "local_port": {{ $local_port | marshal }} + } + } + , "{{ $n_parallel }}@parallel.json" + , "@check_collected_responses.json" + ] +} diff --git a/test/parallel/indirect/parallel.json b/test/parallel/indirect/parallel.json new file mode 100644 index 00000000..9ad95ca9 --- /dev/null +++ b/test/parallel/indirect/parallel.json @@ -0,0 +1 @@ +"@parallel_indirect.json" diff --git a/test/parallel/indirect/parallel_case.json b/test/parallel/indirect/parallel_case.json new file mode 100644 index 00000000..9a8ce7c6 --- /dev/null +++ b/test/parallel/indirect/parallel_case.json @@ -0,0 +1,22 @@ +{ + "name": "bounce-json: bounce n={{ parallel_run_idx }}", + "request": { + "server_url": "http://localhost{{ datastore `local_port` }}", + "endpoint": "bounce-json", + "method": "POST", + "body": { + "n": {{ parallel_run_idx }} + } + }, + "response": { + "statuscode": 200, + "body": { + "body": { + "n": {{ parallel_run_idx }} + } + } + }, + "store_response_qjson": { + "responses[]": "body.body.n" + } +} diff --git a/test/parallel/indirect/parallel_indirect.json b/test/parallel/indirect/parallel_indirect.json new file mode 100644 index 00000000..eef091a6 --- /dev/null +++ b/test/parallel/indirect/parallel_indirect.json @@ -0,0 +1 @@ +"@parallel_case.json" diff --git a/test/proxy/read_from_proxies.json b/test/proxy/read_from_proxies.json index c0de5a93..bab82a6b 100644 --- a/test/proxy/read_from_proxies.json +++ b/test/proxy/read_from_proxies.json @@ -50,7 +50,7 @@ "type": "binary" }, "body": { - "md5sum": {{ md5sum "@../_res/assets/camera.jpg" | marshal }} + "md5sum": {{ md5sum "../_res/assets/camera.jpg" | marshal }} } } }, @@ -82,4 +82,4 @@ } } {{ end }} -] \ No newline at end of file +]