diff --git a/.obsidian/core-plugins.json b/.obsidian/core-plugins.json index 96869dfd..086a8e4e 100644 --- a/.obsidian/core-plugins.json +++ b/.obsidian/core-plugins.json @@ -4,6 +4,7 @@ "switcher", "graph", "backlink", + "canvas", "outgoing-link", "tag-pane", "page-preview", diff --git a/docs/automate/cmemc-command-line-interface/command-reference/admin/client/index.md b/docs/automate/cmemc-command-line-interface/command-reference/admin/client/index.md new file mode 100644 index 00000000..eb66189e --- /dev/null +++ b/docs/automate/cmemc-command-line-interface/command-reference/admin/client/index.md @@ -0,0 +1,87 @@ +--- +title: "cmemc: Command Group - admin client" +description: "List client accounts, get or generate client account secrets." +icon: material/account-cog +tags: + - Keycloak + - Security + - cmemc +--- +# admin client Command Group + + +List client accounts, get or generate client account secrets. + +This command group is an opinionated interface to the Keycloak realm of your Corporate Memory instance. In order to be able to use the commands in this group, the configured cmemc connection account needs to be equipped with the `manage-clients` role in the used realm. + +Client accounts are identified by a client ID which is unique in the scope of the used realm. + +In case your Corporate Memory deployment does not use the default deployment layout, the following additional config variables can be used in your connection configuration: ``KEYCLOAK_BASE_URI`` defaults to `{`CMEM_BASE_URI`}/auth` and locates your Keycloak deployment; ``KEYCLOAK_REALM_ID`` defaults to `cmem` and identifies the used realm. + + +## admin client list + +List client accounts. + +```shell-session title="Usage" +$ cmemc admin client list [OPTIONS] +``` + + + + +Outputs a list of client accounts, which can be used to get an overview as well as a reference for the other commands of the `admin client` command group. + +!!! note + The list command only outputs clients which have a client secret. Use the `--raw` option to get a JSON description of all clients. + + + + +??? info "Options" + ```text + + --raw Outputs raw JSON. + --id-only Lists only Client ID. This is useful for piping the IDs into + other commands. + ``` + +## admin client secret + +Get or generate a new secret for a client account. + +```shell-session title="Usage" +$ cmemc admin client secret [OPTIONS] CLIENT_ID +``` + + + + +This command retrieves or generates a new secret for a client account from a realm. + + + +??? info "Options" + ```text + + --generate Generate a new secret + --output Display client secret + ``` + +## admin client open + +Open clients in the browser. + +```shell-session title="Usage" +$ cmemc admin client open [CLIENT_IDS]... +``` + + + + +With this command, you can open a client in the keycloak web interface in your browser. + +The command accepts multiple client IDs which results in opening multiple browser tabs. + + + diff --git a/docs/automate/cmemc-command-line-interface/command-reference/admin/user/index.md b/docs/automate/cmemc-command-line-interface/command-reference/admin/user/index.md index 637d25b6..d89c92a0 100644 --- a/docs/automate/cmemc-command-line-interface/command-reference/admin/user/index.md +++ b/docs/automate/cmemc-command-line-interface/command-reference/admin/user/index.md @@ -133,10 +133,14 @@ With this command, the password of a user account can be changed. The default ex ??? info "Options" ```text - --value TEXT With this option, the new password can be set in a non- - interactive way. + --value TEXT With this option, the new password can be set in a non- + interactive way. - --temporary If enabled, the user must change the password on next login. + --temporary If enabled, the user must change the password on next + login. + + --request-change If enabled, will send a email to user to reset the + password. ``` ## admin user open diff --git a/docs/automate/cmemc-command-line-interface/command-reference/index.md b/docs/automate/cmemc-command-line-interface/command-reference/index.md index f1c05694..64db829f 100644 --- a/docs/automate/cmemc-command-line-interface/command-reference/index.md +++ b/docs/automate/cmemc-command-line-interface/command-reference/index.md @@ -20,6 +20,9 @@ tags: | ------------: | :------ | :---------- | | [admin](admin/index.md) | [status](admin/index.md#admin-status) | Output health and version information. | | [admin](admin/index.md) | [token](admin/index.md#admin-token) | Fetch and output an access token. | +| [admin client](admin/client/index.md) | [list](admin/client/index.md#admin-client-list) | List client accounts. | +| [admin client](admin/client/index.md) | [secret](admin/client/index.md#admin-client-secret) | Get or generate a new secret for a client account. | +| [admin client](admin/client/index.md) | [open](admin/client/index.md#admin-client-open) | Open clients in the browser. | | [admin metrics](admin/metrics/index.md) | [get](admin/metrics/index.md#admin-metrics-get) | Get sample data of a metric. | | [admin metrics](admin/metrics/index.md) | [inspect](admin/metrics/index.md#admin-metrics-inspect) | Inspect a metric. | | [admin metrics](admin/metrics/index.md) | [list](admin/metrics/index.md#admin-metrics-list) | List metrics for a specific job. | diff --git a/docs/automate/cmemc-command-line-interface/command-reference/project/index.md b/docs/automate/cmemc-command-line-interface/command-reference/project/index.md index f3627cd7..6acafda6 100644 --- a/docs/automate/cmemc-command-line-interface/command-reference/project/index.md +++ b/docs/automate/cmemc-command-line-interface/command-reference/project/index.md @@ -179,7 +179,7 @@ This command deletes existing data integration projects from Corporate Memory. Create projects. ```shell-session title="Usage" -$ cmemc project create PROJECT_IDS... +$ cmemc project create [OPTIONS] PROJECT_IDS... ``` @@ -193,6 +193,16 @@ This command creates one or more new projects. Existing projects will not be ove +??? info "Options" + ```text + + --from-transformation TEXT This option can be used to explicitly create the + link specification, which is internally executed + when using the mapping suggestion of a + transformation task. You need the task ID of the + transformation task. + ``` + ## project reload Reload projects from the workspace provider. diff --git a/docs/automate/cmemc-command-line-interface/command-reference/workflow/index.md b/docs/automate/cmemc-command-line-interface/command-reference/workflow/index.md index ff730ab4..fafa5d28 100644 --- a/docs/automate/cmemc-command-line-interface/command-reference/workflow/index.md +++ b/docs/automate/cmemc-command-line-interface/command-reference/workflow/index.md @@ -65,32 +65,30 @@ With this command, you can execute a workflow that uses variable datasets as inp ??? info "Options" ```text - -i, --input FILE From which file the input is taken: note - that the maximum file size to upload is - limited to a server configured value. If the + -i, --input FILE From which file the input is taken. If the workflow has no defined variable input - dataset, this can be ignored. + dataset, this option is not allowed. - -o, --output FILE To which file the result is written to: use + -o, --output FILE To which file the result is written to. Use '-' in order to output the result to stdout. If the workflow has no defined variable - output dataset, this can be ignored. Please - note that the io command will not warn you - on overwriting existing output files. + output dataset, this option is not allowed. + Please note that the io command will not + warn you on overwriting existing output + files. - --input-mimetype [guess|application/xml|application/json|text/csv] + --input-mimetype [application/x-plugin-csv|application/x-plugin-json|application/xml|application/x-plugin-excel|application/octet-stream|application/x-plugin-multiCsv|text/plain|guess] Which input format should be processed: If not given, cmemc will try to guess the mime type based on the file extension or will - fail + fail. - --output-mimetype [guess|application/xml|application/json|application/n-triples|application/vnd.openxmlformats-officedocument.spreadsheetml.sheet|text/csv] + --output-mimetype [application/x-plugin-csv|application/x-plugin-excel|application/n-triples|application/n-triples|application/x-plugin-json|application/xml|guess] Which output format should be requested: If not given, cmemc will try to guess the mime type based on the file extension or will fail. In case of an output to stdout, a - default mime type will be used (currently - xml). + default mime type will be used (JSON). --autoconfig / --no-autoconfig Setup auto configuration of input datasets, e.g. in order to process CSV files with diff --git a/docs/automate/cmemc-command-line-interface/invocation/docker-image/index.md b/docs/automate/cmemc-command-line-interface/invocation/docker-image/index.md index a54d4b03..87867f33 100644 --- a/docs/automate/cmemc-command-line-interface/invocation/docker-image/index.md +++ b/docs/automate/cmemc-command-line-interface/invocation/docker-image/index.md @@ -16,16 +16,16 @@ This is especially needed if you want to use cmemc in orchestrations. The following image - tag combinations are available for public use: -- `docker-registry.eccenca.com/eccenca-cmemc:v22.1` - a specific release +- `docker-registry.eccenca.com/eccenca-cmemc:v23.2` - a specific release - `docker-registry.eccenca.com/eccenca-cmemc:latest` - same as the latest release ``` shell-session title="Image retrieval and check cmemc version" -$ docker run -it --rm docker-registry.eccenca.com/eccenca-cmemc:v22.1 --version -Unable to find image 'docker-registry.eccenca.com/eccenca-cmemc:v22.1' locally -v22.1: Pulling from eccenca-cmemc -Digest: sha256:29bdd320e02f1b7758df22528740964225b62530c73c773a55c36c0e9e18b647 -Status: Downloaded newer image for docker-registry.eccenca.com/eccenca-cmemc:v22.1 -cmemc, version v22.1.1, running under python 3.9.13 +$ docker run -it --rm docker-registry.eccenca.com/eccenca-cmemc:v23.2 --version +Unable to find image 'docker-registry.eccenca.com/eccenca-cmemc:v23.2' locally +v23.2: Pulling from eccenca-cmemc +Digest: sha256:.... +Status: Downloaded newer image for docker-registry.eccenca.com/eccenca-cmemc:v23.2 +cmemc, version v23.2 running under python 3.11.4 ``` ## Volumes diff --git a/docs/automate/cmemc-command-line-interface/invocation/github-action/index.md b/docs/automate/cmemc-command-line-interface/invocation/github-action/index.md index 53554a15..807d9448 100644 --- a/docs/automate/cmemc-command-line-interface/invocation/github-action/index.md +++ b/docs/automate/cmemc-command-line-interface/invocation/github-action/index.md @@ -15,14 +15,14 @@ In order to control eccenca Corporate Memory instances from within Github Action ## Runner Provisioning Providing a working cmemc command is simple. -You just need to install a python environment suitable to run cmemc (currently `3.9`). +You just need to install a python environment suitable to run cmemc (currently `3.11`). This can be done with the [setup-python](https://github.com/actions/setup-python) action. After that, simply use `pip` to install cmemc: ```yaml title="Partial github action yaml showing cmemc provisioning" - uses: actions/setup-python@v4 with: - python-version: '3.9' + python-version: '3.11' - name: install cmemc run: | pip install -q cmem-cmemc diff --git a/docs/automate/processing-data-with-variable-input-workflows/index.md b/docs/automate/processing-data-with-variable-input-workflows/index.md index d58549e6..0dd234cf 100644 --- a/docs/automate/processing-data-with-variable-input-workflows/index.md +++ b/docs/automate/processing-data-with-variable-input-workflows/index.md @@ -18,7 +18,7 @@ The workflow is then called via an HTTP REST call (or via [cmemc](../cmemc-comm This allows for solving all kinds of [☆ Automation](../) tasks when you need to process lots of small data snippets or similar. -!!! Abstract +!!! Abstract The complete tutorial is available as a [project file](tutorial-varinput.project.zip). You can import this project @@ -35,7 +35,7 @@ First, install all required ontologies/vocabularies which are needed for mapping In this tutorial, we need the _Schema.org_ and the _RDFS_ vocabulary. Press the (toggle switch) button on the right to install them. -![Vocabulary catalog](pdwviw-vocab-catalog.png) +![Vocabulary catalog](pdwviw-vocab-catalog.png){ class="bordered" } ## 2 Create a new project @@ -43,7 +43,7 @@ Second, create in the tab **DATA INTEGRATION** a new project. Provide it with a The project will include everything you need to build a workflow for extracting Feed XML data, transforming it into RDF, and loading it into a Knowledge Graph. -![Create new project dialog](pdwviw-create-new-project.png) +![Create new project dialog](pdwviw-create-new-project.png){ class="bordered" } ## 3 Create an (example) feed dataset and target graph dataset @@ -54,7 +54,7 @@ For this tutorial, you may take this file: [feed.xml](feed.xml)(1) 1. Original feed source was: `https://www.ecdc.europa.eu/en/taxonomy/term/2942/feed` -![Create XML dataset dialog](pdwviw-create-xml-dataset.png) +![Create XML dataset dialog](pdwviw-create-xml-dataset.png){ class="bordered" } ## 4 Create the feed transformation @@ -64,14 +64,14 @@ The mappings are based on classes and properties defined by the _Schema.org_ and In case you need help with mapping data from XML to RDF, feel free to visit your respective tutorial: [Lift data from JSON and XML sources](../../build/lift-data-from-json-and-xml-sources/). -![Feed transformation](pdwviw-feed-transformation.png) +![Feed transformation](pdwviw-feed-transformation.png){ class="bordered" } ## 5 Create the variable input and workflow Create a new workflow in your project. Move the **input** XML feed dataset and the **Feed Data** Graph into the workflow editor and connect them with your created **Transform feed**. -![Workflow with variable input](pdwviw-variable-input-workflow.png) +![Workflow with variable input](pdwviw-variable-input-workflow.png){ class="bordered" } ## 6 Use `cmemc` to feed data into the workflow @@ -107,4 +107,4 @@ http://feeds.bbci.co.uk/news/rss.xml In **EXPLORATION**, you can study the ingested feed data in your Knowledge Graph. -![Explore the result](pdwviw-review-knowledge-graph.png) +![Explore the result](pdwviw-review-knowledge-graph.png){ class="bordered" } diff --git a/docs/automate/processing-data-with-variable-input-workflows/pdwviw-review-knowledge-graph.png b/docs/automate/processing-data-with-variable-input-workflows/pdwviw-review-knowledge-graph.png index d3ea3ac6..7f4347b4 100644 Binary files a/docs/automate/processing-data-with-variable-input-workflows/pdwviw-review-knowledge-graph.png and b/docs/automate/processing-data-with-variable-input-workflows/pdwviw-review-knowledge-graph.png differ diff --git a/docs/automate/processing-data-with-variable-input-workflows/pdwviw-vocab-catalog.png b/docs/automate/processing-data-with-variable-input-workflows/pdwviw-vocab-catalog.png index 6187c91e..0b7d23a9 100644 Binary files a/docs/automate/processing-data-with-variable-input-workflows/pdwviw-vocab-catalog.png and b/docs/automate/processing-data-with-variable-input-workflows/pdwviw-vocab-catalog.png differ diff --git a/docs/build/.pages b/docs/build/.pages index 7c735aed..5e97a12c 100644 --- a/docs/build/.pages +++ b/docs/build/.pages @@ -12,4 +12,4 @@ nav: - Connect to Snowflake: snowflake-tutorial - Active learning: active-learning - Worspace-Tutorial: workspace-tutorial - + - Link Intrusion Detection Systems to Open-Source INTelligence: tutorial-how-to-link-ids-to-osint diff --git a/docs/build/define-prefixes-namespaces/DM-query-editor.png b/docs/build/define-prefixes-namespaces/DM-query-editor.png index a53e638f..045a1804 100644 Binary files a/docs/build/define-prefixes-namespaces/DM-query-editor.png and b/docs/build/define-prefixes-namespaces/DM-query-editor.png differ diff --git a/docs/build/define-prefixes-namespaces/DM-turtle-editor.png b/docs/build/define-prefixes-namespaces/DM-turtle-editor.png index 06f9173a..023ebfd0 100644 Binary files a/docs/build/define-prefixes-namespaces/DM-turtle-editor.png and b/docs/build/define-prefixes-namespaces/DM-turtle-editor.png differ diff --git a/docs/build/define-prefixes-namespaces/index.md b/docs/build/define-prefixes-namespaces/index.md index 265ff91a..9cf2aa33 100644 --- a/docs/build/define-prefixes-namespaces/index.md +++ b/docs/build/define-prefixes-namespaces/index.md @@ -13,8 +13,8 @@ Namespace declarations allow for the abbreviation of IRIs by using a prefixed re For example, after defining a namespace with the values -- **prefix name** = `cohw`, and the -- **namespace IRI** = `https://data.company.org/hardware/` +- **prefix name** = `cohw`, and the +- **namespace IRI** = `https://data.company.org/hardware/` you can use the term `cohw:test` as an abbreviation for the full IRI `https://data.company.org/hardware/test`. @@ -22,15 +22,14 @@ This is particularly useful when you have to write source code in Turtle and SPA ## Managing Namespace Declarations - ### Using the Vocabulary Catalog After installing a vocabulary from the [Vocabulary Catalog](/explore-and-author/vocabulary-catalog), the vocabulary namespace declaration is automatically added to all integration projects. In order to get the **prefix name** and the **namespace IRI** from the vocabulary graph, the following terms from the [VANN vocabulary](https://vocab.org/vann/) need to be used on the Ontology resource. -- [vann:preferredNamespacePrefix](https://vocab.org/vann/#preferredNamespacePrefix) - to specify the **prefix name** -- [vann:preferredNamespaceUri](https://vocab.org/vann/#preferredNamespaceUri) - to specify the **namespace IRI** +- [vann:preferredNamespacePrefix](https://vocab.org/vann/#preferredNamespacePrefix) - to specify the **prefix name** +- [vann:preferredNamespaceUri](https://vocab.org/vann/#preferredNamespaceUri) - to specify the **namespace IRI** In the Explore area, an Ontology with a correct namespace declaration looks like this: @@ -43,7 +42,7 @@ The `vocabulary` command group of [cmemc](../../automate/cmemc-command-line-inte Beginning with v22.2, this command has an additional option `--namespace` which you can use to set a vocabulary namespace even if the vocabulary does not include the data needed for autodiscovery: ```shell-session -$ cmemc vocabulary import my-ont.ttl --namespace myo https//example.org/my/` +cmemc vocabulary import my-ont.ttl --namespace myo https//example.org/my/` ``` ### Using the Project Configuration @@ -52,10 +51,10 @@ In addition to the used vocabulary namespace declarations, you may want to add w Such organization use cases include: -- Namespaces per class / resource type: - - **prefix name** = `persons`, **namespace IRI** = `https://example.org/data/persons/` -- Namespaces per data owner or origin: - - **prefix name** = `sales`, **namespace IRI** = `https://example.org/data/sales/` +- Namespaces per class / resource type: + - **prefix name** = `persons`, **namespace IRI** = `https://example.org/data/persons/` +- Namespaces per data owner or origin: + - **prefix name** = `sales`, **namespace IRI** = `https://example.org/data/sales/` Prefixes in Data Integration are defined on a project basis. When creating a new project, a list of well-know prefixes is already declared. @@ -69,8 +68,8 @@ By using the **Edit Prefix Settings** button in this Configuration area, you wil In this dialog, you are able to -- Delete a namespace declaration → **Delete Prefix** -- Add a new namespace declaration → **Add** +- Delete a namespace declaration → **Delete Prefix** +- Add a new namespace declaration → **Add** ## Validating Namespace Declarations @@ -78,6 +77,6 @@ After adding namespace declarations to a project you are able to use the abbrevi ![Prefixes displayed in the mapping dialogs, e.g. property selection.](prefix-displayed-in-mapping.png) -![Query Editor](DM-query-editor.png) +![Query Editor](DM-query-editor.png){ class="bordered" } -![Turtle Editor](DM-turtle-editor.png) +![Turtle Editor](DM-turtle-editor.png){ class="bordered" } diff --git a/docs/build/lift-data-from-json-and-xml-sources/index.md b/docs/build/lift-data-from-json-and-xml-sources/index.md index 49ddb5a1..04cddc4d 100644 --- a/docs/build/lift-data-from-json-and-xml-sources/index.md +++ b/docs/build/lift-data-from-json-and-xml-sources/index.md @@ -30,11 +30,11 @@ The documentation consists of the following steps, which are described in detail The following material is used in this tutorial: -- Sample vocabulary describing the data in the JSON and XML files: [products_vocabulary.nt](products_vocabulary.nt) +- Sample vocabulary describing the data in the JSON and XML files: [products_vocabulary.nt](products_vocabulary.nt) - ![Visualization of the "Products Vocabulary".](products-vocab-xml+json.png) + ![Visualization of the "Products Vocabulary".](products-vocab-xml+json.png){ class="bordered" } -- Sample JSON file: [services.json](services.json) +- Sample JSON file: [services.json](services.json) ```json [ @@ -56,7 +56,8 @@ The following material is used in this tutorial: ] ``` -- Sample XML file: [orgmap.xml](orgmap.xml) +- Sample XML file: [orgmap.xml](orgmap.xml) + ```xml @@ -110,19 +111,19 @@ The vocabulary contains the classes and properties needed to map the source data 1. In Corporate Memory, click **Vocabularies** in the navigation under **EXPLORE** on the left side of the page. - ![Menu entry EXPLORE > Vocabularies](menu-explore-vocabularies.png){width="30%"} + ![Menu entry EXPLORE > Vocabularies](menu-explore-vocabularies.png){ class="bordered" width="30%" } 2. Click **Register new vocabulary** on the top right of the **Vocabulary catalog** page in Corporate Memory. - ![Vocabularies Catalog](vocab-catalog-xml+json.png) + ![Vocabularies Catalog](vocab-catalog-xml+json.png){ class="bordered" } 3. Define a **Name**, a **Graph URI** and a **Description** of the vocabulary. _In this example we will use:_ - - Name: _**Product Vocabulary**_ - - Graph URI: _**http://ld.company.org/prod-vocab/**_ - - Description: _**Example vocabulary modeled to describe relations between products and services.**_ + - Name: _**Product Vocabulary**_ + - Graph URI: _****_ + - Description: _**Example vocabulary modeled to describe relations between products and services.**_ - ![Dialog to register a new vocabulary.](dialog-register-new-vocabulary.png){width="50%"} + ![Dialog to register a new vocabulary.](dialog-register-new-vocabulary.png){ class="bordered" width="50%" } 4. Click **REGISTER**. @@ -136,11 +137,11 @@ To add the data files, click Projects under BUILD in the navigation on the left 2. In **Create new item** window, select **JSON** and click Add. - ![Dialog to create new JSON dataset](create-dataset-JSON.png) + ![Dialog to create new JSON dataset](create-dataset-JSON.png){ class="bordered" } 3. Define a **Label** for the dataset and upload the [services.json](services.json) file. You can leave all the other fields at default values. - ![Dialog to create new JSON dataset](dialog-create-new-json-dataset.png){width="45%"} ![Dialog to create new JSON dataset](dialog-create-new-json-dataset-2.png){width="45%"} + ![Dialog to create new JSON dataset](dialog-create-new-json-dataset.png){ class="bordered" width="45%"} ![Dialog to create new JSON dataset](dialog-create-new-json-dataset-2.png){ class="bordered" width="45%"} 4. Click **Create**. @@ -148,11 +149,11 @@ To add the data files, click Projects under BUILD in the navigation on the left 1. Press the **Create** button and select XML - ![Dialog to create new XML dataset](create-dataset-XML.png) + ![Dialog to create new XML dataset](create-dataset-XML.png){ class="bordered" } 2. Define a **Label** for the dataset and upload the [orgmap.xml](orgmap.xml) example file. You can leave all the other fields at default values. - ![Dialog to create new XML dataset](dialog-create-new-xml-dataset.png) + ![Dialog to create new XML dataset](dialog-create-new-xml-dataset.png){ class="bordered" } 3. Click **Create**. @@ -162,7 +163,7 @@ To add the data files, click Projects under BUILD in the navigation on the left 2. In **Create new item** window, select **Knowledge Graph** and click **Add**. The Create new item of type Knowledge Graph window appears. - ![Dialog to create new Knowledge Graph dataset](create-dataset-KG.png) + ![Dialog to create new Knowledge Graph dataset](create-dataset-KG.png){ class="bordered" } 3. Fill in the required details such as Label and Description. @@ -173,7 +174,7 @@ To add the data files, click Projects under BUILD in the navigation on the left - Name: _**Service Knowledge Graph**_ - Graph: _**http://ld.company.org/prod-instances/**_ - ![Dialog to create new Knowledge Graph dataset](create-new-kg-for-json.png) + ![Dialog to create new Knowledge Graph dataset](create-new-kg-for-json.png){ class="bordered" } === "XML" @@ -182,7 +183,7 @@ To add the data files, click Projects under BUILD in the navigation on the left - Name: _**Organization Knowledge Graph**_ - Graph: _**http://ld.company.org/organization-data/**_ - ![Dialog to create new Knowledge Graph dataset](create-new-kg-for-xml.png) + ![Dialog to create new Knowledge Graph dataset](create-new-kg-for-xml.png){ class="bordered" } ## 4 Create a Transformation @@ -192,7 +193,7 @@ The transformation defines how an input dataset (e.g.: JSON or XML) will be tran 2. On the **Create New Item** window, select **Transform** and click **Add** to create a new transformation. - ![Create new Transformation](create-new-tf.png) + ![Create new Transformation](create-new-tf.png){ class="bordered" } 3. In the **Create new item of type Transform** window, enter the required fields. @@ -205,7 +206,7 @@ The transformation defines how an input dataset (e.g.: JSON or XML) will be tran - Select the Source Dataset: _**Services JSON**_ - Select the Output Dataset: _**Service_Knowledge_Graph**_ - ![Dialog to create new Transformation](create-new-tf-for-json.png) + ![Dialog to create new Transformation](create-new-tf-for-json.png){ class="bordered" } Click **Create**. @@ -219,7 +220,7 @@ The transformation defines how an input dataset (e.g.: JSON or XML) will be tran - Type: _**dept**_ (define the Source Type, which defines the XML element that should be iterated when creating resources) - Select the Output Dataset: _**Organization_Knowledge_Graph**_ - ![Dialog to create new Transformation](create-new-tf-for-xml.png) + ![Dialog to create new Transformation](create-new-tf-for-xml.png){ class="bordered" } Click **Create**. @@ -227,7 +228,7 @@ The transformation defines how an input dataset (e.g.: JSON or XML) will be tran 5. Click **Edit** to create a base mapping. - ![Mapping header configuration.](mapping-configuration-header.png) + ![Mapping header configuration.](mapping-configuration-header.png){ class="bordered" } 6. Define the **Target entity type** from the vocabulary, the **URI pattern** and a **Label** for the mapping. @@ -280,13 +281,13 @@ The transformation defines how an input dataset (e.g.: JSON or XML) will be tran 7. Evaluate your mapping by pressing on the ![Button expand](button-expand-xml+json.png) button in the **Examples of target data** property to see at most three generated base URIs. - ![Preview of mapping results](mapping-value-preview.png) + ![Preview of mapping results](mapping-value-preview.png){ class="bordered" } We have now created the Service entities in the Knowledge Graph. Next we will now add the name of our entity. 8. Click the circular blue **+** icon on the lower right and select **Add value mapping**. - ![Add a mapping rule](service-mapping-add-rule.png) + ![Add a mapping rule](service-mapping-add-rule.png){ class="bordered" } === "JSON" @@ -298,7 +299,7 @@ The transformation defines how an input dataset (e.g.: JSON or XML) will be tran - which corresponds to the following element in the json-file: _[ {"ProductManager": {  "name": "Corinna Ludwig"} ... } ...]_ - An optional Label: _**has Product Manager**_ - ![Configuration of a mapping rule](mapping-rule-config-json.png) + ![Configuration of a mapping rule](mapping-rule-config-json.png){ class="bordered" } Click **Save**. @@ -312,7 +313,7 @@ The transformation defines how an input dataset (e.g.: JSON or XML) will be tran - which corresponds to the `department name` attribute in the XML file - An optional Label: _**department name**_ - ![Configuration of a mapping rule](mapping-rule-config-xml.png) + ![Configuration of a mapping rule](mapping-rule-config-xml.png){ class="bordered" } Click **Save**. @@ -320,17 +321,17 @@ By clicking on the ![Button expand](button-expand-xml+json.png) button in the ** === "JSON" - ![Mapping result](mapping-result-json.png) + ![Mapping result](mapping-result-json.png){ class="bordered" } === "XML" - ![Mapping result](mapping-result-xml.png) + ![Mapping result](mapping-result-xml.png){ class="bordered" } ## 5 Evaluate a Transformation Click **Transform evaluation** to evaluate the transformed entities. -![Transformation evaluation view](xml+json-transform-evaluation.png) +![Transformation evaluation view](xml+json-transform-evaluation.png){ class="bordered" } ## 6 Build the Knowledge Graph @@ -338,13 +339,13 @@ Click **Transform evaluation** to evaluate the transformed entities. 2. Press the ![Button play](button-play-xml+json.png) button and validate the results. In this example, 9x Service entities were created in our Knowledge Graph based on the mapping. 3. You can click **Knowledge Graphs** under **EXPLORE** to (re-)view of the created Knowledge Graphs 4. Enter the following URIs in the Enter search term for JSON and XML respectively. - - JSON / Service: _**http://ld.company.org/prod-instances/**_ - - XML / Department: _**http://ld.company.org/organization-data/**_ + - JSON / Service: _****_ + - XML / Department: _****_ === "JSON" - ![Service KG](services-kg.png) + ![Service KG](kg-services.png){ class="bordered" } === "XML" - ![Organization KG](kg-organization.png) + ![Organization KG](kg-organization.png){ class="bordered" } diff --git a/docs/build/lift-data-from-json-and-xml-sources/kg-organization.png b/docs/build/lift-data-from-json-and-xml-sources/kg-organization.png index 51d06af2..49a219b3 100644 Binary files a/docs/build/lift-data-from-json-and-xml-sources/kg-organization.png and b/docs/build/lift-data-from-json-and-xml-sources/kg-organization.png differ diff --git a/docs/build/lift-data-from-json-and-xml-sources/kg-services.png b/docs/build/lift-data-from-json-and-xml-sources/kg-services.png new file mode 100644 index 00000000..c3c984e7 Binary files /dev/null and b/docs/build/lift-data-from-json-and-xml-sources/kg-services.png differ diff --git a/docs/build/lift-data-from-json-and-xml-sources/menu-explore-vocabularies.png b/docs/build/lift-data-from-json-and-xml-sources/menu-explore-vocabularies.png index 673f5299..55906885 100644 Binary files a/docs/build/lift-data-from-json-and-xml-sources/menu-explore-vocabularies.png and b/docs/build/lift-data-from-json-and-xml-sources/menu-explore-vocabularies.png differ diff --git a/docs/build/lift-data-from-json-and-xml-sources/services-kg.png b/docs/build/lift-data-from-json-and-xml-sources/services-kg.png deleted file mode 100644 index 4e45b854..00000000 Binary files a/docs/build/lift-data-from-json-and-xml-sources/services-kg.png and /dev/null differ diff --git a/docs/build/lift-data-from-json-and-xml-sources/vocab-catalog-xml+json.png b/docs/build/lift-data-from-json-and-xml-sources/vocab-catalog-xml+json.png index 64d91204..abfd1caf 100644 Binary files a/docs/build/lift-data-from-json-and-xml-sources/vocab-catalog-xml+json.png and b/docs/build/lift-data-from-json-and-xml-sources/vocab-catalog-xml+json.png differ diff --git a/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/explore-vocab-catalog.png b/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/explore-vocab-catalog.png index 7ccabc94..55906885 100644 Binary files a/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/explore-vocab-catalog.png and b/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/explore-vocab-catalog.png differ diff --git a/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/index.md b/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/index.md index 481941a9..0cdfaa9c 100644 --- a/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/index.md +++ b/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/index.md @@ -38,11 +38,11 @@ The documentation consists of the following steps, which are described in detail The following material is used in this tutorial, you should download the files and have them at hand throughout the tutorial: -- Sample vocabulary which describes the data in the CSV files: [products_vocabulary.nt](products_vocabulary.nt) +- Sample vocabulary which describes the data in the CSV files: [products_vocabulary.nt](products_vocabulary.nt) - ![](products-vocab.png) + ![](products-vocab.png){ class="bordered" } -- Sample CSV file: [services.csv](services.csv) +- Sample CSV file: [services.csv](services.csv) !!! info @@ -52,7 +52,7 @@ The following material is used in this tutorial, you should download the files a | I241-8776317 | Component Confabulation | Z249-1364492, L557-1467804, C721-7900144, ... | Corinna.Ludwig@company.org | 1082,00 EUR | | … | … | … | … | … | -- Sample Excel file: [products.xlsx](products.xlsx) +- Sample Excel file: [products.xlsx](products.xlsx) !!! info @@ -72,11 +72,11 @@ The vocabulary contains the classes and properties needed to map the data into t 1. In Corporate Memory, click Vocabularies under **EXPLORE** in the navigation on the left side of the page. - ![](explore-vocab-catalog.png){width="30%"} + ![](explore-vocab-catalog.png){ class="bordered" width="30%" } 2. Click **Register new vocabulary** on the top right. - ![](vocab-catalog.png) + ![](vocab-catalog.png){ class="bordered" } 3. Define a **Name**, a **Graph URI** and a **Description** of the vocabulary. _In this example we will use:_ @@ -85,7 +85,7 @@ The vocabulary contains the classes and properties needed to map the data into t - Description: _**Example vocabulary modeled to describe relations between products and services.**_ - Vocabulary File: Browse in your filesystem for the **[products_vocabulary.nt](products_vocabulary.nt)** file and select it to be uploaded. - ![](register-new-vocab.png){width="50%"} + ![](register-new-vocab.png){ class="bordered" width="50%" } === "cmemc" @@ -101,7 +101,7 @@ The vocabulary contains the classes and properties needed to map the data into t 1. In Corporate Memory, click Projects under **BUILD** in the navigation on the left side of the page. - ![](menu-build-projects.png){width="30%"} + ![](menu-build-projects.png){ class="bordered" width="30%" } 2. Click **Create** at the top of the page.  @@ -115,11 +115,11 @@ The vocabulary contains the classes and properties needed to map the data into t 7. In the **Create new item** dialog, select **CSV**. - ![](build-dataset-types-csv.png) + ![](build-dataset-types-csv.png){ class="bordered" } 8. Fill out a label and upload the **[services.csv](services.csv) sample file**. - ![](create new-dataset-csv.png) + ![](create new-dataset-csv.png){ class="bordered" } 9. Click **Create**.** Leave all other parameters at their default values. @@ -131,7 +131,7 @@ The vocabulary contains the classes and properties needed to map the data into t 1. In the project, Click **Create** and select the **JDBC endpoint** type. - ![](build-dataset-types-jdbc.png) + ![](build-dataset-types-jdbc.png){ class="bordered" } 2. Define a **Label** for the dataset, specify the **JDBC Driver connection URL**, the **table** name and the **user** and **password** to connect to the database. _In this example we will use:_ @@ -141,7 +141,7 @@ The vocabulary contains the classes and properties needed to map the data into t - username: _**root**_ - password: _**\*\*\*\***_ - ![](create new-dataset-jdbc.png) + ![](create new-dataset-jdbc.png){ class="bordered" } The general form of the JDBC connection string is: @@ -186,15 +186,15 @@ To validate that the input data is correct, you can preview the data table in Co 1. On the dataset page, press the **Load preview** button - ![](dataset-services.png) + ![](dataset-services.png){ class="bordered" } 2. Once the preview is loaded, you can view a couple of rows to check that your data is accessible. - ![](dataset-services-preview.png) + ![](dataset-services-preview.png){ class="bordered" } 3. Optionally, you can click **start profiling** and explore statistics about the dataset. - ![](dataset-services-profiling.png) + ![](dataset-services-profiling.png){ class="bordered" } --- @@ -204,14 +204,14 @@ To validate that the input data is correct, you can preview the data table in Co 2. In Create new item window, select Knowledge Graph and click Add. - ![](build-dataset-types-kg.png) + ![](build-dataset-types-kg.png){ class="bordered" } 3. The Create new item of type Knowledge Graph window appears. 4. Define a **Label** for the Knowledge Graph and provide a **graph** uri. Leave all the other parameters at the default values. _In this example we will use:_ - - Label: _**Service Knowledge Graph**_ - - Graph: _**http://ld.company.org/prod-instances/**_ + - Label: _**Service Knowledge Graph**_ + - Graph: _****_ 5. Click **Create**. @@ -227,35 +227,35 @@ The transformation defines how an input dataset (e.g. CSV) will be transformed i 3. Fill out the the details leaving the **target vocabularies** field at its default value **all installed vocabularies,** which will enable us to create a transformation to the previously installed products vocabulary. _In this example we will use:_ - - Name: _**Lift Service Database**_ - - _In the section **INPUT TASK** in the field **Dataset**, select the previously created dataset: **Services** (Input Dataset)._ - - _Select the previously created dataset as the Output Dataset: **Service Knowledge Graph**_ + - Name: _**Lift Service Database**_ + - _In the section **INPUT TASK** in the field **Dataset**, select the previously created dataset: **Services** (Input Dataset)._ + - _Select the previously created dataset as the Output Dataset: **Service Knowledge Graph**_ 4. In the main area you will find the **Mapping editor**. - ![](services-mapping.png) + ![](services-mapping.png){ class="bordered" } 5. Click **Mapping** in the main area to expand its menu. - ![](mapping-header.png) + ![](mapping-header.png){ class="bordered" } 6. Click **Edit** to create a base mapping. - ![](services-mapping-rule.png) + ![](services-mapping-rule.png){ class="bordered" } 7. Define the **Target entity type** from the vocabulary, the **URI pattern** and a **label** for the mapping. _In this example we will use:_ - - Target entity type: _**Service**_ - - URI pattern: + - Target entity type: _**Service**_ + - URI pattern: - - Click **Create custom pattern** - - Insert _**http://ld.company.org/prod-inst/{ServiceID}**_ - - where _http://ld.company.org/prod-inst/_ is a common prefix for the instances in this use case, - - and _{ServiceID}_ is a placeholder that will resolve to the column of that name + - Click **Create custom pattern** + - Insert _****_ + - where __ is a common prefix for the instances in this use case, + - and _{ServiceID}_ is a placeholder that will resolve to the column of that name - - _An optional Label: **Service**_ + - _An optional Label: **Service**_ - ![](services-mapping-class.png) + ![](services-mapping-class.png){ class="bordered" } 8. Click **Save** _Example RDF triple in our Knowledge Graph based on the mapping definition:_ @@ -265,22 +265,22 @@ The transformation defines how an input dataset (e.g. CSV) will be transformed i 9. Evaluate your mapping by clicking the Expand ![](button-expand.png) button in the **Examples of target data** property to see at most three generated base URIs. - ![](mapping-inline-preview.png) + ![](mapping-inline-preview.png){ class="bordered" } We have now created the Service entities in the Knowledge Graph. As a next step, we will add the name of the Service entity. 10. Press the circular **Blue + button** on the lower right and select **Add value mapping**. - ![](services-mapping-add-rule.png) + ![](services-mapping-add-rule.png){ class="bordered" } 11. Define the **Target property**, the **Data type**, the **Value path** (column name) and a **Label** for your value mapping. _In this example we will use:_ - - Target Property: **_name_** - - Data type: _**StringValueType**_ - - Value path: _**ServiceName**_ (which corresponds to the column of that name) - - An optional Label: _**service name**_ + - Target Property: **_name_** + - Data type: _**StringValueType**_ + - Value path: _**ServiceName**_ (which corresponds to the column of that name) + - An optional Label: _**service name**_ - ![](services-mapping-rule-edit.png) + ![](services-mapping-rule-edit.png){ class="bordered" } 12. Click **Save.** @@ -290,7 +290,7 @@ The transformation defines how an input dataset (e.g. CSV) will be transformed i Go the **Transform evaluation** tab of your transformation to view a list of generated entities. By clicking one of the generated entities, more details are provided. -![](mapping-evaluation.png) +![](mapping-evaluation.png){ class="bordered" } --- @@ -298,12 +298,12 @@ Go the **Transform evaluation** tab of your transformation to view a list of gen 1. Go into the mapping and visit the **Transform execution** tab. - ![](mapping-execution.png) + ![](mapping-execution.png){ class="bordered" } 2. Press the ![](button-play.png) button and validate the results. In this example, 9x Service triples were created in our Knowledge Graph based on the mapping. - ![](mapping-execution-result.png) + ![](mapping-execution-result.png){ class="bordered" } -3. Finally you can use the DataManager **Knowledge Graphs** module to (re-)view of the created Knowledge Graph: http://ld.company.org/prod-instances/ +3. Finally you can use the DataManager **Knowledge Graphs** module to (re-)view of the created Knowledge Graph: - ![](kg-result.png) + ![](kg-result.png){ class="bordered" } diff --git a/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/kg-result.png b/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/kg-result.png index e48c2027..4081406c 100644 Binary files a/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/kg-result.png and b/docs/build/lift-data-from-tabular-data-such-as-csv-xslx-or-database-tables/kg-result.png differ diff --git a/docs/build/loading-jdbc-datasets-incrementally/graph-turtle-view.png b/docs/build/loading-jdbc-datasets-incrementally/graph-turtle-view.png index 705cc0d4..3080e850 100644 Binary files a/docs/build/loading-jdbc-datasets-incrementally/graph-turtle-view.png and b/docs/build/loading-jdbc-datasets-incrementally/graph-turtle-view.png differ diff --git a/docs/build/loading-jdbc-datasets-incrementally/index.md b/docs/build/loading-jdbc-datasets-incrementally/index.md index e8fc81e9..71f0536e 100644 --- a/docs/build/loading-jdbc-datasets-incrementally/index.md +++ b/docs/build/loading-jdbc-datasets-incrementally/index.md @@ -36,13 +36,13 @@ To extract data from a relational database, you need to first register a **JDBC 1. In Corporate Memory, click Projects under **BUILD** in the navigation on the left side of the page. - ![Menu BUILD > Projects](incr-jdbc-menu-build-projects.png) + ![Menu BUILD > Projects](incr-jdbc-menu-build-projects.png){ class="bordered" } 2. Click **Create** at the top of the page. 3. In the Create new item window, select Project and click Add. The Create new item of type Project window appears. 4. In the Create new item window, select Dataset under **Item Type**, search for JDBC endpoint, and click **Add**. - ![Create JDBC Dataset](create-dataset-JDBC.png) + ![Create JDBC Dataset](create-dataset-JDBC.png){ class="bordered" } 5. Provide the required configuration details for the JDBC endpoint: - **Label**: Provide a table name. @@ -55,9 +55,9 @@ To extract data from a relational database, you need to first register a **JDBC - **User**: Provide the user name which is allowed to access the database. - **Password**: Provide the user password that is allowed to access the database. -![Configuration of a JDBC dataset](configure-JDBC-1.png) +![Configuration of a JDBC dataset](configure-JDBC-1.png){ class="bordered" } -![Configuration of a JDBC dataset](configure-JDBC-2.png) +![Configuration of a JDBC dataset](configure-JDBC-2.png){ class="bordered" } ## 2 Create a Metadata Graph @@ -75,7 +75,7 @@ In our example, the JDBC Endpoint IRI looks like this: _ + +

+ +*Figure 1. We need to imagine an interface where analysts can list the IoCs during the incident and read all their documentations.* + +The classic Splunk interface is a set of panels, like "static table" panel. This table panel can show a table of cells and also one cell with a text via in input a Splunk Search Processing Language (SPL). With the plugin [Linked Data App](../link-IDS-event-to-KG/eccenca_commands.tar.gz) ([tutorial page](../link-IDS-event-to-KG/index.md)), we can insert a SPARQL query and select the part of your knowledge graph to print (figure 2). + +![](demo-splunk-with-kg-edit.png) + +*Figure 2. An analyst can insert a SPARQL query with Splunk token in input of one "static table" panel of his dashboard with the plugin "Linked Data App"* + +The first dashboard to do for our use cases is the list of IoCs with classic SPL queries of analysts via a static table and allow the analyst to select one IoC via a click in the table. The dashboard with this selected row can save the ID of IoC in a global variable for the other panels in the same dashboard ([a Splunk token](https://docs.splunk.com/Documentation/Splunk/9.0.5/Viz/tokens)). When this variable (Splunk token) is changed by the user, Splunk is able to recalculate automatically the queries with this variable in the other static tables. So with this mechanism, we can print the details in the knowledge graph (with SPARQL queries) and the IoC statistics in the Splunk indexes (with SPL queries) around of one selected IoC. With these knowledge about Splunk dashboard, we proposed to analysts a first naive interface in the figure 3. + +

+ +

+ +*Figure 3. Imagine the expected Splunk dashboard with its interaction* + +Here, the figure 3 is nice but before this first schema during the project, there are a lot of shemas and all were minimalist and ugly often only on a whiteboard. This type schema before the technical feasibility is only to validate the objective with the analysts before starting the development. During the technical feasibility, we can decrease/increase step-by-step your objectives to show finally a first result in figure 4 in a real dashboard. + +![](../link-IDS-event-to-KG/demo_ld_without_html.png) + +*Figure 4. First interface with only SPARQL queries in SPLUNK static tables.* + +## Technical feasibility with the available information + +It's not really technical to check if the data is available or not, but without technical knowledge, it's hard to evaluate the difficulty to link each id to their instance in your RDF knowledge graph. +In this tutorial, we learn to use Corporate Memory of Eccenca to transform these IDs to IRI to import properly these ID with the other data necessary to build these interface. + +After research and one meeting with analysts, we have chosen the datasets of Mitre Attack, the datasets of IoC rules (Sigma and Hayabusa) in Github and of course, the IoCs data already in the Splunk indexes. + +![](alert_data_of_ioc.png) + +*Figure 5. Define the information available in alerts of IDS and in OSINT to link these information.* + +The Splunk indexes of IoCs are selected by the analysts in the dashboard via the component [multiselect input](https://docs.splunk.com/Documentation/Splunk/9.0.5/Viz/FormEditor#Multiselect) in the form part of dashboard (the form part inits other Splunk tokens). We have choosen the IDs to link these data and the figure 5 resumes how we are going to link these data via Corporate Memory of Eccenca. + +## Understand the base of Splunk dashboards + +A Splunk dashboard is coded in XML. The Splunk user can modify a dashboard via a no-code interface or directly in the code. + +A user can clone any dashboard before modifying it. + +For example, in our dashboard, you can find: + +- the root element `form`, +- the definition of input component to select the Splunk indexes by the user and +- the table panel to execute a SPL query and show the result in a table + +```xml +
+ +... + + + index=" + " + OR + index + index + + | eventcount summarize=false index=* +| search NOT index IN ("history", "cim_modactions", "summary") +| dedup index +| fields index + 0 + + + all + * + +... + + + + + | tstats count where $selected_index$ ld.ld_source_type=hayabusa Level!=info $level$ by RuleTitle +| rename RuleTitle as "Rule name" +| sort - count + 0 + + + + + + $click.value$ + +
+
+ ... +
+... +
+``` + +To read this code, you need to know the [Splunk concept "token"](https://docs.splunk.com/Documentation/Splunk/9.0.5/Viz/tokens). Quickly, a Splunk token is a global variable in the dashboard between all the components of dashboard. + +In this example, the token "selected_index" is defined by the input component and reuse it in the SPL query of the table panel. When the user click on a label in this panel, this selected `RuleTitle` is saved in the token `selected_rule` and the panel which uses this token, are refreshed. So, we use tokens in the SPARQL queries to refresh automatically the SPARQL results inside dashboards. For example, this SPARQL query prints three columns "Source","Description" and "MitreID", only if the user initializes before the token `selected_rule` in another panel of dashboard: + +```xml + + + Rule's sources + + + | sparql +query="prefix ctis: <https://github.com/SigmaHQ/sigma-specification/blob/main/Sigma_specification.md#> +prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> +prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> +prefix xsd: <http://www.w3.org/2001/XMLSchema#> + +SELECT DISTINCT (STRBEFORE(STRAFTER(STR(?link),\"https://github.com/\"),\"/\") as ?Source) (?comment as ?Description) ?link (?mitreID as ?MitreID) +FROM <http://example.com/rule> +WHERE { + VALUES ?title { \"$selected_rule$\" } + + ?ruleHayabusa a ctis:Rule ; + rdfs:label ?title ; + rdfs:comment ?comment ; + rdfs:seeAlso ?referenceLink; + rdfs:isDefinedBy ?link ; + ctis:filename ?filename . + OPTIONAL { + ?ruleHayabusa ctis:mitreAttackTechniqueId ?mitreID . + } +}" +... + + ... + ["Source","Description","MitreID"] +
+
+
+``` + +!!! Tip + + The editor of Splunk is limited for SPARQL. You can develop your SPARQL query in Corporate Memory Sparql editor. After, like you can see in this example, you have to insert (automatically) `\` before all `"` in your query before to copy this SPARQL in a Splunk query. If your SPARQL query works in the dashboard, you can insert the tokens. + +!!! Warning + + You can follow the tutorial about Mitre Attack and Rules wihout using Splunk. If you want execute the dashboards with your knowledge graph, you need to modify the SPL queries of dashboards according to your data in Splunk. We do not share the indexes of Splunk in this tutorial. + +## Technical feasibility with the Splunk dashboard + +During our project, we have implemented the SPARQL command necessary to execute a SPARQL query in a SPL query but also several scripts to extend the panels of dashboard. For example, these are problems to print a HTML text and open a external Web page in a dashboard. Before starting a knowledge graph, we need to know if we have to work with a specific syntax in output for Splunk. So, we have searched the simplest solution to print the HTML literal in our knowledge graph with their links. We found it and implemented simple Javascript scripts to resolve these problems. These scripts are imported via the header of dashboard XML and called in the XML of static table panel. You can see the final dashboard with the Mitre description in HTML (the Mitre in these datasets uses Markdown but we show how convert Markdown link to HTML). We give you these scripts in your [Linked Data App](../link-IDS-event-to-KG/eccenca_commands.tar.gz) ([tutorial page](../link-IDS-event-to-KG/index.md)). + +![](../link-IDS-event-to-KG/demo_ld.png) + +*Figure 6. With an extern Javascript script, static tables support HTML and the user can open.* + +## Conclusion: starting to specify the necessary RDF models for these interfaces + +With the interfaces, the available data and their links in head, the analyst can now imagine the necessary RDF models of concepts (for example, figure 7 and 8) in his future knowledge graph to generate expected dashboards. These RDF models evolve at the same time as the interfaces (forever) and according to future RDF standards of Cyber world. With Corporate Memory, after each evolution of your models, you can rebuild your knowledge graph "from scratch" when you want. Several RDF models of different versions can exist in your knowledge graphs, so you can update progressively your dashboards without service interruption of old dashboards. + +![](../lift-data-from-STIX-2.1-data-of-mitre-attack/rdf-model-course-of-action.png) +*Figure 7. RDF model of Mitre concept "course of action" in our future knowledge graph.* + +![](../lift-data-from-YAML-data-of-hayabusa-sigma/23-1-rdf-model-rule.png) +*Figure 8. RDF model of concept "IoC Rule" in our future knowledge graph.* + +--- + +Tutorial: [how to link Intrusion Detection Systems (IDS) to Open-Source INTelligence (OSINT)](../index.md) + +Next chapter: [Build a Knowledge Graph from MITRE ATT&CK® datasets](../lift-data-from-STIX-2.1-data-of-mitre-attack/index.md) + +Previous chapter: [Specify the dashboards of use cases before the RDF models](../define-the-interfaces/index.md) diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/define-the-interfaces/interface.png b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-interfaces/interface.png new file mode 100644 index 00000000..11645a30 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-interfaces/interface.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/define-the-interfaces/use_cases_with_interfaces.png b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-interfaces/use_cases_with_interfaces.png new file mode 100644 index 00000000..25852b71 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-interfaces/use_cases_with_interfaces.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/advanced_use_cases.png b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/advanced_use_cases.png new file mode 100644 index 00000000..be3251fc Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/advanced_use_cases.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/basic_use_cases.png b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/basic_use_cases.png new file mode 100644 index 00000000..4f5e8fae Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/basic_use_cases.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/diagram.drawio b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/diagram.drawio new file mode 100644 index 00000000..07621d1e --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/diagram.drawio @@ -0,0 +1 @@ +7V1bc5vIEv41qvNkirnPPDp2spvaeNcbpzZ7HrEY2xwjoQXky/76M8NNwIwkJAGSnaQqiRgQkrq/7uk7E3Qxe/kl9hYPV5Evwwl0/ZcJupxAiCDD6j+98pqvQI5QvnIfB36+5q4WboJ/Zb4IytVl4MukcWEaRWEaLIpFkC9Oo/lcTtPGmhfH0XPzsrso9BsLC+9eNu6uF26mXiiNy74HfvqQr3JSu/pXGdw/lJ8M3OLMzCsvLhaSB8+PnmtL6OMEXcRRlOavZi8XMtTUa9Ll05qz1ReL5Tzt8gZy5d//z/vt+V8a3/01T9lL6PEzUXy5Jy9cFr/4ZhEu54/Fd05fS0Kor7/QL5ez8FPszdTLD88PQSpvFt5Urz8rAKi1h3QWqiOgTxf0gupD1ImSSkgf3UbLuS/9L7flxU8yTgNF9vMwuJ+rtVng+/qjP3jFQijv1Ls/JOrjgvn9l+zokqiV4geo98uXtaQBFcEVVGU0k2n8qi4p3nDGKM3fU8D0DOOCMM8rplNSXPRQZ7jLiyu9Amn31e1XzFAvCn7swhuwljeut1hkaI4zpM29e0UTfb/5k0zS4N5Lg2ieZAv6YvX3t18mkIaagrfqPfRev4ru1Ak5ncr51OuT35QPye8kjaPHShhhXwggvIUA14IAxE0ECDIYAOAGACi2JZrDcu4vokD9yjcjsL6XPEi/uE0fvEOuaPKu0rk13gGGHWKRX06G4h5azz1NgtvIi/3kB2YbI27JkZJxiAiDcRi5NsZxIQZiHIDC4JzBJnUfZYKs404f5MG8iWpmwXS5S9UJQ3tQR98Xies/Pv758PdZ+O0vRaHF9b9n+J2YCgYPLJzaCbUAcsteASzWAhVD7RUdbYX0Qep/l2kUB144mkkAKHqDJgGGJqctdiER0KagKB5KEMGGneUimil70H87MmnZVg4TUMiZwTbXpjyhdV8BbDC2bTDn3olBcKBqRW2+AWGzByxcI3wwa8Dc9i5rzHLTSEcfYumlWrOaGlU7X9Nlkkaztn/WJ6cxaXCanKCLbWpTZDHTFRdM7kJ3KCMdEIO7H1+82SLUzMw2w0oyrdzNd1XN4wf1dc9C+ZQFwAbjMyBDSnQ/fHYdwcTqT4Pngnbl+GBa2JTnnTj+HKgvnrNc0Xi6DJXg6ysf59FzKP17fZt7HQwtb/fHzeffv1lv9RR42Y4dL6I4Vx9XchZl1MziNulDkDSstf5wxBshGtyzvmjaY71tD5A72NghCDERxVcX1kGFhrPIDEx9/l0dn8+98DXZGJo5nyrm1oj9xbuV4XWUBJnmQJe3Uaq3DpMbadTibbRMw2AuL6pAuNsT3Tmgbc0thKm50Zi+KDDj1l8CTenCw/kcXWjR8ZdxFhhVWgcBTY/8bDCfBr6miP4BmTir9UBrcl8m0zhYFOHTPXz+QyGOHWEYrxbvEtio3Uck2k5tE98fX+R0WZo8JXVvrs+//vlFrf2zlPpjqxNVGPrmWp+OpbrAIhfDExhy6DCDwMhC4PI31wk8nGtADQJfx1lQt8BzDZdNkmdbjEL7fzTc46XexUYnKqHEglobUeGoOoJtIepVkMa5OlBm3Kyw1dySoseQfyslgSUTMTIlzYDoV+lpqyeaV9T8Lm+b0CyQG02XM0WSkrzekxeE3m1mbnnK5khbeJ4X6vso2hcaBoawwphZI9JDkR+am10TyGrff0xq0M2VgbyTihpTeRwguwaQuW0fc8fEMTT3sQ2EzNTDaZKyyjk2dqxRSWnGIJUUKz9K/erMzWkeKYsrXPqyWrxvXFIctUhbs23bxm9lFYfaXP7gTR/vY+2+XEShtqYv51qNoA9y7p/rGhC1Ei3kPF/5FOhfmlnHjQhWnPs/2wzn1IvvZdph28l8wfo7k2gZT+VWo3Y9ElzHBQg0kABN7weUPI+l8k2Dp+aXsAGh+MTrPI28Ah40gQfcVfqjvFX+u66rJPSndTdEFiTbbpiTeMMNywuju7tENq7JsFyR7gB4m1GCdwVvher49e/6wX/1gUPKw8uX+snL121i0RHcXeSHWuVnrVgoqRCkGWU6G1QsgA3FeH+xwK7thm3NfQpCYQZLfwrFYUIhOgpFaQK+NaHoUSREO3d+ChJh+u3vSyJegvTv4ox+XZMHdbQSB30wpjSAn9JwitJgBlzelTSsRfZ2m8YO2C4yge3IHgK2EGBLggUeYPALZLkh7mbu94VKZDr9l7UMX5a/z0qo81Bgs4C6AOP8NlnY4NhbJheeYqUbBg4xtZihNq0JXDBYpZtphH6PgywTkcdwLq4+Xl1oNmUB81pocXIBJ+dFvUaZuZ3kaYm5fG4z3+D18BWfUOAWtW0RM2IJ8/SRmLAS27RvNhM7J68vQ5nTdn4CZEVljHEFYgtZqYWshAxEVm6Q9UauiBrmmcwsCDmNZotlKuOsmWO2CJU6yOsNglXkfZXOHJ22OifP8bpqD4DMco9KMdQJ3Udi3l6Ev5HQWZqhTAGrX600eEZqdK6PZRxEfgZiv8aI0yMyPzaRLYWNl5s0QI7snAW6L+kIBMWO297ZzI1tZCpaqnrXKAXDSmnk2VeEbVc3JKmXLvfLYRxKcOiU3UwlwZlZ2mkl+GDtVZb8cFXAkMj4KZjKNQXyr8oAVv/dBXHGE1tvXcaJ/LqmDZkodXIbvRhMGLsm6DCGcmNLtZgqQ1UEWblpTUi1gb7y/QpvsEYrm69n+Gt7hTUmHZ3ENemfeqvBBvk41PtjwijQpe0qzM6en+VmHHSLWOzh99kBYYtWvy1AiKMCgrsGD0W7xqEzICgTDmEIUaGMQgFxq99aCNdhGHMiXMI5Y6MihZlezo2yX6aruoBAAeFFZ/8LYzBf9UKlQ/XWXFR91yqHGu1Wpbd5HPeHQeRw1iQ3ZJbop23zHUxdM3Pz/bqcnwzJzBYiavEZbRRDQ/mMzOY0tlC6KlMx4hxHIGJLxqFbUfV4RDQdwosoXM408n79dqVNv69qU1BkjCsh1lZzMM0Udl62tvDm0qzGH6EjGLSsLmqtqLIUrg4mxvyHKAPadddf00jGzH1/A0g3ZLMIapUBla7MEFkB7JpZAdEGVGc7ADJLjsFty/u4ySw7sm0VQKdhPlrsxw0O7vHsR9oE6Rkvu792Rg1o7yWcio5VY33hQZjuxEn1+Bzoz8Pm1mIJ3I7qzQP3h8gl97KxsK77CtmqDvrbNCh3iBBId2YDxlkzZH0GAXGYYBwRwhElEOyrGKBADmOUMaE+CLqGngDqezBK9fQfSBhDrS7xgXUGcE1z8yeKNycRtqKYjoli6uBa4qU1ImzvYAgUwGHKUtfDDBhAouWdA6SEBwoFacAFcmF72xwatZZ2v4s4S9ysDYtn3uZy4WeuZjPgXUtHrAmne6HRM5+s7ZbOE85verNl7eD5sWPnwNIUb9B4jIlXTcLYvNuhJl6toYvNChnbCdg4G3C7EwDWUH0kLwBw5sC2w9curOjuCOiapJYtMbYnAIAlEvdz2NaBst9yQIhlYhMcV/gt3YLt/LBlm6rtaOXOl1i3vixDrLczs8rQuKdGkwwT+fwgYzOW+6Z3Pxujx938Sp1+ekq+0jTbtfw6qo+k5RHgjmIRoBgDF6FW5Icy5e4hlyofzeUCkL29PWWPtwrha8NJxlL/0FT/5/6TN59mdXll+qBelpNXnp5aNqEliJaRLiNbW6XYd/SW5UsqdTb2rTjLVXuZbuR1+aTRY0bxZHOTWXZ0LeMgq1HcWdd0sBP5Rry4jkJI01G1FHD11kGDUXOXaFeBddUZuAlyhLtFgMbMNABLh8IfGlvV8Je8XcGfmJNfysTvcSusLfWUR6/8Bcjc1i/KMY06khBGz8W8vkw7m2SszSSpKJ1kGfdM0Zf1wUegNtWx1TXlwHbKc9vAkuEqK9GBQ/ZWYxVXXPqxpyoisaEEXJlWBs/tYxXhcCw382PrypeT1ySVehzqIo70UwyUG1WI4KqQ2Wh1mMv0OYqz6SxFiVVUDhVLFnIa3GVGVVW0n5/MvnetheIIssocBtbyDXfUkoM9YwQg05gt9h5zklNTZP9Z6ifXfNg097S8ZrJ7k0XLdj6SmkVr1Sy2qtmBpkKtqSfZzVx+c8mlNeMYttjJk73c742dox28720VO4Ly1uAeYMKnt7QV2Zi2ajcLdne/+ea0FdwrbTWqqY1NU/tdycx+6N84GrsH9APanuxum6rXm+/oKvuYuNUf2Phowh3hUrWMOaKMI7hvMApj7mDBMMOUI5fwVlyKO4xTJiihGLuoDOR1loVX668fUjJMd+ldSUY1ysRRrJ/Ux5kgwiebBprog/0DL9vlrHSWOgjathDN2IJGnNbmAtTWwyFiSvmrswKRFu53kC7mqLdTQZSsUtr8VQABBxOgn8cGscCCw5OXrn6705qhRNYMJTJYxRaPFUvcPrRk2E62ZtBv78E7VCjvTWHMRQhDULUlVYYUgw7nFEDCKIKCjZyHwKdQobyxVnZrhxs6btaKu20Ttt2j2L3hEZkPi8K6bV+pQKg72tSuMzY++m2AXLljpspBYFiNsxfiNli1R1RNbcTt32ILTcShIyPONg6vJ8SRJuKwGBhy25GETmuXq9KnuyMJWB50d2QkmYV3+zyl53vwGPhe2uvTjYd9qNq6R+f0EIZu+sEr/DSenLO6rveH4q3htFki/6WWKwi97PXMC5/Vzy0qqVbjThJNFg2BLEeg2b2QfmDye/hwMRVKYlpSxGxj2m3FTcNl4ogZ8LKQ92kZzmXs3QZhkAYyOVESM1cYY3usLbDK8LI0wQ5I5B8kduI6QpTDX/Nt2MVs59hJfVsHormtU1JZlkeyJK1tuhv6MDdEY1wAmmgdcuYsRk1z4EwwJS2IA4ohxtSIkexQdNe+8c6jZ6vq7eb7Boy3kHfel16PZoqGRDKORopmbny84uFN7hi0on5DhjIN6aFEOAxzJigiyKga22ECLnWU8UQZJIK4CIn2p+hApkCAqr9aVHcUrDEkyYwydZyS1sX4fgP16u1RZwJhyxiTcUvWySn0JR04nIAc133WncGAQ4ZcDhBFVRdALTADHEQIIxQJgklbNrt718aoPAUgoWxGgnWihHfLDPYnz5bmpT/0GNT3OcjA6iCMLKy2JvCxhdX+1WhnacXHnUVnmSeIEHNA6047COXmzA4WytU5WmanxOIJIoasSUEfbyYhAwfhgDscCgQJAmRVslS3z7iyMpGASHAXdbPP+sOBpRH/2+f3q6ttIcmedLU6jCNdjLpikH5I+1XkS33F/wE= \ No newline at end of file diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/index.md b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/index.md new file mode 100644 index 00000000..b8a332cb --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/index.md @@ -0,0 +1,122 @@ +# Define the need, the expected result and the use cases + +A typical mistake when staring to work on a complex ontology is the absence of a real use case and a lack of understanding of the cyber analysts requirements. + +In this part, we explains how to: + +1. Clarify a need +2. Propose an expected result +3. Define an use case to implement + +## Define the need of precise users + +There are three things to know to describe a need: + +1. For who +2. What +3. The context + +With our customers, we claim a knowledge graph can reduce the work of analysts in the Cybersecurity. So, one of our customer wanted to help their IN Analysts with the software Corporate Memory of eccenca. +After several meeting, we defined the need. + +### For who + +!!! example "For who" + + The future use case is to help the IN Analysts (Incident Responders) but also SOC Analysts (Security Operation Center). + +Each field of activity is vast and brings together very different professions. Cybersecurity is no exception. It is necessary to organize work meetings with end users even if they have no knowledge of knowledge graphs for the moment. So you can ask them about their trades and identify what they want. However, keep in mind the most famous quote attributed to Ford: "If I had asked people what they wanted, they would have said faster horses." During the meetings, the question of "What" is not about the final result or the method to do it. The question is "what do you think to need?". After only, we can start to imagine how a knowledge graph can help or not. + +### What + +The conclusion of first meetings was: + +!!! example "What" + + The analysts want a solution to link the heterogenous data of different indexes in Splunk to improve their capacity to analyze an incident via their Splunk dashboards. + +Of course, during the meetings, a lot of needs are explained by the analysts but you need to choose the need to resolve according to the context. + +### The context + +During the meetings, we try to understand the "why" of their needs and their feeling about the knowledge graph technology. + +!!! example "Context of the analyst job" + + Today, a IN Analysts (Incident Responders) switches between the timeline of alerts’ messages and an autonomous research in his event store (like Splunk). During this research of evidences for the last incident, there are often new unknown alerts’ messages of recent indicators of compromise rules. Each analyst for each unknown message will try to understand the reason of this message before investigating in the metadata behind this new alert. A professional analyst takes between 30s and 2 minutes to open all references in his browser about a new alert. + +Feeling of the analyst about the knowledge graph technology: + +!!! example "Feeling of the analyst about the knowledge graph technology" + + Today, typically, the IN Analysts have the skills necessary to write NoSQL requests to query their event store. + As eccenca is going to create RDF graphs of OSINT, IN analysts will need to write SPARQL queries to jointly query IDS and knowledge graphs. + SPARQL is an untypical skill among IN Analysis, thus, they need to make the investment to gain this knowledge. + Like every investment a payoff is expected, in this case we see a significant reduction of the time spent by the analyst. + Only after this demonstration, the IN Analysts will try to learn maybe to write SPARQL queries in their dashboards or they will create a new job in their team to do these new dashboards. + +Very quickly (in a lot of customer meetings), the skills are the first problem to work with a knowledge graph and the no interoperability of a lot of knowledge graph on the market does not help to resolve this problem. To onboard the future final users with the technologies of Linked Data, a minimal and simple need is often the best way to start to think "graph". Only after, we can propose more complex graphs to resolve other needs and only after, we can do to understand the objectives to build/use in their domain a global ontology. + +## Propose an expected result + +Here, the context shows this project is an exploration with like deliverable probably a demonstrator. So, your project started like a exploration with a lot of meetings to understand how these analysts work concretely. We choose a public dataset of one previous incident with the analysts. After, they create the Splunk indexes with these datasets. With these Splunk indexes near of the reality, we can learn alone to use Splunk in order to understand the desire to use this tool to build their dashboards with a knowledge graph. + +During this step of R&D, we are developing the tools to request a SPARQL service via an authentification OAuth directly by Splunk dashboard. You will install these Splunk apps step-by-step during this tutorial or you can download their "tar.gz" in SPlunk now: + +- "[Linked Data App](../link-IDS-event-to-KG/eccenca_commands.tar.gz)" to install the SPARQL command. You need to configure it with the details in his README file or in [its tutorial page](../link-IDS-event-to-KG/index.md). +- "[Investigate lateral movements with a knowledge graph](../link-IDS-event-to-KG-via-cmem/eccenca_poc_investigate.tar.gz)" to install our demonstrator to connect SPLUNK to CMEM. You need to read his README or [its tutorial page](../link-IDS-event-to-KG/index.md) to understand this example. + +After several propositions, analysts oriented the implementation of our first dashboards and finally, we showed clearly a benefice to use a knowledge graph via these results. + +In this tutorial, we study only this first result: + +!!! example "Expected result" + A knowledge graph will reduce the time required to research details on the Web of each new alerts in the IDS of IN Analysts. + To achieve such savings we aggregate all links of sources and references about alerts in the Security information and event management (SIEM) in a knowledge graph. + Analyst are able to read the Mitre information directly in his timeline (e.g. in SPLUNK) and to access all references about an alert from this central place. + + ![](slide_result_expected.png) + + *Figure 1. Example of expected results for analysts during the task to understand the meaning and relevance of new alerts in their IDS.* + +When we know the waited results, we can imagine the necessary use cases. + +## First use cases + +We know the final need, the expected results and the limits of knowledge graph with the Linked Data technologies in Splunk. So, we can define the probable use cases to implement and all the actors who will interact with these use cases. + +![](basic_use_cases.png) + +*Figure 2. UML use cases to resolve this basic need and several use cases with Wikidata to show the interoperability of knowledge graphs with the Linked data technologies. Each bubble in this type of schema is a use case.* + +With a simple UML schema of use cases, you can delimited each use case, their priorities and their tasks for the next step, ie. specify the essential interfaces to limit the complexity of future RDF graph. + +In this tutorial, after to test this first result, we claim a knowledge graph can reduce the time required to research details on the Web of each new alerts in the IDS of IN Analysts by 50 to 95%, see figure 1. The next step is to specify the interfaces before starting to think about the graphs. + +## Advanced use cases + +Another result of this project was to resolve this other need: + +!!! example "For who" + IN analysts +!!! example "What" + Calculate and manage their investigations' knowledge graphs of high-level and low-level directly in Splunk +!!! example "Context" + Linking IDS events to a knowledge graph can be complex. + This is for several reasons like labels/IDs/structures of the same resources can be different. + Corporate Memory provides advanced capabilities to perform this in an automatic way. + To use these tools, we need to export the data of SPLUNK to Corporate Memory. + Analysts need to export data from SPLUNK to Corporate Memory on the fly and execute Corporate Memory workflows with reconciling complex data automatically and SPARQL update queries directly triggered via their SPLUNK dashboards. + + For the moment, we are searching the best way to resolve this need but a demonstrator to manage several investigations in the same knowledge graphs is available with several examples of dasboards in the Splunk app "[Investigate lateral movements with a knowledge graph](../link-IDS-event-to-KG-via-cmem/eccenca_poc_investigate.tar.gz)" ([tutorial page](../link-IDS-event-to-KG/index.md)). This need is for advanced users of Corporate Memory and it may be proposed in a future tutorial. + +![](advanced_use_cases.png) + +*Figure 3. UML use cases to resolve this avanced need.* + +--- + +Tutorial: [how to link Intrusion Detection Systems (IDS) to Open-Source INTelligence (OSINT)](../index.md) + +Next chapter: [Specify the dashboards of use cases before the RDF models](../define-the-interfaces/index.md) + diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/slide_result_expected.png b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/slide_result_expected.png new file mode 100644 index 00000000..7f59ab89 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/define-the-need/slide_result_expected.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/index.md b/docs/build/tutorial-how-to-link-ids-to-osint/index.md new file mode 100644 index 00000000..9b48f7d7 --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/index.md @@ -0,0 +1,44 @@ +--- +icon: material/table +subtitle: such as STIX +tags: + - ExpertTutorial +--- + +# How to link Intrusion Detection Systems (IDS) to Open-Source INTelligence (OSINT) + +## Plan of tutorial + +1. Introduction, level, material +2. [Define the need, the expected result and the use case](define-the-need/index.md) +3. [Specify the dashboards before the RDF models](define-the-interfaces/index.md) +4. Build Knowledge Graphs from: + 1. [STIX 2.1 data such as the MITRE ATT&CK® datasets](lift-data-from-STIX-2.1-data-of-mitre-attack/index.md) + 2. [Indicators of compromise rules, like Hayabusa and Sigma rules](lift-data-from-YAML-data-of-hayabusa-sigma/index.md) +5. Link IDS event to a knowledge graph in dashboards via: + 1. [Queries](link-IDS-event-to-KG/index.md) + 2. [Inferences](link-IDS-event-to-KG-via-cmem/index.md) (for the advanced users of Corporate Memory) + +## Introduction + +“Everything as code” has become the status quo among leading organizations adopting DevSecOps and SRE practices, and yet, monitoring and observability have lagged behind the advancements made in application and infrastructure delivery for the Cyber Investigation Analysts. +“Investigating as code” is not just automated installation and configuration of agents, plugins, and exporters to collect the alerts of Indicator of Compromise (IoC) — it encompasses the conception of a custom dashboard to navigate in gigabytes of event data linked to open data of Open-Source INTelligence (OSINT). +However, traces and knowledge about attacks are heterogeneous due to fragmented cyber communities. +This fact prevents rapid development and makes the “investigating as code” a difficult and tedious task, including cybersecurity event browsing. + +This tutorial is going to demonstrate solutions that exist to browse the knowledge of an attack like on the Web with Linked Data technology. +It also includes the development of new custom links between events and knowledge, ie. inferences. + +This self-service monitoring/alerting and now inferencing allows analysts breaking data silos which enables by continuous improvement the linking of Intrusion Detection Systems (IDS) to Open-Source INTelligence (OSINT). + +## Level + +This tutorial is also suitable for beginners. +Simple examples will allow you to discover Linked Data technologies. + +## Material + +eccenca will offer an online sandbox running an instance of Corporate Memory. +You can also install Corporate Memory Control ([cmemc](https://eccenca.com/go/cmemc)) on your computer to test the example of bash scripts in this tutorial. +For the part "Link IDS event to a knowledge graph in dashboards", you need to have a Splunk instance where you can install the Splunk apps of this tutorial. + diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-collect_IDs.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-collect_IDs.gif new file mode 100644 index 00000000..383d76f4 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-collect_IDs.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-RDF-dataset-result.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-RDF-dataset-result.png new file mode 100644 index 00000000..717bda94 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-RDF-dataset-result.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-RDF-dataset.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-RDF-dataset.gif new file mode 100644 index 00000000..bb03fb04 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-RDF-dataset.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-prefix.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-prefix.gif new file mode 100644 index 00000000..729ec803 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-prefix.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-project.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-project.gif new file mode 100644 index 00000000..7fd695fe Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-project.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-transformer.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-transformer.gif new file mode 100644 index 00000000..da9c5357 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-transformer.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-workflow.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-workflow.gif new file mode 100644 index 00000000..1505384c Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-create-workflow.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-graph-navigation.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-graph-navigation.gif new file mode 100644 index 00000000..4aea5aac Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-graph-navigation.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-workflow-STIX.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-workflow-STIX.png new file mode 100644 index 00000000..83a8a548 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-workflow-STIX.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-workflow-gen.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-workflow-gen.png new file mode 100644 index 00000000..b1ea9137 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-workflow-gen.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-workflow-global.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-workflow-global.png new file mode 100644 index 00000000..3a572219 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-ex-workflow-global.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-objects.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-objects.gif new file mode 100644 index 00000000..0c0e237a Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-objects.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-properties.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-properties.gif new file mode 100644 index 00000000..9599bbdc Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-properties.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-rdfslabel.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-rdfslabel.png new file mode 100644 index 00000000..387e1dcc Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-rdfslabel.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-references.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-references.gif new file mode 100644 index 00000000..fde7c2d3 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-references.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-root-object.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-root-object.gif new file mode 100644 index 00000000..7415a0df Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-extract-root-object.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-import-JSON-result.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-import-JSON-result.png new file mode 100644 index 00000000..48e9c0ac Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-import-JSON-result.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-import-JSON.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-import-JSON.gif new file mode 100644 index 00000000..e98e108e Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-import-JSON.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-regex-replace.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-regex-replace.gif new file mode 100644 index 00000000..d3404d58 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-regex-replace.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-regex-replace.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-regex-replace.png new file mode 100644 index 00000000..92eb1930 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-regex-replace.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-see-steps-during-a-transformation.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-see-steps-during-a-transformation.png new file mode 100644 index 00000000..b448bf37 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-see-steps-during-a-transformation.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-all_datasets.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-all_datasets.gif new file mode 100644 index 00000000..930d27fa Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-all_datasets.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-query.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-query.gif new file mode 100644 index 00000000..abab579e Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-query.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-task.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-task.gif new file mode 100644 index 00000000..97606a23 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-task.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-void.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-void.gif new file mode 100644 index 00000000..e12c76fa Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-sparql-void.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-cmemc-activity.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-cmemc-activity.png new file mode 100644 index 00000000..7ac4172a Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-cmemc-activity.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-extract-rdfslabel.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-extract-rdfslabel.png new file mode 100644 index 00000000..00e8c500 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-extract-rdfslabel.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-transformer.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-transformer.png new file mode 100644 index 00000000..57483198 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-transformer.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-worflow.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-worflow.png new file mode 100644 index 00000000..c7a004ac Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-success-worflow.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-warning-bad-uri.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-warning-bad-uri.png new file mode 100644 index 00000000..f65d5273 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-warning-bad-uri.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-workflow-import.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-workflow-import.png new file mode 100644 index 00000000..1f39fc80 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-1-workflow-import.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-2-fix-url.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-2-fix-url.png new file mode 100644 index 00000000..1874a56f Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/23-2-fix-url.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/MITREATTCK_tutorial.zip b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/MITREATTCK_tutorial.zip new file mode 100644 index 00000000..94ee2f6a Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/MITREATTCK_tutorial.zip differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/RDF_model_and_pattern.drawio b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/RDF_model_and_pattern.drawio new file mode 100644 index 00000000..6e7e8bd6 --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/RDF_model_and_pattern.drawio @@ -0,0 +1 @@ +7Vhfc9s2DP8se/Dd+kCfqP96TNKkfehuvcvt1j5SEiWzoUSFomN7n36QSNmiZNdOm2R32/xgiQAIgPgBMOiFd1NtP0jSrH4TOeUL18m3C+/9wnVdx/Xg0VF2mhI5riaUkuWahA+Ee/YXNUTHUNcsp60lqITgijU2MRN1TTNl0YiUYmOLFYLbVhtS0hnhPiN8Tv2T5WqlqXHgHOgfKStXg2XsGE5FBmFDaFckF5sRybtdeDdSCKXfqu0N5V3whrjofXcnuHvHJK3VJRtuP3mfv2zQlfhaP+bV7f327o9HZLQ8Eb42B164IQd9121D6s5rtTOhCB/XnavX1lvZPVdKdWhcdbbdu5Kp1TpdZqKCRcWUpIgoRbIHWOoX1Cq2RTlRZNGlh7YHXG1S60zXdc4pQjjxsZcHFIURDZBP/BgRP4OvInXCNEoCP6cmvntPXTg45Acsrjcrpuh9Q7KOs4EU7d2tOKwwvBaiVibncAhrwllZwyKDmFLZezUN8hAxKhXdjkgm6B+oqKiSOxAxXOQmgd5jSsAPTUZsDgnlD1myGiVTZGjE5HC5132AuYuxRvoZqCfPRx0Oq5AJDwDtcFocT4ZMMQIiv6ffunJ0Q1J1Ma/TttFCx8B+OQBfAi8cu0s/GX0iCz4vmsOHB4jH8PnBK8E39MYRfmQewjq/6vpfl82ctC3L7MhJARVGOzPOc+LYirXM6PmOoogsqTqfgzS3+u8clVGUgyM1MtAk5USxJ7trH4u8sfBZMDjZHvTQLtHEm5SePrbZNG6yEz0Y24rieKJIx2WmqM+D/al/IjVON/ScPV3Yz4cdqZxSwKdezUuX7ev03SH2BosgOdJ3Paj1eJ5WU9hernbd/wE6BVA4BGcMkB+9MUDeDKAXmW8uR3eLtNZMcA6/oUzUMAYVQYEDz0NFiCPkZyRGCUl9FBKYsMOoIA5O/i1ZEU2ywguOZEWwTPy3zAr/+SMT4CeknpZkmf4KroFtZ3i868PldCFFBakY32nRj5Q/UcUyMuK3fcw7Lg6b7ZihjXacWsiK8BHviUhG4AkYEbWWtD0jl5HmlMjGhLhj+o6jOZwqAB3B8TNWl/OdQjYrUhuVrqadnCI1h8FEUhs7znDQnqMkqCpA+2CnppoLSdHf8UZGNkLmtlt7XXCS9IGBuk5nq6R4oMiklSWXQhWX/YSEJii6AdYAjl/ejTzNaSYk6atWrVj2UNPWuMdqBrMxPy47QvK7ciN3LLmCC6KmwclZ23CyG8Q5A4br/MKqRkhF6u8O8GIY4C8b2H9q2gTQ5e5Lx1wGw/Krke0X77fWamdWrz+lDr8GZ8fUoXs5SydKQvvCgGf96xUnWezZE2jgTK4hl46ys/FpqujEKHtQNAiKomjp64y70fPb8n/nJgu/pEvPGX3si2zo/+MX2XgG35tdZM+XfnRh6b9RXQOYUJNwU41gDvRgBLLBDP2lH+AkSqLYi90o+bGiD8JoGY8yBltGMJ7ei3/4OgvLw1+fWvzwB7J3+zc=7R1bc5u4+rech0zbMwOjK4jHJm22Z6ed02l6ZnefdmSDbTYYvEBu++uPBAiDwAlxgOCWTKcGSQih736RdIYvtve/xHy3+RK5XnCGgHt/hj+cIYQAwuJHljzkJTZAecE69t28CO4Lrvx/vKIQFKU3vusltYZpFAWpv6sXLqMw9JZprYzHcXRXb7aKgvpbd3ztNQquljxolv7mu+kmL2UU7Ms/ef56o94MQVGz5apxUZBsuBvdVYrwxzN8EUdRml9t7y+8QE6empf8ucsDteXAYi9Muzzw1f771w+fLpdr9M9ff2w/X/H/2rdG0cstD26KDz5DViD6O3f9Wzno9KGYCevvGznS89rVuvjNnljEeokYTNaNKkW1HpEYnwCjuDm/2/ipd7XjS1lzJzBJlG3SbSDuoLhcRWFaoAa0xD0P/HUobpbi07247Lo6F+rDvDj17itFxdz84kVbL40fRJOi1sAK5QpMhRggk+ZFd3vQYwXPTQXsrCjjBbaty+73ABEXBUyeAR80w0fBBzlUgw8irw4ffBA+yY6HHQG0SVPJzd7LoaDLtZ9ubhbmMtrKEflp7Bk8TfnyWtzmF0aS+veGy1N+hnBRtBO/Xhwaxoq6zPNcyyCMOwZZuiuDY9c2XLhaccdZYhfxCvzzYZ4EAmCgEShUFFsBP2kDvz0U+MnzwS++NjWK+REQB4G3aseKZerzrEkrrOpg7w9yPQAKEWBS29n/sTrYAGiCDSriroKN0IHARp8PtmUURHEOsXi9eCu+QLwaqJ932cwBOZnGim/94CFv+skLbr3UX/JKfZLNtqyF1u6+WpG/VNaEUbzlQaXulsc+F78Cb3h6E3vJE+2WfHeoyV0xw7KSiC/IagJP4pEhPn/ph+vmk1G82/Cw6BLlZQcxOa/xQzfDIVkF1IdmNWksulqJ3tV7Qi+vFQiRKWyVl9xFsVsfVtmX+JLFtS+6k30maRxde0aBUrV2C0Eo6zi6CV1DgyKiMAdg9eJdZaSut4xinvpRaKQbf3kdekkxPD/0BX0G7W0rkHy0XWU4tXarIOKpPjmun+wC/qCaB76oQOBf/nYXxSkPH2Ui6cPO68z0Q/e9VJwl+w54kvjLOsfIZtKT5AVa+IcAefzwe1GZ3fwhb4SgLm4/3FcrPzyou3s//b14g7z+o1K+f0TeqCcOcqokuomX3tNCO+Xx2kuf5u6eWzMRmnyvwsdoi/RRZbEXCLDf1g2LNt5WvOFr5IsvK9kqpNiEUGOljibZ8m8vnqwaA1pnqNmXGGi9r3x+Gn1lHLf8+uOZsDWg7PxPuPEXvvzeq4ck9YQmBb4JshMy7KErGdTRfIJS1GrqvqXVXEU/PJTyY7cAcBK85ODsP0nx1qQo3tb1XdQjtevK1cDUznqk9uMkus4jYnclZXnAF9Jl1VE2CsT4nD1QQ+OGuSPJXGh9wfuiYuu7ruzjXGhu/j98kfUnUXUn5zuDAD0/ox86sZJHiVHnL6VnrHjpWdX51MZ3gIkczdJWgvCF6GzoDhZW7yFarRJvEORzDiLfIS9KZ0nzJBa342pV3dO7vIjCxHcFHkmFcbkJJSLlcBQ6bpDI4kiiROzdetn8phsv1w8FjKWaLNT1lRyLF9/6Sy/J1HfZwBNavlA7Vf3KD7JKP7yNhJniZpfyOSUy40JkmuLvLPe7JjlRBf61fOX73e5ztLyWQ714e+GnmVabf+BvQv2XujwCF9F2y0NXXv769eLjt+/vsu4OEFwnx9TUZTOELX4p0iIodPWtN9msSO2EhHNvarszKSFuERNT22IAEcdBNrJ1VGGmrHYIxqKBo0nlrhKegBYRb48r4uHhYMJryfjC/HW9ZBn7u5z5WXwrOUS4SHZ1jnOikr8k9T5EP7br+KmI7lgiKLpBo4l6eDhi8nKzsu6oz12wZuaiN6N4nZXqoZZcgv99IyXt5XdIJMcf2Pneg4CDbewENd22aNRgC2yLtvQq1IYXTgo/JyKdDEsPetIjbcyGdl+Ga8YSPyMEY0RzLw558GfsrTyBJJl2fTICJTN1z0u5epGLVTEYfJn9PUPmKErsQ+ZQ6vSDy7YWILZHEzptAaWJMCbFcJ7kTHhanAlRbBJoMwApcIiFcR22BBHTcWwAmdCZGUPHMS3S4hkjbGS1uU9HeK9qc8ntxPibXO5UuF5XlqZouA+WBlC/anQpYx1NJxuNw7U5++eQ+Rwy//FC5veJIIT3YnIyV2o3tiZH+SqGGqHApAzs/0idQbRFCdmYQUJ4ROBnZhwz45gZx7CMQzCNegijzjfaIhjj8o3DMbuXW/P1rNrTyAchNjVB9c+qQwzjpnsOtrnnBgMZGjzmNIIVDKcVPTKwbZlMI05Kj4wSCRu/pTcwrr1bov6Afrp8Qv4M+dY7Ycv1gL+uFFOd7FrUZ3gIgnqkW61PeTGW6+JnLLtWsaheFtj0mAMiI01loGkRRGvzr91SQNCMYnElyqRGK36AmKnLuzzXQvSdZ1oYfHGTeK6xeChknBcnZoazL8q6mNZqEOQgk2h5F5jYr74iCA0epBo/80J9wpPSU6UQT0V6CrXJBBYGRKhHju0gC9URhhDpRWaAiFpiObo/rbNcRdDEoKJM25oZTgSy2loMfGgxe0Q4bDa8Z8P7BA3vQum7iYM6hpfSrZvCN1k9sKum12dQdqAIRj2KZ4wXokVHrPl7Vl7QiNpaX3pZD2qYviabgKb+RdscR2Qw/etwOHWKyd6P5Emfdo600HlMoqlblDZ9VMiWEYwmggznpppDmbNi9FMpRnVv2OAK0g9r3HZdHVhqUcBBdceW8peNZf7ayKQYMWg7ADOme9kIxSZzKLMhsy2HWkcav7YtrF8tiYrCzgbvvsMRtMC2aPREQhRdM4jVkKfiY6HUJJQiiqGDCYPM0XwfQkk0bYIBYgAjGxJyHJpRZkLMhNQklEHEkOb6I5ia2KaQEoaBXHDTCfd6w6vTWmH4PdpGIV/zXOpdycV9b3MTRd7/ysMbnmEQst7JulYd9X1mtYiLk1/NR/YKqMImq22tA2ghImsoLRUPHkydsqSdWBDWEryF2eLPsjGDWDdrAMImwA5jlsy3Qs6RghRR2yTa8sCGGTQwI1NyfHrJyD/0Gj7cZ5C27zV8JRXUnxhOS1MrESpIWKycm5rkcKR+XcnJ0RYplCtwK3zKaeFT+s47/UmRI/YnnH0ds6/jBH0dJ5Z9KRmHziyaOueoKZd4jhjPzGJmFtNjFs2Nj1+bU8wbqM6cYuYUk+MUBEFTqBHln+6UJ6/NN45Y6jzzjZlvzHxj6P3s6KOrSNvc4eMyjhH3mt07wMs9qF91O7tphfuUCCn3WNFh3tXb7WgdWXpG/tCe7h8gFAwnhRqGA0xmQ8EaoEUsRohmtshAsIMZFayDWBQDLT2g+0ZT+p5VdGzUGXJtahHu+O/iL3mc1bGRjldMCTUgs03iVFdD1MEFWqTJuOd/KCZWASA/OeLH0wqSPkmUXekb6hvJ0ZFjoORwDPRQMsdCKMtCi6+olcIeeGsY1fJCvR3EnoGzPTPbM0/n/exib+XfSyRUZ0uBUsJk+P3USWTiVzyaL3JYyEKeyJA5uvzf1ftfPppb90yu8Ch6fDQHaKanmZ5+HHqK3VXWWtfWWsirpK67uzvzDhcbB8NsB5ZLeTjqpVChRH9G8hCm/N4QsJ1paqapn5KmkkMyqp2IUHayYL7KLqOg5cbb8pl8ZvL5Ccknd0C/QCShLH50+fuXz1fPo6OjvNo5TNQWN6634jdZ51W7Gpbdv/AgWdbcBanNxV1uqTVKxrfKKh7Sq/RNbVt+Co4kCPS8B9y2vQvELa4jXB7d3T+cmjmVfTuPej1EM39qgGM0lcP86XM0JxbCkGn2yKE2tgijgGh5eJghE9rYptSxKQYOOs6NhW1gMnI4N5hhoqPo0H6tZn7fdLyenTfomxgySZYED58Ehh1bh3JnP6h+3Dmmjb6Gxphmntc3bxdIGYDAFx4LDUzKpcAPr+WSsIWc9k/fv3zel+nH4Xzz1pL5t+ol9OKMnr/NllJ8zG+y/z+If/9+py4v3u5biKuLd6r2XdvL8rG2GBFcYoNQlcRHvTlDBL2pot6bRcDD6ze6tiP9wtU+xJRyvY1GTcmG7+RlGKUeyoyIjHo+LxQ5PaUV5bSFJAvXV6WkkWysFq+Ukv/FYreM6Jab8bXs6dFKUoPtqUba8oYOM67CSHB5ssk4FaxPqiz/yqUNE2YlSLBmpYD+VnwiarC6o2WyvPvqxb6YCrm66IVC1+nKJqe1ExvWmRm0jwwPlQcUHTwVd2im2MxF+a4O+RLFA6xUWvlBUDmsZ7XyrOWyYTOJGtd2FuBR/HpOLiEz9Twg1LbHMIGmWvE4Si4QGTxxY/xzpxVOTYVakcwHI4gyQbeYAT0BHVFkWoRYgCGbMUa1Xbw6L3iFcg/yioqM6q8hcGTK7jOvYz6k+hHSnfIp1UjrdrxDdhT1zunRs695ir7mnzU9Ojsrgx0UU21a0ajZ0bSZsHSZnfi+n/cse+PP8sSLEhoSt8S0xnyZHyTv5wfCSyLg1bPi8yPmsx1W89PnFzd+4JYVuXzKzHrNBq+qxtmYvgh8Wpd9y5JsA5nXNpxfRcdumNoUNlFpXFObomdp18OY2q1ubDSUH1sp309q6YrOJqKlU+fxI3xsoaRbENgYOhRaR25KY+sJ/3DkU+XpEVtYdA6LHbP76slb9nJnVuZUzC59mSBoYUJyj78xd2mlbZsRTNvK7y9gpnB+IowGAnn0VMUdoLmFKOrFHWBAIF5TXTigIebYS4Ron3uGz/6AR4h89ge0Ts+8XHr2B8z+gMn5A4QkrB3FqImpMv7zav6AlghVZqYXGoqAVrYVOMisfR4k0qDPDfm6fJGYlgX1i3w8uRns+U2agTvN6LTFy2C0eBlmE3+PHVZHE18PkPaHHs8LoI1o4tuPm/gTCcGrfeCfVuOnFdUrV2SWjktydHKSrUfhpYEoFHaKKEHYVhv4jqWotwXuRsbo4/GJdl3xOzF8QhYzITwoBi09ett5p3eATIoqy8e16LOlnx00MHYphn2a2KUOND417OqTWzHETOXY3jst+sIiudookprOvrlQVjZfIlea3B//Dw==7V1bk5u4Ev4t52Gqdh+gdEOXx5kks7tVySZ1Zk+d7FMKG2xTweDFTDzOr1+Jm0EID/YYm0k8lYqNJAS0ur+v1S3hG/xm+fRb4q4WH2LPD28Q8J5u8NsbhBBAWH6okm1eYmHB85J5Enh5GdwVPATf/aIQFKWPgeevGw3TOA7TYNUsnMZR5E/TRpmbJPGm2WwWh82rrty53yp4mLphu/T/gZcu8lLugF35734wX5RXhqCoWbpl46JgvXC9eFMrwu9u8JskjtP82/LpjR8q6ZVyyc+776itbizxo7TPCR/urX++fGUfl8Ht9l0yoYB++W4VvXxzw8figYubTbelBOZJ/LhqX6w8009S/8k0FO6k7GH3tFJP/Hjpp8lWtivOshAvJLSttAbZTl602ckciqLZoiZvxsqWbjHQ8+oCO1nIL4U4DhANMoiGhvK6d17wrSEi+s+jGsS7xrd58ZmdMUn0EnlLWTdlqSZ0eX9Sw+XB3WYRpP7Dyp2qmo20Mlm2SJfyUd5C+XUWR2lhNZDKYzcM5pE8mMqB8pOq6/rI7VUEfTjbw1YfEubYglLqcEE4x4Cx9ggRYdOBRghfR2ifYXEbEU4wg5gjRAmBDSuD2KakbWSXHVFyHdF9IwqJDTDGBAIHQioQwU3gZLYY24g6zxPMkHI/WMQWYsAGmqnwksMbckU24QZZgoEkSQeU5AkEJ/XRFvU/1pQh4wYZCscgwKEInXWCy3rlRkZ0mcZhLBXrVlYm88kv8gnkpUH58WsmOaCEac3cZRBu86a/++E3Pw2mbq1+nUlb1UK6eqpX5BdVNVGcLN2wVvfNTQJXfko9d9PHRPnBe9tN3VVXk00hYVVJ5BNkNaGfSsOx5ONPg2jePjNOVgs3KrpEeZlUh9QqLO8262OW1mqCyMt0SFWB8kGzmjSRXc1k7+V1Ij+vlQqRuem1i2zixGveVtWXfJLJ10B2p/pcp0n81bcKlWq0m7jTr8p1jTxLG0XkwHwA619+rd2p50/jxE2DOLLSRTD9Gvnr4vaCKEiDUjp629pI7m1Xu51Gu1kYu6kuHC9Yr0J3WzYPA1mBwH+C5SpOUjcyU+BUdirPSLcrv8Z9uZZ3kV/k3arpkoLT0F2vg2kTMTJJ+sq8gAE/5JAn289FZXbwtzqQvnlx+PapXvl2Wx49Benn4grq+9+18t0p6qA8oxOp1vFjMvWfZ6LUTeb+PlYucNb3GhPDvX6wY5iZlGWJH8ph/9acTpqwrbjCpziQT7Zz4rCt5pTVn85MUKObXAxFJ/XZoNavRajNRbMzoUNvLqtWZxn6VpI4HpD5wd5eJ/h0OYGdyG6Gh7o9trrE92mQht0W1cvJbNrR+Giak/bMuwrG1PUbD+XniB5+zkXAanjo4eOAnp0unBJc6HnBpdTZQ9y9bt/mKG9CR5DEmyk/InQnKkrak5elLrzPTmhoeGvqoxBAepzhbVGxDDxP9XEnvcbgexGLU1q8UgLPhsC5u3He9kKZvXb64gAgsJEoJwJlmJichDwrJag0utlDPJut/WG0zxRYHTW3ef56mgQr5aG+coZj+qALA6UBYRNkYLWhJp/QFE7+SWitNIZL81qJCo5NCcJQMAcAhhjW1AVz22GUC4IxERUyHcqBGCvPXeNAvbOhObA7Qn4pDiympk20cZcKFqLJetWEmVfKjJWtn4IaMWt66GXW48XUSDXdPBs1Hh7kvzA1LoM08b88Jt1O27D5hROQIhI2cLQBp4aAbM1xNyHv6WmxT37gRbR4Bnoj46I3iPV8Ojt2HtfOzGs9DU1gpqTHsQTWQUWyuZ9Ebvgl8We+1JKpv35FlJRNJu8qVHuTg5q8GXyf/R3AWqUpnoK1HEecRpnLHHWpgZoDNSBNmdJFI0GmEnGehSY8Ls9b+tY2UZlA6ABBKNbS15AQWwgG5IxNOJyj40ALOy3HW6rkmR3v7tD2hR3vCu3k/bdR7rWgXl9IK234FJAG0ECOuBYo1dR+QIQzRdpPRa6Zz2y5aSp1t68iXTpyhDGwHV5LtmncA0zLQNA5cyPIFNR+baRU6t1ISMki7WwFQuzIsI+FhaE33W0ZmH4q1R/Qbc4F8iVyl/4rJpIO97laf9GLZiqrPAXNQMCbNINOo+VYoxmth+FopoSoMWJWX8hCI5viI0FsQDEgEGPBBGrG8hCkypHmgMhKQkWJzAdjmYA2Bp1rBRT52Yy1qs8FckcEt6/r+a7r+V7ver5aBFoLP/ej29GycF+eLZlk1NM5rVu9iwGJtjux8nKv7wbfV/HRvakQXZsumPTQo9cQEcO+MoSR7eA2Vw+2kB+Zch4nToDpqahB1OCghUyXntwzXRmEYTJPTfsRBtODI5IbVwfm6sC8XgemGTMY3JEZ+6TS6TmprJwWIFBzFl8GB8407cREUSXikAmAOddCCggybHPhcAY5o8Kh+MhpJydy3qlncPAxc81d32dwvwZPGO5W+O0W9VU7Zvas8PPc9SLrFL5Ir/uuh8A91fpcyyGQYxPHQQ6GAhOup1sEETYjGEhvESMGybGxEghtTqHg2CFS/SFDenLTcWzMHOgQjgF2mLbhduhgyavba7NzM3+YlclUpZc0WHMMXqg494TkB9xy05uER7LnZrd4S8IEZ/KPMswhLhepV9uoCbYBFlxiDaAOEhqM9IUrCVE2YbVUp7YIWugr/gbGJ2xKbI5iwcQPvVK5svwxr1TWVHw4D670OWtK+BckZZxgREQi3Zj23gLaDm2Z3pikp6JP9zYe1JLeNZxxDWf8iOGMp7WnohlposTXkwTUXV4ELAg2gUXb7eQGsBhsIRO+Jm+vYHEFi/GBhfPMEkh4aeAwJTlPOlc9bbRsLPNKfU8Q0Sd4/QNdeiYNnHuuOPiesTMHTNG4VAVSYHMGpWFDSignRN84SIQtMHek4RPqYKC9VKG3IiFdJzE4syJ1J2C7gqITyYWSpGusIen+F8uqlxfsNYi7Aq/uytVdeT5+vkr8WfCklDAPYt2o6E0RvMr0e5GmxXDfy3/zIF08TuxpvFR2pHZuyE95qvx/EsYTVeiuVYwK3f/v4fa3d/bSu1H5jaLHHitPrvZ0tacfwZ4Sb5a11gPBBvOqrGuz2dgbbMeJdP/voRBCfgDFZpIDZX/Wehul7pMlx/ZqU1eb+iltat3FUWYjkmIAyojk491nFjRd+Ev3aj5X8/kJzSePL72AklD2Vtn7zx/ePxxmR0cFrfIxKTfaef7Mfcw6r8/B9067D3iXO6asekf7vhUXGBum03SwENYRLyI/AJS6Itwd+e1nltQMeUctsKi9yL91t/FjsvateGa50/pKoN2vPHz8b7vslE9x6AbZDsHmW+Ktlauw1fAYfUPE64W7Ul+jOPVR9iRZROv9pDS05+wwtzWkluDoCw/SWDUu1ydUD3qi6HIzmsRMLxwG0PgaKqgHiE5nk6b1cd0xxYKdGnHCmmxV+adigFUJArhCvvJXpNSYNaOQxpeh50ukBngdOu0ZnCzRaiTRSYxJ8/3VTW3iUnMwhoBhKBxIj1wdRTixCaxdhOpXwTY9725XbFqjNy6I/vjprz8+/nn7/ufCNIsAanM9RI7bTgaExMaGH9/RX852uh9TOuxlIYNiGrjp+IGHQio1WCOswrxPfhJIWSh9eyHaleYzFhRjmsJwiGynlnY99u0j1a623StnT5VTUXPt6scK8+a733zE7/4F \ No newline at end of file diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/RDF_tree_objects.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/RDF_tree_objects.png new file mode 100644 index 00000000..ae02a9cc Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/RDF_tree_objects.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/index.md b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/index.md new file mode 100644 index 00000000..97d2960e --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/index.md @@ -0,0 +1,800 @@ +# Build a Knowledge Graph from STIX 2.1 data such as the MITRE ATT&CK® datasets + +## Introduction + +MITRE ATT&CK is a globally-accessible knowledge base of adversary tactics and techniques based on real-world observations. The ATT&CK knowledge base is used as a foundation for the development of specific threat models and methodologies in the private sector, in government, and in the cybersecurity product and service community. + +The MITRE ATT&CK datasets in STIX 2.1 JSON collections are here: + +* [enterprise-attack.json](https://github.com/mitre-attack/attack-stix-data/tree/master/enterprise-attack/enterprise-attack.json) +* [mobile-attack.json](https://github.com/mitre-attack/attack-stix-data/blob/master/mobile-attack/mobile-attack.json) +* [ics-attack.json](https://github.com/mitre-attack/attack-stix-data/blob/master/ics-attack/ics-attack.json) + +[Structured Threat Information Expression (STIX™)]( +https://oasis-open.github.io/cti-documentation/stix/intro.html) is a language and serialization format used to exchange cyber threat intelligence (CTI). + +The "ontology" of MITRE ATT&CK with STIX is here: [https://github.com/mitre/cti/blob/master/USAGE.md](https://github.com/mitre/cti/blob/master/USAGE.md) + +The objective of this tutorial is not focus on the ontologies. In our use case, we just need to extract several metadata. If the community of STIX wants to resolve their problems, it will be the moment to define a solid ontology. If you are a newbie with the Linked Data technologies, you have to learn to generate in first a functional knowledge graph for your needs before building a perfect ontology for everybody. When you masterize the ontologies, we will modify this first ontology and you could refresh your knowledge graph when you want with Corporate Memory. + +This tutorial is written in order to gradually acquire all the skills necessary to build from scratch a knowledge graph with Corporate Memory and update it automatically via Corporate Memory Console. +This tutorial must be completed in order. + +Labs: + +1. Create a new project for your knowledge graph in your Sandbox +2. Import the datasets to convert in RDF +3. Create named graphs of your knowledge graph +4. Create a RDF transformer for STIX 2.1 +5. Create the workflow to transform all STIX datasets to RDF +6. Create the global named graph of your knowledge graph +7. Test the SPARQL query to obtain the name, the description and the references of a Mitre tag +8. (optional) Create the Void description of knowledge graph +9. (optional) Refresh your knowledge graph automatically + +You can improve this first knowledge graph with these exercises: + +1. Create an inference in your knowledge graph via a SPARQL Update query +2. Create an other knowledge graph for CAPEC linked to MITRE ATT&CK + +## Labs + +### Create your sandbox + +You need to create your sandbox of Corporate Memory to create your tempory knowledge graph for this tutorial. + +Follow the instructions here: [https://eccenca.my](https://eccenca.my/) + +### Create a project + +For each type of dataset, you can create an new project with all the tools necessary to convert this dataset in a knowledge graph. + +Create a new project, reproduce the demonstration in the following video: + +* Title: MITRE ATT&CK® + +* Description: MITRE ATT&CK® is a globally-accessible knowledge base of adversary tactics and techniques based on real-world observations. + +![](23-1-create-project.gif) + +### Import datasets + +MITRE ATT&CK® has 3 domains: [Entreprise](https://attack.mitre.org/techniques/enterprise/), [Mobile](https://attack.mitre.org/techniques/mobile/) and [ICS](https://attack.mitre.org/techniques/ics/). + +Each domain dataset is saved in GitHub: + +* [enterprise-attack.json](https://github.com/mitre-attack/attack-stix-data/tree/master/enterprise-attack/enterprise-attack.json) +* [mobile-attack.json](https://github.com/mitre-attack/attack-stix-data/blob/master/mobile-attack/mobile-attack.json) +* [ics-attack.json](https://github.com/mitre-attack/attack-stix-data/blob/master/ics-attack/ics-attack.json) + +1. Download these 3 files +2. Create for each JSON file, a JSON dataset: +![](23-1-import-JSON.gif) + +!!! Tip + + Give a short name at each dataset/transformer/etc in Corporate Memory to recognize it easily in the workflow view. For example, we will use "MA Entreprise (JSON)" like label and "MITRE ATT&CK® Entreprise dataset STIX 2.1" like description for the Entreprise dataset and so "MA Mobile (JSON)" for Mobile, "MA ICS (JSON)" for ICS, etc. + +!!! Success + + Now, you can see these JSON datasets in Corporate Memory: + ![](23-1-import-JSON-result.png) + +### Create named graphs + +!!! Info + + A knowledge graph is an abstract concept. Concretly in a triplestore or a RDF graph database via Corporate Memory, the database saves each RDF triple of graph in a named graph or RDF dataset in Corporate Memory. A graph named is a set of triples. So, a knowledge graph can be composed by one or several named graphs. + +!!! Tip + + A named graph can be modify without affecting the other named graphs. Each dataset of Mitre can be updated at any moment, so we are going to create a specific named graph for each Mitre dataset to simplify the update of each dataset in your knowledge graph. + + A good practice is to name the named graph by the URI of its real source on the Web, so the labels and graph names of your RDF datasets can be: + + * Entreprise domain + + - Label: MA Entreprise (knowledge graph) + - Graph name: https://github.com/mitre-attack/attack-stix-data/raw/master/enterprise-attack/enterprise-attack.json + + * Mobile domain + + - Label: MA Mobile (knowledge graph) + - Graph name: https://github.com/mitre-attack/attack-stix-data/raw/master/mobile-attack/mobile-attack.json + + * ICS domain + + - Label: MA ICS (knowledge graph) + - Graph name: https://github.com/mitre-attack/attack-stix-data/raw/master/ics-attack/ics-attack.json + +Create one RDF dataset for each Mitre dataset: + +1. Add component "Knowledge Graph" +2. Put a label +3. Put a URI of named graph +4. Enable "Clear graph before workflow execution" + +![](23-1-create-RDF-dataset.gif) + +!!! Success + + Now, you can see these RDF datasets in Corporate Memory: + ![](23-1-create-RDF-dataset-result.png) + +!!! Tip + + The consequence of the option "Clear graph before workflow execution" is the named graph will be deleted (with all its triples) before receiving new triples when you use this named graph like an output in a workflow and also in the transformer task (in the next step). + + This option is to use only for the graphs which will generate automatically by Corporate Memory. + +### Create a transformer + +!!! Tip + + There are not bad manners to build a knowledge graph but there are knowledge graphs useless or very hard to use by the analysts or developers in their missions. + + Without having all queries necessary in their missions, your knowledge will continue to evolve to sastify all needs of your users. + + With Corporate Memory, you can develop progressively your vocabularies RDFS or your ontologies OWL to describes your knowledge graph. + + If it's your first knowledge graph, the best manner to start is with RDFS vocabularies because you can develop it like you develop classes and their instances in an object oriented language. It's exactly the same manner to describe the world. Of course, there are differences but you can start a first functional knowledge graph without being an expert. + + Here, you will create all classes and attributes necessary in your use case case. Not more, not less. So, we are adding each STIX object in your knowledge base with its STIX type, its label, its description and its references. Each reference can have an url, a label, a description and an external ID, like Mitre ID or CAPEC ID. + + In UML, you can represent your targeted model like that: here a RDF model to describe an instance of type "course-of-action" in MITRE ATT&CK. (you can download the [File drawio of schemas](./RDF_model_and_pattern.drawio)) + + ![RDF model to describe an instance of type "course-of-action" in MITRE ATT&CK](rdf-model-course-of-action.png) + + The SPARQL query for this model can be specify in UML with a RDF pattern: here a RDF pattern to select the "course-of-action" objects with a known Mitre ID + + ![RDF pattern to select the "course-of-action" objects with a knowed Mitre ID](rdf-pattern-to-select-a-course-of-action-with-a-mitre-tag.png) + + Without an official vocabulary and its official prefix, we are using the documentation on the Web of its datasets: [https://github.com/mitre/cti/blob/master/USAGE.md](https://github.com/mitre/cti/blob/master/USAGE.md) + + So, to make a prefix, we choosed a short name, for example "ctia", and the IRI will build with the Web address of its documentation with a # at the end (to link to anchors of attributes in the Web page, if they exist): + + ```turtle + prefix ctia: + ``` + +1. Create the prefix of your vocabulary: + + ```turtle + prefix ctia: + ``` + +![](23-1-create-prefix.gif) + +2. Create the (Mitre) STIX 2.1 transformer + +This transformer will be a component of your worflow. You could reuse it in several workflows in other projects. To create a new transformer, you need to give a: + +* Label: STIX 2.1 transformer +* Input: MA Entreprise (JSON) +* Output: MA Entreprise (knowledge graph) + +![](23-1-create-transformer.gif) + +!!! Tip + + In your use case, there is only this transformer to build this named graph, so there is no consequence on the final knowledge graph when we test this transformer on this graph (automatically cleared after each execution of transformer). However, a good practice is to create a tempory graph in ouput for each transformer, so your final knowledge graph is not affected during the modification of your transformer before executing the workflows with this transformer. In this case, you need to hide this tempory graph of your users. + + You can create a transformer for several syntaxes in input: JSON, XML, CSV, etc. If your format does not exist in Corporate Memory, you can convert your data in JSON before importing this data in Corporate Memory. + +!!! Info + + STIX gives the possibility to extend its syntaxes. Mitre uses this possibility. So, in theory, if we need to import all the data, we can extend this transformer at all STIX attributes and add the Mitre attributes described in its [documentation](https://github.com/mitre/cti/blob/master/USAGE.md). + +3. Study the tree of STIX data + +```json +{ + "type": "bundle", + "id": "bundle--19413d5e-67e5-4a48-a4c8-afb06b7954de", + "spec_version": "2.1", + "objects": [ + { + "type": "x-mitre-collection", + "id": "x-mitre-collection--1f5f1533-f617-4ca8-9ab4-6a02367fa019", + "name": "Enterprise ATT&CK", + "description": "ATT&CK for Enterprise provides a knowledge base of real-world adversary behavior targeting traditional enterprise networks. ATT&CK for Enterprise covers the following platforms: Windows, macOS, Linux, PRE, Office 365, Google Workspace, IaaS, Network, and Containers.", + ... + }, + { + "id": "attack-pattern--0042a9f5-f053-4769-b3ef-9ad018dfa298", + "type": "attack-pattern", + "name": "Extra Window Memory Injection", + "description": "Adversaries may inject malicious code..." , + "external_references": [ + { + "source_name": "mitre-attack", + "external_id": "T1055.011", + "url": "https://attack.mitre.org/techniques/T1055/011" + }, + { + "url": "https://msdn.microsoft.com/library/windows/desktop/ms633574.aspx", + "description": "Microsoft. (n.d.). About Window Classes. Retrieved December 16, 2017.", + "source_name": "Microsoft Window Classes" + },... +``` + +To extract STIX objects with its type, its label, its description and its references, we need to navigate via a root object of type "bundle" before touching the STIX objects. Each object has an ID, we suppose unique in all Mitre datasets to generate IRI of all objects. We use your prefix ctia to build the class name and the properties of your RDFS vocabulary. Here, we build the vocabulary of manner agile for your use case because Mitre had not proposed a RDFS vocabulary for its datasets. + +4. Create the root object and give it an unique IRI: + +* RDF type: ctia:Object +* IRI pattern: + +![](23-1-extract-root-object.gif) + +!!! Tip + + You can develop an IRI from scratch in the IRI formula editor, like here or directly in the form and improve it after, if necessary (see an example in the next step). + + The important is to test the result in the evaluation view. + +!!! Success + + During the development of a transformer, you can test your transformation and check all the steps. + + ![](23-1-see-steps-during-a-transformation.png) + +5. Link the sub-objects to their root: + +* Value path: objects + +with their IRI and the property ctia:object: + +* RDF property: ctia:object +* RDF type: ctia:Object +* IRI pattern: + +![](23-1-extract-objects.gif) + +!!! Tip + + The RDFS classes start by an uppercase and the property by a lowercase and apply the camel case notation, if possible. The objective is to create cool IRI, ie. lisible IDs for humans and unique on the Web. + + There are exceptions, like Wikidata which prefers to use a number for their IRI but with a explicit label in all languages. + + Moreover, if there is no clear ontology in your domain, the best is to take the name of parameters of the source (here json). So, we will use the property, like `ctia:external_id` with underscore because it's the convention of Mitre in its datasets. If Mitre defines a best RDF ontology, we will modify simply your transformer to respect their new ontology. + +!!! Tip + + We could limit the number of objects to import, if you add conditions in the formula editor with the field "type" of objects, for example. + +6. Extract now their type, label and description with these properties for example: + +* ctia:type + * RDF type: URI + * Via the "value forma editor" create the IRI: `https://github.com/mitre/cti/blob/master/USAGE.md#{type}` +* rdfs:label + * value path: name + * RDF type: String +* ctia:description + * value path: description + * RDF type: String + +![](23-1-extract-properties.gif) + +!!! Tip + + STIX type doesn't apply the camel case and doesn't start by an uppercase. We prefers to create a specific property ctia:type for this reason. + + You can reuse a vocabulary already in Corporate Memory (like rdfs) but you are also free to develop a new vocabulary on the fly with your prefixes. + +!!! Success + + When you test your transformer, you can see the future instances in your knowledge graph: + ![](23-1-success-transformer.png) + +7. At the end of the last step, we saw the dataset uses the syntax of Markdown to define a Web link. In the interface of SPLUNK, we need to use the HTML syntax. Modify the formula for the description with the operator "regex replace". + +* Regex: `\[([^\[\]]*)\]\(([^\(\)]*)\)` +* Replace: `$1` + +![](23-1-regex-replace.gif) + +!!! Success + In the "value formula editor", you can immediatly check the result of your formula. + ![](23-1-regex-replace.png) + +!!! Tip + + At any moment, you will modify your vocabulary according to your needs that you will find during your development. You need to modify this transformer and relaunch all your workflows which use this transformer. + +!!! Tip + + The regular expression are often necessary in the components of "value formula editor". The website [regex101](https://regex101.com/) will help you to develop and debug the regular expressions. + +8. Via the same method, we are linking the references objects to their STIX objects: + +* via the property: `ctia:external_references` +* Type: ctia:Reference +* value path: external_references +* IRI of each object: its own URL () + +ctia:Reference object has these properties: + +* ctia:source_name +* ctia:description +* ctia:url +* ctia:external_id + +![](23-1-extract-references.gif) + +!!! Tip + + Sometimes, several urls are not correct. You can use the component "Fix URI" to fix the classic problems. + + ![](23-2-fix-url.png) + +!!! Warning + + When you make a transformer on a dataset, you see quickly the limit of data. For example with Mitre, several references are a set of citations without URL. + + ![](23-1-warning-bad-uri.png) + + For example references with this description: `(Citation: Palo Alto menuPass Feb 2017)(Citation: DOJ APT10 Dec 2018)(Citation: District Court of NY APT10 Indictment December 2018)` + + The URL for the majority of citations can be found in the dataset but we need to do a first pass before to link correctly the citations at their URL. + + Moreover, we can find also citation directly in the description of several objects but without URL and without their references in their JSON tree. + + Here, it's a simple tutorial. So, we do not try to fix this problem of citations for the moment, but if you want a tutorial to fix it, let me a comment in this page. + +!!! Success + + To test your transformer, you need to develop one or several SPARQL queries with the RDF pattern which will use in your use case. You are developing this query in the SPARQL editor: + + ```sparql + #Test 1 transformer STIX 2.1 + + PREFIX rdfs: + PREFIX ctia: + + SELECT + ?title ?description + (GROUP_CONCAT(?link; separator="
") as ?references) + FROM + WHERE { + { + ?resource ctia:type ctia:course-of-action . + } union { + ?resource ctia:type ctia:attack-pattern . + } + + ?resource rdfs:label ?title ; + ctia:description ?description ; + ctia:external_references ?mitre_url . + + ?mitre_url ctia:external_id "T1490" ; + ctia:source_name "mitre-attack" . + + OPTIONAL { + ?resource ctia:external_references [ + ctia:url ?reference_url ; + ctia:source_name ?reference_label ; + ctia:description ?reference_description + ] . + BIND( CONCAT("",?reference_label,": ",?reference_description ,"") as ?link) + } + } + GROUP BY ?title ?description + ``` + + ![](23-1-sparql-query.gif) + +9. During the building of interfaces, we saw the same MITRE ID of IoC rules is used by the concepts of tactic, mitigation, technique,... In the final interface, we will print properly the label of each concept for the same Mitre ID, like "Technique TXX" or "Mitigation TXX". + +!!! Tip + + Moreover, Corporate Memory indexes some specific properties automatically, like rdfs:label. Without this property, it's not easy to find the objects by a search by text. To facilite the research of references, like the mitre id, you are adding the property rdfs:label to reference objects. + +So, we add a new property `rdfs:label` to object `ctia:Reference`. If the reference is not a Mitre ID, we will copy the source_name else we will extract the type of concept in the URL and concat his Mitre ID: + +* In the transformer STIX, add the property rdfs:label (type string) to object `ctia:Reference`. + +![](rdf-model-course-of-action.png) + +* Customize the value of label, like in this RDF model: (try to do this rule alone before to look at this possible response) + +![](23-1-extract-rdfslabel.png) + +!!! Success + + You can test the result when you search the Mitre ID via the explorer of knowledge graph "MA Entreprise": + + + +### Create a workflow + +You have now a STIX transformer. We are building here a workflow to apply this transformer for all datasets in same time. + +1. Create a workflow with a name, for example "MITRE ATT&CK® workflow" +2. Insert the input JSON dataset +3. Insert the output RDF dataset +3. Insert the transformer +4. Link the three components +5. Execute the workflow to test it +6. Save it + + ![](23-1-create-workflow.gif) + +7. Do the same operations for the two other datasets. + +!!! Success + + At the end, the workflow looks like that: + + ![](23-1-success-worflow.png) + +### Create a global named graph + +To simplify the requests by a SPARQL query on your knowledge graph, we are offering the possibility to request all data of these 3 datasets in same time. + +We are showing the "SPARQL tasks", another important feature available in Corporate Memory. More precisely, we will work with the SPARQL Update task with Jinja template. + +!!! Note + + Jinja is a text-based template language and thus can be used to generate any markup as well as source code, like SPARQL. Corporate Memory gives the possibility to insert the name of named graph in a SPARQL query according to its position in the worflow to execute. + + For example, `$outputProperties.uri("graph")` inserts the name of graph connected to the output of the task in the workflow and `$inputProperties.uri("graph")` inserts the name of graph connected to the input. It's very practice to do repetive tasks, like to calculate the VoiD description at each update of graph. + +1. Create a "Knowledge Graph" dataset (ie, a RDF dataset) + + * Label: MITRE ATT&CK® (knowledge graph) + * URI (name of graph): + * Enable "Clear graph before workflow execution" + +2. Create a "SPARQL Update query" task without missing to enable the Jinja Template + + * Label: Import graph + +```sparql +PREFIX owl: + +INSERT DATA { + GRAPH $outputProperties.uri("graph") { + $outputProperties.uri("graph") + owl:imports $inputProperties.uri("graph") . + } +} +``` + +!!! Note + + In this query, Jinja replace $outputProperties.uri("graph") and $inputProperties.uri("graph") according to our workflow so the final code executed of this query is, for example: + + ```sparql + PREFIX owl: + + INSERT DATA { + GRAPH { + + owl:imports . + } + } + ``` + +!!! Success + + In the Turtle view of RDF dataset "MITRE ATT&CK®", you can see the triples inserted by your SPARQL query. + + ```turtle + + owl:imports ; + owl:imports ; + owl:imports + . + ``` + +1. In the same workflow add one SPARQL task for each RDF datasets and in output add the RDF dataset "MITRE ATT&CK®". Execute it and save it. + + ![](23-1-sparql-task.gif) + +!!! Success + + ![](23-1-workflow-import.png) + + In the Turtle view of RDF dataset "MITRE ATT&CK®", you can see the triples inserted by your SPARQL query. + + ```turtle + + owl:imports ; + owl:imports ; + owl:imports + . + ``` + +### Test your final SPARQL query + +Now, you can request all the datasets in same time through the named graph `https://attack.mitre.org` to respond at the final query of our use case: + +```sparql +#Test 2 final query + +PREFIX rdfs: +PREFIX ctia: + +SELECT +?title ?description +(GROUP_CONCAT( distinct ?link; separator="
") as ?references) +FROM +WHERE { + { + ?resource ctia:type ctia:course-of-action . + } union { + ?resource ctia:type ctia:attack-pattern . + } + + ?resource rdfs:label ?title ; + ctia:description ?description ; + ctia:external_references ?mitre_url . + + ?mitre_url ctia:external_id "T1490" ; + ctia:source_name "mitre-attack" . + + OPTIONAL { + ?resource ctia:external_references [ + ctia:url ?reference_url ; + ctia:source_name ?reference_label ; + ctia:description ?reference_description + ] . + BIND( CONCAT("",?reference_label,": ",?reference_description ,"") as ?link) + } +} +GROUP BY ?title ?description +``` + +!!! Success + + ![](23-1-sparql-all_datasets.gif) + +### Create the Void description + +In theory, RDF datasets in the Linked Open Data have to have a [VoID](https://www.w3.org/TR/void/) description with their statistics. The objective is to catalog automatically these datasets. + +!!! Info + + [VoID](https://www.w3.org/TR/void/) is an RDF Schema vocabulary for expressing metadata about RDF datasets. It is intended as a bridge between the publishers and users of RDF data. + +Here, we are creating a new SPARQL Update task to calculate and insert automatically the statistics of our global graph and add a [VoID](https://www.w3.org/TR/void/) description. + +1. In the same workflow, insert a new SPARQL Update task with this query to calculate the statistics: + +* label: Calculate VoID + +```sparql +PREFIX rdfs: +PREFIX dcterms: +prefix void: + +INSERT +{ + GRAPH $outputProperties.uri("graph") { + $outputProperties.uri("graph") a void:Dataset; + rdfs:label "MITRE ATT&CK®"; + rdfs:comment "MITRE ATT&CK® is a globally-accessible knowledge base of adversary tactics and techniques based on real-world observations."; + void:triples ?triples ; + void:entities ?entities . + } +} +USING $outputProperties.uri("graph") +WHERE { + { + SELECT (COUNT(DISTINCT ?resource) as ?entities) + WHERE { + ?resource a ?class . + } + } + { + SELECT (COUNT(?s) as ?triples) + WHERE { + ?s ?p ?o . + } + } +} +``` + +!!! Tip + + This query uses the variable `$outputProperties.uri("graph")` (Jinja template). If the name of graph changes, the code of the query stays stable in your workflow. + +![](23-1-sparql-void.gif) + +!!! Success + + The final triples in the graph `https://attack.mitre.org`after this worflow. + + ```turtle + prefix owl: + prefix rdf: + prefix rdfs: + prefix xsd: + + + rdf:type ; + rdfs:comment "MITRE ATT&CK® is a globally-accessible knowledge base of adversary tactics and techniques based on real-world observations." ; + rdfs:label "MITRE ATT&CK®" ; + 28081 ; + 150120 ; + owl:imports + , + , + . + ``` + +### Refresh all automatically + +The datasets of Mitre are updated regularly. You may want to update them automatically via a command line in a bash file. In this script, we use CMEM. + +1. [Install CMEMC - a Command Line Interface of CMEM](/automate/cmemc-command-line-interface/installation/) + +2. Open your config file: + +```bash +cmemc config edit +``` + +3. Insert your sandbox in your CMEMC config, example with a password grant type: + +```bash +[johndo.eccenca.my] +CMEM_BASE_URI=https://johndo.eccenca.my/ +OAUTH_GRANT_TYPE=password +OAUTH_CLIENT_ID=cmemc +OAUTH_USER=johndo@example.com +OAUTH_PASSWORD=XXXXXXXXX +``` + +You need to replace "johndo" by other thing, "" by your login (email) in the sandbox and XXXXXXXXX by your password. Save the file (with VI, :wq). + +!!! Tip + + Immediatly, in the file ~/.bashrc, you can specify your sandbox like your instance by default for CMEMC with this line: + + ```bash + export CMEMC_CONNECTION=johndo.eccenca.my + ``` + +Test: + +```bash +cmemc graph list +# or cmemc -c johndo.eccenca.my graph list +``` + +If you can connect it, you can see your knowledge graph "" in the list. + +4. You need to know the IDs of your JSON datasets IDs and your workflow ID to implement the command lines with the tool [Corporate Memory Console]() ( + +![](23-1-collect_IDs.gif) + +For example in my demo the JSON datasets and the workflow have these IDs: + +``` +MITREATTCK_3dc114458dfd4c57:MAEntrepriseJSON_14f0f94ed5de5daa +MITREATTCK_3dc114458dfd4c57:MAICSJSON_e024c6433ed523e1 +MITREATTCK_3dc114458dfd4c57:MAMobileJSON_3f890442dad17750 + +MITREATTCK_3dc114458dfd4c57:MITREATTCKworkflow_0b8fa5454ef21a00 +``` + +5. You can now import the file directly of Mitre repository on GitHub and import the files in the sandbox and execute your workflow. + +```bash +wget https://raw.githubusercontent.com/mitre-attack/attack-stix-data/master/enterprise-attack/enterprise-attack.json +wget https://raw.githubusercontent.com/mitre-attack/attack-stix-data/master/mobile-attack/mobile-attack.json +wget https://raw.githubusercontent.com/mitre-attack/attack-stix-data/master/ics-attack/ics-attack.json + +cmemc dataset download --replace MITREATTCK_3dc114458dfd4c57:MAEntrepriseJSON_14f0f94ed5de5daa enterprise-attack.json +cmemc dataset download --replace MITREATTCK_3dc114458dfd4c57:MAMobileJSON_3f890442dad17750 mobile-attack.json +cmemc dataset download --replace MITREATTCK_3dc114458dfd4c57:MAICSJSON_e024c6433ed523e1 ics-attack.json +cmemc workflow execute --wait MITREATTCK_3dc114458dfd4c57:MITREATTCKworkflow_0b8fa5454ef21a00 +``` + +!!! Success + + You can see the result in the shell but also via the "Activities Board". It's useful to follow the errors of your workflows, if you execute a script via a Linux Cron, for example. + + ![](23-1-success-cmemc-activity.png) + +!!! Tip + + With these command lines, you can now start a cron every day to check the Mitre updates and start refreshing your datasets. + +## Exercices + +### Create inferences + +After this tutorial, you want probably to navigate in your new knowledge graph between the relationships of Objects STIX. Before, you need to create inferences of these STIX "relationships" in your knowledge graph via a SPARQL Update query. + +1. In the STIX transformer, import also the fields: `ctia:source_ref`, `ctia:target_ref` and `ctia:relationship_type`. + +2. Create a new SPARQL Update task "convert STIX relationships to rdf statements" with this code: + +```sparql +PREFIX ctia: + +INSERT { + GRAPH $outputProperties.uri("graph") { + ?sourceIRI ?propertyIRI ?targetIRI . + } +} +WHERE { + GRAPH $inputProperties.uri("graph") { + ?relationship + ctia:type ctia:relationship ; + ctia:source_ref ?source ; + ctia:target_ref ?target ; + ctia:relationship_type ?property . + } + + BIND (IRI(CONCAT("https://github.com/mitre-attack/attack-stix-data#",?source)) as ?sourceIRI) + BIND (IRI(CONCAT("https://github.com/mitre/cti/blob/master/USAGE.md#",?property)) as ?propertyIRI) + BIND (IRI(CONCAT("https://github.com/mitre-attack/attack-stix-data#",?target)) as ?targetIRI) +} +``` + +This SPARQL query create explicitly the STIX links in the knowledge graph. Here, we create a new inference via a simple query. + +3. Create a new Knowledge graph dataset "STIX inferences" with this IRI: + +!!! Tip + + Separate always the facts extracted of raw data and the inferences calculate with other graphs. So, you can recalculate your inferences without rebuild all knowledge graph. + +4. Split the workflow in two workflows: + + * "Transform all STIX data to RDF" to calculate the inferences after RDF triples + ![](23-1-ex-workflow-STIX.png) + + * "Assemble the global knowledge graph", it will import all the graphs of projects + ![](23-1-ex-workflow-gen.png) + +5. Create a new workflow "MITRE ATT&CK® workflow" where you will insert the other workflows, like that: + + ![](23-1-ex-workflow-global.png) + +!!! Success + + You can now navigate in your first knowledge graph: + ![](23-1-ex-graph-navigation.gif) + +### Reconcile automatically the STIX concepts via the Linking tasks + +[The "Linking task"](/build/active-learning) is very useful to reconcile the instance of concepts in your graphs when their labels are inserted manually with some light differences. For example, you can reconcile the tool, malware, etc of different STIX documents. + +1. Read the documentation of ["Linking task"](/build/active-learning) + +2. Use the json of [STIX report](https://oasis-open.github.io/cti-documentation/examples/example_json/apt1.json) of [Mandiant's APT1 Report](https://oasis-open.github.io/cti-documentation/stix/examples.html) to reconcile the STIX tools in this report and the tools in the Mitre knowledge graph with your transformer STIX and a Linking task. + +### Add the CAPEC dataset + +The Common Attack Pattern Enumeration and Classification (CAPEC™) effort provides a publicly available catalog of common attack patterns that helps users understand how adversaries exploit weaknesses in applications and other cyber-enabled capabilities. + +* Dataset: [https://github.com/mitre/cti/blob/master/capec/2.1/stix-capec.json](https://github.com/mitre/cti/blob/master/capec/2.1/stix-capec.json) +* The CAPEC "ontology": [https://github.com/mitre/cti/blob/master/USAGE-CAPEC.md](https://github.com/mitre/cti/blob/master/USAGE-CAPEC.md) + +1. Import the CAPEC dataset in Corporate Memory +2. Create the named graph of CAPEC +3. In the workflows of MITRE ATT&CK, generate also the CAPEC dataset +4. Modify the transformer to support the references to CAPEC dataset from MITRE datasets. + +## Conclusion + +STIX uses JSON syntax and can therefore be converted to RDF via Corporate Memory. Here, we have only extracted a few useful fields for our use case but if you want to import all the data, you will need to import the other properties from STIX 2.1, the extended properties in your Mitre datasets and convert the other STIX relationships to RDF statements (like in the exercice "Create inferences"). + +## Ressources + +* [RDF schemas (Model, pattern, etc)](RDF_model_and_pattern.drawio) +* [Archive of CMEM project](MITREATTCK_tutorial.zip) + +--- + +Tutorial: [how to link Intrusion Detection Systems (IDS) to Open-Source INTelligence (OSINT)](../index.md) + +Next chapter: [Build a Knowledge Graph of compromise rules, like Hayabusa and Sigma rules](../lift-data-from-YAML-data-of-hayabusa-sigma/index.md) + +Previous chapter: [Specify the dashboards before the RDF models](../define-the-interfaces/index.md) diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/rdf-model-course-of-action.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/rdf-model-course-of-action.png new file mode 100644 index 00000000..513e2198 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/rdf-model-course-of-action.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/rdf-pattern-to-select-a-course-of-action-with-a-mitre-tag.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/rdf-pattern-to-select-a-course-of-action-with-a-mitre-tag.png new file mode 100644 index 00000000..dcac0534 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-STIX-2.1-data-of-mitre-attack/rdf-pattern-to-select-a-course-of-action-with-a-mitre-tag.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-add-json-dataset.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-add-json-dataset.gif new file mode 100644 index 00000000..f730ec4f Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-add-json-dataset.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-add-prefix-ctis.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-add-prefix-ctis.gif new file mode 100644 index 00000000..af8655db Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-add-prefix-ctis.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-add-worflow.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-add-worflow.gif new file mode 100644 index 00000000..f17ca223 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-add-worflow.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-formula-mitreid.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-formula-mitreid.png new file mode 100644 index 00000000..a500354d Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-formula-mitreid.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-id-worflow.gif b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-id-worflow.gif new file mode 100644 index 00000000..748bba00 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-id-worflow.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-iri-rule.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-iri-rule.png new file mode 100644 index 00000000..41f0e921 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-iri-rule.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-rdf-model-rule.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-rdf-model-rule.png new file mode 100644 index 00000000..c964855c Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-rdf-model-rule.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-rules-isdefinedby.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-rules-isdefinedby.png new file mode 100644 index 00000000..b4e2f134 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-rules-isdefinedby.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-success-extract-rule.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-success-extract-rule.png new file mode 100644 index 00000000..2ba668c0 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-success-extract-rule.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-success-extract-rule2.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-success-extract-rule2.png new file mode 100644 index 00000000..161bc69b Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-success-extract-rule2.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-success-workflow.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-success-workflow.png new file mode 100644 index 00000000..5914fe05 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-success-workflow.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-workflow-allow-replacement.png b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-workflow-allow-replacement.png new file mode 100644 index 00000000..3461147f Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/23-1-workflow-allow-replacement.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/RDF_model_and_pattern.drawio b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/RDF_model_and_pattern.drawio new file mode 100644 index 00000000..6d91febb --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/RDF_model_and_pattern.drawio @@ -0,0 +1 @@ +7VxJk9u4Ff4tOXTVzIEsLFyPvbjtJLN4YpfHyaULIkEJZS4aEmq18uvzwE0kRaolNdmW09JBIkFsBL63fA+Aruht9PQ+ZcvFr4nPwyuC/KcrendFCMWuCz8qZVOkYGKbRco8FX6Ztk34JP7Ly0RUpq6Ez7NWRpkkoRTLdqKXxDH3ZCuNpWmybmcLkrDd6pLN+U7CJ4+Fu6l/Cl8uilTHRNv0D1zMF1XLGJVPIlZlLhOyBfOTdSOJvruit2mSyOIqerrloRq9alyKcvcDT+uOpTyWhxTg6W/vswdK/qTv7W8frv+41iKskaKWRxauyhe+IlYI9d344lF1Wm7KkbD+Wqme3gRJLLUsn6dryIDxEib7ZvscrubqdyGlmqJr1SFyz/8KQzHTU85gDLmfeJkuEpUew1fIJM8kXLCYhRspvAyuPcOmxKJEcz3sa4brIM0NmKNh33U8lwYudri+kFFYdXiWvqi/VTXQdP7qVSpp1UpgTAF6cHOzXgjJPy2Zp56sAf75S0N/6B0u2y3hjNU9C8U8hhsPpounddXN+Sun9JGnkj81ksr5fM+TiMt0A1nKp8QosVVJl0HLhPUWq4ZTpi0aOK3SWCke87ruLYLgogTREYCiRwNqxrxv8zRZxb7mJWGSFvOUzmc/ERN0BXQDNS9+zocO5dMasEiEm6LAbRJnSQjvk2eE6lmkZqRs5DZZpYIrhPzG192HRZEoiZOsmM66gQZuSIkbFIqYa9VY5o/cPSKwlYAnaDPkupdEcJeuYBTI/TJNvAcPpEKKJH5Yi/hh5vncF/JhBnrhAZRa8CChHE/1zRbpU0CUTARRrdZ/JUZdow+i5i5Erakgah0NUXhXqZWjo2Y85IHcq0QAR/ERYBexkIKFvVXeKAORwzsQ81WaIwXuP+eoEPEcrr8IBt83BXL0d1/fNYBS9OQoFZd3j6sRRMdhyBoHMtTsIMY0dxBD7R6lRqdCjN2DmK7Qxf61cjeU+IAWyoTXHqf2oHZHDQYr3XwtH+Y3/1Y3ulnd3j01H95tyrvB0c5A33l8zyuVelqydM7l88LC/ZabtDt3jbkxe6amSks5GHvx2Hau+uarbOFjImLZMHimpTtux+a5nWkv3r0s2XSIupXhjvW0sNmuqRidnZpyDNXvfjqsnEFFNKg/BjUR2mdGcWU9txc/9yqb1A+UzxayGQ8HlUgX+QCNX/ICLcDvGBOlAAQ41tflg0j4vqrjJuVgZtksr0+BeqnGO58B8+bKvDtIyewV267mqdlC2WjLIe/TSEgnrmu2wKIZowBa62BQw267iiQIMj4J+tzpzOAdlzC6mdIvC3Bz0CpT30lQJ5ReTj4TUcRiP6d1uT5SZi3XJ6BO6uyFDfQ6NtBnkunKFi5E3hb3FrH4a6VK5AlJIBW/QFJEPCu6oRqabXJ6Fq4BPyqXao5JCfLD06wYevhS9QMrSVdePr3QI6bKzXiQ5MVCtoq9RWGAUwa+Z6Tq05ts5Ie2uhjhXUet1+x29e9oZhcfz06H9eOxPPAFLtzBbbU58hwgv5qVBOGTmEfswx9K+aor+J2FyUwNI8uUPi0ohCLKwBp8FeZQdYzChBUp4VlW85KX8JQT3M8tfdnDpvvkYwwh6JAVjN0etoJ6hMCeTAj6GPUbcT4rBXAm3icm4H06buPTBotNTvREsb3r1lpdRE3sjGJjOn+gdCtFdscDEXP/ZnOoLnh93zL3fW9qHX9bqHjoDL3PP4e7n7XkjuF/mqY7EobbODPoa/mbFbl6i4qMnpciMwykU2Q7CJvINSxKO3rMNXTXtREGXWc6DjlNqVldeu2Yr0uv8YSBPiAFSqNFQqb8OucOnyv28Xf/fJXboZqrEtUxNBcidgsHldC8EMMUt2ql1qvpsb5o4DOcoRORQUUYpvoZWtL4wMNHrqa+n1VY1YpE8aBoVD2JkzRqUZFHlgoGvwAgJlcpz57J57HlUJZ1Y+FDOcLFkgiXAFVNrZ4oQrxTMkmXC6DIxQNSpA0ztfwJkJoco+oRql40fyIV2wYSHlXtxOWKDejOfLm30cg6Sf12t+q64E1m3wRUp+oEnp9841qpfVv5ToqrFT31uZcUwQpNLoT3LQZW1cMWu3kbM7k33yD7DBMmu4Pji2wZsk2VXS1iwcXfRLRMUsnifi33lIEgXMPg5GGOw8KBqpffJYBhk+6ywS5161sKnWzVAJ8Q370oiouiuCiKiRUFdnV3mMX3xT1fVW9UvGjyTTmtgGPKfY/FLN14al8Ok0kkPA0SAfcs2oYfA0pdbvvM9pEZWMi0Az/wfJ/aBpkFXkAtM+CcMLeuQwUmPxeLdMWvHtUO8lvcskM7rIj27Ydwe/A22ZYd0men3ggxr171TIg5plg3qGEbloMsEzttjoNN19VtgyLAAgU1ZhinMXNimLqqA1uG6WDiEKtD1JGpU9vEpuFQRE27Q60mpu10V/193i7yn9GKmYqioManM1m4R7D75JqYwyh52Va84b14F//z4n9e/M/v5n92XIDatH8vj5O+4Q1u5+UAELKzEkjqIwVH2/ndZUWCzE5l41lz86P75fdV+h8NZwb6x/Xnx3+y33u3Tu7nMsN+vgLVQYTB2iEMKGBeu8zWZg0tV2acX4dZ0tAnRQeOIx+vG+Q3yvv70jrffeGpD7SuV5T264IRYv8mcjqxfzKKlGidvXivF/ynw7vmjt+zObCo9K/8bMCBmy9P4bYj2THNNtpxlLaqcXrir9jt2ehvTOUAVw54Y7LYyHZtevtUIe5MDBTuxi8MfKJ1wk43EtI98TEx0TQqxB9xVgncdOAPDYcWmMhPmtZMLx3rSZgUvjCpC5N6/szQMuWBeFIgLCzKVeNAXPhMHLiz8RSGknsiAESWW0HrbagirnI/tPLk8V1at7fXWbpI20Xa/n+kDXhDnrtEfjzLim3JPcJXy956vdbXVE/SOdzg3Ie6R8rWgX8F9WnZJpbsSYO5vcjURabepEwNWrB+IYJhUGuNKhx/n0uQt+DqAMVFfC7i8+bEpwiMv8AkkXxd6/7rr798Ok6OToq2F3NS7bn3ecBWdeSvuTI/RvSi/oOBmsn2BCwMh+oY77LpMf6coDde2rf34ywi8fxJyLyYbhtOeV+UdG2jvN8WVTfNkh95KmCAVKjzleL6ZxY2MQwdGa5LTGpgipHR2XKPkKkT6jrYIbZFTLcTEjs44m8BXM1tQM4xO810F5snjv6fsAL9Oifc1bFjngfzf5wz7nv1xRmfccdtrFeSOWqwvndsJjzRdtqB2B/vUHg32kt6zsPSqdaqeyd19FNkAwbxGXM4qgHbh94zMWAUdaP1px523fmjgZ2z0xMbpeFjYYNGaXBH08TWqozfBiLkMYsOXxWc3lwdaJ5GO0imIR0j2x4Fy90d31anhuks0gmrx5cdeZcwwmVH3rRujklx+48d2voB090/oBvJ51ExyvofYAsts/0jXfrufw== \ No newline at end of file diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/importRules.sh b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/importRules.sh new file mode 100755 index 00000000..e9436fbb --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/importRules.sh @@ -0,0 +1,13 @@ +#!/bin/bash -x + +output_dir_rules=/home/karima/datasets/rules + +mkdir -p ${output_dir_rules} +cd ${output_dir_rules} +git clone --depth 1 https://github.com/Yamato-Security/hayabusa-rules +git clone --depth 1 https://github.com/SigmaHQ/sigma + +for file in $(find . -name '*.yml'); do + [ -f "$file" ] || break + yq ".rulePath = \"${file}\"" -o=json $file > ${file}.json +done diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/importRules2.sh b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/importRules2.sh new file mode 100755 index 00000000..febc7882 --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/importRules2.sh @@ -0,0 +1,20 @@ +#!/bin/bash -x + +output_dir_rules=/home/karima/datasets/rules + +mkdir -p ${output_dir_rules} +cd ${output_dir_rules} +git clone --depth 1 https://github.com/Yamato-Security/hayabusa-rules +git clone --depth 1 https://github.com/SigmaHQ/sigma + +for file in $(find . -name '*.yml'); do + [ -f "$file" ] || break + yq ".rulePath = \"${file}\"" -o=json $file > ${file}.json +done + +cmemc graph delete http://example.com/rule + +for file in $(find . -name '*.json'); do + [ -f "$file" ] || break + cmemc workflow io RulesHayabusaSigma_671e1f43d94bbc36:Importrules_6ccbc14b656c75c9 -i ${file} +done \ No newline at end of file diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/index.md b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/index.md new file mode 100644 index 00000000..f5b7a87f --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma/index.md @@ -0,0 +1,245 @@ +# Build the Knowledge Graph from indicators of compromise rules, like Hayabusa and Sigma rules + +## Introduction + +There are a lot of sources to download the indicators of compromise rules to detect a possible future incident. + +There are rules for Host-based intrusion detection systems (HIDS) with Hayabusa/Sigma, for example, and Network intrusion detection systems (NIDS) with Suricata/Zeek for example. + +Here, we are working with the Hayabusa/Sigma rules available via GitHub: + +- [https://github.com/Yamato-Security/hayabusa-rules](https://github.com/Yamato-Security/hayabusa-rules) +- [https://github.com/SigmaHQ/sigma](https://github.com/Yamato-Security/hayabusa-rules) + +The problem of interoperability, here, is the YAML format of files, their random position in their folders in their Github projets. Moreover, the same rule can exist in different projects but in this tutorial, we will not fix this problem and we consider the IRI rule is their Web address. In Corporate Memory, we would fix that with the Linked Tool, we will study this tool in a next part of this tutorial. + +To build this knowledge graph of rules, we need to: + +1. Create a JSON dataset with all rules +2. Build the transformer of a JSON rule to RDF +3. Build a workflow to insert each rule in an unique knowledge graph +4. Use this workflow in CMEMC to import automatically all rules + +## Import the JSON datasets + +The YAML syntax is used to define each rule and there is one file by rule. + +Corporate Memory doesn't support YAML (for the moment) but you can convert the files in JSON with this bash where you need to install git and [yq](https://github.com/mikefarah/yq). + +Moreover, we use yq to add the field `rulePath` in each file with their paths in their repositories to have the possibility to rebuild their positions on the Web and so allowing the analyst to click directly on this link to read the details and may be, modify this rule. + +At the end of this bash, you will have a tree of JSON files and we will apply a workflow on each file. + +!!! Tip + + Don't forget to replace the final folder before using this bash. + +```bash +#!/bin/bash -x + +output_dir_rules=/home/karima/datasets/rules + +mkdir -p ${output_dir_rules} +cd ${output_dir_rules} +git clone --depth 1 https://github.com/Yamato-Security/hayabusa-rules +git clone --depth 1 https://github.com/SigmaHQ/sigma + +for file in $(find . -name '*.yml'); do + [ -f "$file" ] || break + yq ".rulePath = \"${file}\"" -o=json $file > ${file}.json + +done +``` + +We can test this script: + +```bash +cd ~/git/tutorial-how-to-link-ids-to-osint/docs/build/tutorial-how-to-link-ids-to-osint/lift-data-from-YAML-data-of-hayabusa-sigma +chmod +x importRules.sh +./importRules.sh +``` + +For example, the file [proc_creation_win_bcdedit_boot_conf_tamper.yml](https://github.com/SigmaHQ/sigma/blob/master/rules/windows/process_creation/proc_creation_win_bcdedit_boot_conf_tamper.yml) will become this JSON file: + +```json +{ + "title": "Boot Configuration Tampering Via Bcdedit.EXE", + "id": "1444443e-6757-43e4-9ea4-c8fc705f79a2", + "status": "stable", + "description": "Detects the use of the bcdedit command to tamper with the boot configuration data. This technique is often times used by malware or attackers as a destructive way before launching ransomware.", + "references": [ + "https://github.com/redcanaryco/atomic-red-team/blob/f339e7da7d05f6057fdfcdd3742bfcf365fee2a9/atomics/T1490/T1490.md", + "https://eqllib.readthedocs.io/en/latest/analytics/c4732632-9c1d-4980-9fa8-1d98c93f918e.html" + ], + ... + "tags": [ + "attack.impact", + "attack.t1490" + ], + ... + "level": "high", + "rulePath": "./sigma/rules/windows/process_creation/proc_creation_win_bcdedit_boot_conf_tamper.yml" +} +``` + +## Create the knowledge graph + +The collected rules are from Sigma and Hayabusa repositories. [Hayabusa "are trying to make this rules as close to sigma rules as possible"](https://github.com/Yamato-Security/hayabusa-rules#rule-file-format). In your use case, we need properties defined by Sigma and which also exist in Hayabusa rules. The day where there will be a official RDF vocabulary to define a rule, we will use it. Waiting, your minimal vocabulary is "defined" here: [https://github.com/SigmaHQ/sigma-specification/blob/main/Sigma_specification.md#](https://github.com/SigmaHQ/sigma-specification/blob/main/Sigma_specification.md#). We use this address for the prefix of your RDF vocabulary for your use case. + +The filename of the same rule between repositories does not change. So, we are making the IRI of rules with their filename and a arbitrary IRI, like "". However, we want to give the possibility to open the original YAML rule directly via SPLUNK, so we add the property `rdfs:isDefinedBy` to associate the rule Web URLs to a rule. +We will not use the guid id or Web address of the rule in its IRI because rules are often duplicate between the repositories and the filename and the title seem to be the used IDs of rules in Splunk and not the guid id. + +This new transformer are building the following RDF model for your use case: + +![](23-1-rdf-model-rule.png) + +1. Create a new project to build the knowledge graph of "Rules Hayabusa Sigma" + +2. In this project, create a RDF dataset "Rules Hayabusa Sigma" in Corporate Memory for all rules with the named graph: `http://example.com/rule` + +3. Create a JSON dataset "Rule example (JSON)" in Corporate Memory with one example of rule: + + ![](23-1-add-json-dataset.gif) + +4. Create the prefix of your vocabulary: + + ```turtle + prefix ctis: + ``` + + ![](23-1-add-prefix-ctis.gif) + +5. Create the transformer for "SIGMA Hayabusa rule" to build this RDF model. + +Rule object: + +- type: `ctis:Rule` + +- IRI: concatenation of "" with the result of this regular expression `^.*?([^\/]*)$` on the rule path + +![](23-1-iri-rule.png) + +- property `ctis:filename` with the result of this regular expression `^.*?([^\/]*)$` on the value path `rulePath` +- property `rdfs:label` with the value path `title` +- property `rdfs:comment` with the value path `description` +- property `rdfs:seeAlso` with the value path `references` +- property `ctis:mitreAttackTechniqueId` is building with this formula with the value path `tags` + - Filter by regex: `^attack\.t\d+$` + - Regex replace `attack\.t` by `T` + +![](23-1-formula-mitreid.png) + +- property `rdfs:isDefinedBy` on the value path `rulePath` is building with this formula to link the rules to their Web addresses. + - Add two "Regex replace" + - replace `\./hayabusa-rules/` by `https://github.com/Yamato-Security/hayabusa-rules/blob/main/` + - replace `\./sigma/` by `https://github.com/SigmaHQ/sigma/blob/master/` + +![](23-1-rules-isdefinedby.png) + +So the rulepath `./sigma/rules/windows/process_creation/proc_creation_win_bcdedit_boot_conf_tamper.yml` becomes the link `https://github.com/SigmaHQ/sigma/blob/master/rules/windows/process_creation/proc_creation_win_bcdedit_boot_conf_tamper.yml` and `./hayabusa-rules/hayabusa/sysmon/Sysmon_15_Info_ADS-Created.yml`becomes `https://github.com/Yamato-Security/hayabusa-rules/blob/main/hayabusa/sysmon/Sysmon_11_Med_FileCreated_RuleAlert.yml` + +!!! Tips + + To test your transformer, you can use the tab "Transform execution". Here, the knowledge graph will not be cleared after each workflow or execution to test your transformer because the option "clear graph before workflow" is disabled. However during the steps to build this transformer, you can enable tempory this option to see and test the final transformer. + You need only to disable this option when your transformer is finished. + +!!! Success + + Your example of rule exists now in your knowledge graph: + ![](23-1-success-extract-rule2.png) + ![](23-1-success-extract-rule.png) + +6. Make the workflow "Import rules" with one input + + ![](23-1-success-workflow.png) + +And don't forget to allow the replacement of JSON dataset because it allows to replace this specific JSON by all other rules during the execution of this worflow. + +![](23-1-workflow-allow-replacement.png) + +![](23-1-add-worflow.gif) + +7. Copy the workflow ID + + ![](23-1-id-worflow.gif) + +!!! Success + + In this example the ID of workflow is `RulesHayabusaSigma_671e1f43d94bbc36:Importrules_6ccbc14b656c75c9` + +## Apply the worflow to all files + +We modify the first bash where we add the line to clear the knowledge graph before importing all rules via our worflow where we subtitute the JSON dataset in input by the rules' files. + +!!! Tip + + Don't forget to replace the worflow ID and the final folder before using this bash. + +!!! Tip + + CMEMC config file need to be correctly configurated before to execute this bash, like in the previous tutorial. + + For example: + + ```bash + [johndo.eccenca.my] + CMEM_BASE_URI=https://johndo.eccenca.my/ + OAUTH_GRANT_TYPE=password + OAUTH_CLIENT_ID=cmemc + OAUTH_USER=johndo@example.com + OAUTH_PASSWORD=XXXXXXXXX + ``` + You need to replace "johndo" by other thing, "johndo@example.com" by your login (email) in the sandbox and XXXXXXXXX by your password. Save the file (with VI, :wq). + + Don't forget to specify the config by default to use by CMEMC. + + ```bash + export CMEMC_CONNECTION=johndo.eccenca.my + ``` + +```bash +#!/bin/bash -x + +output_dir_rules=/home/karima/datasets/rules + +mkdir -p ${output_dir_rules} +cd ${output_dir_rules} +git clone --depth 1 https://github.com/Yamato-Security/hayabusa-rules +git clone --depth 1 https://github.com/SigmaHQ/sigma + +for file in $(find . -name '*.yml'); do + [ -f "$file" ] || break + yq ".rulePath = \"${file}\"" -o=json $file > ${file}.json +done + +cmemc graph delete http://example.com/rule + +for file in $(find . -name '*.json'); do + [ -f "$file" ] || break + cmemc workflow io RulesHayabusaSigma_671e1f43d94bbc36:Importrules_6ccbc14b656c75c9 -i ${file} +done +``` + +We can test this script: + +```bash +./importRules2.sh +``` + +## Conclusion + +Here, we learnt how to generate a knowledge graph with files in input with Corporate Memory to prepare the worflow and cmemc to execute this worklow on all files. + +## Ressources + +- [RDF schemas (Model, pattern, etc)](RDF_model_and_pattern.drawio) +- [script 1](importRules.sh) +- [script 2](importRules2.sh) + +--- + +Tutorial: [how to link Intrusion Detection Systems (IDS) to Open-Source INTelligence (OSINT)](../index.md) + +Next chapter: [Link IDS event to a knowledge graph in dashboards via queries](../link-IDS-event-to-KG/index.md) + +Previous chapter: [Build a Knowledge Graph of MITRE ATT&CK® datasets](../lift-data-from-STIX-2.1-data-of-mitre-attack/index.md) diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/eccenca_poc_investigate.tar.gz b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/eccenca_poc_investigate.tar.gz new file mode 100644 index 00000000..d126d98c Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/eccenca_poc_investigate.tar.gz differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/index.md b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/index.md new file mode 100644 index 00000000..44586067 --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/index.md @@ -0,0 +1,230 @@ +# Link IDS event to a knowledge graph via advanced tools + +## Introduction + +In this tutorial, we are using the Splunk app "Investigate lateral movements with a knowledge graph", like example. This app contains several dasboards to help the analysts to navigate in the Hayabusa or Sigma alerts before searching in the Suricata and Sysmon alerts. We hope this method can "Accelerate Cyber Threat Hunting". + +In the demo of this Splunk app via the video 1, the user selects the data about one investigation via Splunk and generate a bash script to export these data via the Splunk API in tempory graphs in Corporate Memory for each investigation. + +![](splunk-app-demo-poc-app.gif) + +*Video 1: Splunk dashboards of the Splunk app "Investigate lateral movements with a knowledge graph"* + +In this page, we are showing how, we : + +1. Install the Splunk app "Investigate lateral movements with a knowledge graph" +2. "Accelerate Cyber Threat Hunting" with dashboards +3. Manage the graphs of your application +4. Export data in Splunk to Corporate Memory +5. Reasoning with data in Corporate Memory via Splunk +6. Reconcile automatically the complex data via Linking tasks + +## Install the Splunk app "Investigate lateral movements with a knowledge graph" + +This tutorial describes the method in the app "[Investigate lateral movements with a knowledge graph](eccenca_poc_investigate.tar.gz)". You can install it and modify the source code. The queries and workflows used in its dashboards are not shared because they are built according to Splunk indexes which are not shared also. But, you can modify the dashboards and build your queries and workflows in your sandbox. + +This app is not directly connected to your Corporate Memory instance. The custom REST endpoint in this app via the file "Investigation.py" generates bash scripts for CMEMC to create or delete the tempory graphs of each investigation (and create/delete tempory folders). When the script is generated for one investigation, the script "createInvestigation.sh" or "deleteInvestigation.sh" via a cron service on your Splunk server calls it and CMEMC will use this configuration by default on this server to connect to your Corporate Memory instance. + +Position of these scripts in the folders of this app: + +``` ++---bin +| Investigation.py ++---cmem +| createInvestigation.sh +| deleteInvestigation.sh +``` + +Moreover, a settings file is necessary to insert the credentials of Splunk (like token, IP, port) and to specify the folders to manage the files of investigations on the server, ie. the generated scripts and the exported raw data of Splunk. Read the file "README.md" for more information. + +## "Accelerate Cyber Threat Hunting" with dashboards + +In this Proof of Concept, we have implemented two types of investigation: + +- high-level with the data of alerts of Zeek and Hayabusa/Sigma +- low-level with the data of Suricata and Sysmon + +For each investigation, an analyst selects and navigates in the data with two dashboards for each type of investigation: + +- one dashboard (see figure 1) to select the data to transfer to knowledge graph: typeA_request.xml +- one dashboard (see figure 2,3,4) to navigate in the knowledge graph: typeA_dashboard.xml + +![](poc-app_request.png) + +*Figure 1: Splunk dashboard to select the data before executing a high-level investigation* + +![](poc-app_high_level_computers.png) + +*Figure 2: High-level investigation dashboard with the list of computers implicated in the incident* + +![](poc-app_high_level_period.png) + +*Figure 3: High-level investigation dashboard with the panel to select a specific period during an incident according to IoCs details* + +![](poc-app_low_level.png) + +*Figure 4: low-level investigation dashboard contains the command lines of Windows processus rised Suricata alerts during the period selected by the analyst in a high-level investigation dashboard.* + +The idea is the analyst can do an investigation high-level without using a lot of ressources (a little graph) but when he want to see the suspicious processus on one computer in a specific period, he can ask an investigation low-level with a maximum of details. + +To follow the calculation of investigations and free memory when one investigation is closed, we developed another dashboard "investigation_list.xml" (see figure 5). This dashboard prints the status of investigations actually in the knowledge graph. Here, each investigation is saved in tempory graphs and the analyst can create and delete them directly in Splunk. + +![](poc-app_list_investigations.png) + +*Figure 5: The dashboard "investigation list" shows all the tempory graph actually in the knowledge graph. The analyst can open an investigation, see the SPL query generated when he has created an investigation and delete it when he want.* + +With these interfaces to manage and calculate different investigations with different levels of details, we imagined a first method to "follow lateral movements" (see figure 6) in order to understand the objectives of the incident. We hope this PoC will "Accelerate Cyber Threat Hunting". + +![](poc-app_hunt.png) + +*Figure 6: Analyst can select a computer and a period to analyze the suspicious processus implicated in Suricata alerts. So, an analyst can follow the "lateral movements" and see the command lines executed by these suspicious processus.* + +## Manage the graphs of your application + +A knowledge graph in Corporate Memory is a set of named RDF graphs. Each named graph can be managed and each named graph can have missions very different. In this PoC, when a new investigation is created, the app creates in first the named graph "investigation" to link all the other tempory named graphs for this investigation with the property "owl:imports". So, when an analyst want to delete an investigation, the app can find all the tempory graphs of this investigation and can delete all its tempory graphs simply. Moreover, the analyst can request with SPARQL all these tempory graphs via one named graph "investigation" (thanks to "owl:imports"), so the complexity of tempory graphs is invisible for the final user. + +An error of novices is to save all the triples of data in input and their inferences in the same graph. For each upload of facts, each calculated inference, etc, you need to save their triples in its own named graph because during the life of data (and the development), you want to restore easily one named graph without re-building all the other graphs. + +This manner to manage the graphs has been applied in this app, so an analyst is able to write in his dashboard code the data to upload from Splunk to knowledge graph with the tokens "source_x", for example: + +```xml + { + "index": $selected_index|s$, + "search": "sauron_metadata.sauron_source_type=hayabusa Level!=info | table RuleTitle RuleFile | dedup RuleTitle RuleFile", + "workflowID": "b5deffdd-f4b9-4d1a-8ea0-9b3410d915e7_PoC21:investigation-hayabusa" + } +``` + +This token contains a json object where: + +- "index" the list of splunk indexes of the SPL query +- "search" the second part of the SPL query +- "workflowID" the ID of workflow in Corporate Memory to convert the raw data of Splunk in RDF in a tempory graph + +An analyst can import as many sources as needed with several tokens, ie. "source_1", "source_2", etc. + +There are not consensus about the manner to calculate inferences on the RDF data in a knowledge graph. To simplify, one inference is a set of triples in a graph according to calculations with other triples in input. These calculations are possible only after all the sources are imported in the knowlege graph, ie. here, when all temporary graphs are created, the analyst can apply multiple calculations on these temporary graphs. Each calculation will create new triples in new tempory graphs always associated at the same investigation. We have created the token "inferences" to insert in the dashboard the json array of inferences to calculate when all data are imported from Splunk. For example: + +```xml + + [ + { + "comment": "POC 2.1: inference 1: Resolve hostname in Zeek Notice with Zeek DNS", + "command": "query", + "queryIRI": "https://ns.eccenca.com/data/queries/d063d87e-9122-41a3-84e9-4a05c2d0766e", + "inferenceID": "1" + }, + { + "comment": "Linking hostname to prepare to calculate computers", + "command": "workflow", + "workflowID": "b5deffdd-f4b9-4d1a-8ea0-9b3410d915e7_PoC21:WORFLOW_CONSOLIDATION_HOSTNAME_bebe8b4a7f975e90" + }, + { + "comment": "POC 2.1: inference 2: Calculate computers", + "command": "query", + "queryIRI": "https://ns.eccenca.com/data/queries/10cd6a60-c5d4-444c-8a09-6dc63f51576f", + "inferenceID": "2" + }, + ... + ] + +``` + +We use two manners to calculate new inferences: + +- when the inference is simple to calculate with SPARQL, we use a SPARQL update query with parameters in Corporate Memory (like "inferenceID" to build the name of destination tempory graph). "queryIRI" is the IRI of the query in the catalog of Corporate Memory. +- when the inference is complex to calculate, we use a workflow of Corporate Memory. + +With these tokens "source_1", "source_2", etc and "inferences" in the dashboard, the app can generate a bash script for CMEMC. + +We explain now how to insert these tokens in a dashboard. + +## Export data in Splunk to Corporate Memory + +In the previous chapter, you explain the role of token "source_x" to create a new tempory graph with Splunk data. This token contains SPL queries with other external tokens. To calculate correctly these SPL queries, you need to "set" the token "source_x" in the xml element `change` of last input component of these external tokens, like in this example with the external token "selected_index": + +```xml + + + index=" + " + OR + index + index + + | eventcount summarize=false index=* +| search NOT index IN ("history", "cim_modactions", "summary") +| dedup index +| fields index + 0 + + + all + * + + { + "index": $selected_index|s$, + "search": "sauron_metadata.sauron_source_type=hayabusa Level!=info | table RuleTitle RuleFile | dedup RuleTitle RuleFile", + "workflowID": "b5deffdd-f4b9-4d1a-8ea0-9b3410d915e7_PoC21:investigation-hayabusa" + } + + +``` + +Here the example is simple with one external token, but when you have several external tokens in the token "source_x", there are often problems to generate a complet SPL query (to check the SPL query, you need to use the Javascript console via the Javascript "investigation.js" to read the final SPL queries in the tokens "source_x"). + +You can add in your dashboard several tokens "source_1", "source_2", "source_3", etc. Each source generates a part of final bash script where a curl command requests the Splunk API and a CMEMC command executes the specified worflow via its ID in the token. With this method, the analyst is free to import any Splunk data and choose any CMEM workflow according to these data. The SPL queries are executed by the Splunk API (via curl), so the analyst is free to use all commands supported in Splunk to select the data. For example, with low-level investigation, these SPL queries contains also the selected period by the analyst to limit the quantity of data to import. + +## Reasoning with data in Corporate Memory via Splunk + +When all the sources are imported in the knowlege graph, the app will use the token "inferences" to execute the last calculations to do on the knowledge graphs. Here, there is not SPL query to calculate so we "set" simply the token "inferences" in the xml element `init`: + +```xml + + + [ +... + { + "comment": "Linking hostname to prepare to calculate computers", + "command": "workflow", + "workflowID": "b5deffdd-f4b9-4d1a-8ea0-9b3410d915e7_PoC21:WORFLOW_CONSOLIDATION_HOSTNAME_bebe8b4a7f975e90" + }, + { + "comment": "POC 2.1: inference 2: Calculate computers", + "command": "query", + "queryIRI": "https://ns.eccenca.com/data/queries/10cd6a60-c5d4-444c-8a09-6dc63f51576f", + "inferenceID": "2" + }, +... + ] + + +``` + +If you want to reuse your own algorithm to calculate new inferences, you can develop your own [plugin](develop/python-plugins/) in Corporate Memory and call it in a workflow that you can call in a Splunk dashboard. + +## Reconcile automatically the complex data via a Linking task + +In the previous chapter, you saw it is possible to call a workflow in a Splunk dashboard. A workflow is a set of components available in Corporate Memory, like transformer, tasks (SPARQL update query), etc. + +One component is **very useful** to reconcile the instance of concepts in your graphs like the concept of tools of different STIX documents or again, the computer according to their hostnames in the logs of systems and networks, etc. This component is the Linking task. + +To use it: + +1. Read the documentation of ["Linking task"](/build/active-learning) to learn to use it + +2. [Do the exercice "Reconcile automatically the STIX concepts via the Linking tasks" in the page of tutorial about Mitre's datasets](../lift-data-from-STIX-2.1-data-of-mitre-attack/#reconcile-automatically-the-stix-concepts-via-the-linking-tasks) + +## Conclusion + +RDF is often considerated like too simple to manage the complexe knowledge but the reality is this simplicity is the core to manage all type of complexity. This abstraction is rarely simple for the novices but when you know use RDF properly with professional tools like Corporate Memory, you will make all type of applications, like applications in the cybersecurity. + +Often with this tutorial, the analysts said there is already in SIEM (like Splunk) a lot of tools to hunt via the data. It's truth. There are a lot of tools in the SIEM to help the analysts. The difference with Corporate Memory and the Linked Data technologies, **YOU ARE FREE** because you can calculate your own inferences on your data, you can connect your SIEM with your knowledge graphs and ofcourse, your knowledge graphs is INTEROPERABLE, ie. you can connect your structured data with other tools in your information systems. Moreover, Corporate Memory can support several databases, so you are able to choose the best RDF database according the size of your investigations and the type of calculations to do. + +Now, you know what is it possible to do simply an app in the CyberSecurity with the Linked Data technologies and Corporate Memory. + +--- + +Tutorial: [how to link Intrusion Detection Systems (IDS) to Open-Source INTelligence (OSINT)](../index.md) + +Previous chapter: [Link IDS event to a knowledge graph in dashboards via queries](../link-IDS-event-to-KG/index.md) diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_high_level_computers.png b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_high_level_computers.png new file mode 100644 index 00000000..71ca1174 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_high_level_computers.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_high_level_period.png b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_high_level_period.png new file mode 100644 index 00000000..cd87507c Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_high_level_period.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_hunt.png b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_hunt.png new file mode 100644 index 00000000..c6ddfd4f Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_hunt.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_list_investigations.png b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_list_investigations.png new file mode 100644 index 00000000..362d85db Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_list_investigations.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_low_level.png b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_low_level.png new file mode 100644 index 00000000..b260ae08 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_low_level.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_request.png b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_request.png new file mode 100644 index 00000000..85111ec5 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/poc-app_request.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/splunk-app-demo-poc-app.gif b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/splunk-app-demo-poc-app.gif new file mode 100644 index 00000000..06d3020b Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG-via-cmem/splunk-app-demo-poc-app.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/demo_ld.png b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/demo_ld.png new file mode 100644 index 00000000..8f4a7a81 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/demo_ld.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/demo_ld_without_html.png b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/demo_ld_without_html.png new file mode 100644 index 00000000..98d587f0 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/demo_ld_without_html.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/eccenca_commands.tar.gz b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/eccenca_commands.tar.gz new file mode 100644 index 00000000..1e24e899 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/eccenca_commands.tar.gz differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/index.md b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/index.md new file mode 100644 index 00000000..98d7fabc --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/index.md @@ -0,0 +1,421 @@ +# Link IDS event to a knowledge graph in dashboards via SPARQL queries + +## Introduction + +In this tutorial, we are using the Linked Data App for Splunk. This app contains the SPARQL command necessary to dasboards to help the analysts to understand the Hayabusa and Sigma alerts before searching manually in the data via a SPL (Search Processing Language) query in Splunk. + +In the demo of this app in the video 1, the user selects the indexes of his investigation and select an alert message to open its sources on the Web before searching manually via the Splunk interfaces. Splunk, automatically, refreshes the SPARQL queries in the dashboard after each interaction of user. + +![](splunk-app-demo-LD-app.gif) + +*Video 1: Splunk dashboards of the Linked Data App* + +In this tutorial, we learn to: + +1. Install the "Linked Data App" for this tutorial +2. Configure the SPARQL endpoint of your sandbox +3. Add other SPARQL endpoints +4. An example of dashboard with your private knowledge graphs + +## Install the "Linked Data App" in Splunk for this tutorial + +The "Linked Data App" extends Splunk Search Processing Language (SPL) to support the [SPARQL protocol](https://www.w3.org/TR/sparql11-protocol/). + +1. Download the tar.gz: [Linked Data App](../link-IDS-event-to-KG/eccenca_commands.tar.gz) + +2. Open the App window in Splunk via the icon "tools" (see figure 1) + +![](splunk_apps_menu.png) + +*Figure 1: In the top of the list of installed Splunk apps, you need to click on the icon "tools" to open the window to manage your apps* + +3. Upload the app in Splunk (see video 2) + +![](splunk-app-install.gif) + +*Video 2: When the tar.gz of the "Linked Data App", you can upload it manually directly in Splunk.* + +!!! Tip + + The dependencies of this app are already in its tar.gz but you can update the dependencies yourself, via the lines: + + ```bash + cd eccenca_commands + pip install sparqlwrapper -t bin --upgrade + pip install splunk-sdk -t bin --upgrade + ``` + +## Configure the SPARQL endpoint of your sandbox + +When you made your knowledge graphs in the previous pages, you have used the eccenca sandbox. To use these structured data in Splunk dashboards, you need to connect the SPARQL endpoint of your sandbox to Splunk via the "Linked Data App". + +After the installation, this app is in the folder `etc/apps/eccenca_commands` of Splunk directory. + +1. Create the file `settings.conf`: + +```bash +cd etc/apps/eccenca_commands +cp default/settings_template_sandbox.conf default/settings.conf +vi default/settings.conf +``` + +You have an example of configuration for the eccenca sandbox SPARQL endpoint in the file `default/settings_template_sandbox.conf` (and another example via Oauth2 secret ID in the file `default/settings_template_oauth_secret_id.conf`). + +2. Insert your credentials in the the file `settings.conf`, ie. replace `johndo` by the name of your sandbox (endpointRead, token_endpoint), `johndo@example.com` by your email and `XXXXXXXXX` by your password. Don't change the parameters OAUTH_CLIENT_ID and OAUTH_GRANT_TYPE. + +```ini +[config:default] +# replace johndo.eccenca.my by your sandbox +endpointRead=https://johndo.eccenca.my/dataplatform/proxy/default/sparql +accessMethod=oauth2 +# replace johndo.eccenca.my by your sandbox +token_endpoint=https://johndo.eccenca.my/auth/realms/cmem/protocol/openid-connect/token +OAUTH_CLIENT_ID=cmemc +OAUTH_GRANT_TYPE=password +# replace johndo@example.com by your email +OAUTH_USER=johndo@example.com +# insert your password +OAUTH_PASSWORD=XXXXXXXXX +``` + +3. Restart after your Splunk instance (via the administration windows) + +4. Test your sandbox endpoint in Splunk with this SPL query: + +``` +| sparql + query=" + select * + where { + ?s ?p ?v + } + LIMIT 10 + " +``` + +## Add other SPARQL endpoints + +To add a new SPARQL endpoint, add these two lines in your file `settings.conf` where your need to replace here `wikidata` by the name of new public endpoint and `https://query.wikidata.org/sparql` by the url of endpoint. + +```ini +[config:wikidata] +endpointRead=https://query.wikidata.org/sparql +``` + +Restart after your Splunk instance and request in Splunk your endpoint with the parameter config (here wikidata) to select the config to use in the file `settings.conf`: + +``` +| sparql + config="wikidata" + query=" + select * + where { + ?s ?p ?v + } + LIMIT 10 + " +``` + +!!! Tip + + You can clone the dashboards of this app to see and modify the SPARQL examples with Wikidata. + +## An example of dashboard with your private knowledge graphs + +To work, our example of dashboard need to have Splunk indexes of IoCs. We cannot share our indexes but you can modify our example with your own SPL queries according to your Splunk indexes. + +![](demo_ld.png) + +*Figure 2: Dashboard with SPARQL commands and the script `table_html.js` to print the HTML and to open Web pages of alerts' references* + +!!! Tip + + The SPARQL command respects the logic of SPLUNK to see all metadata of the SPARQL response (types of literal, etc). However, in a dashboard via a static table panel, you want probably to see only the columns in the header of your SPARQL query. In the XML element `table`, you can select the columns in output via the XML element `fields`: + + ```xml + ["Source","Description","MitreID"] + ``` + + Of course, you can do it also via the SPL query. + +You can see the XML of dashboard in the figure 2: + +```xml +
+ + + + +
+ + + index=" + " + OR + index + index + + | eventcount summarize=false index=* +| search NOT index IN ("history", "cim_modactions", "summary") +| dedup index +| fields index + 0 + + + all + * + + + + critical + high + medium + low + critical,medium,high,low + Level=" + " + OR + +
+ + + + + | tstats count where $selected_index$ ld_metadata.ld_source_type=hayabusa Level=low + 0 + + + + + + + + + + + + + + | tstats count where $selected_index$ ld_metadata.ld_source_type=hayabusa Level=medium + 0 + + + + + + + + + + + + + + | tstats count where $selected_index$ ld_metadata.ld_source_type=hayabusa Level=high + 0 + + + + + + + + + + + + + + | tstats count where $selected_index$ ld_metadata.ld_source_type=hayabusa Level=critical + 0 + + + + + + + + + + + + + + + + | tstats count where $selected_index$ ld_metadata.ld_source_type=hayabusa Level!=info $level$ by RuleTitle +| rename RuleTitle as "Rule name" +| sort - count + 0 + + + + + + $click.value$ + +
+
+
+ + + Rule's sources + + + | sparql +query="prefix ctis: <https://github.com/SigmaHQ/sigma-specification/blob/main/Sigma_specification.md#> +prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> +prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> +prefix xsd: <http://www.w3.org/2001/XMLSchema#> + +SELECT DISTINCT (STRBEFORE(STRAFTER(STR(?link),\"https://github.com/\"),\"/\") as ?Source) (?comment as ?Description) ?link (?mitreID as ?MitreID) +FROM <http://example.com/rule> +WHERE { + VALUES ?title { \"$selected_rule$\" } + + ?ruleHayabusa a ctis:Rule ; + rdfs:label ?title ; + rdfs:comment ?comment ; + rdfs:seeAlso ?referenceLink; + rdfs:isDefinedBy ?link ; + ctis:filename ?filename . + OPTIONAL { + ?ruleHayabusa ctis:mitreAttackTechniqueId ?mitreID . + } +}" + -24h@h + now + + $result.MitreID$ + + + + + $row.link|n$ + + + + ["Source","Description","MitreID"] +
+
+ + Rule's references + + + | sparql +query="prefix ctis: <https://github.com/SigmaHQ/sigma-specification/blob/main/Sigma_specification.md#> +prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> +prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> +prefix xsd: <http://www.w3.org/2001/XMLSchema#> + +SELECT DISTINCT (GROUP_CONCAT(STRBEFORE(STRAFTER(STR(?link),\"https://github.com/\"),\"/\"); separator=', ') as ?Source) (?referenceLink as ?Reference) +FROM <http://example.com/rule> +WHERE { + VALUES ?title { \"$selected_rule$\" } + + ?ruleHayabusa a ctis:Rule ; + rdfs:label ?title ; + rdfs:comment ?comment ; + rdfs:seeAlso ?referenceLink; + rdfs:isDefinedBy ?link ; + ctis:filename ?filename . +} +GROUP BY ?referenceLink" + -24h@h + now + + + + + ["Source","Reference"] + + $row.Reference|n$ + +
+
+
+ + + + Mitre description in relation with this rule + + | sparql +query=" +PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> +PREFIX ctia: <https://github.com/mitre/cti/blob/master/USAGE.md#> + +SELECT +(CONCAT (\"<b>\",?title,\"</b>\",\"<br/>\",?description,\"<br/><br/>\",GROUP_CONCAT( distinct ?link; separator=\"<br/>\")) as ?html) +FROM <https://github.com/mitre-attack/attack-stix-data/raw/master/enterprise-attack/enterprise-attack.json> +WHERE { + + { + ?resource ctia:type ctia:course-of-action . + } union { + ?resource ctia:type ctia:attack-pattern . + } + + ?resource rdfs:label ?title ; + ctia:description ?description ; + ctia:external_references ?mitre_url . + + ?mitre_url ctia:external_id \"$MitreID$\" ; + ctia:source_name \"mitre-attack\" . + + OPTIONAL { + ?resource ctia:external_references [ + ctia:url ?reference_url ; + ctia:source_name ?reference_label ; + ctia:description ?reference_description + ] . + BIND( CONCAT(\"<a href='\",STR(?reference_url),\"'>\",?reference_label,\": \",?reference_description ,\"</a>\") as ?link) + } + +} +GROUP BY ?title ?description" + -24h@h + now + + + + ["html"] +
+
+
+ + + + + $selected_index$ ld_metadata.ld_source_type=hayabusa RuleTitle="$selected_rule$" +| strcat Channel " type " EventID event_source +| table RecordID, Timestamp, event_source, Computer, Details + 0 + + + + + search?q=$selected_index$%20ld_metadata.ld_source_type%3Devtx%20EventRecordID%3D$click.value$&earliest=0&latest= + +
+
+
+
+``` + +## Conclusion + +In the "Linked Data App", we implemented a simple SPARQL command to request the Linked Open Data and also your private knowledge graphs. + +The Linked Data technologies give the opportunity to push the Open-Source INTelligence (OSINT) in the Linked Open Data and it will simplify the work of analysts via their SIEM, like Splunk or other. + +In the previous pages of this tutorial, you are able to create new classes and new properties in your knowledge graphs for Mitre Attack or IoC rules when you feel the need to do so. This natural behavior in the concept “Everything as code” creates a natural entropy of the global ontology of cyber domain. The first victim of this entropy is all the analysts. This problem can be resolve, if the analysts work together to build their cyber ontologies with the Linked Data technology, like Wikipedia contributors made Wikidata. It is only a matter of willpower and skill that you have now obtained through this tutorial. + +In the next page, we are using advanced tools in Coporate Memory to "Accelerate Cyber Threat Hunting". + +--- + +Tutorial: [how to link Intrusion Detection Systems (IDS) to Open-Source INTelligence (OSINT)](../index.md) + +Next chapter: [Link IDS event to a knowledge graph in dashboards via inferences](../link-IDS-event-to-KG-via-cmem/index.md) (for the advanced users of Corporate Memory) + +Previous chapter: [Build a Knowledge Graph from indicators of compromise rules, like Hayabusa and Sigma rules](../lift-data-from-YAML-data-of-hayabusa-sigma/index.md) diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/splunk-app-demo-LD-app.gif b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/splunk-app-demo-LD-app.gif new file mode 100644 index 00000000..cb0c6773 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/splunk-app-demo-LD-app.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/splunk-app-install.gif b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/splunk-app-install.gif new file mode 100644 index 00000000..faa28636 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/splunk-app-install.gif differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/splunk_apps_menu.png b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/splunk_apps_menu.png new file mode 100644 index 00000000..b3dc24e8 Binary files /dev/null and b/docs/build/tutorial-how-to-link-ids-to-osint/link-IDS-event-to-KG/splunk_apps_menu.png differ diff --git a/docs/build/tutorial-how-to-link-ids-to-osint/usecase.drawio b/docs/build/tutorial-how-to-link-ids-to-osint/usecase.drawio new file mode 100644 index 00000000..1a59a86e --- /dev/null +++ b/docs/build/tutorial-how-to-link-ids-to-osint/usecase.drawio @@ -0,0 +1 @@ +7V1Zd6M4Fv4t85Bzuh7wwRhvj1nKXenpdNdUMlU1TzkChK0OIBfIcdy/fq4WMKuNHewmFXxyYiMJId373UVXCxeDa//l1xAtF3fUwd6FoTsvF4ObC8MwRlMDvnjKRqX0xyplHhJHpvW3Cffkb6wSdZW6Ig6OMgUZpR4jy2yiTYMA2yyThsKQrrPFXOpln7pEc1xIuLeRV0z9Rhy2kKmTob5N/4TJfBE/ua+rHB/FhVVCtEAOXaeSBh8vBtchpUz+8l+uscepF9NF3jeryE0aFuKA1bkhuv0ReH/Tr8uV+fFr8O9Pvw6evmiqlmfkrVSHVWPZJqYAtHvJfwaUYWj0lUVXgYOd3y0PEvuQsF4Qhu+XyOal1oADSFswP86OJE+NIfx+xiEjQNxLj8wDSGSUF0bqyoae4BASPGRh7wrZT/OQP+yaejQULQigpiuXBkwBpW/CdZEQca/gafgllaQI8yumPmbhBoqoXG3am6Y/k5GsQaFWG0wUE9dbDAxjiC5S/E+4jRTu5smztqyBH4o7B3DKKOHUyGOcwEsUZFg2+rHioLpa0ogwQoG0l5AfYg8x8owFxUBeBE1Fxtz6ZQTNhnbpo4n8Ho8/yHKc2JqLfOJtZGmoHvmca+oh90tvFTxBxmeo3qWhDz/vaEDz5WS9twEIakShKJIJ1/Iyklf5m25CCkQx9HsURDvrhRxk0/JKrukqJIArQ/8Dr8tvT4rwCx+eEkk4JxSQIOb975vLl3SGpDrPCaDzyEvlPaOQIPgGeCO2Crka21nORsuqImuFMZ5pgooROR5mIC8abysJ5sU7abhccMKJDEOmgUAwTQncpajDZakcAqIdqOfocUdFDguhKsFf9ZxA0QdEQmjZ1EPWNHSyzUrqgp5YTwSq43VGLKRPWFNClSlnJdKv5aBqDIeST+kfH1ItdbBNQ8SBr7EFsZ8CHKnmkQDkIaZOvmyKkzvLpZqTLjdKADWaq28hnVZYKptVqCpUM05qArUhRT37gKPEvylolDD/H+RBbXTtAOhusO8TubryXFc17FIvlTrpPavrU4nhPmGr1tJ1Le1xGu+ADkhCBla0LL8+Ssm8wX6/qX7uUuL7aaDbIKtRpiNPeCP02DP2uPd/MNAPcCphsKdoIL/0NO/StBzHtDy2R1qAfFzszKXnzQj2nNvApVWM5w+qw/szYu0AEiuw9Y3kh1FbQHQH1Lq2jMepcUaOaLsoz0TzQ24JC8To9Xp7+l+R3GnqTlP/s/1snc5t3nS0TVNfw2gk4LHD43h+YmRubUTLGvZTGJwt74+1NfebCEYBnbnpzE1nbjpzU8fcUH+54nMN7VTrnb05qb3ZMv9Yg+NvbFVJz6M22ue3dKanMz2tND1dTOrspufjMw7Y7U0Vz7twVE5bb+l1rLLud9q5086ddu60cy3tzF5mxMOdeq6tnhOCHaufocO3f3z+7wN8i0mD2R2xQxpRl2nfSMDXF2gywPP45xLLBQDI62F4cKfYO8X+FhV761T0zx/x+Z3T5liOd+GeN2yhYs4fa54WAM7O0HSGpk2G5icxIDWNxN7ensOAfOGrT53qwI1K5k89q6qOiZiJkAwGtmmazj5bIxt7Thbv3mfQEpt0tKlJYeToYBX/dOamMzdvTiSaNyrVur2m4WhPAK3KqKw8vCvc9OahefzQpvVda4fUVRiiLa6Oj8rtcWiOdU1cd6LzTbJVD7ReZcWWIbUf7RCLMOHjmgSPlu1gh7BHi1L2aNPAfWTAOBz2Nn46IGCVWNky/6izyZ1NfjPaobPJx9nkB8I6o/wWu9YOsas2yjGw3plVvuKnXIjdoy6Zr+QcHlw/CEMsOqp/JQj+X0lj3fv4/WNnmzvb3Gol0c0DvsF5wAfi44gPAI7lemtt89sQmn92LjDN/WNNMFBvoOmmxo8U4iZzcDkw4Z/R7w34AT0XxpVIFMa0s0qdVTqzgLVnfPUqS/Rqu3HGEaPnPPqYIUecklJndrDmlN1r1MDx83wHzdiVCtS5If/6Fr8OLKqZpzmp6VDF35D9OB9S9+v7Q693wX5Xv5ypUB+N9auBgXP2FA2rxIc4YBz8E0Kj03Bn1XD7Vr8cltxMd9/zaWfd4ZTd4ZS1DkZ0PYpYnjgOiZYe2sTFPQIZhv4v4i9pyFDAdlqBw2xMJ9GdRHcSfYbjZpsIvHXS2UlnJ52NSufWc25oEMar2WOQc+f486Zf7D2tn7ej6cP1k5Py4+P0TcPoDYsH6sevTsgcqD9KijZ+pP6g8kj92tPrkwo23YNe4RP9OnX5P+FYxaeU6cgD2km162GbYa7UrE1S7hPaIGsVIQ7xVWYZSnaUW87f9JsXhETl355APC+XVHj/Qv41DT5xHP6YUvCE8n0QkKDH+FGNyuNp0hCezGEeT2bx9QwDswRNg1O9ncHcByVX0myLlK84dFCADgyVGBV4WzCmzMUM/uaELVZWz6Zgf2f3ZO6jT//h6oH/4t1DkTj+aLZHJ+10/ssiLg55flUvzkszLlzgSMzWcpM5/OKLR8E+JOtHUyTaFfQqrp15RauOXsBaEQisSBasOpkmUY5EpR6p1hl6hZbhZa4T4g908bmofFWMg120Shh35rfFmEZRHZnTEnVknkodDfepo/LgLXxct1Xxb5ZbiGntQ3g8LDmN0J5ofVuZjL7ST0xNHDTlcTZSyy6WcJFuDH8OjuyQLCUn9mlD63D8jN0GG3uDGTiCfFTLFtwFXEWJ7ygTlA0QjfF9FHCXkVH+T0BM6BpQNUlxiUc7h0c+Q93juFwQ8SxsLwLyY8XvEAnUZZiXY3yZjGxG4pvCQHINSlOMJLnKYgyULg4jqb7gH68fg6FY2SKwAC1C/D4LwziRX3poFdgLKQx8+Eh9WV8pb945eEPsYjBQNufCYdhtimq5aSu9POm8UqJx81nt6IbYscE7DDc89jNDjPrE1uSJQg99YX/ld893zkqnYrPxD88jVg+cPPAPsEPtqEd4m7nwzTzEQIx4DwLkbcB/4p6pDf1AE63vTCf2dOBO+xPcE/5Zs2LySr++NfLTkPOB5mn5q+V7lDvYjZGxJtykdu4RH5xo1rJGnU8lu26D/iyTKqS+Ni6lulBHrQnMGT1zom8/k/htrclrL8dlAxmzNx2WBOr6pxrMjLp3lIrw6GBL+J3vJZ2UjTRPxp1xkTvbhXAnsa+q0MujT1iIHx3qI8JnhJK7pQ8/bLAVKkXwfhmSCGtSmWUKn6rHwxsxs3F6ihKnQMTUBP4qhG5TVwOLwmc+NMsyhtZkOtYs1x5rZt+YaJPB1NGGBp6OR6ZjuWh0Dvo0OtKtJg4oaRyCR/aY8c1PBbnziNAq9Ko5fnb3PusjK29ByHiPhnOunAjjk6eAvmjr01cZ5PSU7ukpGYF42Pgxtz68gqJvzPMRLNgqvAMcoPPyIJHQXXrsjfJgD9h3Er8hjdSsFWp0XNnEWczAsWmWmJnZ9Mzi/tMMo87VlbdlERKDYHl03vtraYPDDPYAfkEa3Au9nen97TSepgKkkcYn0bGjWRstCVZmYiZ1bMZPzexsiLwtTH+gPg3QXC0Ju+fx6l8kn/n1byhYITEsMkYfeN63ePZWv1aMh5+XlgpdZ/dPvG92H+SjnI3dpfz77fP1xy8PNTj3k7AtY1wLvTnviOdsw82WAfE2WBBLTLHdbyKG+dJVfp7nMxba5i4ZexyiT05JvnZqb76ElzhiRlJOL4q3o/GZSBZST8w68mnLZYj5e30OVM/tmRMf37wijFu2juUUAUOzJGBYEtEdDoc9vSSia+qnihlOKpenVOnsV+y9T6+WLiylLlvKzffBaIvUXf3pmcYxza57qxdnyUyhZof6KgajRYy8aHJ3+yxE6wNWENZtUklgdVZI6/0V1VnLUeIJ1FvvduAETJUknyjUbw5KJLdsjevwVFI7LUjtvVxQqpYpx+tVxFKSeI2zWHciF6V8wxb35f+8v/3j4cM7Xr5cWA0/LOFsv4SzJ1u9HD8sxdqbtHchOXvHlQPnJuJLeGwiF66TLYOzcHiv/B0O6vB3Uj6LejoW9wssLrAIB85lGNI1pzU//oTYWYYVSTlTe8FuYgtVX00CgcPNd15XTx+N4oT/yYTpME64eVGPk1eb9NVnHBKgDsfEzU7Ny1A4x2wHdRQnsDPHO1mc2a1SrXq3u+tSjShjqXrCZ0qE4U82zOQQZBg5WMihvLpti4xCTcM9K5UHeWMhKVWoWEAuocMrUGi0FYUaoE43szA0B+N9MHwhTGLYMMfqWt47GcbX21v5RfrOPH4PA7XEwH5Q7wX/uFXgn+aw358cif3RJFtTfgXEqaFetrusFVDXe6NpBun9Y5TtFvpTI4P88bh/MPKPB7lRE+Rmq0AOHldexY+OhLmp9/qT0Thew5VfwQXZupmyALm9lKcWguK+uAfhKiYbiNUKcrGgfU3DJ9eTwWDhVl7TcMl32vI77rBPBYfk8MISIw4RS7Jp8CwHG8oNfbj9Lsb3MNZnSeVZFzXJFM/5cjNLykW8ylVA3I0YxXjyVsLHt/zFPrpHnnBS3e1NlPeOL9X4tZXub63dUnt0zGRfFEt12mjAl+6P89agZGtV6U7PvDg150gX91YpSHMOK0AtV6Hc/rsmokZLbLEIIt7x3KDJJYGIkDooWlgUhU4HnUagM81r2H6/CB2jbMv5+HDo8MAY33KTUp5Aw8UddTAv8X8=7Vxbc9o4FP41zKQPML5gkjwCSbqXdKaTdDbdfdkRtgBNZYvKAkJ//Z4jyWAbzB2SdqHtgI5lXc75zlV2a343fv0oyWj4SUSU1zwneq35dzXP892WB19ImRnKjXNrCAPJIkNyF4Rn9oNaomOpYxbRtNBRCcEVGxWJoUgSGqoCjUgppsVufcGLs47IgC4RnkPCl6kvLFJDu4vAWdB/o2wwzGZ2HXslJllnS0iHJBLTHMm/r/ldKYQyv+LXLuXIvIwv5r6HiqvzhUmaqG1umI3ayW16/Y/4g9KPf47+/f7y8KNuR5kQPrYbfhnSBJk8pMjBhPBZqnD1lAN7U6TBPzkG/njdeb+IpMOeIDLCa7QvaTqk6fyqgC+JszAK4jDMULOMw4q+wvo7QxVzILjwM1VSfKNdwYUESiIS6NnpM85LJMLZIIFmCByACfzOhErFQHZteyFmUYTTdKZDpujziIQ45xSQCjQpxklEkTkOtDjpUd4h4beBpmczRbRPxhzX1xeJeiAx4wjkv6iMgDeWbFHr3th2dnPN8x39yfrZTVuVwNXS10ppunOMgHJREVMlZ9DF3lB3Hd8Cy2qW17Lt6QKnfkYb5jCa0YhVjcF88AV64IcF0A5g8pbAVPNamnkRmxSE3vo+Rth3Cr8G9lvf0SsT0hFJVo7RmwutHhrGt3GhCVOMcD2Ag8yvp1pKeM29Gb2unPxJw9pJSAxfLRIjTJJeOjKjXEiWtNTmhokPRkbHk1tPyIjK/OUJkVf1ep7+YTcRvwf+XUjvAVzbIiaEUdSGdQB5yWABTZu9jFpye+mUxZwY15bzDegrwiHj0SOZiTFa7FTBXrJWZygk+wH9ycJZEpl5IN8p9HjGO+2Y4JKhz+fMv7gl0ifyWuj4SFKVrUZwTkYp6+n14Y0xkQOWdIRSIradVjnYnEfPe8lWrejMFy428+ec9hcsO7KXdLPILeclg+YKL+lmEejR3aS/s5vc3g5WuVMUwMqhy1rQqrabh2pcecjfY4y9Pefq7vHxA3w/CoLx2HtwJxevcSH90qRzedmyIrWCIGhtmK4nV7hSY8DWe9j/gxE9wIg1Go3Tcv70/MntWg56V14QmCqE692YH04+HejbdB27LzL25Wk7WIPxnK5I+mwwlkQxgdN/AR2hkiUDvJ1h2aMTgoNiqnH/9f49KPFRDcLP4HTPZaVcN7jdMFuFYpwYuUtJS7BBk1ZqdHE/q8txu9bgbMxeWYGDOUNQpUfd7a65oDzZkBtJAm7vc10kHcKNFEbojARLlA7Bgw78Beh2nUZQC+6QbUHHXbThL3aXChQZlk+YzhEoZDJTitlMRwpFFOnN86wdk5at8pHqwthykmJTkixD2ZSR3JwqIWlWJiRbW/Q15ZZiOe+OKsK4riP3MKPFGjHDpikwU11BHnOsHq9NrYvKd8mtN+XWJ0mmfW9L6J4Ou8Em7IbzYvwCm1lZfvcApIx7bwPux5WKsGfQsUInnBy5er4rMcod7JgDHHCCNXOgk9ApXiPQ3RF9dNt4dEbBM69Rwy2cywYzckCBt4rzYGDsQZXZ6Til2Z4MoWf4qaERxyRBi6OE3j2yXqMZwDzvbuQUluQUgSdpoLyM9YI5hwn7PtZMTfWEyrCbxfokDJaBE/VQn2LCp6B12AunIwptCZWp0Q8jkAhclhyHik2w35TgfT3aF/o2TsYJGDINEkmSVMRmvP1Ck20529hZ7sdXo6FSePDbRqPhPQxAUONeA+QIDUmjEOIkOQtBmA8m2amKH5fUZOeY8vjArdTYvfVzTZZ33i0djIOi2Ol3zlmvISkBr0MjEaYNtlnmv558z6+HW89Y7YfekvVH8EPlob+4zdt16XRF9eQkUvo9GbKe9mzPs1RRMIrOEw0xqULv8Qny3YF1XzspSYVpWRLxGynTVo8VFKOfkTl40IoGbNE+ta298PliooOljVkui3TAYqIPSLtNoAKpu86wdFQzknRCEzXfe8RSSIFN7KB3BTuasFAHKTociSARs0GOvg75hL7IkongEx3FaBalGcakxVhD21/9eFZqahqcfdOsHY0eBUY4QL3qMmVAqHf3wpIIn9HCQFgHY/jzj8/d+6cvH3L1yvNx9Yn2KSREyA/s+OYBzxoG/SrxjS0AyIzzB+vgkUp2+zvH3ZC7TQEOctaW/tSKCT7Q+wH+qf2vSnGlp+zMZ87MfUp0zaOU6LzgVHWO1hlrdG0ODExrhay5VJ7boH6X6tw7qM61tq3Oud6pUHt9OGqXTzicvmH2gpYdkRx+crtitpUFRGtxtj6zOmABZuD67jnGIedTTnY6Nf/hrc5/PMfz606zjujUTzq3fXAfbc9t+N7ykivOIveqHR5bdL+BQuzI5GOET9seDO5xaL35wH+Tqb4cBh7/MLB1lEjDP5XJvrmY7ONZ7Z/HaAc7Ge2f1mK/88c2DjTqF+v9xtZ7xStY57Xet3sdh1cpWBFrl0jhXWHt9q2xlr0gUQBbCRY0idr4Fi60Qk7SlIVFZhVfwtziHctmrfodS+CmnH3FsRq3zWZG+FsTHH9OuMsSftOa5VufwcwCd/BtUkN8ZcqMeN10bdsMeHMd2PZiPGzMco3yaIY9NFp65biU5AMLxVjaiGntSySKyAFdh6GKulYeNCswk9Ek5UQ/gpBf2yog2Rk+o9blahKlikQd/GXDdWF4+3FvikOabdtR8u8ylwb2A5BocLv4BMV5/BZcz3284jSGa0vTaC2YM+kAxVh+q/r9KEbglhTDvd5HMbaH8kaIVpi5M0G0FRQxulTC3RaTSyP5Zcu7N+zw6Zb5/xNgui/+twX//j8= \ No newline at end of file diff --git a/docs/deploy-and-configure/configuration/dataintegration/index.md b/docs/deploy-and-configure/configuration/dataintegration/index.md index a3cb7870..9611a8ae 100644 --- a/docs/deploy-and-configure/configuration/dataintegration/index.md +++ b/docs/deploy-and-configure/configuration/dataintegration/index.md @@ -356,6 +356,37 @@ workspace.repository.projectS3 = { # /path/to/my-workspace/ } ``` +For this S3 plugin make sure the account has at least these permissions attached: + +``` json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "VisualEditor0", + "Effect": "Allow", + "Action": "s3:ListBucket", + "Resource": "arn:aws:s3:::" + }, + { + "Sid": "VisualEditor1", + "Effect": "Allow", + "Action": "s3:ListAllMyBuckets", + "Resource": "*" + }, + { + "Sid": "VisualEditor2", + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::/*" + } + ] +} +``` ### S3 Bucket, Shared Directory - s3 diff --git a/docs/deploy-and-configure/installation/22-1-cmem-login-page.png b/docs/deploy-and-configure/installation/22-1-cmem-login-page.png deleted file mode 100644 index c258d011..00000000 Binary files a/docs/deploy-and-configure/installation/22-1-cmem-login-page.png and /dev/null differ diff --git a/docs/deploy-and-configure/installation/22-1-successful-login.png b/docs/deploy-and-configure/installation/22-1-successful-login.png deleted file mode 100644 index c09a5b1a..00000000 Binary files a/docs/deploy-and-configure/installation/22-1-successful-login.png and /dev/null differ diff --git a/docs/deploy-and-configure/installation/scenario-local-installation/index.md b/docs/deploy-and-configure/installation/scenario-local-installation/index.md index e7cdb028..01effd92 100644 --- a/docs/deploy-and-configure/installation/scenario-local-installation/index.md +++ b/docs/deploy-and-configure/installation/scenario-local-installation/index.md @@ -9,10 +9,10 @@ The code examples in this section assumes that you have POSIX-compliant shell (l ## Requirements -- Access credentials to eccenca Artifactory and eccenca Docker Registry → [contact us to get yours](https://eccenca.com/en/contact) -- [docker](https://www.docker.com/) and [docker-compose](https://docs.docker.com/compose/install/) (v1) installed locally -- [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) installed locally -- At least 4 CPUs and 12GB of RAM (recommended: 16GB) dedicated to docker +- Access credentials to eccenca Artifactory and eccenca Docker Registry → [contact us to get yours](https://eccenca.com/en/contact) +- [docker](https://www.docker.com/) and [docker-compose](https://docs.docker.com/compose/install/) (v1) installed locally +- [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) installed locally +- At least 4 CPUs and 12GB of RAM (recommended: 16GB) dedicated to docker ## Setup & Check Installation Environment @@ -153,16 +153,8 @@ Run make logs to see log output Open your browser and navigate to  -![cmem-login-page](../22-1-cmem-login-page.png) - -Click CONTINUE WITH LOGIN and use one of these default accounts: - | account | password | description | | ------- | -------- | ------------------------------------------------------------------------------------------- | | `admin` | `admin` | Is member of the global admin group (can see and do anything) | -| `user` | `user` | Is member of the local user group (can not change access conditions or see internal graphs) | - -![successful-login](../22-1-successful-login.png) After successful login, you will see Corporate Memory interface. You can now proceed to the :arrow_right:[Getting Started](../../../getting-started/index.md) section. - diff --git a/docs/deploy-and-configure/installation/scenario-redhat-enterprise-linux-7/index.md b/docs/deploy-and-configure/installation/scenario-redhat-enterprise-linux-7/index.md index 8fc18e10..cf9ec38c 100644 --- a/docs/deploy-and-configure/installation/scenario-redhat-enterprise-linux-7/index.md +++ b/docs/deploy-and-configure/installation/scenario-redhat-enterprise-linux-7/index.md @@ -9,9 +9,9 @@ This page describes a docker-compose based orchestration running on RedHat Enter ## Requirements -- [Virtualbox](https://www.oracle.com/virtualization/technologies/vm/downloads/virtualbox-downloads.html) and [vagrant](https://www.vagrantup.com/downloads.html) installed locally -- Terminal with ssh client installed locally -- POSIX-compatible command line interface (Linux, macOS or WSL for Windows) +- [Virtualbox](https://www.oracle.com/virtualization/technologies/vm/downloads/virtualbox-downloads.html) and [vagrant](https://www.vagrantup.com/downloads.html) installed locally +- Terminal with ssh client installed locally +- POSIX-compatible command line interface (Linux, macOS or WSL for Windows) ## Provisioning @@ -253,15 +253,8 @@ echo "10.10.10.10 corporate.memory" >> /etc/hosts Open your browser and navigate to [https://corporate.memory] -![cmem-login-page](../22-1-cmem-login-page.png) - -Click CONTINUE WITH LOGIN and use one of these default accounts: - | account | password | description | | ------- | -------- | ------------------------------------------------------------------------------------------- | | `admin` | `admin` | Is member of the global admin group (can see and do anything) | -| `user` | `user` | Is member of the local user group (can not change access conditions or see internal graphs) | - -![successful-login](../22-1-successful-login.png) After successful login, you will see Corporate Memory interface. You can now proceed to the :arrow_right:[Getting Started](../../../getting-started/index.md) section. diff --git a/docs/deploy-and-configure/installation/scenario-single-node-cloud-installation/index.md b/docs/deploy-and-configure/installation/scenario-single-node-cloud-installation/index.md index d7ac5cca..c50b5059 100644 --- a/docs/deploy-and-configure/installation/scenario-single-node-cloud-installation/index.md +++ b/docs/deploy-and-configure/installation/scenario-single-node-cloud-installation/index.md @@ -9,10 +9,10 @@ This page describes a docker-compose based orchestration running on a server ins ## Requirements -- ssh access to a server instance (Debian 11) with a public IP address -- A resolvable domain name to this server -- Terminal with ssh client installed locally -- An eccenca partner account for the docker registry as well as the release artifact area +- ssh access to a server instance (Debian 11) with a public IP address +- A resolvable domain name to this server +- Terminal with ssh client installed locally +- An eccenca partner account for the docker registry as well as the release artifact area ## Server Provisioning @@ -89,10 +89,10 @@ $ vi prod.env In addition that, you need to remove the default config and link it to your prod.env ```shell-session -$ cd /opt/cmem-orchestration/environments +cd /opt/cmem-orchestration/environments -$ rm config.env -$ ln -s prod.env config.env +rm config.env +ln -s prod.env config.env ``` To see all available configuration options refer to [Docker Orchestration configuration](./../../configuration/docker-orchestration/index.md) page. @@ -100,8 +100,8 @@ To see all available configuration options refer to [Docker Orchestration confi Next, request SSL certificates from [letsencrypt](https://letsencrypt.org/) service: ```shell-session -$ cd /opt/cmem-orchestration -$ make letsencrypt-create +cd /opt/cmem-orchestration +make letsencrypt-create ``` Change `CMEM_BASE_URI` according to your `DEPLOYHOST`. @@ -121,16 +121,16 @@ EOF Finally deploy the Corporate Memory instance: ```shell-session -$ make clean-pull-start-bootstrap -$ make tutorials-import +make clean-pull-start-bootstrap +make tutorials-import ``` Optional: you can install cmem as a systemd service for this use these commands as root oder sudo: ```shell-session -$ cp /opt/cmem-orchestration/conf/systemd/cmem-orchestration.service /etc/systemd/system -$ systemctl enable cmem-orchestration -$ systemctl start cmem-orchestration +cp /opt/cmem-orchestration/conf/systemd/cmem-orchestration.service /etc/systemd/system +systemctl enable cmem-orchestration +systemctl start cmem-orchestration ``` ## Validation and Finalisation @@ -142,13 +142,9 @@ Click **CONTINUE WITH LOGIN** and use one of these default accounts: | account | password | description | | ------- | -------- | ------------------------------------------------------------------------------------------- | | `admin` | `admin` | Is member of the global admin group (can see and do anything) | -| `user` | `user` | Is member of the local user group (can not change access conditions or see internal graphs) | - -![successful-login](../22-1-successful-login.png) After successful login, you will see Corporate Memory interface. You can now proceed to the :material-arrow-right: [Getting Started](../../../getting-started/index.md) section. Do not forget to change the passwords of your deployment, especially if it is available from the public internet. For this, take a look at [Change Passwords and Keys](../../configuration/keycloak/change-passwords-and-keys/index.md). - diff --git a/docs/explore-and-author/.pages b/docs/explore-and-author/.pages index 1321dcec..4e6a1a64 100644 --- a/docs/explore-and-author/.pages +++ b/docs/explore-and-author/.pages @@ -1,5 +1,6 @@ nav: - Explore and Author: index.md + - Workspace Selection and Configuration: workspace-configuration - Graph Exploration: graph-exploration - Vocabulary Catalog: vocabulary-catalog - Thesauri Management: thesauri-management diff --git a/docs/explore-and-author/embedding-services-via-the-integrations-module/index.md b/docs/explore-and-author/embedding-services-via-the-integrations-module/index.md index f1fab72a..45ff2873 100644 --- a/docs/explore-and-author/embedding-services-via-the-integrations-module/index.md +++ b/docs/explore-and-author/embedding-services-via-the-integrations-module/index.md @@ -7,7 +7,7 @@ tags: A DataManager module is available that can be used to embed / integrate other web-services in Corporate Memory. The module can be used and configured globally or individually per workspace configuration. -[![](./integrations.png)](./integrations.png) +![image](integration.png){ class="bordered" } ## Activation and configuration in DataManager @@ -39,7 +39,7 @@ A restart of DataManager will be required in order for the configuration change ## Link Configuration in DataIntegration -The (module) link configuration in DataIntegration is managed in its own configuration. Thus, the following snippet from a `dataintegration.conf`  shows how to add the "INTEGRATINOS" link to the DataIntegrations menu: +The (module) link configuration in DataIntegration is managed in its own configuration. Thus, the following snippet from a `dataintegration.conf`  shows how to add the "INTEGRATIONS" link to the DataIntegrations menu: ``` js eccencaDataManager.moduleLinks = [ @@ -72,6 +72,7 @@ eccencaDataManager.moduleLinks = [ ``` !!! Note + The "`name"` and "`defaultLabel`" property should be aligned in the DataManager and DataIntegration configuration for consistency. A restart of DataIntegration will be required in order for the configuration change to become effective. @@ -80,4 +81,4 @@ A restart of DataIntegration will be required in order for the configuration cha A typical (eccenca) use case for the Integrations Module is to embed redash dashboards. In order show a dashboard in a Corporate Memory make sure your redash instance use the same protocol as your Corporate Memory instance (typically https). Then open the dashboard that should be embedded and click the sharing button ![](./share.png){ .off-glb width=32 }. In the dialog make sure "*Allow public access*" is enabled. Copy the "*Secret address*" and paste this address into the "`url`" property of a tab configuration, as shown above. -[![](./share_dashboard.png){ width=561 }](./share_dashboard.png) +![](./share_dashboard.png){ class="bordered" width=561 } diff --git a/docs/explore-and-author/embedding-services-via-the-integrations-module/integration.png b/docs/explore-and-author/embedding-services-via-the-integrations-module/integration.png new file mode 100644 index 00000000..8561586e Binary files /dev/null and b/docs/explore-and-author/embedding-services-via-the-integrations-module/integration.png differ diff --git a/docs/explore-and-author/embedding-services-via-the-integrations-module/integrations.png b/docs/explore-and-author/embedding-services-via-the-integrations-module/integrations.png deleted file mode 100644 index 7aa9e01f..00000000 Binary files a/docs/explore-and-author/embedding-services-via-the-integrations-module/integrations.png and /dev/null differ diff --git a/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/index.md b/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/index.md index eaa52930..2569af27 100644 --- a/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/index.md +++ b/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/index.md @@ -13,42 +13,42 @@ Working with shapes allows for creation of a customized Linked Data user interfa You can define forms using SHACL rules. The rules state: -1. What types of resources the form definition applies to. This is based on the `rdf:type` of a resource. -2. What fields are shown in the form in which order. Field contents are retrieved from properties connected to the resource. -3. Which other, linked resources are shown in the form. Linked resources can either be shown as links or as their full form. -4. Which texts are used to name and describe fields, as well as the tab in the user interface. +1. What types of resources the form definition applies to. This is based on the `rdf:type` of a resource. +2. What fields are shown in the form in which order. Field contents are retrieved from properties connected to the resource. +3. Which other, linked resources are shown in the form. Linked resources can either be shown as links or as their full form. +4. Which texts are used to name and describe fields, as well as the tab in the user interface. Forms are defined in the CMEM Shapes Catalog graph. The graph URI is `https://vocab.eccenca.com/shacl/`. Form definitions are twofold: -1. The form itself is defined as so called `NodeShape`. NodeShapes define which types of resources the form applies to (the target class), and which fields are shown in the form (the Properties). -2. The individual fields are defined as so called `PropertyShape`. PropertyShapes define which property is used to retrieve data for the field (the path), the name of the field, a description, its cardinality (min and max count), its position in the form (the order), and if it should always be shown. In case of object properties, it also defines the type of the linked resource (the class). The full list of features is described in [section PropertyShapes](#propertyshapes). +1. The form itself is defined as so called `NodeShape`. NodeShapes define which types of resources the form applies to (the target class), and which fields are shown in the form (the Properties). +2. The individual fields are defined as so called `PropertyShape`. PropertyShapes define which property is used to retrieve data for the field (the path), the name of the field, a description, its cardinality (min and max count), its position in the form (the order), and if it should always be shown. In case of object properties, it also defines the type of the linked resource (the class). The full list of features is described in [section PropertyShapes](#propertyshapes). To define a new form, for example for `foaf:Person` resources, navigate to the CMEM Shapes Catalog graph and select `NodeShape` in Navigation. The list of existing NodeShapes is shown. Click "Create a new SHACL Node shape" in the upper right to create a new NodeShape. Enter a name of the resource. An empty NodeShape resource is created and shown. -[![](./createNodeShape.png)](./createNodeShape.png) +![](./createNodeShape.png){ class="bordered" } To create the initial definition, click ![](./ic_mode_edit_black_18dp_1x.png){ .off-glb } (Edit). A form is shown to you with input fields Name, Property Shapes, Vocabulary, Target class and Statement Annotation. The initial definition requires the name, and the target class. Fields are attached to the form later. Target class in particular binds the form to the resources it should cover. The Target class field features an auto-complete that displays all classes stored in Corporate Memory. The example form should cover resources of the type `foaf:Person`, so enter `foaf:Person` in the Target class field. Click SAVE to save the NodeShape. -[![](./EditNodeShape.png)](./EditNodeShape.png) +![](./EditNodeShape.png){ class="bordered" } You have now created an "empty" form that covers `foaf:Person` resources with tab name "Person". Navigating to a `foaf:Person` resource, you see a new tab as defined. You can still see all properties of the resource in the PROPERTIES tab. -[![](./nodeshape.png)](./nodeshape.png) +![](/nodeshape.png){ class="bordered" } To define new fields, for example showing the email address of the person (defined as `foaf:mbox`), navigate to the CMEM Shapes Catalog graph and select `PropertyShape` in Navigation. The list of existing PropertyShapes is shown. Click CREATE NEW PROPERTYSHAPE in the upper right to create a new PropertyShape. Enter a name of the resource. An empty PropertyShape resource is created and shown. Edit the form using ![](./ic_mode_edit_black_18dp_1x.png). A form is shown with all relevant properties of a field definition. Required in this step are: -1. The name of the field, which will be displayed left of the data content or input field in the form. -2. The description, which will be displayed as tooltip on the question mark to the right of the name. -3. The path, which states which property the field represents. In this example, it is `foaf:mbox`. -4. The form the field should be shown in (Property of). The field provides an auto-complete, so just enter "Person" and select the NodeShape resource you defined in the previous step. +1. The name of the field, which will be displayed left of the data content or input field in the form. +2. The description, which will be displayed as tooltip on the question mark to the right of the name. +3. The path, which states which property the field represents. In this example, it is `foaf:mbox`. +4. The form the field should be shown in (Property of). The field provides an auto-complete, so just enter "Person" and select the NodeShape resource you defined in the previous step. Click SAVE after filling out the required fields. -[![](./nodeshapeedit.png)](./nodeshapeedit.png) +![](nodeshapeedit.png){ class="bordered" } ## Using forms @@ -60,7 +60,7 @@ While browsing your knowledge graph, you will always see your shape in action, w The next images demonstrate this behavior : -[![](./nodeshape.png)](./nodeshape.png) +![](./nodeshape.png){ class="bordered" } ### Creating new resources @@ -68,6 +68,6 @@ You can also create new resources by using a shaped form. One way to achieve thi The next images demonstrate this behaviour: -[![](./createsparqlquery.png)](./createsparqlquery.png) +![](./createsparqlquery.png){ class="bordered" } -[![](./createsparqlqueryeditor.png)](./createsparqlqueryeditor.png) +![](./createsparqlqueryeditor.png){ class="bordered" } diff --git a/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/nodeshape.png b/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/nodeshape.png index 6f889ae1..a41d95c2 100644 Binary files a/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/nodeshape.png and b/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/nodeshape.png differ diff --git a/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/nodeshapeedit.png b/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/nodeshapeedit.png index e1f9b4c6..22188cbc 100644 Binary files a/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/nodeshapeedit.png and b/docs/explore-and-author/graph-exploration/building-a-customized-user-interface/nodeshapeedit.png differ diff --git a/docs/explore-and-author/graph-exploration/graphoverview.png b/docs/explore-and-author/graph-exploration/graphoverview.png index 9f1feb20..06fbe554 100644 Binary files a/docs/explore-and-author/graph-exploration/graphoverview.png and b/docs/explore-and-author/graph-exploration/graphoverview.png differ diff --git a/docs/explore-and-author/graph-exploration/index.md b/docs/explore-and-author/graph-exploration/index.md index 847fe1b0..3403ca2c 100644 --- a/docs/explore-and-author/graph-exploration/index.md +++ b/docs/explore-and-author/graph-exploration/index.md @@ -20,7 +20,7 @@ The user interface of the Explore module shows the following main areas: - the main area, providing multiple views, depending on which resource has been selected. -1. If necessary, you can toggle the navigation area by using the +1. If necessary, you can toggle the navigation area by using the :eccenca-toggler-moveleft: (hide) and :eccenca-toggler-tree: (show) buttons. ## Graphs @@ -37,7 +37,7 @@ In the main area, the Metadata view of the selected graph appears, showing sev The Graphs are categorized into groups as follows: -- User: All graphs which represent user data (created manually or by build processes).  +- User: All graphs which represent user data (created manually or by build processes). - Vocabularies: All graphs containing vocabularies. - System: All graphs containing configuration data. - All @@ -56,16 +56,16 @@ To add a new graph to the Graphs list: - Click **Save** to create the new graph. -1. More concrete, you select a shape here. +1. More concrete, you select a shape here. This can be configured in the workspace configuration as well. ### :eccenca-item-download: Downloading a graph To download a graph from the Graphs list: -- In the **Graphs** list, click **:eccenca-item-download: Download graph** on the graph you want to download. -- A message box appears, stating that downloading can take a long time. -- Click **Download**. +- In the **Graphs** list, click **:eccenca-item-download: Download graph** on the graph you want to download. +- A message box appears, stating that downloading can take a long time. +- Click **Download**. ### :eccenca-item-edit: Managing a graph @@ -83,7 +83,7 @@ To update or replace data of a graph: - Click **Update** to start the upload process. -1. You can upload one of the following file formats: Turtle, N-Triples, RDF/XML, or JSON-LD. +1. You can upload one of the following file formats: Turtle, N-Triples, RDF/XML, or JSON-LD. To delete a graph, select **:eccenca-item-remove: Remove graph** on the graph you want to remove and confirm deletion process. @@ -103,7 +103,7 @@ To reset the results delete the keyword and press Enter. Select a class in the Navigation box to show all instances of this class in the main area. (1) { .annotate } -1. The table uses a default query to list all resources with a given class. +1. The table uses a default query to list all resources with a given class. This can be configured by adding a `shui:navigationListQuery` to the class shape. ### Instance Details @@ -111,7 +111,7 @@ Select a class in the Navigation box to show all instances of this class in th To open the Instance Details of a resource click on that resource in the Instance List. Resources are shown as grey chip buttons. -![](./instancedata.png) +![](instancedata.png){ class="bordered" } !!! warning inline end @@ -128,7 +128,7 @@ The availability of these views depends on the context and the resource type. The Resource tab provides a view based on the shapes of the selected resource. The details of the shaped view depends on the configuration. -![](./graphoverview.png) +![](graphoverview.png){ class="bordered" } #### Properties @@ -151,8 +151,7 @@ Click **SAVE** to save your changes. The Statistics tab indicates the number of classes, properties, entities and triples of the graph. -![](./statictics.png) - +![](./statictics.png){ class="bordered" } #### Graph @@ -162,7 +161,7 @@ The Statistics tab indicates the number of classes, properties, entities and t The Graph tab shows a visual graph representation of the graph. -![](./graphvisulization.png) +![](./graphvisulization.png){ class="bordered" } #### Vocab @@ -173,21 +172,20 @@ The Graph tab shows a visual graph representation of the graph. This tab shows a graph visualization of an installed vocabulary. It displays all classes showing the class-subclass.  You can open the class details and view the list of instances related to that class. It also allows you to copy the resource IRI. -![](./vocab.png) +![](vocab.png){ class="bordered" } #### References This tab shows all resources that link back to the selected resource. -[](./Reference.png) +![](./Reference.png){ class="bordered" } #### Turtle This tab shows the turtle RDF representation of the raw data representing the resource. You can use this tab to edit the selected resource: -- Enter your changes in turtle. -- Click **UPDATE** to save your changes. +- Enter your changes in turtle. +- Click **UPDATE** to save your changes. Deleting the entire turtle representation deletes the resource. - diff --git a/docs/explore-and-author/graph-exploration/instancedata.png b/docs/explore-and-author/graph-exploration/instancedata.png index 5d004102..a4382da5 100644 Binary files a/docs/explore-and-author/graph-exploration/instancedata.png and b/docs/explore-and-author/graph-exploration/instancedata.png differ diff --git a/docs/explore-and-author/graph-exploration/statement-annotations/annotations.png b/docs/explore-and-author/graph-exploration/statement-annotations/annotations.png index f55fd6e2..3b619575 100644 Binary files a/docs/explore-and-author/graph-exploration/statement-annotations/annotations.png and b/docs/explore-and-author/graph-exploration/statement-annotations/annotations.png differ diff --git a/docs/explore-and-author/graph-exploration/statement-annotations/index.md b/docs/explore-and-author/graph-exploration/statement-annotations/index.md index b1dbc238..304ef0fb 100644 --- a/docs/explore-and-author/graph-exploration/statement-annotations/index.md +++ b/docs/explore-and-author/graph-exploration/statement-annotations/index.md @@ -17,7 +17,7 @@ Statement Annotations provide a way to express knowledge about statements. Typi If enabled on a specific type of statement or type of resource, you see a Statement Annotation text bubble beside every annotatable statement: -![](./statementannotationoveriew.png) +![](statementannotationoveriew.png){ class="bordered" } This bubble has different status: @@ -27,11 +27,11 @@ This bubble has different status: Clicking on one of the text bubbles opens the Statement Annotation dialog for this specific statement: -![](./createstatementannotations.png) +![](./createstatementannotations.png){ class="bordered" } In the Statement Annotation dialog, you can select the Statement Annotation Template and click **Create**. -![](./statementedit.png) +![](./statementedit.png){ class="bordered" } ## Setup @@ -41,25 +41,25 @@ In order to have a working Statement Annotation setup, the following steps need Create a new Graph, edit its metadata and change the type to Statement Annotation Graph. -![](./statementannotation.png) +![](statementannotation.png){ class="bordered" } ### Setup and import the Statement Annotation Graph in your data graph In your data graph, where the resources exist which you want to annotate, import the Statement Annotation Graph and select it as an Annotation Graph. -![](./annotations.png) +![](annotations.png){ class="bordered" } ### Create a shaped form which will be used to annotate statements In your Shape Catalog, select a Node Shape (or create one) which you want to use for statement annotations, and Enable Statement Annotation to true. -![](./setannotations.png) +![](setannotations.png){ class="bordered" } ### Allow statement annotations in your shaped forms on specific Classes or Properties Finally, select the Node Shape or Property Shape from your Shape Catalog, and enable annotations by setting the Enable option in the Statement Annotations group to true. -![](./setannotations.png) +![](setannotations.png){ class="bordered" } This will enable the feature on the statements of all resources shown with this Node Shape or on all statements shown with this Property Shape. @@ -71,7 +71,7 @@ These Annotation Resources are based on specific Shapes which are enabled as Sta Reification Resources as well as Annotation Resources are managed in a Statement Annotation Graph, which need to be configured on a Graph as well as imported to this Graph. The following illustration depicts this schema with boxes and arrows: -![](20-10-StatementAnnotationSchema.png) +![](20-10-StatementAnnotationSchema.png){ class="bordered" } !!! note "Some notes on this:" diff --git a/docs/explore-and-author/graph-exploration/statement-annotations/setannotations.png b/docs/explore-and-author/graph-exploration/statement-annotations/setannotations.png index 1cea55aa..f8e37eb3 100644 Binary files a/docs/explore-and-author/graph-exploration/statement-annotations/setannotations.png and b/docs/explore-and-author/graph-exploration/statement-annotations/setannotations.png differ diff --git a/docs/explore-and-author/graph-exploration/statement-annotations/statementannotation.png b/docs/explore-and-author/graph-exploration/statement-annotations/statementannotation.png index ce065fda..fdb0be40 100644 Binary files a/docs/explore-and-author/graph-exploration/statement-annotations/statementannotation.png and b/docs/explore-and-author/graph-exploration/statement-annotations/statementannotation.png differ diff --git a/docs/explore-and-author/graph-exploration/statement-annotations/statementannotationoveriew.png b/docs/explore-and-author/graph-exploration/statement-annotations/statementannotationoveriew.png index 90e3d2bc..c50bfcb8 100644 Binary files a/docs/explore-and-author/graph-exploration/statement-annotations/statementannotationoveriew.png and b/docs/explore-and-author/graph-exploration/statement-annotations/statementannotationoveriew.png differ diff --git a/docs/explore-and-author/graph-exploration/vocab.png b/docs/explore-and-author/graph-exploration/vocab.png index 0d215277..2c11c677 100644 Binary files a/docs/explore-and-author/graph-exploration/vocab.png and b/docs/explore-and-author/graph-exploration/vocab.png differ diff --git a/docs/explore-and-author/index.md b/docs/explore-and-author/index.md index aa944c0f..ce83eca1 100644 --- a/docs/explore-and-author/index.md +++ b/docs/explore-and-author/index.md @@ -3,6 +3,7 @@ icon: material/star hide: - toc --- + !!! info inline end "" ![Your are here](overview-explore.drawio.png "You are here") diff --git a/docs/explore-and-author/query-module/Queries.png b/docs/explore-and-author/query-module/Queries.png index a008ecf7..26fedd42 100644 Binary files a/docs/explore-and-author/query-module/Queries.png and b/docs/explore-and-author/query-module/Queries.png differ diff --git a/docs/explore-and-author/query-module/index.md b/docs/explore-and-author/query-module/index.md index fec957ad..f73c53d6 100644 --- a/docs/explore-and-author/query-module/index.md +++ b/docs/explore-and-author/query-module/index.md @@ -20,10 +20,9 @@ The catalog lists all existing SPARQL queries including name, type and descripti Use the **:eccenca-operation-search: Search** bar in order to look for a specific query. -![](./Queries.png) - -Select the query from the Queries catalog, to open and load the query.  +![](Queries.png){ class="bordered" } +Select the query from the Queries catalog, to open and load the query. ## Query editor @@ -35,7 +34,7 @@ The query editor features SPARQL syntax highlighting and SPARQL validation, allo The Query editor allows to Run query, Download Results, Delete, Save and Save as Queries. -![](./QueryEditor.png) +![](./QueryEditor.png){ class="bordered" } ### Run a query @@ -44,9 +43,9 @@ The results are presented as a table with pagination. ### Export results -To export the full set of results without any limits in form of a CSV file click **:eccenca-item-download: Download result** on the top right.  +To export the full set of results without any limits in form of a CSV file click **:eccenca-item-download: Download result** on the top right. -![](./QueriesResults.png) +![](./QueriesResults.png){ class="bordered" } !!! info @@ -58,7 +57,6 @@ To export the full set of results without any limits in form of a CSV file click To save a query in the Query catalog click **:material-floppy: Save**. This opens a dialog that allows you to overwrite the existing query. - ### Placeholders In addition to the standard SPARQL syntax, placeholders can be used to parametrize a query. @@ -67,7 +65,7 @@ Multiple placeholders can be defined by changing the name inside the brackets. When a query contains a placeholder, the placeholder list to the right of the query editor shows a field with its name. -![](./placeholder.png) +![](./placeholder.png){ class="bordered" } When running a query that contains placeholders, the query editor replaces the `{{placeholdername}}` string in the query with the respective string entered into the placeholder list. This is a direct string replacement, so placeholders can contain simple strings and literal values, URIs, variables or even sub queries. @@ -86,4 +84,3 @@ When you enter `Person` into the `class` placeholder field in the placeholde ```sparql SELECT * WHERE { ?classInstance a .} ``` - diff --git a/docs/explore-and-author/thesauri-management/CMEM-19-02-navigation-tree.png b/docs/explore-and-author/thesauri-management/CMEM-19-02-navigation-tree.png deleted file mode 100644 index 2d179333..00000000 Binary files a/docs/explore-and-author/thesauri-management/CMEM-19-02-navigation-tree.png and /dev/null differ diff --git a/docs/explore-and-author/thesauri-management/CMEM-22-2-thesaurus-project-catalog.png b/docs/explore-and-author/thesauri-management/CMEM-22-2-thesaurus-project-catalog.png deleted file mode 100644 index 8339d2bb..00000000 Binary files a/docs/explore-and-author/thesauri-management/CMEM-22-2-thesaurus-project-catalog.png and /dev/null differ diff --git a/docs/explore-and-author/thesauri-management/index.md b/docs/explore-and-author/thesauri-management/index.md index 51079a29..1be770be 100644 --- a/docs/explore-and-author/thesauri-management/index.md +++ b/docs/explore-and-author/thesauri-management/index.md @@ -18,8 +18,7 @@ You can think of these relations as a hierarchical tree representing the relatio In a concept scheme Industries, a top branch in this tree, as for example the sub-industry Industrials or Health Care, is called a top concept. All branches together belong to the concept scheme Industries. -![Navigation tree with detail view of the concept Airlines](./CMEM-19-02-navigation-tree.png "Navigation tree with detail view of the concept Airlines") - +![Navigation tree with detail view of the concept Airlines](./navigation-tree.png "Navigation tree with detail view of the concept Airlines"){ class="bordered" } !!! info @@ -43,7 +42,7 @@ In order to get more information on a thesaurus project and edit its metadata, c The view expands showing the project metadata. Click **:eccenca-item-edit: Edit** on the right side of the row to open the edit mode, enter your changes and click **SAVE**. -![Thesaurus project catalog](CMEM-22-2-thesaurus-project-catalog.png "Thesaurus project catalog") +![Thesaurus project catalog](thesaurus-project-catalog.png "Thesaurus project catalog"){ class="bordered" } To open the detail view of a thesaurus project, click the project name in the catalog. @@ -150,8 +149,8 @@ You can add, for example, a second broader concept for an existing concept or a To add relations, select the concept in the navigation tree. In the detail view, click **:eccenca-item-edit: Edit** to open the edit mode. -- To add an associative relation to another concept, enter the concept name in the field **Related concept**. -- To add a further broader relation, enter the name of the broader concept in the field **Broader concepts**. +- To add an associative relation to another concept, enter the concept name in the field **Related concept**. +- To add a further broader relation, enter the name of the broader concept in the field **Broader concepts**. You can only choose from existing concepts. Click **SAVE** to confirm your changes. @@ -169,4 +168,3 @@ When adding relations the inverse relation is automatically added, too. To remove concepts or concept schemes, select the resource in the navigation tree, click the context menu **:eccenca-item-moremenu: Show more options** and select the **Remove** option. Confirm the dialog and click **REMOVE**. - diff --git a/docs/explore-and-author/thesauri-management/navigation-tree.png b/docs/explore-and-author/thesauri-management/navigation-tree.png new file mode 100644 index 00000000..c59027b2 Binary files /dev/null and b/docs/explore-and-author/thesauri-management/navigation-tree.png differ diff --git a/docs/explore-and-author/thesauri-management/thesaurus-project-catalog.png b/docs/explore-and-author/thesauri-management/thesaurus-project-catalog.png new file mode 100644 index 00000000..e37107fa Binary files /dev/null and b/docs/explore-and-author/thesauri-management/thesaurus-project-catalog.png differ diff --git a/docs/explore-and-author/vocabulary-catalog/index.md b/docs/explore-and-author/vocabulary-catalog/index.md index 4888b4b0..93910118 100644 --- a/docs/explore-and-author/vocabulary-catalog/index.md +++ b/docs/explore-and-author/vocabulary-catalog/index.md @@ -10,7 +10,7 @@ tags: Vocabularies are the foundation for semantic data lifting activities.This module shows the list of all managed vocabularies in Corporate Memory that are accessible for the user. The table represents the list of known vocabularies. Installed vocabularies are indicated by the orange switch in the column `Installed`. -[![](./vocabulary.png)](./vocabulary.png) +![](vocabulary.png){ class="bordered" } ## Add new vocabulary @@ -26,8 +26,7 @@ Each table row provides a menu with more options clicking on ![](./ic_keyboard A vocabulary which is known and available but not installed, looks like this: -[![Example of extended information of uninstalled Vocabulary Catalog](./not_installed_vocab.png - "Example of extended information of uninstalled Vocabulary Catalog")](./not_installed_vocab.png) +![Example of extended information of uninstalled Vocabulary Catalog](./not_installed_vocab.png "Example of extended information of uninstalled Vocabulary Catalog"){ class="bordered" } Example of extended information of uninstalled Vocabulary Catalog @@ -36,7 +35,7 @@ Example of extended information of uninstalled Vocabulary Catalog A vocabulary which is installed looks like this -[![Example of extended information of installed Vocabulary Catalog](./installed_vocab.png "Example of extended information of installed Vocabulary Catalog")](./installed_vocab.png) +![Example of extended information of installed Vocabulary Catalog](./installed_vocab.png "Example of extended information of installed Vocabulary Catalog"){ class="bordered" } Example of extended information of installed Vocabulary Catalog diff --git a/docs/explore-and-author/vocabulary-catalog/vocabulary.png b/docs/explore-and-author/vocabulary-catalog/vocabulary.png index a01a17a0..3e88e8f2 100644 Binary files a/docs/explore-and-author/vocabulary-catalog/vocabulary.png and b/docs/explore-and-author/vocabulary-catalog/vocabulary.png differ diff --git a/docs/explore-and-author/workspace-configuration/add-workspace.png b/docs/explore-and-author/workspace-configuration/add-workspace.png new file mode 100644 index 00000000..3157aae1 Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/add-workspace.png differ diff --git a/docs/explore-and-author/workspace-configuration/configuration.png b/docs/explore-and-author/workspace-configuration/configuration.png new file mode 100644 index 00000000..cc8c594f Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/configuration.png differ diff --git a/docs/explore-and-author/workspace-configuration/create-new-workspace.png b/docs/explore-and-author/workspace-configuration/create-new-workspace.png new file mode 100644 index 00000000..a1de6940 Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/create-new-workspace.png differ diff --git a/docs/explore-and-author/workspace-configuration/delete-select.png b/docs/explore-and-author/workspace-configuration/delete-select.png new file mode 100644 index 00000000..91e4576d Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/delete-select.png differ diff --git a/docs/explore-and-author/workspace-configuration/delete-stepresult.png b/docs/explore-and-author/workspace-configuration/delete-stepresult.png new file mode 100644 index 00000000..09a9c746 Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/delete-stepresult.png differ diff --git a/docs/explore-and-author/workspace-configuration/delete.png b/docs/explore-and-author/workspace-configuration/delete.png new file mode 100644 index 00000000..74553d03 Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/delete.png differ diff --git a/docs/explore-and-author/workspace-configuration/details.png b/docs/explore-and-author/workspace-configuration/details.png new file mode 100644 index 00000000..c1058a04 Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/details.png differ diff --git a/docs/explore-and-author/workspace-configuration/enable.png b/docs/explore-and-author/workspace-configuration/enable.png new file mode 100644 index 00000000..cd7d3a0f Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/enable.png differ diff --git a/docs/explore-and-author/workspace-configuration/index.md b/docs/explore-and-author/workspace-configuration/index.md new file mode 100644 index 00000000..836488e1 --- /dev/null +++ b/docs/explore-and-author/workspace-configuration/index.md @@ -0,0 +1,92 @@ +--- +icon: eccenca/application-config +status: new +tags: + - KnowledgeGraph +--- + +# Workspaces + +The specific configuration of the application defines which options are available here, i.e. whether you can select one of several workspaces, access only a default workspace or are allowed to create own workspaces. + +## Select a workspace + +To select a workspace click on the **user icon** on the right side of the page + +![User menu](user.png){ class="bordered" } + +Click the drop-down list and click the workspace you want to open. + +![Workspace selector](workspace.png){ class="bordered" } + +!!! success "Step Result" + + The workspace opens and now you can enable or disabled the modules and change modeule configuration as per your requirement. + + ![Configuration module de-/activation](enable.png){ class="bordered" } + +## Configure a workspace + +Click on the **user icon** on the right side of the page then click on **Configuration**. + +![Configuration module](configuration.png){ class="bordered" } + +Click on **Workspace** then select the workspace you want to see the details. + +![Select workspace to configure](select-workspace.png){ class="bordered" } + + Click on down arrow to expand the **Workspace** and **DI Workspace Configuration** to see the configuration details as shown below. + +![Configuration module settings](details.png){ class="bordered" } + +## Add a Workspace + +Click on the **user icon** on the right side of the page then click on **Configuration**. + +![Configuration module](configuration.png){ class="bordered" } + +Click on **Workspace** on the left side of the page then click on **Create New Workspace** + +![Create workspace](create-new-workspace.png){ class="bordered" } + +Type the **Id** and **Label** name then click on **Add** + +![Add new workspace](add-workspace.png){ class="bordered" } + +!!! success "Step Result" + + The workspace created sucessfully and now you can enable or disabled the modules and change modeule configuration as per your requirement. + + ![Workspace created](enable.png){ class="bordered" } + +!!! note + + Refer to the system manual of eccenca DataManager to get more information on all the options that can be configured here. + +## Delete a workspace + +Click on **User Icon** on the right side of the page then click on **Configuration** then click on **Workspace** + +![Create workspace](create-new-workspace.png){ class="bordered" } + +Select the Workspace from the drop-down you want to delete + +![Select workspace to configure](select-workspace-1.png){ class="bordered" } + +Click on the **Delete Icon** on the right side of the page. + +![Delete selected workspace](delete.png){ class="bordered" } + +Click on **Delete** + +![Delete confirmation dialog](delete-select.png){ class="bordered" } + +!!! success "Step Result" + + The workspace has been deleted. + + ![Deletion result](delete-stepresult.png){ class="bordered" } + +!!! note + + When you delete a workspace, no graphs or Build projects are deleted. diff --git a/docs/explore-and-author/workspace-configuration/select-workspace-1.png b/docs/explore-and-author/workspace-configuration/select-workspace-1.png new file mode 100644 index 00000000..69769135 Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/select-workspace-1.png differ diff --git a/docs/explore-and-author/workspace-configuration/select-workspace.png b/docs/explore-and-author/workspace-configuration/select-workspace.png new file mode 100644 index 00000000..0b0f952a Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/select-workspace.png differ diff --git a/docs/explore-and-author/workspace-configuration/user.png b/docs/explore-and-author/workspace-configuration/user.png new file mode 100644 index 00000000..6496b96e Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/user.png differ diff --git a/docs/explore-and-author/workspace-configuration/workspace.png b/docs/explore-and-author/workspace-configuration/workspace.png new file mode 100644 index 00000000..e949f687 Binary files /dev/null and b/docs/explore-and-author/workspace-configuration/workspace.png differ diff --git a/docs/getting-started/Explore.png b/docs/getting-started/Explore.png deleted file mode 100644 index c199a56c..00000000 Binary files a/docs/getting-started/Explore.png and /dev/null differ diff --git a/docs/getting-started/UserInterface.png b/docs/getting-started/UserInterface.png deleted file mode 100644 index 1f36b3e2..00000000 Binary files a/docs/getting-started/UserInterface.png and /dev/null differ diff --git a/docs/getting-started/Workspace.png b/docs/getting-started/Workspace.png deleted file mode 100644 index f28d0311..00000000 Binary files a/docs/getting-started/Workspace.png and /dev/null differ diff --git a/docs/getting-started/demograph.png b/docs/getting-started/demograph.png new file mode 100644 index 00000000..98c6266b Binary files /dev/null and b/docs/getting-started/demograph.png differ diff --git a/docs/getting-started/index.md b/docs/getting-started/index.md index 038f5a64..ef9cc549 100644 --- a/docs/getting-started/index.md +++ b/docs/getting-started/index.md @@ -24,28 +24,27 @@ eccenca Corporate Memory is a semantic data management software that accelerates The main features of Corporate Memory include: -- Flexible metadata and schema layer based on knowledge graphs -- Data virtualization and analytics -- Data integration and indexing -- Dataset and vocabulary management -- Thesaurus and taxonomy management -- Big data scalability -- Access control +- Flexible metadata and schema layer based on knowledge graphs +- Data virtualization and analytics +- Data integration and indexing +- Dataset and vocabulary management +- Thesaurus and taxonomy management +- Big data scalability +- Access control ### Minimal requirements For the best user experience, we recommend to use the newest version of Google Chrome or Mozilla Firefox. Corporate Memory is tested with the following browsers: -- Google Chrome 83 or later -- Mozilla Firefox 78 or later -- Microsoft Edge 83 (on Windows) or later +- Google Chrome 83 or later +- Mozilla Firefox 78 or later +- Microsoft Edge 83 (on Windows) or later ## Login and Logout To start eccenca Corporate Memory: 1. Enter the URL in your web browser. -1. Select your workspace and click **CONTINUE WITH LOGIN**. 2. Enter your credentials and click **LOG IN**. After you logged in to your Corporate Memory instance, the main application view appears. @@ -54,54 +53,9 @@ To log out, open the menu :material-dots-vertical: in the Module bar and click ## Workspaces -A workspace is an endpoint of an eccenca DataPlatform identified by a workspace name and the DataPlatform URL. The specific configuration of the application defines which options are available here, i.e. whether you can select one of several workspaces, access only a default workspace or are allowed to create own workspaces. -These options are configured by the system administrator. -For more information on workspace configuration refer to the system manual of eccenca DataManager. - -### Selecting a workspace - -To select an existing workspace open the drop-down list and click the workspace you want to open. -The name and the DataPlatform URL of the selected workspace are shown under **Workspace Configuration**. -Click **CONTINUE WITH LOGIN** and enter your credentials to log in. - -### Adding a new workspace - -!!! info - - Whether this option is available depends on the configuration of Corporate Memory that is defined by the system administrator. - -To add a new workspace, open the drop-down list on the Workspaces window and click **Add New Workspace**. - -![](Workspace.png) - -Under **Workspace Configuration** enter a Workspace Name and the DataPlatform URL. - -Click **SHOW OPTIONS** to display extended configuration options. - -!!! note - - Refer to the system manual of eccenca DataManager to get more information on all the options that can be configured here. - -Click on **CONTINUE WITH LOGIN** to save your entries. - - -### Deleting a workspace - -!!! note - - This option is only available for workspaces created by users themselves. - -To delete a workspace, select the workspace from the drop-down list on the Workspace screen and click **DELETE**. - -The workspace is removed from the drop-down list. - -!!! note - - When you delete a workspace, the graph data is not deleted. - -This section describes the main elements of the graphical user interface of eccenca Corporate Memory. +See the [workspaces](../explore-and-author/workspace-configuration/) section for more details. ## User interface and modules @@ -110,19 +64,19 @@ The user interface of Corporate Memory usually consists of two sections: 1. The module bar providing access to the various modules of Corporate Memory and to a menu with further options 1. The main section for operating the software functions -![](UserInterface.png) +![Graph statistics view](statistic.png) Each module provides a set of functionalities and views for specific use cases. To access a module, click the module name. The active module is highlighted. By default, Corporate Memory provides the following modules: -- EXPLORE - for Knowledge Graph browsing and exploration, specifically - - [Knowledge Graphs](../explore-and-author/index.md) - a generic and extensible RDF data browser and editor - - [Vocabularies](../explore-and-author/vocabulary-catalog/index.md) - for vocabulary management - - [Thesauri](../explore-and-author/thesauri-management/index.md) - for managing thesauri and taxonomies based on SKOS - - [Queries](../explore-and-author/query-module/index.md) - a SPARQL query interface -- [BUILD](../build/index.md) - for creating and integrating Knowledge Graphs, with specific links to +- EXPLORE - for Knowledge Graph browsing and exploration, specifically + - [Knowledge Graphs](../explore-and-author/) - a generic and extensible RDF data browser and editor + - [Vocabularies](../explore-and-author/vocabulary-catalog/) - for vocabulary management + - [Thesauri](../explore-and-author/thesauri-management/) - for managing thesauri and taxonomies based on SKOS + - [Queries](../explore-and-author/query-module/) - a SPARQL query interface +- [BUILD](../build/) - for creating and integrating Knowledge Graphs, with specific links to - Projects - the BUILD Projects level - Datasets - the Datasets across all BUILD Projects - Workflows - the Workflows across all BUILD Projects @@ -134,9 +88,9 @@ By default, Corporate Memory provides the following modules: Use the provided search field(s) in each module to search for specific keywords or strings in names and labels of resources. -The **EXPLORE** module provides more search fields (e.g. in the Graph box, Navigation box, etc.) where you can limit your search to specific graphs or resources. +The **Knowledge Graphs** module provides more search fields (e.g. in the Graph box, Navigation box, etc.) where you can limit your search to specific graphs or resources. -![](Explore.png) +![Explore graph sample](demograph.png) ### Settings menu for table views @@ -189,4 +143,3 @@ This section provides an overview of icons and their functionality in Corporate | ---- | ----------- | | :material-folder: | Object mappings. | | :material-file: | Value mappings. | - diff --git a/docs/getting-started/statistic.png b/docs/getting-started/statistic.png new file mode 100644 index 00000000..b329464b Binary files /dev/null and b/docs/getting-started/statistic.png differ diff --git a/docs/getting-started/with-your-sandbox/index.md b/docs/getting-started/with-your-sandbox/index.md index 3072cbfe..12a5231b 100644 --- a/docs/getting-started/with-your-sandbox/index.md +++ b/docs/getting-started/with-your-sandbox/index.md @@ -21,9 +21,11 @@ eccenca's Corporate Memory is a platform for creating and managing Enterprise Kn --- - Learn how to get going with Corporate Memory in our Getting Started guide. Understand the user interface, the structure of the application and basic concepts. [:octicons-arrow-right-24: Getting started](../){target=_blank} + Learn how to get started with Corporate Memory in our Getting Started Guide. Understand the user interface, application structure, and basic concepts. - Community support for the sandbox is provided in this [:simple-github: forum](https://github.com/eccenca/documentation.eccenca.com/discussions){target=_blank}, use it seek for help, report issues or suggestions or discuss solution ideas. + [:octicons-arrow-right-24: Getting Started](../){target=_blank} + + Community support for the sandbox is provided in this [:simple-github: forum](https://github.com/eccenca/documentation.eccenca.com/discussions){target=_blank}, use it seek for help, report issues or suggestions, or discuss solution ideas. Find and contact us at: [:simple-github:](https://github.com/eccenca){target=_blank} • [:simple-twitter:](https://twitter.com/eccenca){target=_blank} • [:simple-linkedin:](https://de.linkedin.com/company/eccenca-gmbh){target=_blank} • [:octicons-mail-24:](mailto:info@eccenca.com) @@ -31,15 +33,15 @@ eccenca's Corporate Memory is a platform for creating and managing Enterprise Kn --- - The sandbox includes a sample build project named _"Product Data Integration Demo"_ and the graphs produced by this project as well as an integration graph as the entry point: _"Products - Integration"_. Shacl shapes are provided for the Product Vocabulary. Those are used in the EasyNav module for visual exploration as well as in a customized workspace configuration called _"Product Data Integration"_ demonstrating how the user interface can be tailored. + The sandbox includes a sample build project named _"Product Data Integration Demo"_ and the graphs generated by that project, as well as an integration graph as an entry point: _"Products - Integration"_. Shacl shapes are provided for the product vocabulary. These are used in the EasyNav module for visual exploration as well as in a custom workspace configuration called _Product Data Integration_ to demonstrate how the user interface can be customized. -- :superhero:{ .lg .middle } __KGC 2023 Masterclass Material__ +- :fontawesome-solid-graduation-cap:{ .lg .middle } __Masterclass Material__ --- - A lists materials and resources to be used to replicate and follow the **The Knowledge Graph Conference 2023** Masterclass Session: _From zero to KG hero: boost your KG creation productivity with eccenca Corporate Memory_. + A list of materials and resources to reproduce and follow the masterclass session: _From Zero to KG Hero: Boosting Your KG Creation Productivity with eccenca Corporate Memory_. Originally presented at **The Knowledge Graph Conference 2023**. [Watch the recording on :simple-youtube:](https://youtu.be/qD-hge6gyIE){target=_blank}. - [:octicons-arrow-right-24: KG heros :superhero: this way :material-directions:](./material){target=_blank} + [:octicons-arrow-right-24: materials and resources](./material){target=_blank} - :material-script-text-play-outline:{ .lg .middle } __Tutorials and Examples__ @@ -57,7 +59,7 @@ eccenca's Corporate Memory is a platform for creating and managing Enterprise Kn --- - _BUILD_ data product pipelines that turn you existing data points into Enterprise Knowledge Graphs. Use them to establish data product assembly lines in a visual intuitive and business user compatible fashion. + _BUILD_ data product pipelines that turn you existing data points into Enterprise Knowledge Graphs. Use them to create data product assembly lines in a visual, intuitive, and business-user friendly way. [:octicons-arrow-right-24: Learn more about _Build_](../../build){target=_blank} @@ -65,7 +67,7 @@ eccenca's Corporate Memory is a platform for creating and managing Enterprise Kn --- - With _EXPLORE_, you can interact with and visualize your Knowledge Graph data in both list and detail views. Configure individualized frontend configurations to meet domain-specific needs without duplicating data. + With _EXPLORE_, you can interact with and visualize your Knowledge Graph data in both list and drill-down views. Configure custom front-end configurations to meet domain-specific needs without duplicating data. [:octicons-arrow-right-24: Learn more about _Explore_](/explore-and-author){target=_blank} @@ -73,7 +75,7 @@ eccenca's Corporate Memory is a platform for creating and managing Enterprise Kn --- - The _CONSUME_ stage provides several standard APIs to retrieve data from your Knowledge Graph. We also offer native integrations with [PowerBI](/consume/consuming-graphs-in-power-bi){target=_blank} and [redash](https://redash.io/){target=_blank}, and [custom APIs](../../consume/provide-data-in-any-format-via-a-custom-api/){target=_blank} can be configured to provide data in any format. + The _CONSUME_ tier provides several standard APIs for retrieving data from your Knowledge Graph. We also provide native integrations with [PowerBI](/consume/consuming-graphs-in-power-bi){target=_blank} and [redash](https://redash.io/){target=_blank}.[Custom APIs](../../consume/provide-data-in-any-format-via-a-custom-api/){target=_blank} can be configured to provide data in any format. [:octicons-arrow-right-24: Learn more about _Consume_](/consume){target=_blank} @@ -81,7 +83,7 @@ eccenca's Corporate Memory is a platform for creating and managing Enterprise Kn --- - With _AUTOMATE_, you can easily set up and automate processes in your Knowledge Graph. Our `cmemc` command line tool simplifies the management and migration of data and configurations in Corporate Memory. Learn about our vision of a DataOps process based on Corporate Memory. how to schedule workflows and using variable data inputs. + With _AUTOMATE_, you can easily set up and automate processes in your Knowledge Graph. Our `cmemc` command line tool simplifies the management and migration of data and configurations in Corporate Memory. Learn about our vision of a DataOps process based on Corporate Memory, how to schedule workflows, and how to use variable data inputs. [:octicons-arrow-right-24: Learn more about _Automate_](../../automate){target=_blank} @@ -89,7 +91,7 @@ eccenca's Corporate Memory is a platform for creating and managing Enterprise Kn --- - Our Learning Management System contains courses and certifications for different target groups: e.g. for business, (linked data) consultants or DevOps. + Our Learning Management System includes courses and certifications for different audiences: e.g., business, (linked data) consultants, or DevOps. [:octicons-arrow-right-24: register at _eccenca LMS_](https://lms.eccenca.com/){target=_blank} @@ -97,6 +99,6 @@ eccenca's Corporate Memory is a platform for creating and managing Enterprise Kn --- - eccenca's _Corporate Memory_ solution transforms background knowledge about products, processes, partners, people, policies, and data into understandable and executable containers. By automating decisions across hundreds of individual processes, Corporate Memory helps scale the use and reuse of knowledge, increasing knowledge worker productivity and efficacy. This builds a sustainable approach towards scaling decision automation and governance of AI. + eccenca's _Corporate Memory_ solution transforms background knowledge about products, processes, partners, people, policies, and data into understandable and executable containers. By automating decisions across hundreds of individual processes, Corporate Memory helps scale the use and reuse of knowledge, increasing the productivity and effectiveness of knowledge workers. This provides a sustainable approach to scaling decision automation and AI governance. diff --git a/docs/getting-started/with-your-sandbox/material.md b/docs/getting-started/with-your-sandbox/material.md index e74d4f93..8f63fbe9 100644 --- a/docs/getting-started/with-your-sandbox/material.md +++ b/docs/getting-started/with-your-sandbox/material.md @@ -5,9 +5,9 @@ hide: - navigation --- -# KGC 2023 Masterclass - Material and Namespace Suggestions +# Masterclass - Material and Namespace Suggestions -This page lists materials and resources to be used to replicate and follow the **The Knowledge Graph Conference 2023** Masterclass Session _From zero to KG hero: boost your KG creation productivity with eccenca Corporate Memory_. +A list of materials and resources to reproduce and follow the masterclass (MC). !!! info "About Session" @@ -25,36 +25,36 @@ This page lists materials and resources to be used to replicate and follow the * | Dataset (CSV) | Service Products | [services.csv](./material/resources/services.csv){target=_blank} | | Dataset (JSON) | Supplier | [supplier.json](./material/resources/supplier.json){target=_blank} | | Dataset (XML) | Organizational Information | [orgmap.xml](./material/resources/orgmap.xml){target=_blank} | - | Vocabulary | Products Vocabulary* | [pv.ttl](./material/vocabs/pv.ttl){target=_blank} | + | Vocabulary* | Products Vocabulary | [pv.ttl](./material/vocabs/pv.ttl){target=_blank} | - *) FYI, vocabulary already installed + *) vocabulary already installed, attached for information purposes only. - ## Name(space) suggestions --- - | Type | Name | IRI | - | ------------- | ----------------------- | ------------------------------------ | - | Dataset (KG) | KGC Prod - Integration | `http://kgc.eccenca.com/prod-int/` | - | Dataset (KG) | KGC Prod - Hardware | `http://kgc.eccenca.com/prod-hw/` | - | Dataset (KG) | KGC Prod - Services | `http://kgc.eccenca.com/prod-srv/` | - | Dataset (KG) | KGC Prod - Supplier | `http://kgc.eccenca.com/prod-suppl/` | - | Dataset (KG) | KGC Prod - Organization | `http://kgc.eccenca.com/prod-org/` | - | Dataset (KG) | KGC Prod - Links | `http://kgc.eccenca.com/prod-links/` | - | Build Project | KGC Product Build Demo | | + | Type | Name | IRI | + | ------------- | ---------------------- | ----------------------------------- | + | Dataset (KG) | MC Prod - Integration | `http://mc.eccenca.com/prod-int/` | + | Dataset (KG) | MC Prod - Hardware | `http://mc.eccenca.com/prod-hw/` | + | Dataset (KG) | MC Prod - Services | `http://mc.eccenca.com/prod-srv/` | + | Dataset (KG) | MC Prod - Supplier | `http://mc.eccenca.com/prod-suppl/` | + | Dataset (KG) | MC Prod - Organization | `http://mc.eccenca.com/prod-org/` | + | Dataset (KG) | MC Prod - Links | `http://mc.eccenca.com/prod-links/` | + | Build Project | MC Product Build Demo | | - ## Resource IRI suggestions --- - | Type | IRI | - | ---------------- | --------------------------------------------------------------- | - | Department | `http://kgc.eccenca.com/prod-data/dept-{id}` | - | Employee | `http://kgc.eccenca.com/prod-data/empl-{email}` | - | Hardware | `http://kgc.eccenca.com/prod-data/hw-{id}` | - | Price | `http://kgc.eccenca.com/prod-data/price-{parent-id}-{currency}` | - | Product Category | `http://kgc.eccenca.com/prod-data/prod-cat-{name|uuid}` | - | Service | `http://kgc.eccenca.com/prod-data/srv-{id}` | - | Supplier | `http://kgc.eccenca.com/prod-data/suppl-{id}` | + | Type | IRI | + | ---------------- | -------------------------------------------------------------- | + | Department | `http://mc.eccenca.com/prod-data/dept-{id}` | + | Employee | `http://mc.eccenca.com/prod-data/empl-{email}` | + | Hardware | `http://mc.eccenca.com/prod-data/hw-{id}` | + | Price | `http://mc.eccenca.com/prod-data/price-{parent-id}-{currency}` | + | Product Category | `http://mc.eccenca.com/prod-data/prod-cat-{name|uuid}` | + | Service | `http://mc.eccenca.com/prod-data/srv-{id}` | + | Supplier | `http://mc.eccenca.com/prod-data/suppl-{id}` | diff --git a/docs/release-notes/.pages b/docs/release-notes/.pages index 1a9c07cf..be5aa4f0 100644 --- a/docs/release-notes/.pages +++ b/docs/release-notes/.pages @@ -1,4 +1,5 @@ nav: + - Corporate Memory 23.2: corporate-memory-23-2 - Corporate Memory 23.1.3: corporate-memory-23-1 - Corporate Memory 22.2.3: corporate-memory-22-2 - Corporate Memory 22.1: corporate-memory-22-1 diff --git a/docs/release-notes/corporate-memory-23-1/index.md b/docs/release-notes/corporate-memory-23-1/index.md index 8e2880fe..8eab0076 100644 --- a/docs/release-notes/corporate-memory-23-1/index.md +++ b/docs/release-notes/corporate-memory-23-1/index.md @@ -1,5 +1,4 @@ --- -status: new tags: - ReleaseNote --- diff --git a/docs/release-notes/corporate-memory-23-2/index.md b/docs/release-notes/corporate-memory-23-2/index.md new file mode 100644 index 00000000..b0aea701 --- /dev/null +++ b/docs/release-notes/corporate-memory-23-2/index.md @@ -0,0 +1,81 @@ +--- +status: new +tags: + - ReleaseNote +--- +# Corporate Memory 23.2 + +Corporate Memory 23.2 is the second major release in 2023. + +... images ... + +The highlights of this release are: + +- Build: + - Support for user managed **project variables** in dataset and task parameters. +- Explore: + - ... +- Automate: + - New **`admin client` command group** for managing client accounts in the Keycloak CMEM realm. + +This release delivers the following component versions: + +- eccenca DataPlatform v23.2 +- eccenca DataIntegration v23.2 +- eccenca DataIntegration Python Plugins v4.1.0 +- eccenca DataManager v23.2 +- eccenca Corporate Memory Control (cmemc) v23.2 + +More detailed release notes for these versions are listed below. + +## eccenca DataIntegration v23.2 + +... + +## eccenca DataIntegration Python Plugins v4.1.0 + +Corporate Memory v23.2 includes the DataIntegration Python Plugins support in version 4.1.0. + +v4.1.0 of eccenca DataIntegration Python Plugins adds the following new features: + +- use `post_resource` api in `write_to_dataset` function to update dataset file resource +- use cmempy 23.2 +- upgrade dependencies +- enforce usage of Python 3.11 + +## eccenca DataManager v23.2 + +... + +## eccenca DataPlatform v23.2 + +... + +## eccenca Corporate Memory Control (cmemc) v23.2 + +v23.2 of eccenca Corporate Memory Control adds the following new features: + +- `admin user password` command + - option `--request-change` added, to send a email to user to reset the password +- `dataset create` command + - add `readOnly` and `uriProperty` keys for the `-p/--parameter` option +- `admin client` command group + - `list` command - list client accounts + - `open` command - Open clients in the browser + - `secret` command - Get or generate a new secret for a client account +- `project create` command + - new option `--from-transformation` to create a mapping suggestion project + +### Changed + +- `dataset upload` command + - use new endpoint which is aware of read-only datasets +- `workflow io` command + - use of extended io endpoint + - allows for uploading bigger files + - allows for more input and output mimetypes + - change default output to JSON + +## Migration Notes + +... diff --git a/overrides/.icons/eccenca/README.md b/overrides/.icons/eccenca/README.md index e3148391..d65dfd68 100644 --- a/overrides/.icons/eccenca/README.md +++ b/overrides/.icons/eccenca/README.md @@ -1,4 +1,3 @@ # eccenca icon set Based on [@eccenca/gui-elements](https://github.com/eccenca/gui-elements/), this directory holds all icons used in eccenca Corporate Memory. - diff --git a/overrides/.icons/eccenca/application-config.svg b/overrides/.icons/eccenca/application-config.svg new file mode 100644 index 00000000..1abd1c73 --- /dev/null +++ b/overrides/.icons/eccenca/application-config.svg @@ -0,0 +1 @@ + diff --git a/poetry.lock b/poetry.lock index e34a1f89..9b881682 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,100 +1,99 @@ -# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aiohttp" -version = "3.8.4" +version = "3.8.5" description = "Async http client/server framework (asyncio)" -category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"}, - {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"}, - {file = "aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea"}, - {file = "aiohttp-3.8.4-cp310-cp310-win32.whl", hash = "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1"}, - {file = "aiohttp-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f"}, - {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4"}, - {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4"}, - {file = "aiohttp-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24"}, - {file = "aiohttp-3.8.4-cp311-cp311-win32.whl", hash = "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d"}, - {file = "aiohttp-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc"}, - {file = "aiohttp-3.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff"}, - {file = "aiohttp-3.8.4-cp36-cp36m-win32.whl", hash = "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777"}, - {file = "aiohttp-3.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e"}, - {file = "aiohttp-3.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241"}, - {file = "aiohttp-3.8.4-cp37-cp37m-win32.whl", hash = "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a"}, - {file = "aiohttp-3.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480"}, - {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f"}, - {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15"}, - {file = "aiohttp-3.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d"}, - {file = "aiohttp-3.8.4-cp38-cp38-win32.whl", hash = "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54"}, - {file = "aiohttp-3.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f"}, - {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed"}, - {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567"}, - {file = "aiohttp-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4"}, - {file = "aiohttp-3.8.4-cp39-cp39-win32.whl", hash = "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a"}, - {file = "aiohttp-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04"}, - {file = "aiohttp-3.8.4.tar.gz", hash = "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c"}, + {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, + {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, + {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, + {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, + {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, + {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, + {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, + {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, + {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, + {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, + {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, + {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, + {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, + {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, + {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, + {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, ] [package.dependencies] @@ -113,7 +112,6 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -128,7 +126,6 @@ frozenlist = ">=1.1.0" name = "async-timeout" version = "4.0.2" description = "Timeout context manager for asyncio programs" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -140,7 +137,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -159,7 +155,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "babel" version = "2.12.1" description = "Internationalization utilities" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -174,7 +169,6 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" -category = "main" optional = false python-versions = ">=3.6.0" files = [ @@ -193,7 +187,6 @@ lxml = ["lxml"] name = "bracex" version = "2.3.post1" description = "Bash style brace expander." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -203,14 +196,13 @@ files = [ [[package]] name = "cairocffi" -version = "1.6.0" +version = "1.6.1" description = "cffi-based cairo bindings for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "cairocffi-1.6.0-py3-none-any.whl", hash = "sha256:fec979f3e904c1a38350b06dcc91d3c15f9a4954e618a068761622e3a0d5058c"}, - {file = "cairocffi-1.6.0.tar.gz", hash = "sha256:e7f21ea1546a5d490ceba77b6216c1393d7e0a7856e292864a88a7506092bf4d"}, + {file = "cairocffi-1.6.1-py3-none-any.whl", hash = "sha256:aa78ee52b9069d7475eeac457389b6275aa92111895d78fbaa2202a52dac112e"}, + {file = "cairocffi-1.6.1.tar.gz", hash = "sha256:78e6bbe47357640c453d0be929fa49cd05cce2e1286f3d2a1ca9cbda7efdb8b7"}, ] [package.dependencies] @@ -225,7 +217,6 @@ xcb = ["xcffib (>=1.4.0)"] name = "cairosvg" version = "2.7.0" description = "A Simple SVG Converter based on Cairo" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -246,21 +237,19 @@ test = ["flake8", "isort", "pytest"] [[package]] name = "certifi" -version = "2023.5.7" +version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] [[package]] name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = "*" files = [ @@ -335,99 +324,97 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] [[package]] name = "click" -version = "8.1.3" +version = "8.1.6" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, + {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, ] [package.dependencies] @@ -437,7 +424,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -445,11 +431,21 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "cssselect" +version = "1.2.0" +description = "cssselect parses CSS3 Selectors and translates them to XPath 1.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cssselect-1.2.0-py2.py3-none-any.whl", hash = "sha256:da1885f0c10b60c03ed5eccbb6b68d6eff248d91976fcde348f395d54c9fd35e"}, + {file = "cssselect-1.2.0.tar.gz", hash = "sha256:666b19839cfaddb9ce9d36bfe4c969132c647b92fc9088c4e23f786b30f1b3dc"}, +] + [[package]] name = "cssselect2" version = "0.7.0" description = "CSS selectors for Python ElementTree" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -469,7 +465,6 @@ test = ["flake8", "isort", "pytest"] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -479,93 +474,78 @@ files = [ [[package]] name = "frozenlist" -version = "1.3.3" +version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, - {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, - {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, - {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, - {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, - {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, - {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, - {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, - {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, - {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, - {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, + {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, + {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, + {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, + {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, + {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, + {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, + {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, + {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, + {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, ] [[package]] name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." -category = "main" optional = false python-versions = "*" files = [ @@ -583,7 +563,6 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "gitdb" version = "4.0.10" description = "Git Object Database" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -596,14 +575,13 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.31" +version = "3.1.32" description = "GitPython is a Python library used to interact with Git repositories" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, - {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, + {file = "GitPython-3.1.32-py3-none-any.whl", hash = "sha256:e3d59b1c2c6ebb9dfa7a184daf3b6dd4914237e7488a1730a6d8f6f5d0b4187f"}, + {file = "GitPython-3.1.32.tar.gz", hash = "sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6"}, ] [package.dependencies] @@ -613,7 +591,6 @@ gitdb = ">=4.0.1,<5" name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -623,14 +600,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.7.0" +version = "6.8.0" description = "Read metadata from Python packages" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, - {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, + {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, + {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, ] [package.dependencies] @@ -639,13 +615,12 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -663,7 +638,6 @@ i18n = ["Babel (>=2.7)"] name = "linkcheckmd" version = "1.4.0" description = "Check links for Markdown-based site" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -678,11 +652,117 @@ lint = ["flake8", "flake8-blind-except", "flake8-bugbear", "flake8-builtins", "m requests = ["requests"] tests = ["pytest"] +[[package]] +name = "lxml" +version = "4.9.3" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +files = [ + {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, + {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, + {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, + {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, + {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, + {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, + {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, + {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, + {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, + {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, + {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, + {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, + {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, + {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, + {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, + {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, + {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, + {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, + {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, + {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, + {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, + {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, + {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.35)"] + [[package]] name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -696,11 +776,26 @@ importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] testing = ["coverage", "pyyaml"] +[[package]] +name = "markdown2" +version = "2.4.10" +description = "A fast and complete Python implementation of Markdown" +optional = false +python-versions = ">=3.5, <4" +files = [ + {file = "markdown2-2.4.10-py2.py3-none-any.whl", hash = "sha256:e6105800483783831f5dc54f827aa5b44eb137ecef5a70293d8ecfbb4109ecc6"}, + {file = "markdown2-2.4.10.tar.gz", hash = "sha256:cdba126d90dc3aef6f4070ac342f974d63f415678959329cc7909f96cc235d72"}, +] + +[package.extras] +all = ["pygments (>=2.7.3)", "wavedrom"] +code-syntax-highlighting = ["pygments (>=2.7.3)"] +wavedrom = ["wavedrom"] + [[package]] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -760,7 +855,6 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -772,7 +866,6 @@ files = [ name = "mike" version = "1.1.2" description = "Manage multiple versions of your MkDocs-powered documentation" -category = "main" optional = false python-versions = "*" files = [ @@ -792,14 +885,13 @@ test = ["coverage", "flake8 (>=3.0)", "shtab"] [[package]] name = "mkdocs" -version = "1.4.3" +version = "1.4.2" description = "Project documentation with Markdown." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs-1.4.3-py3-none-any.whl", hash = "sha256:6ee46d309bda331aac915cd24aab882c179a933bd9e77b80ce7d2eaaa3f689dd"}, - {file = "mkdocs-1.4.3.tar.gz", hash = "sha256:5955093bbd4dd2e9403c5afaf57324ad8b04f16886512a3ee6ef828956481c57"}, + {file = "mkdocs-1.4.2-py3-none-any.whl", hash = "sha256:c8856a832c1e56702577023cd64cc5f84948280c1c0fcc6af4cd39006ea6aa8c"}, + {file = "mkdocs-1.4.2.tar.gz", hash = "sha256:8947af423a6d0facf41ea1195b8e1e8c85ad94ac95ae307fe11232e0424b11c5"}, ] [package.dependencies] @@ -823,7 +915,6 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp name = "mkdocs-autolinks-plugin" version = "0.7.0" description = "An MkDocs plugin" -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -838,7 +929,6 @@ mkdocs = ">=1.2.3" name = "mkdocs-awesome-pages-plugin" version = "2.9.1" description = "An MkDocs plugin that simplifies configuring page titles and their order" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -855,7 +945,6 @@ wcmatch = ">=7" name = "mkdocs-git-revision-date-localized-plugin" version = "1.2.0" description = "Mkdocs plugin that enables displaying the localized date of the last git modification of a markdown file." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -873,7 +962,6 @@ pytz = "*" name = "mkdocs-glightbox" version = "0.3.4" description = "MkDocs plugin supports image lightbox with GLightbox." -category = "main" optional = false python-versions = "*" files = [ @@ -885,7 +973,6 @@ files = [ name = "mkdocs-macros-plugin" version = "0.7.0" description = "Unleash the power of MkDocs with macros and variables" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -907,7 +994,6 @@ test = ["mkdocs-include-markdown-plugin", "mkdocs-macros-test", "mkdocs-material name = "mkdocs-material" version = "9.1.4" description = "Documentation that simply works" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -930,7 +1016,6 @@ requests = ">=2.26" name = "mkdocs-material-extensions" version = "1.1.1" description = "Extension pack for Python Markdown and MkDocs Material." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -940,13 +1025,12 @@ files = [ [[package]] name = "mkdocs-redirects" -version = "1.2.0" +version = "1.2.1" description = "A MkDocs plugin for dynamic page redirects to prevent broken links." -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "mkdocs-redirects-1.2.0.tar.gz", hash = "sha256:ddd38267d49fdfa19fb2f25b4aed2fb53f0496c818bf3018009c8eaf6676a327"}, + {file = "mkdocs-redirects-1.2.1.tar.gz", hash = "sha256:9420066d70e2a6bb357adf86e67023dcdca1857f97f07c7fe450f8f1fb42f861"}, ] [package.dependencies] @@ -959,14 +1043,13 @@ test = ["autoflake", "black", "isort", "pytest"] [[package]] name = "mkdocs-swagger-ui-tag" -version = "0.6.2" +version = "0.6.3" description = "A MkDocs plugin supports for add Swagger UI in page." -category = "main" optional = false python-versions = "*" files = [ - {file = "mkdocs-swagger-ui-tag-0.6.2.tar.gz", hash = "sha256:1ff7aaaf09599ed118a0a90964a89157e2b2ce4ac32918ab3030d272214d206a"}, - {file = "mkdocs_swagger_ui_tag-0.6.2-py3-none-any.whl", hash = "sha256:42fbb276263bc078ef2609973b745cafbf4291ac4e2b1010923ae0514bfb0363"}, + {file = "mkdocs-swagger-ui-tag-0.6.3.tar.gz", hash = "sha256:0a2ef6ebcdd9b14f6c83ddbe856998b8deece5434868183ce67d5c040f250449"}, + {file = "mkdocs_swagger_ui_tag-0.6.3-py3-none-any.whl", hash = "sha256:4d5aeb2e95325a375bcd745b2d05b5c985f12dcfd79b2230858e600688a628ef"}, ] [package.dependencies] @@ -976,7 +1059,6 @@ beautifulsoup4 = ">=4.11.1" name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1060,7 +1142,6 @@ files = [ name = "natsort" version = "8.4.0" description = "Simple yet flexible natural sorting in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1076,7 +1157,6 @@ icu = ["PyICU (>=1.0.0)"] name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1084,11 +1164,20 @@ files = [ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] +[[package]] +name = "paginate" +version = "0.5.6" +description = "Divides large result sets into pages for easier browsing" +optional = false +python-versions = "*" +files = [ + {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, +] + [[package]] name = "pillow" version = "9.5.0" description = "Python Imaging Library (Fork)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1164,11 +1253,25 @@ files = [ docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +[[package]] +name = "pipdeptree" +version = "2.12.0" +description = "Command line utility to show dependency tree of packages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pipdeptree-2.12.0-py3-none-any.whl", hash = "sha256:9b1f82b2a7b9755e0f8d97bdc461161dcfaf20844fd9b34df55d1cffa933925c"}, + {file = "pipdeptree-2.12.0.tar.gz", hash = "sha256:d58b34eca0092d56cba92961cee6edebef2beba56c88dcc11e411c753c155b86"}, +] + +[package.extras] +graphviz = ["graphviz (>=0.20.1)"] +test = ["covdefaults (>=2.3)", "diff-cover (>=7.7)", "pip (>=23.2)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "virtualenv (>=20.24,<21)"] + [[package]] name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1180,7 +1283,6 @@ files = [ name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1193,25 +1295,41 @@ plugins = ["importlib-metadata"] [[package]] name = "pymdown-extensions" -version = "10.0.1" +version = "10.1" description = "Extension pack for Python Markdown." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pymdown_extensions-10.0.1-py3-none-any.whl", hash = "sha256:ae66d84013c5d027ce055693e09a4628b67e9dec5bce05727e45b0918e36f274"}, - {file = "pymdown_extensions-10.0.1.tar.gz", hash = "sha256:b44e1093a43b8a975eae17b03c3a77aad4681b3b56fce60ce746dbef1944c8cb"}, + {file = "pymdown_extensions-10.1-py3-none-any.whl", hash = "sha256:ef25dbbae530e8f67575d222b75ff0649b1e841e22c2ae9a20bad9472c2207dc"}, + {file = "pymdown_extensions-10.1.tar.gz", hash = "sha256:508009b211373058debb8247e168de4cbcb91b1bff7b5e961b2c3e864e00b195"}, ] [package.dependencies] markdown = ">=3.2" pyyaml = "*" +[[package]] +name = "pyquery" +version = "2.0.0" +description = "A jquery-like library for python" +optional = false +python-versions = "*" +files = [ + {file = "pyquery-2.0.0-py3-none-any.whl", hash = "sha256:8dfc9b4b7c5f877d619bbae74b1898d5743f6ca248cfd5d72b504dd614da312f"}, + {file = "pyquery-2.0.0.tar.gz", hash = "sha256:963e8d4e90262ff6d8dec072ea97285dc374a2f69cad7776f4082abcf6a1d8ae"}, +] + +[package.dependencies] +cssselect = ">=1.2.0" +lxml = ">=2.1" + +[package.extras] +test = ["pytest", "pytest-cov", "requests", "webob", "webtest"] + [[package]] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -1226,7 +1344,6 @@ six = ">=1.5" name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -1236,59 +1353,57 @@ files = [ [[package]] name = "pyyaml" -version = "6.0" +version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] [[package]] name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1299,11 +1414,25 @@ files = [ [package.dependencies] pyyaml = "*" +[[package]] +name = "readtime" +version = "3.0.0" +description = "Calculates the time some text takes the average human to read, based on Medium's read time forumula" +optional = false +python-versions = "*" +files = [ + {file = "readtime-3.0.0.tar.gz", hash = "sha256:76c5a0d773ad49858c53b42ba3a942f62fbe20cc8c6f07875797ac7dc30963a9"}, +] + +[package.dependencies] +beautifulsoup4 = ">=4.0.1" +markdown2 = ">=2.4.3" +pyquery = ">=1.2" + [[package]] name = "regex" version = "2023.6.3" description = "Alternative regular expression module, to replace re." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1401,7 +1530,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1423,7 +1551,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1435,7 +1562,6 @@ files = [ name = "smmap" version = "5.0.0" description = "A pure Python implementation of a sliding window memory map manager" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1447,7 +1573,6 @@ files = [ name = "soupsieve" version = "2.4.1" description = "A modern CSS selector implementation for Beautiful Soup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1459,7 +1584,6 @@ files = [ name = "termcolor" version = "2.3.0" description = "ANSI color formatting for output in terminal" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1474,7 +1598,6 @@ tests = ["pytest", "pytest-cov"] name = "tinycss2" version = "1.2.1" description = "A tiny CSS parser" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1491,14 +1614,13 @@ test = ["flake8", "isort", "pytest"] [[package]] name = "urllib3" -version = "2.0.3" +version = "2.0.4" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, - {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, + {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, + {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, ] [package.extras] @@ -1511,7 +1633,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "verspec" version = "0.1.0" description = "Flexible version handling" -category = "main" optional = false python-versions = "*" files = [ @@ -1526,7 +1647,6 @@ test = ["coverage", "flake8 (>=3.7)", "mypy", "pretend", "pytest"] name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1566,7 +1686,6 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcmatch" version = "8.4.1" description = "Wildcard/glob file name matcher." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1581,7 +1700,6 @@ bracex = ">=2.1.1" name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "main" optional = false python-versions = "*" files = [ @@ -1593,7 +1711,6 @@ files = [ name = "yarl" version = "1.9.2" description = "Yet another URL library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1679,21 +1796,20 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.15.0" +version = "3.16.2" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, + {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "d5e662b470267f1bab297f8c01b57bc853f0d77c8cf137550b49279b996d387c" +content-hash = "f20fdcceb8b06db839b54092c3e1130a89b57724893d3a10f3d96928cc406d6b" diff --git a/pyproject.toml b/pyproject.toml index 24896442..116ec098 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,9 @@ dec-tool = 'tools:cli' python = "^3.8" Pillow = "^9.4.0" CairoSVG = "^2.7.0" -mkdocs = "^1.4.2" +# Using exact requirement of mkdocs to resolve attribute error +# https://github.com/squidfunk/mkdocs-material/issues/5772 +mkdocs = "==1.4.2" mike = "^1.1.2" mkdocs-git-revision-date-localized-plugin = "^1.2.0" mkdocs-awesome-pages-plugin = "^2.8.0" @@ -28,7 +30,6 @@ mkdocs-redirects = "^1.2.0" mkdocs-swagger-ui-tag = "^0.6.1" mkdocs-material = "9.1.4" - [tool.poetry.group.dev.dependencies] linkcheckmd = "^1.4.0"