diff --git a/docs/getting-started/command-line-interface.md b/docs/getting-started/command-line-interface.md index 06168fa..89fbc80 100644 --- a/docs/getting-started/command-line-interface.md +++ b/docs/getting-started/command-line-interface.md @@ -34,9 +34,5 @@ poetry install ## Usage -!!! warning "Coming Soon" - [//]: # (TODO) - For now, please use the built-in help to learn about the command line interface. - ``` - $ geneweaver --help - ``` \ No newline at end of file +!!! tip + Check out the command line interface [reference documentation.](/reference/command-line) \ No newline at end of file diff --git a/docs/getting-started/index.md b/docs/getting-started/index.md index e6622a9..20e75a7 100644 --- a/docs/getting-started/index.md +++ b/docs/getting-started/index.md @@ -8,7 +8,7 @@ For more in-depth information about the concepts presented in this section, plea the [Concepts](../concepts/index.md) section and the [Reference](../reference/index.md) section. -## User Specific Entrypoints +## Quick Start If you're already familiar with Geneweaver, and just need to find content specific to your use-case, you can use the following links to jump to the appropriate documentation. If you find that you need more information, head back to this website and keep reading! diff --git a/docs/index.md b/docs/index.md index b07c630..90f20c0 100644 --- a/docs/index.md +++ b/docs/index.md @@ -12,82 +12,88 @@ **Web Application**: [https://www.geneweaver.org](https://www.geneweaver.org) -**Web Application Documentation**: -[https://geneweaver.org/help/](https://geneweaver.org/help/) - -**Project Documentation (this site)**: -[https://bergsalex.github.io/geneweaver-docs/](https://bergsalex.github.io/geneweaver-docs/) +**API Documentation**: +[https://geneweaver.jax.org/api/docs](https://geneweaver.jax.org/api/docs) --- +
+ +- :material-clock-fast:{ .lg .middle } __Get Going Quickly__ + + --- + + Architected to make it easy to get started, whichever way makes sense for you. + + [:octicons-arrow-right-24: Jump Into Using The Website](/getting-started/web-application/) + + [:octicons-arrow-right-24: Download the Geneweaver CLI](/reference/command-line/) + + [:octicons-arrow-right-24: Build With Our Python Packages](/reference/available-packages/) + +- :simple-jupyter:{ .lg .middle } __Example Workflows__ + + --- + + Follow real examples of how GeneWeaver's tools will be used to do analysis. + + [:octicons-arrow-right-24: NCI-60 Workflow](/tutorial/nci_60_example_01/) -## Components of GeneWeaver -GeneWeaver is two things: + [:octicons-arrow-right-24: Authenticating With The Client Library](/tutorial/geneweaver_client_login/) -1. A web application 🖥️, and -2. An ecosystem of software packages ⚙️. +- :material-api:{ .lg .middle } __API Standards__ -**Genomics Researchers** will find the web application useful as a fully integrated -platform for the analysis of heterogeneous functional genomics data. + --- -**Educators**, **students** and **software developers** will find the ecosystem of -software packages useful for the development of new tools and applications. + Developed to be consistent with our open API Standards. + + [:octicons-arrow-right-24: Standards Documentation](/reference/api-standards/) + +- :material-scale-balance:{ .lg .middle } __Open Source, Apache License 2.0__ + + --- + + GeneWeaver is open source and available on [GitHub](https://github.com/orgs/TheJacksonLaboratory/repositories?q=geneweaver). + + [:octicons-arrow-right-24: License](https://github.com/TheJacksonLaboratory/geneweaver-docs/blob/main/LICENSE) + +
+ + +## What Is GeneWeaver? +**[GeneWeaver](http://www.geneweaver.org)** is a web application for the integrated +cross-species analysis of functional genomics data from heterogeneous sources. The +application consists of a large database of gene sets curated from multiple public data +resources and curated submissions, along with a suite of analysis tools designed to +allow flexible, customized workflows through web-based interactive analysis or scripted +API driven analysis. Gene sets come from multiple widely studied species and include +ontology annotations, brain gene expression atlases, systems genetic study results, gene +regulatory information, pathway databases, drug interaction databases and many other +sources. Users can retrieve, store, analyze and share gene sets through a graded access +system. Gene sets and analysis results can be stored, shared and compared privately, +among user defined groups of investigators, and across all users. Analysis tools are +based on combinatorics and statistical methods for comparing, contrasting and +classifying gene sets based on their members. + +## What Is a Gene Set? +Each **"gene set"** contains a list of genomic features, free text descriptive content, +ontology annotations and gene association scores. Genomic features are mapped within and +across multiple species. Currently, 10 species are supported, *Mus musculus, Homo +sapiens, Rattus norvegicus, Danio rerio, Drosophilia melanogaster, Macaca mulatta, +Caenorhabditis elegans, Saccharomyces cervisiae, Gallus gallus, Canis familiaris.* +Additional species are added in response to community request. + +## What Can You Do With GeneWeaver? +**[GeneWeaver](http://www.geneweaver.org)** allows users to integrate these diverse +functional genomics data across species, tissue and experimental platform to address +questions about the relations among genes and biological functions. Applications include +the prioritization of gene-disease associations from multiple evidence sources, the +classification and comparison of biological functions based on biological substrates, +and the identification of similar genes based on function. Cross species analysis +enables the discovery of conserved mechanisms of biological functions, and the discovery +of divergent functions served by conserved biological mechanisms. ## What is this Documentation? -This documentation is intended to be the foundational documentation for the entire -Geneweaver software ecosystem. It provides an introduction to the system and ways of +This documentation provides an introduction to the system and ways of working with it, explanations of core concepts, tutorials of general utility, as well as detailed reference documentation for the various components and processes that make up the system. - -## GeneWeaver Use Cases -Your entrypoint to GeneWeaver will depend on who you are, and what you would like to -do with GeneWeaver. The following sections will help you find the right place to start -depending on our best guess as to your goals, based on who you are. Ultimately, you -will need to make your own decisions about what approach works best for you. - -=== "Researchers 🔬" - Geneweaver is a valuable resource for any genomics researchers looking to integrate and - analyze complex datasets in order to gain new insights into biological processes and - pathways. - - Geneweaver allows researchers to easily compare and combine datasets from various - sources and analyze the results in an interactive and user-friendly interface. The - system includes a large collection of publicly available datasets, including gene - expression data, protein-protein interactions, and gene ontology annotations, among - others. Geneweaver is a valuable resource for researchers in the field of genomics and - can be used to identify novel gene interactions and pathways, as well as to explore the - functional relationships between genes and biological processes. -=== "Educators & Students 📚" - For educators, GeneWeaver provides a comprehensive platform for integrating real world - data and open sources software into the classroom. GeneWeaver provides data, tools, - systems, and workflows that can be used to teach students about genomics, - bioinformatics, computer science and data science. - - GeneWeaver allows students and educators to integrate with a full cycle of development - and learning, starting with application to real-world concepts and data, and ending with - the development and contribution of new tools and applications. - - The following disciplines might find GeneWeaver useful: - - - **Computer Science** - - Algorithms - - Software Engineering - - Databases - - Data Science - - **Biology and Genomics** - - **Bioinformatics** - - **Mathematics** -=== "Software Developers 💻" - For software developers, GeneWeaver provides a suite of open source software - packages that can be used for the development of new tools and applications. - GeneWeaver is built on a collection of open source software - packages. A software developer could use all or some of that ecosystem to build new - applications and tools. - - Software developers can leverage the ecosystem of open source Python packages used - in Geneweaver.org through its package-based architecture to build new bioinformatics - tools and applications, contribute to existing packages, integrate with other tools, - and address specific needs or gaps in the ecosystem. - - - diff --git a/docs/reference/api-standards/api_monitoring.md b/docs/reference/api-standards/api_monitoring.md new file mode 100644 index 0000000..8e7712c --- /dev/null +++ b/docs/reference/api-standards/api_monitoring.md @@ -0,0 +1,35 @@ +# API Monitoring +> These guiding principles will help stability, uptime, and awareness for your applications. + +Actuator endpoints let you monitor and interact with your application. For language +specific tutorials (please see here)[/Development/Tutorials/Monitoring/endpoints]. + +## Monitoring +Our endpoints for monitoring and info will exist at + +``` +/api/monitor// +``` + +## Health Endpoint + +**Required** + + +This endpoint provides health information, mainly the status of the application. It will be the +primary target for monitoring services. + +Location: +``` +http://127.0.0.1/api/monitor/server/health +``` +Example Response: +```` +{"name":"Fancy Application","status":"UP","details":"Everything seems okay."} +```` + +| Status Text | Http Status Code | +| ----------- | ------------------------------------ | +| `UP` | 200 | +| `UNKNOWN` | 200 | +| `DOWN` | 503 | diff --git a/docs/reference/api-standards/api_versioning.md b/docs/reference/api-standards/api_versioning.md new file mode 100644 index 0000000..d84292c --- /dev/null +++ b/docs/reference/api-standards/api_versioning.md @@ -0,0 +1,152 @@ +# API Versioning and Backwards Compatibility + +!!! success "Overview" + In order to prioritize simplicity, reliability, and maintainability we seek to + **minimize the need for API versioning** _as much as possible_. + +API versioning is a strategy to ensure that changes to the structure, +features, or behavior of an API (Application Programming Interface) do not break +applications that depend on it. APIs allow different software applications to +communicate and interact with each other, so changes to an API can have broad impacts. + +!!! tip + If your service has a single client, **which you control**, then you can avoid + versioning altogether. You also **do not** need to version APIs which have not yet + been released. + +!!! warning + This document talks about versioning API deployments, a process that is separate + from application release versioning, which is described in the + [Software Versions Document](/Operations/Best_Practices/software_versions/). + + In the context of application release versioning, this document describes the + process of incrementing the **major** version number. For example, going from + `v1.0.0` to `v2.0.0`. + + +## Guiding Principles + +1. **Minimal versioning**: We avoid versioning unless absolutely necessary. This + approach reduces complexity, both for the team developing the API and for those who + consume it. We strive, whenever possible, to make changes that are backwards + compatible. + +2. **Independent changes for reproducible science**: To support reproducible science, + it's essential that we can modify APIs without impacting the analysis. This principle + allows us to evolve our systems independently, providing robustness and flexibility. + +When versioning becomes unavoidable due to changes that can break existing clients, we +should follow the strategies provided in +this [guide](https://www.xmatters.com/blog/blog-four-rest-api-versioning-strategies/). + +!!! danger + Versioning is not a substitute for good API design. We should always strive to + design APIs that are flexible and extensible, and that can evolve without breaking + existing integrations. **Incrementing a major version should be a last resort.** + + +## Backwards Compatibility + +Backwards compatibility is as important as versioning. By maintaining backwards +compatibility, we allow API consumers to upgrade at their own pace without breaking +their existing integrations. Strategies to ensure backwards compatibility include: + +1. **Adding new fields**: When adding new data, do so by adding new fields that older + clients will simply ignore. The simplest way to do this is to add new fields that + have a default value. + +2. **Avoid removing fields**: Removing data fields may break existing integrations. It's + safer to deprecate old fields and leave them in place, while encouraging clients to + use new fields. + +3. **Keeping old endpoints**: If creating a new version of an endpoint, consider keeping + the old one for a time to allow clients to transition gradually. + +4. **Communicating changes**: Whenever a change is made that affects the API, it's + important to inform consumers in advance to give them time to adapt. + + +## Service Information Endpoint + +A service information endpoint is a valuable tool in API design, especially in terms of +API versioning and backwards compatibility. The service information endpoint provides a +clear way for client applications to understand the current state of the service, its +capabilities, and its versioning details. This enables clients to interact more +intelligently and efficiently with the service, adjusting their behavior based on the +information returned by this endpoint. + +Here's how a service information endpoint contributes to API versioning and backward +compatibility: + +1. **Communicating Version Information**: + The version of the API is often part of the service information. This makes it clear + to any consumers of the API what version they are interfacing with. Consumers can + then decide how to handle any changes based on the version information. This is where + an application would provide its deployed + [Software Version](/Operations/Best_Practices/software_versions/) number. + +2. **Facilitating Backward Compatibility**: + By communicating what features, workflows, and filesystem protocols are supported by + the service, a client application can adjust its behavior to only use features + supported by the current API version. This allows older clients to still function + correctly, even if new features or changes have been introduced in the latest version + of the API. + +3. **Providing Useful Metadata**: + Additional information such as the name, description, environment, and contact + details helps clients understand more about the service. For instance, it can help + debug issues (e.g., if the environment is "test", the behavior might be different + from a production environment). + +4. **Allowing Intelligent Client Behavior**: + In a more advanced use case, a client application might use the service information + to decide between different APIs or services. + +## When We Need to Version + +Despite our minimal versioning philosophy, there may be situations that require it. +These could include major architectural changes, the addition of new features that +aren't compatible with the current API design, or the nature of the system that's being +changed. + +In these cases, we should follow the versioning strategies outlined below. + +### Versioning Strategies + +When we need to introduce versioning, you might consider adopting one of the following +strategies: + +1. **URI Versioning**: In this strategy, the version information is included in the URI + itself. This is very simple and straightforward, but it can break URI consistency and + may cause confusion. + + 1. **URI Path**: This option involves including the version number as part of + the URI path. This allows clients to cache resources easily, but requires + branching the entire API when incrementing the version. + + 2. **Query Parameters**: This option involves including the version number as a + query parameter. This is straightforward to implement, but can become + difficult to manage across multiple endpoints. It also requires maintaining + all versions of the endpoints in a single application build. + +2. **Request Header Versioning**: This approach keeps the URI clean and includes the + version information in the request header. This preserves URI consistency, but it + requires consumers to add version information to their requests. + +3. **Media Type Versioning (Accept Header)**: This strategy requires the client to + specify the version in the Accept header. It offers clean URIs, but it may increase + complexity for clients. + +4. **Hypermedia As The Engine Of Application State (HATEOAS)**: This is the most complex + strategy, but it provides a lot of flexibility. It allows API consumers to navigate + APIs through hypermedia links. + +Remember, the choice of versioning strategy should be informed by our guiding +principles, the needs of our API consumers, and the nature of the changes that require +versioning. You should pick the strategy that best fits the situation of your +application and it's needs, and prioritize minimizing API versioning as much as +possible. + +!!! tip + For more information on possible versioning strategies, see this + [guide](https://www.xmatters.com/blog/blog-four-rest-api-versioning-strategies/). \ No newline at end of file diff --git a/docs/reference/api-standards/dns_naming.md b/docs/reference/api-standards/dns_naming.md new file mode 100644 index 0000000..d893e6d --- /dev/null +++ b/docs/reference/api-standards/dns_naming.md @@ -0,0 +1,130 @@ +# DNS Naming + +DNS (Domain Name System) is a system that translates human-friendly domain names, like +`www.jax.org`, into the numerical IP addresses needed for devices to load Internet +resources. This conversion process is often referred to as DNS name resolution, and it's +carried out by DNS servers. DNS serves as a sort of phonebook for the Internet, allowing +us to use memorable domain names while computers and other devices can handle the +underlying IP addresses they correspond to. + +Consistent DNS naming is important to maintaining API standards, both in terms of +operations and development. Consistent naming conventions enhance ease of use and +understanding of APIs, as intuitive and uniform naming conventions allow developers to +guess the correct endpoint, thereby reducing reliance on extensive documentation lookup. +Furthermore, DNS naming conventions enhances efficiency in both development and +maintenance as the risk of misconfiguration and mistakes drops when there +are standard conventions to follow in handling APIs. Another aspect is interoperability; +standard naming conventions ease integration with other systems and services as they +allow for a common language to be used. Finally, consistency in DNS naming aids in +service discovery, particularly in a microservices architecture in that it streamlines +categorization and comprehension of each service's purpose. + +## Naming Conventions +There is no single standard for DNS naming conventions that applications are _required_ +to follow, however, the following is the default convention that should be followed +unless there is a compelling reason to deviate from it. + +``` +-.. +``` + +Where most applications will be hosted on `.jax.org` +``` +-.jax.org +``` + +### Production DNS +For production environments, the application will also map a DNS name that **does not** +include the environment name. This is to allow for the use of the shorter DNS name +by most end users. + +``` +.. +``` + +Where most applications will be hosted on `.jax.org` +``` +.jax.org +``` + +### Frontend Clients and Backend APIs +One important consideration is to maintain a list that maps client applications (e.g., +web apps, CLI tools) to the backend services they employ. This also helps manage CORS +settings, if needed. This also helps to make it clear when specific frontend clients are +siblings of specific backend APIs. + +Where possible, it is preferred that the frontend client uses the same DNS name as the +backend API. This allows for intuitive discovery of the backend API by the frontend +client, and by developers, quality assurance, and operations personnel. + +!!! tip "Micro-service Architectures" + In a world of micro-services it's likely that we would move away from a one-to-one + mapping between a UI and an API. In theory services will be for specific purposes, + while clients may become more broad and integrated, pulling from multiple APIs. + + Data or analytic API’s should also be concerned with serving up other programmatic + clients, like scripts or Jupyter notebooks developed by an analyst. + +### Versioning +Versioning should be handled by the application itself, and **not by the DNS name**. +This allows for the application to be updated to a new version without requiring any +changes to the DNS name. + +!!! tip "Versioning Not Required" + Not all APIs will require versioning, and those that do not should not feel + compelled to implement it. Often, versioning can be avoided by careful design of the + API. + + Read more about API Versioning in the API Standards group's + [versioning document](/Development/Best_Practices/API_Standards/api_versioning/). + + +### Environments +The environment name should be one of the following: + +- `dev` - Development environment +- `sqa` - Testing environment +- `stage` - Staging environment +- `prod` - Production environment + +It is highly recommended to deploy each of the above environments, especially if your +application will rely on other applications, or if other applications will rely on your +application. This allows for your application, and those that depend on it, to always +have a matching environment for each deployment environment. + + +#### Staging/Production +The staging and production environments are special environments that should be as close +to identical as possible. The only difference between the two should be the data that +they are using. This is to ensure that any issues that are found in staging can be +reproduced in production. + +Depending on your application deployment model, these two environments can be viewed as +the "Blue" and "Green" in a "Blue/Green" deployment model. + +??? note "Blue/Green Deployments" + Blue/Green deployments are a type of software release management strategy designed to + reduce downtime and risk by running two identical production environments known as + Blue and Green. + + - Blue environment: This is the live production environment that is currently serving + user traffic. + - Green environment: This is the clone of the production environment where you deploy + the new version of the application. + + Initially, the Blue environment is live, serving all user traffic. When a new version + of the software is ready for release, it is deployed to the Green environment. + + Once the new version is tested and ready to go live, the router is switched to direct + all incoming traffic to the Green environment. The Green environment then becomes the + live or active environment. + + The Blue environment, now idle, can be used for preparation for the next release. + + +## Additional Considerations + +1) Applications should be hosted on port 80/443 so that the user does **not** need to +specify a port. +2) It's important to publicize if the application is hosted on an internal-only +IP address as this will affect the ability of users to access the application. diff --git a/docs/reference/api-standards/error_handling.md b/docs/reference/api-standards/error_handling.md new file mode 100644 index 0000000..49ca1a0 --- /dev/null +++ b/docs/reference/api-standards/error_handling.md @@ -0,0 +1,219 @@ +# REST API Error Handling Best Practices + +Error handling is crucial for ensuring the reliability and usability of a REST API service. Proper error handling not only helps developers identify and fix issues but also provides clear and informative responses to clients so they may take corrective action. + +## Guidelines +### 1. Use Appropriate HTTP Status Codes + +HTTP status codes are essential for conveying the outcome of API requests and allow a high level grouping of error responses. Use them consistently to indicate the result of each API call, and avoid using obscure codes that API consumers may not be familiar with. Common, widely used error codes provide clear semantics and promote consistency and interoperability. These include: + +- **400 Bad Request**: Malformed request syntax or invalid parameters +- **401 Unauthorized**: Authentication required or invalid credentials +- **403 Forbidden**: The authenticated user does not have permission to perform the operation +- **404 Not Found**: Requested resource does not exist +- **422 Unprocessable Content**: The request is formed correctly but there are semantical errors in the payload +- **500 Internal Server Error**: Generic server-side error + +### 2. Provide Detailed Error Messages + +When an error occurs, include detailed error messages in the response. These messages should provide enough information for consumers to understand the cause of the error and take appropriate actions. However, be cautious not to expose sensitive information or specifics that could be exploited by malicious users. For example, do not include implementation details or detailed stack traces. +**Error messages should clarify the problem and communicate the intended functionality**. For example, if a type check fails on an API to fetch a study record by id, the message "Study ID must be an integer" clearly conveys what is expected. + +### 3. Implement Consistent Error Response Format + +Follow a consistent error response format across all API endpoints, and format error response payloads as JSON. This makes it easier for clients to parse error responses and handle them gracefully. Include fields **status, request_id, error_code, message, timestamp, and trace_id** in your error responses. + +```json +{ + "status": "", HTTP Status Code for the entire request + "request_id": "", Request identifier generated by the API service + "errors": [{ + "error_code": "", Error code (see below) + "message": "", Human readable error details + "timestamp": "", Date/time of the error + "trace_id": "" Pointer to the log trace + }] +} +``` + +### 4. Return an errors array +It is possible for multiple errors to occur within one transaction, so for consistency return an errors collection even if there is only one error. + +### 5. Log Errors for Debugging + +Log errors on the server-side to aid in debugging and monitoring. Include relevant details like error message, error code, request URL, source, user ID (if available), stack trace, and timestamp. Log errors at appropriate severity levels based on the error's impact, for example "Critical", "Error", "Warning", "Info", "Debug". If you would like to learn about BioConnect's logging service, please reference the documentation [here](https://docs.bioconnect.jax.org/core-modules/logging-service/#the-logging-service). + +### 6. Error Codes +Use meaningful error codes in addition to HTTP status codes to provide more specific information. CS is developing a standard list of error codes that may be used across all of our software applications, providing the following benefits: +1. **Clarity and specificity**: HTTP status codes are useful but can be generic and lack context. In addition to providing status codes, error codes allow us to convey more detailed information about the nature of the error. +2. **Consistency**: By aligning on a standard list of error codes, we can ensure that error responses are uniform and predictable, which will make it easier for front end developers to handle errors and provide consistent messaging to users. +3. **Error reporting and troubleshooting**: If errors are properly logged, developers will be able to aggregate log data based on error codes, providing insight into errors that occur most frequently. + + + +The following examples show error codes, in combination with status codes and error messages: +1. invalid parameter + +```json +{ + "status": "400", + "request_id": "37472a48-a34e-4813-b064-f863170f33fc", + "errors": { + "error_code": "parameter_invalid", + "message": "Cannot convert 'abc' to integer", + "timestamp": "2023-07-02T14:07:01", + "trace_id": "fb3a02ac6caa" + } +} +``` + +2. value too large for column in database + +```json +{ + "status": "400", + "request_id": "a83e5e07-06b2-44f9-a18a-8eefdc3f9bf8", + "error": { + "error_code": "value_invalid", + "message": "animal name must be 50 characters or less", + "timestamp": "2023-08-24T01:10:00", + "trace_id": "3ff84c1df586" + + } +} +``` + +Below is the current working list of error codes for CS Rest APIs: + +400-Bad Request + +- parameter_missing +- parameter_invalid +- parameter_length_exceeded +- header_invalid + +401-Unauthorized + +- api_key_required +- api_key_invalid +- credentials_required +- credentials_invalid + +403-Forbidden + +- privileges_insufficient +- request_limit_exceeded + +404-Not found + +- url_invalid + +408-Timeout exceeded + +- timeout_exceeded + +422-Unprocessable content + +- value_missing +- value_invalid +- value_length_exceeded + + +## Use Cases/Examples + +**Missing/Invalid Parameters** +Consider an API that returns a list of animals based on a weight min and max value. A validation check is performed to ensure that both parameters are present and numeric. If the min value type check validation fails, a status code of 400 is returned, along with a "parameter_invalid" error code and the message "min value must be numeric". If the parameter is missing all together, the following response is issued: + +``` + min = request.query_params.get('min_value', None) + if min is None: + return Response( + { + "status": "400", + "request_id": "7abfde24-b2e8-49f9-af5b-5f45bd56cce3", + "errors":[ + { + "error": { + "error_code": "parameter_missing", + "message": "Minimum weight value is required", + "timestamp": "2023-09-24T01:10:00", + "trace_id": "48785f565a27" + } + } + ] + } + ) +``` +This example is a good use case for aggregating and reporting on error data in order to improve user experience. If your error logs are showing high numbers of "parameter_invalid" codes for a certain API, perhaps you should look into front end validation and constraints in order to prevent them. + + +**Insufficient Privileges** +It is important to remember that errors involving privileges fall under the 403 status code rather than 401. In the case of user based actions, a 401 status code essentially means "I don't recognize you" and 403 means "I know who you are but you're not allowed to do this." + +A typical use case involving insufficient privileges is an attempt to update a record when the user does not have read/write access to the data. The API logic should perform an authorization check before an attempt to update the record, and if not authorized, the response should inform the user that their request has been denied due to insufficient privileges. + +``` +return Response( + { + "status": "403", + "request_id": "5bbca962-1f57-4016-bac5-5ae9a28d7d2e", + "errors":[ + { + "error_code": "privileges_insufficient", + "message": "You do not have sufficient privileges to update this record", + "timestamp": "2023-09-24T01:10:00", + "trace_id": "48785f565a27" + } + ] + } +) +``` + +!!! tip "Not all 403 errors are tied to authenticated users" + They could also be used for anonymous actions that can only be performed under certain circumstances (eg time based restrictions.) Another example is a server that only accepts requests from a predefined range of IP addresses. + + +**Database Errors** +Database errors can be due to authentication/authorization issues, lost connection, or data integrity issues such as constraint violations. Database libraries often have some built in error handling that can be leveraged. When possible, you should catch and handle specific exceptions rather than generic "Exception" handlers. +Wrap database operations in try-except blocks to catch and handle exceptions, for example: +``` + try: + study= Study.objects.get(pk=study_id) + return Response({'identifier': study.identifier}) + except Item.DoesNotExist: + return Response( + { + "status": "404", + "request_id": "5bbca962-1f57-4016-bac5-5ae9a28d7d2e", + "errors":[ + { + ... + } + ] + } + ) +``` + +!!! tip "APIs should be idempotent" + There is no way to group requests together in a transaction in REST APIs. To compensate for this, you should endeavor to [make your APIs idempotent](https://restfulapi.net/idempotent-rest-apis/). With the exception of a POST request, sending the same request multiple times should produce the same result. To validate idempotency, implement test cases encompassing multiple invocations of the same request to ensure consistent outcomes irrespective of the number of executions. + +**Timeout** +Timeout errors can occur when there are network or service issues, or if a call to the API involves too much data. A retry mechanism may be used to handle timeout errors, although this would normally be handled in front end client code when a 504 (Gateway timeout) is received. If timeouts are occurring due to high server load, consider load balancing and scaling your application to ameliorate them. Also consider monitoring and alerts to notify you when timeouts become frequent. + +**Errors from 3rd Party Libraries** +When calling methods in a 3rd party library, you can follow best practices for handling specific errors provided you have visibility into the exceptions it may raise. Consider handling these errors with graceful degradation if it's possible for your application to continue to function when they occur, for example if you can't retrieve data use cached data as a fallback. If it's a 3rd party service and it's possible for it to be down, consider implementing a retry mechanism. +An API end point that makes a failed call to a 3rd party service should return a 200 response if the error was isolated to the 3rd party service. + +**Business Logic Failures** +If a request includes data that fails a back end business logic check, return a status code of 422 (Unprocessable Entity.) This indicates that the syntax is correct but the value(s) prevented the call from being successfully processed. Include data in the response indicating the specific failure. For example, and end point that creates an animal record expects a date of birth but the date provided is in the future and fails a business rule check. A "value_invalid" error code and message stating "Date of birth cannot be in the future" allows the client to correct the error. + + + + + + + + + + + diff --git a/docs/reference/api-standards/http_standards.md b/docs/reference/api-standards/http_standards.md new file mode 100644 index 0000000..72d8b80 --- /dev/null +++ b/docs/reference/api-standards/http_standards.md @@ -0,0 +1,84 @@ + +Hypertext Transfer Protocol (HTTP) is a stateless application-level protocol used for +communication between web servers and clients, allowing for the transfer of data and +requests for resources over the internet. This document summarizes two important +sources of information for HTTP standards: + +- The Mozilla Developer Network (MDN) HTTP Reference[^1] +- The HTTP Semantics RFC (RFC 9110)[^2] + +## HTTP Methods + +### Common Implementation Methods +The following methods are commonly used for the endpoints we create in our web APIs. +They are useful for creating semantically meaningful RestFUL APIs. + +| Method | Type | Description | +|---------|-----------------------|---------------------------------------------------------------------------------------------------------------------------------| +| GET | Read Only, Idempotent | The GET method requests a representation of the specified resource. Requests using GET should **only retrieve data.** | +| HEAD | Read Only, Idempotent | The HEAD method asks for a response identical to a GET request, but **without the response body**. | +| POST | Non-idempotent | The POST method submits an entity to the specified resource, often **causing a change in state** or side effects on the server. | +| PUT | Idempotent | The PUT method **replaces** all current representations of the target resource with the request payload. | +| DELETE | Idempotent | The DELETE method **deletes** the specified resource. | +| PATCH | Non-idempotent | The PATCH method applies **partial modifications** to a resource. | + +### Common Operational Methods +These methods are most commonly handled by the web-frameworks/web-server and are not +typically used when implementing our own endpoints. + +| Method | Description | +|---------|-------------------------------------------------------------------------------------------| +| CONNECT | The CONNECT method establishes a tunnel to the server identified by the target resource. | +| OPTIONS | The OPTIONS method describes the communication options for the target resource. | +| TRACE | The TRACE method performs a message loop-back test along the path to the target resource. | + + +## HTTP Response Status Codes +We will list only the most relevant status codes here, but in general, we follow +the [mozilla guidelines](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status). The +following descriptions are taken directly from the mozilla documentation. + + + +### 2xx Success +These codes indicate success. The three most common that we use are listed here, and +have only slight differences. + +| Status Code | Description | Long Description | +|-------------|-------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 200 | OK | The request succeeded. The result meaning of "success" depends on the HTTP method. | +| 201 | Created | The request succeeded, and a new resource was created as a result. This is typically the response sent after POST requests, or some PUT requests. | +| 204 | No Content | There is no content to send for this request, but the headers may be useful. The user agent may update its cached headers for this resource with the new ones. | + +### 4xx Client Error +These codes indicate that the server determined that client did something wrong with the +request. These are the most straightforward to implement from a backend perspective, +and provide the most information to the client about what went wrong. + +| Status Code | Description | Long Description | +|-------------|----------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 400 | Bad Request | The server cannot or will not process the request due to something that is perceived to be a client error (e.g., malformed request syntax, invalid request message framing, or deceptive request routing). | +| 401 | Unauthorized | Although the HTTP standard specifies "unauthorized", semantically this response means "unauthenticated". That is, the client must authenticate itself to get the requested response. | +| 403 | Forbidden | The client does not have access rights to the content; that is, it is unauthorized, so the server is refusing to give the requested resource. Unlike 401 Unauthorized, the client's identity is known to the server. | +| 404 | Not Found | The server cannot find the requested resource. In the browser, this means the URL is not recognized. In an API, this can also mean that the endpoint is valid but the resource itself does not exist. Servers may also send this response instead of 403 Forbidden to hide the existence of a resource from an unauthorized client. | +| 405 | Method Not Allowed | The request method is known by the server but is not supported by the target resource. For example, an API may not allow calling DELETE to remove a resource. | +| 409 | Conflict | This response is sent when a request conflicts with the current state of the server. | +| 422 | Unprocessable Entity | The request was well-formed but was unable to be followed due to semantic errors. | + +### 5xx Server Error +These codes indicate that the server had a problem processing the request. These are +the most difficult to implement from a backend perspective, and provide the least +information to the client about what went wrong. In general, when using one of these +codes, backend APIs should both log the associated error, and provide as much meaningful +information as possible to the client. + +| Status Code | Description | Long Description | +|-------------|------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 500 | Internal Server Error | Internal Server Error | +| 501 | Not Implemented | The request method is not supported by the server and cannot be handled. The only methods that servers are required to support (and therefore that must not return this code) are GET and HEAD. | +| 502 | Bad Gateway | This error response means that the server, while working as a gateway to get a response needed to handle the request, got an invalid response. | +| 504 | Gateway Timeout | This error response is given when the server is acting as a gateway and cannot get a response in time. | + + +[^1]: [Mozilla: HTTP Status](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status) +[^2]: [RFC 9110 HTTP Semantics](https://www.rfc-editor.org/rfc/rfc9110) diff --git a/docs/reference/api-standards/index.md b/docs/reference/api-standards/index.md new file mode 100644 index 0000000..2c6e2d7 --- /dev/null +++ b/docs/reference/api-standards/index.md @@ -0,0 +1,18 @@ +--- +title: API Standards +--- + +# :material-api: API Standards +The API standards group is an open working group that is focused on creating a set of +standards for the development of APIs. + +The group is focused on exploring the following topics: + +* [x] [Standard HTTP Methods & Status Codes](/reference/api-standards/http_standards/) +* [x] [Standard Naming of Endpoints](/reference/api-standards/restful_endpoint_naming/) +* [x] [Standard Naming of Domain Names](/reference/api-standards/dns_naming/) +* [x] Standard structure for response body + * [x] Define Scenarios and Categories of Response Errors +* [x] [Standard for API Versioning and Deprecation](/reference/api-standards/api_versioning/) +* [x] [OpenAPI Schemas](/reference/api-standards/openapi_schemas/) (FKA Swagger) +* [ ] Testing of all items listed above. diff --git a/docs/reference/api-standards/openapi_schemas.md b/docs/reference/api-standards/openapi_schemas.md new file mode 100644 index 0000000..08959a9 --- /dev/null +++ b/docs/reference/api-standards/openapi_schemas.md @@ -0,0 +1,95 @@ +# OpenAPI Schemas + +!!! success "Overview" + As part of our commitment to creating well-defined, reliable, and easy-to-use APIs, + we adopt OpenAPI[^1] as a standard tool for defining, creating, and documenting our + APIs. OpenAPI helps our teams collaborate more efficiently, enhances the developer + experience, and maintains the quality of our APIs. This document lays out standards + and best practices around the usage and publication of OpenAPI schemas. + +## Why OpenAPI? + +OpenAPI[^1] is an industry-standard method for describing RESTful APIs. By requiring an +OpenAPI document[^2] for each API, we aim to: + +- **Increase Transparency**: With an OpenAPI document, every aspect of an API is clearly +described, from endpoints to response formats. This reduces guesswork and the potential +for misunderstandings. + +- **Streamline Collaboration**: Frontend and backend teams can reference the OpenAPI +document to understand what data is available, how to access it, and what kind of +responses to expect. + +- **Simplify Integration**: Other systems or third-party developers can use the OpenAPI +document to understand how to integrate with our APIs. + +- **Enable API Testing and Monitoring**: OpenAPI documents can be used to generate +testing scripts and monitor APIs for any discrepancies in expected behavior. + +- **Improve Developer Experience**: An OpenAPI document forms the basis for generating +interactive documentation, SDKs, and API explorers, enhancing the overall developer +experience. + +## Key Principles + +1. **Up-to-date Schema**: Each RESTful API must always provide an up-to-date +`openapi.json` file, reflecting the latest version of the API. + +2. **Automatic Generation**: The `openapi.json` file should be automatically generated +from the source code and annotations as part of the build pipeline. + +3. **Public Accessibility**: The `openapi.json` file should be easily accessible, +preferably through a dedicated endpoint (e.g., +`https://api.yourdomain.com/openapi.json`). + +4. **Auto-generated API Documentation**: The OpenAPI schema should be used to +automatically generate and update API navigator pages or interactive API documentation +(like Swagger UI). + +## Detailed Standards + +### OpenAPI File Generation + +- Use tools and libraries that support OpenAPI file generation from code annotations +(e.g., Swagger for Java or SpringFox). + +- The OpenAPI file should be generated as part of the build process. + +- Keep your annotations up-to-date as you make changes to your API. + +### OpenAPI File Hosting + +- The `openapi.json` file should be publicly accessible via a dedicated URL. + +- The file should be placed at a consistent location across all APIs for easy discovery + (e.g., `https://api.yourdomain.com/openapi.json`). + +- The server should deliver the `openapi.json` file with the correct MIME type +`application/json`. + +### Versioning + +- The OpenAPI document should reflect the current version of the API and be updated with +each version change. + +- If multiple versions of the API exist, each version should have its own `openapi.json` +file. + +### Auto-generated API Documentation + +- Use a tool like Swagger UI or ReDoc to automatically generate interactive API +documentation from the OpenAPI schema. + +- The documentation should be publicly accessible and updated automatically whenever the +OpenAPI schema is updated. + +- The documentation should provide interactive features such as the ability to send test +requests. + +!!! info + By adopting OpenAPI and following these standards and best practices, we can improve + the developer experience, enhance the discoverability of our APIs, and ensure our + documentation is always up-to-date. + +[^1]: [OpenAPI Initiative](https://www.openapis.org/) +[^2]: [OpenAPI Specification](https://spec.openapis.org/oas/v3.1.0) diff --git a/docs/reference/api-standards/restful_endpoint_naming.md b/docs/reference/api-standards/restful_endpoint_naming.md new file mode 100644 index 0000000..24924d5 --- /dev/null +++ b/docs/reference/api-standards/restful_endpoint_naming.md @@ -0,0 +1,87 @@ +# RESTful API Endpoint Naming Standards + +!!! success "Overview" + The RESTful API Endpoint Naming Standards are a set of guidelines we follow + to maintain **consistency**, **clarity**, and **usability** in our APIs. + +Application Programming Interfaces (APIs) act as the gateways to data and capabilities +of our applications. With the REST architectural style, we can create scalable APIs that +are easy to consume and understand. + +Following a set of established naming conventions helps us create APIs that are +straightforward to use, which in turn accelerates development, minimizes the risk of +errors, and makes our services easier to consume. + +!!! tip + Although these are presented as standards, they should be considered as guidelines. + In specific cases, there might be valid reasons to deviate from these standards. + However, any deviation should be carefully considered and thoroughly discussed + within the team. + +## Guiding Principles + +1. **Resource Identification**: RESTful APIs use nouns (not verbs) to identify resources +or collections of resources. For example, use `/users` not `/getUsers` or `/createUser`. + +2. **Consistency**: Maintain consistent naming conventions across the API. This reduces +ambiguity and increases usability. + +3. **Plural Form Resources**: Resources should be named in plural form, such as `/users` +rather than `/user`. + +4. **Hierarchical Relationships**: Use sub-resources to show relationships between +resources. For example, to get a user's comments, you can use `/users/{id}/comments`. + +5. **Lowercase Letters**: Use lowercase letters for resources and collections. Mixed +case or camelCase can lead to confusion and errors. + +6. **Avoid Underscores (_)**: Underscores can sometimes be interpreted as spaces in +certain contexts and should be avoided. Use hyphens (-) for better readability if +needed[^2]. + +7. **No Trailing Slashes**: Trailing slashes should be avoided. For example, use +`/users` not `/users/`[^1]. + +8. **Non-CRUD Functions**: For routes that don't easily map to CRUD operations (Create, +Read, Update, Delete), consider mapping these to HTTP methods in a sensible way, or +group them under a sub-resource. For example, `/users/{id}/activate`. + +9. **Filters, Sorting, and Pagination**: For large collections, these should be +expressed as query parameters. For example, +`/users?status=active&sort=-registered&page=2`. + +10. **HTTP Status Codes**: Use appropriate HTTP status codes to indicate the status of +the request. For example, '200' for successful GET requests, '201' for successful POST +requests, '400' for bad requests, etc. + + !!! success "HTTP Status Codes" + For more information on the correct status codes to use, see the response + status codes section of the + [HTTP Standards page](/Development/Best_Practices/API_Standards/http_standards/#http-response-status-codes). + +11. **Error Handling**: Always return meaningful error messages and codes, helping the +consumer understand what went wrong and how they might fix it. + +!!! warning + Remember that these are **guidelines**, not hard rules. They serve as a starting + point for the API design, but each API has unique needs and may require certain + exceptions or adaptations. Always prioritize clarity, simplicity, and usability + when designing your API. + +## When Exceptions are Required + +Despite these guiding principles, there may be situations that require exceptions or +deviations. These could include compatibility with legacy systems, specific requirements +of certain clients, or other unique constraints. + +In such cases, exceptions should be carefully considered, thoroughly discussed within +the team, and clearly documented to avoid confusion and ensure everyone understands the +reasons behind the deviation. + +Always consider the potential impact of any exceptions on the overall usability, +clarity, and consistency of the API, and strive to minimize such deviations as much as +possible. + + +[^1]: [RFC 3986 - Uniform Resource Identifier (URI): Generic Syntax](https://www.rfc-editor.org/rfc/rfc3986#section-3.3) +[^2]: [StackOverflow - Hyphens, Underscores, or Camel Case](https://stackoverflow.com/questions/10302179/hyphen-underscore-or-camelcase-as-word-delimiter-in-uris) \ No newline at end of file diff --git a/docs/reference/external-data-sources.md b/docs/reference/external-data-sources.md new file mode 100644 index 0000000..5031bd6 --- /dev/null +++ b/docs/reference/external-data-sources.md @@ -0,0 +1,28 @@ +External Data Resources +----------------------- + +GeneWeaver contains publicly available sets of genes annotated to structured +vocabularies and ontologies that are assigned Tier I, or public resource data. Other +sets of genes, such as MeSH term-to-gene annotations, are derived from the processing +of public sources and attributed to Tier II. In the case of MeSH, we take advantage of +NCBI's gene-to-Pubmed and Pubmed-to-mesh files to produce sets of genes annotated +through their transitive associations. + +| Tier | Resource | Description | Number of Gene Sets (2012) | Number of Gene Sets (2015) | Number of Gene Sets (2018) | +|:----:|:----------------------------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------:|:--------------------------:|:--------------------------:| +| 1 | Allen Brain Atlas (ABA) | Sets containing upregulated genes found within mouse brain regions and structures. These genes exhibit a >= 2.0 fold change in expression energies compared to all other basic cell groups and brain regions (ABA refers to this area as 'grey' contrast structures). These sets are generated using the ABA API and its differential gene search pipeline. | 785 | 740 | 785 | +| 1 | Comparative Toxicogenomics Database (CTD) | Sets of genes associated with CTD chemical-gene interactions are obtained via CTD flat files. | 6266 | 6177 | 21630 | +| 1 | Drug Related Gene Database (DRG) | Drug Related Gene Database, compiled bt the Neuroscience Informatics Framework (NIF) contains gene expression data related to drug abuse research. | 1208 | 253 | 238 | +| 1 | Human and Mouse Gene Ontology (GO) | Sets of genes from human and mouse annotated to the Gene Ontology (GO), obtained from the Gene Ontology Consortium and MGI. | 33668 | 33668 | 85573 | +| 1 | Human Phenotype Ontology Annotations (HP) | Gene sets derived from annotations of genes to HPO. | 6276 | 4011 | 6276 | +| 1 | Kyoto Encyylopedia of Genes and Genomes (KEGG) | Pathways derived from the KEGG API are directly parsed for identifiers that map to GeneWeaver. Pathway data for humans, mice, rats, and rhesus monkeys is currently included. | 0 | 1172 | 1339 | +| 1 | Mammalian Phenotype Annotations (MP) | Gene sets derived from annotations of mutant mice to MP terms in MGI, with transitive closure. | 7966 | 7966 | 7931 | +| 2 | Medical Subject headings (MeSH) | Genes annotated to MeSH terms were aggregated with gene2publication associations from PubMed. Associations must appear in a minimum of two publications. Genes associated with the closure of each set were obtained. | 0 | 12069 | 12069 | +| 1 | Molecular Signature Database (MSigDB) | Sets of genes annotated to disease for use with Gene Set Enrichment Analysis (GSEA) downloaded from MSigDB v.5.0. Only sets derived from hallmark, C1, C3, C4, C6, and C7 collections are incorporated*. MSigDB genesets that are curated from other resources (e.g. KEGG or GO) are ignored to eliminate data redundancy. | 0 | 3738 | 3738 | +| 1 | MouseQTLs from MGI | Sets of positional candidate genes for the confidence interval around all the QTLs within MGD. | 0 | 5050 | 3405 | +| 1 | Online Mendelian Inheritance in Man (OMIM) | Gene-disease phenotype data is retrieved from OMIM's Morbid Map and Phenotype Series list. Unconfirmed and spurious mappings are ignored. | 0 | 738 | 738 | +| 1 | Pathway Commons (PC) | Sets of genes derived from the "top" pathways: those that are neither controlled nor a pathway component of another biological process. KEGG pathways are removed from this data set to prevent duplicate genesets. | 0 | 1036 | 1149 | +| 1 | Rat QTLs from RGD | Sets of positional candidate genes for the confidence interval around all the QTLs within the RGD. | 0 | 2048 | 2064 | +| 1 | Genome Wide Association Studies (GWAS) | Catalog of Published Genome-Wide Association Studies | 0 | 0 | 3389 | + +*Information on the MSigDB file types included in GenWeaver (H, C1, C3, C4, C6 and C7) diff --git a/docs/reference/license.md b/docs/reference/license.md new file mode 100644 index 0000000..1585c6b --- /dev/null +++ b/docs/reference/license.md @@ -0,0 +1,205 @@ +# License + +**Apache License** + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/docs/reference/policies.md b/docs/reference/policies.md new file mode 100644 index 0000000..a79f709 --- /dev/null +++ b/docs/reference/policies.md @@ -0,0 +1,88 @@ +Usage Policy and Disclaimer +--------------------------- + +Data and web site providers make no guarantees or warranties as to the +accuracy or completeness of results obtained from accessing and using +information from GeneWeaver. We will not be liable to any user or anyone +else for any inaccuracy, error or omission, regardless of cause, in the +data contained in the GeneWeaver databases or any resulting damages. In +addition, the data providers do not warrant that the databases will meet +your requirements, be uninterrupted or error-free. Data providers +expressly exclude and disclaim all expressed and implied warranties of +merchantability and fitness for a particular purpose. Data providers +shall not be responsible for any damage or loss of any kind arising out +of or related to your use of the databases, including without +limitation data loss or corruption, regardless of whether such liability +is based in tort, contract or otherwise. + +To report any errors found in the GeneWeaver database, please notify the +appropriate person listed on our +[Contacts](#contacts) page. + +Data Sharing Policy +------------------- + +Data sharing in GeneWeaver is as broad or restrictive as the +investigator allows. When uploading data, it can be made private, +public or accessible only to selected groups. Access restrictions can +be changed at any time. All group members are also visible on the account setup page. The only people with access to your data are those who you +personally allow, or those who your group administrator(s) allow. +GeneWeaver will make no use of the data outside of normal metrics used +to optimize algorithm or database efficiency, or in other internal use +solely for the development of GeneWeaver, see [Privacy +Policy](#privacy-policy) for more. + +In addition, our directives to share data stem from the NIH Data Sharing +Policy that states: + +> Data should be made as widely and freely available as possible while +> safeguarding the privacy of participants, and protecting confidential +> and proprietary data. + +Privacy Policy +-------------- + +- In order to integrate data from many users, while protecting private + data, we must store data on your server. User contact information is + collected for optional display with your gene sets to foster + collaborative research. +- Entering user information is not mandatory. +- User information will not be sold or otherwise distributed. +- GeneWeaver records some information about how the site is used such + as the IP address of machines accessing data sets. This information + is used to monitor our system performance, to prevent abuse of the + system, and to guide further development of the GeneWeaver. This + information is stored on the server in files that are accessible to + members of the development group. Specific information will not + be released. +- When you visit GeneWeaver, your use of the site is recorded in + two ways. First, your use is logged by the Web server in standard + log files. The IP address of your machine, the date and time, and + the name of the page you visit are recorded. Second, for each + request from the SQL database, the GeneWeaver records your IP + number, the time, and the data set from which you + request information. This information is collected for + statistical purposes. Our system uses a software program (Analog) to + create summary statistics that we find helpful in assessing patterns + of data use, in measuring system performance and in + detecting problems. This information is used to provide you with + better internet service. +- GeneWeaver also may request permission to place a so-called 'cookie' + text file on your system to allow you to retain information on your + set-up preferences. +- For site security purposes and to ensure that this server remains + available to users, this computer system employs programs that + monitor network traffic to identify unauthorized attempts to upload + or change information, and to detect unusually high numbers of + requests from single IP addresses. By accessing this site, you + expressly consent to usage monitoring of this site for unauthorized + or unusual activities. Unauthorized attempts to upload information + and change information are prohibited. +- In some cases, personal identifier information such as name or + e-mail is requested or required. This information may be posted for + public access along with the submitted comments and messages that + it accompanies. In all cases, participation is strictly voluntary + and no other use is made of the information. User data can be + labeled as private, group or public on submission. Permissions can + be changed. Gene sets that are not marked as public will not be + included in global analyses of the database contents. diff --git a/docs/reference/publications.md b/docs/reference/publications.md new file mode 100644 index 0000000..19dba62 --- /dev/null +++ b/docs/reference/publications.md @@ -0,0 +1,326 @@ +**Publications** +================ + +#### How to cite GeneWeaver + + +> Erich J. Baker, Jeremy J. Jay, Jason A. Bubier, Michael A. Langston, and Elissa J. Chesler. **[GeneWeaver: a web-based system for integrative functional genomics](http://nar.oxfordjournals.org/content/40/D1/D1067.full)**. **Nucleic Acids Research**; (2012) 40(D1): D1067-D1076 + +#### Publications Describing GeneWeaver + +* Erich J. Baker, Jeremy J. Jay, Jason A. Bubier, Michael A. Langston, +and Elissa J. Chesler. **[GeneWeaver: a web-based system for integrative +functional +genomics](http://nar.oxfordjournals.org/content/40/D1/D1067.full)**. +**Nucleic Acids Research**; (2012) 40(D1): D1067-D1076. + +* Jason A. Bubier, Michael A. Langston, Erich J. Baker, and Elissa J. Chesler. **[Integrative functional genomics for systems genetics in GeneWeaver.org](https://www.ncbi.nlm.nih.gov/pubmed/27933523)**. **Methods Mol Biol.**; (2017). 1488:131-152. + +* Jeremy J. Jay; **[Cross species integration of functional genomics +experiments](http://www.sciencedirect.com/science/article/pii/B978012398323700001X)**. +**International Review of Neurobiology**; (2012) 104:1-24. + +* Jeremy J. Jay, John D. Eblen, Yun Zhang, Mikael Benson, Andy D. +Perkins, Arnold M. Saxton, Brynn H. Voy and Elissa J. Chesler and +Michael A. Langston; **[A systematic comparison of genome-scale +clustering +algorithms](http://www.ncbi.nlm.nih.gov/pmc/articles/PMC3382433/)**. +**BMC Bioinformatics**; (2012) Jun 25;13 Supplement 10:S7. + +* Erich J. Baker, Jeremy J. Jay, Vivek M. Philip, Yun Zhang, Zuopan +Li, Roumyana Kirova, Michael A. Langston and Elissa J. Chesler; +**[Ontological Discovery Environment: a system for integrating +gene-phenotype +associations](http://www.sciencedirect.com/science/article/pii/S0888754309002067)**. +**Genomics**; (2009) Dec;94(6):377-87. + +#### Other Relevant GeneWeaver Citations + +* Ahmadi Adl, A., & Qian, X. (2015). **[Tumor stratification by a +novel graph-regularized bi-clique finding +algorithm](http://www.ncbi.nlm.nih.gov/pubmed/25791318)**. +**Computational Biology and Chemistry**, 57, 3-11. +10.1016/j.compbiolchem.2015.02.010 + +* Baker, E.J. (2012). **[Biological databases for behavioral +neurobiology.](http://www.ncbi.nlm.nih.gov/pubmed/23195119)** +**International Review of Neurobiology**, 103:19-38. + +* Baker, E., Culpepper, C., Philips, C., Bubier, J., Langston, M., & +Chesler, E. (2014). **[Identifying common components across biological +network graphs using a bipartite data +model.](http://www.ncbi.nlm.nih.gov/pubmed/25374613)**- **BMC +Proceedings**; Oct 13; 8. + +* Baker E., Bubier J.A., Reynolds T., Langston M.A., & Chesler E.J. (2016). **[GeneWeaver: data driven alignment of cross-species genomics in biology and disease](https://www.ncbi.nlm.nih.gov/pubmed/26656951)**. **Nucleic Acids Res.**, 44(D1):D555-9. + + +* Bettembourg, C., Diot, C., & Dameron, O. (2014). **[Semantic +particularity measure for functional characterization of gene sets using +gene ontology](http://www.ncbi.nlm.nih.gov/pubmed/24489737)**. **PLoS +ONE**, 9:e86525. + +* Bhandari, P., Hill, J.S., Farris ,S.P., Costin, B., Martin, I., +Chan, C-L., Alaimo, J.T., Bettinger, J.C., Davies, A.G., Miles, M.F., & +Grotewiel, M. (2012). **[Chloride intracellular channels modulate acute +ethanol behaviors in Drosophila, Caenorhabditis elegans and +mice](http://onlinelibrary.wiley.com/doi/10.1111/j.1601-183X.2012.00765.x/full)**. +**Genes, Brain, and Behavior**; Jun; 11(4):387-97. + +* Bhattacharyya, T., Gregorova, S., Mihola, O., Anger, M., Sebestova, +J., Denny, P., Simecek, P., & Forejt, J. (2013). **[Mechanistic basis of infertility +of mouse intersubspecific +hybrids](https://www.ncbi.nlm.nih.gov/pubmed/23329330)**. **PNAS**; +2013 110 (6) E468-E477. + +* Bubier, J.A., & Chesler, E.J. (2012). **[Accelerating discovery for +complex neurological and behavioral disorders through systems genetics +and integrative genomics in the laboratory +mouse](http://link.springer.com/article/10.1007%2Fs13311-012-0111-3)**. +**Neurotherapeutics**; Apr; 9(2):338-48. + +* Bubier, J.A., Jay, J.J., Baker, C.L., Bergeson , S.E., Ohno, H., +Metten, P., Crabbe, J.C., & Chesler, E.J. (2014). **[Identification of a +QTL in Mus musculus for Alcohol Preference, Withdrawal, and Ap3m2 +Expression Using Integrative Functional Genomics and Precision +Genetics](http://www.ncbi.nlm.nih.gov/pubmed/24923803)**- **Genetics**; +Jun 11;197(4):1377-1393. + +* Bubier, J.A., & Chesler, E.J. (2015). **[GeneWeaver: finding +consilience in heterogeneous cross-species functional genomics +data.](http://www.ncbi.nlm.nih.gov/pubmed/26092690)** -*' Mammalian +Genome*'; Oct 26. + +* Cesar, A.S., Regitano, L.C., Mourão, G.B., Tullio, R.R., Lanna, +D.P., Nassu, R.T., Mudado, M.A., Oliveira, P.S., do Nascimento, M.L., +Chaves, A.S., Alencar, M.M., Sonstegard, T.S., Garrick, D.J., Reecy, +J.M., & Coutinho, L.L. (2014). **[Genome-wide association study for +intramuscular fat deposition and composition in Nellore +cattle.](http://www.ncbi.nlm.nih.gov/pubmed/24666668)**- **BMC +Genetics**; Mar 25; 15:39. + +* Chan, R.F. (2013). Genetic analysis of ethanol sensitivity and +tolerance in drosophila. **[Master's +Thesis](https://scholarscompass.vcu.edu/etd/3154/?show=full)**. + +* Chen, H.C., Zou, W., Lu, T.P., & Chen, J.J. (2014). **[A composite +model for subgroup identification and prediction via bicluster +analysis](http://www.ncbi.nlm.nih.gov/pubmed/25347824)**. **PLoS ONE** , +9 (e111318). 10.1371/journal.pone.0111318 + +* Chesler, E.J., Plitt, A., Fisher, D., Hurd, B., Lederle, L., Bubier, +J.A., Kiselycznyk, C., & Holmes, A. (2012). **[Quantitative trait loci +for sensitivity to ethanol intoxication in a C57BL/6J×129S1/SvImJ inbred +mouse +cross](http://link.springer.com/article/10.1007%2Fs00335-012-9394-2)**. +**Mammalian Genome**; Jun;23(5-6):305-21. + +* Chesler, E.J., Logan, R.W. (2013). **[Opportunities for +bioinformatics in the classification of behavior and psychiatric +disorders](http://www.ncbi.nlm.nih.gov/pubmed/?term=23195316)**- +**International Review of Neurobiology**; 104:183-211 + +* Devera, S.M., Costina, B.N., Xub, R., El-Hagea, N., Balinang, J., +Samoshkin, A., ... Hausera, K.F. (2014). **[Differential expression of +the alternatively spliced OPRM1 isoform μ -opioid receptor-1K in +HIV-infected +individuals](http://www.ncbi.nlm.nih.gov/pubmed/24413261)**'. **AIDS**, +28, 19-30. 10.1097/QAD.0000000000000113 + +* Eblen, J.D., Phillips, C.A., Rogers, G.L., & Langston, M.A. (2012). +**[The maximum clique enumeration problem: algorithms, applications, and +implementations.](http://www.ncbi.nlm.nih.gov/pubmed/22759429)**- **BMC +Bioinformatics**; Jun 25;13 Suppl 10:S5. + +* Farris, S.P. (2016).**[Moving toward understanding the proteome +involved in substance +abuse](http://www.biologicalpsychiatryjournal.com/article/S0006-3223(16)00013-5/abstract)**. +**Biological Psychiatry**, 79 , 422-424. 10.1016/j.biopsych.2016.01.003 + +* Groshaus, M., & Montero, L. (2016). **[Tight lower bounds on the +number of bicliques in false-twin-free +graphs](http://www.sciencedirect.com/science/article/pii/S1571065315002048)**. +**Theoretical Computer Science**, 636, 77-84. 10.1016/j.tcs.2016.05.027 + +* Grubb, S.C., Bult, C.J., & Bogue, M.A. (2014). **[Mouse Phenome +Database](http://www.ncbi.nlm.nih.gov/pubmed/24243846)**.**Nucleic Acids +Research**; 42:D825-D834. + +* Ha, T.J., Swanson, D.J., Kirova, R., Yeung, J., Choi, K., Tong, Y., +Chesler, E.J., & Goldowitz, D. (2012). **[Genome-wide microarray +comparison reveals downstream genes of Pax6 in the developing mouse +cerebellum.](http://www.ncbi.nlm.nih.gov/pubmed/22817342)**- **European +Journal of Neuroscience**; Oct 36(7); 2888-98. + +* Haendel, M.A., & Chesler, E.J. (2012). **[Lost and found in +behavioral informatics.](http://www.ncbi.nlm.nih.gov/pubmed/23195118)**- +**International Review of Neurobiology**; 103:1-18. + +* Hait, N.C., Wise, L.E., Allegood, J.C., O'Brien, M., Avni, D., +Reeves, T.M., Knapp, P.E., Lu, J., Luo, C., Miles, M.F., Milstien, S., +Lichtman, A.H., & Spiegel, S. (2014). **[Active, phosphorylated +fingolimod inhibits histone deacetylases and facilitates fear extinction +memory](http://www.ncbi.nlm.nih.gov/pubmed/24859201)**.*' Nature +Neuroscience.*' + +* Han, B.W., & Chen, Y.Q. (2013). **[Potential pathological and +functional links between long noncoding RNAs and +hematopoiesis](http://www.ncbi.nlm.nih.gov/pubmed/23962981)**.*' Science +Signaling*'; 6. + +* Jay, J.J., & Chesler, E.J. (2014). **[Performing integrative +functional genomics analysis in +GeneWeaver.org](http://www.ncbi.nlm.nih.gov/pubmed/24233775)**.*' +Methods in Molecular Biology*'; pp 13-29. + +* Jegga, A.G. (2014). **[Candidate gene discovery and prioritization +in rare +diseases](http://www.ncbi.nlm.nih.gov/pubmed/24870143)**.**Methods in +Molecular Biology**; 1168:295-312. + +* Kang, H., Choi, I., Cho, S., Ryu, D., Lee, S., & Kim, W. (2014). +**[GsGator: An integrated web platform for cross-species gene set +analysis](http://www.ncbi.nlm.nih.gov/pubmed/24423189)**. **BMC +Bioinformatics**, 15 (13). 10.1186/1471-2105-15-13 + +* Kershenbaum, A., Cutillo, A., Darabos, C., Murray, K., Schiaffino, +R., & Moore, J.H. (2016). **[Bicliques in graphs with correlated edges: +From artificial to biological +networks](http://link.springer.com/chapter/10.1007%2F978-3-319-31204-0_10)**. +In **Lecture Notes in Computer Science** (including subseries Lecture +Notes in Artificial Intelligence and Lecture Notes in Bioinformatics) +(138-155). 10.1007/978-3-319-31204-0\_10 + +* Logan, R.W., Robledo, R.F., Recla, J.M., Philip, V.M., Bubier, J.A., +Jay, J.J., Harwood, C., Wilcox, T., Gatti, D.M., Bult, C.J., Churchill, +G.A., & Chesler, E.J. (2013). **[High-precision genetic mapping of +behavioral traits in the diversity outbred mouse +population](http://www.ncbi.nlm.nih.gov/pubmed/23433259)**- **Genes, +Brain and Behavior**; Jun; 12(4):424-37. + +* Louati, A., Haddad, J.E., & Pinson, S. (2015). **[A multi-agent +approach for trust-based service discovery and selection in social +networks](http://www.scpe.org/index.php/scpe/article/view/1129)**. +**Scalable Computing**, 16, 381-402. doi: 10.12694/scpe.v16i4.1129 + +* McWhite, C.D., Liebeskind, B.J., & Marcotte, E.M. (2015). +**[Applications of comparative evolution to human disease +genetics](http://www.sciencedirect.com/science/article/pii/S0959437X15000878)**. +**Current Opinion in Genetics and Development**, 35, 16-24. doi: +10.1016/j.gde.2015.08.004 + +* Meehan, T.F., Carr, C.J., Jay, J.J., Bult, C.J., Chesler, E.J., & +Blake, J.C. (2011). **[Autism candidate genes via mouse +phenomics](http://www.sciencedirect.com/science/article/pii/S1532046411000438)**. +**Journal of Biomedical Informatics**; Dec; 44 Supplement 1:S5-11. + +* Mooney, M., & McWeeney, S. (2014). **[Data integration and +reproducibility for high-throughput +transcriptomics](http://www.ncbi.nlm.nih.gov/pubmed/25172471)**. In +**International Review of Neurobiology** (55-71). doi: +10.1016/B978-0-12-801105-8.00003-5 + +* Nestor, C.E., Barrenäs, F., Wang, H., Lentini, A., Zhang, H., Bruhn, +S., Jörnsten, R., Langston, M.A., Rogers, G., Gustafsson, M., & Benson, +M. (2014). **[DNA methylation changes separate allergic patients from +healthy controls and may reflect altered CD4+ T-cell population +structure](http://www.ncbi.nlm.nih.gov/pubmed/24391521)**- **PLoS +Genetics**; Jan;10(1):e1004059. + +* O'Brien, M.A., Costin, B.N., & Miles, M.F. (2012). **[Using +genome-wide expression profiling to define gene networks relevant to the +study of complex traits: from RNA integrity to network +topology.](http://www.ncbi.nlm.nih.gov/pubmed/23195313)**- +**International Review of Neurobiology**;104:91-133. + +* Oren, Y., Nachshon, A., Frishberg, A., Wilentzik, R., & Gat-Viks, I. +(2015). **[Linking traits based on their shared molecular +mechanisms](http://www.ncbi.nlm.nih.gov/pubmed/25781485)**. **eLife** , +2015 (e04346). 10.7554/eLife.04346 + +* Padula, A.E., Griffin, W.C. 3rd, Lopez, M.F., Nimitvilai, S., +Cannady, R., McGuier, N.S., Chesler, E.J., Miles, M.F., Williams, R.W., +Randall, P.K., Woodward, J.J., Becker, H.C., & Mulholland, P.J. (2015). +**[KCNN Genes that Encode Small-Conductance Ca2+-Activated K+ Channels +Influence Alcohol and Drug +Addiction.](http://www.ncbi.nlm.nih.gov/pubmed/25662840)**- +**Neuropsychopharmacology**; Feb 9. + +* Recla, J.M., Robledo, R.F., Gatti, D.M., Bult, C.J., Churchill, +G.A., & Chesler, E.J. (2014). **[Precise genetic mapping and integrative +bioinformatics in Diversity Outbred mice reveals Hydin as a novel pain +gene](http://www.ncbi.nlm.nih.gov/pubmed/24700285)**.*' Mammalian +Genome*'; 25:211-222. + +* Rojas-Muñoz, A., Maurya, M.R., Lo, F., & Willems, E. (2014). +**[Integrating omics into the cardiac differentiation of human +pluripotent stem +cells](http://onlinelibrary.wiley.com/doi/10.1002/wsbm.1268/abstract)** +**Wiley Interdisciplinary Reviews: Systems Biology and Medicine**, 6, +247-264. doi: 10.1002/wsbm.1268 + +* Roth, A., Kyzar, E.J., Cachet, J., Stewart, A.M., Green, J., +Gaikwad, S., O'Leary, T.P., Tabakoff, B., Brown, R.E., & Kalueff, A.V. +(2013). **[Potential translational targets revealed by linking mouse +grooming behavioral phenotypes to gene expression using public +databases](http://www.ncbi.nlm.nih.gov/pubmed/23123364)**. **Progress in +Neuro-Psychopharmacology & Biological Psychiatry**; 40:313-325. + +* Saccone, S.F. (2012). **[Applying in silico integrative genomics to +genetic studies of human +disease](http://www.ncbi.nlm.nih.gov/pubmed/23195124)**. In +**International Review of Neurobiology** (133-156). doi: +10.1016/B978-0-12-388408-4.00007-1 + +* Smith, C.L., & Eppig, J.T. (2012). **[The Mammalian Phenotype +Ontology as a unifying standard for experimental and high-throughput +phenotyping data](http://www.ncbi.nlm.nih.gov/pubmed/22961259)**. +**Mammalian Genome**, 23, 653-668. doi: 10.1007/s00335-012-9421-3 + +* Talishinsky, A., & Rosen, G.D. (2012). **[Systems Genetics of the +Lateral Septal Nucleus in Mouse: Heritability, Genetic Control, and +Covariation with Behavioral and Morphological +Traits](http://www.plosone.org/article/info%3Adoi%2F10.1371%2Fjournal.pone.0044236)**. +**PLoS ONE**; 7(8): e44236. + +* Tsiliki, G., Tsaramirsis, K., & Kossida, S. (2014). **[AmalgamScope: +merging annotations data across the human +genome.](http://www.ncbi.nlm.nih.gov/pubmed/24967409)**- **BioMed +Research International.**; Epub 2014 May 20. + +* Van Steen, K., & Malats, N. (2014). **[Perspectives on data +integration in human complex disease +analysis](https://www.researchgate.net/publication/290959823_Perspectives_on_data_integration_in_human_complex_disease_analysis)**. +In **Big Data Analytics in Bioinformatics and Healthcare** (284-322). +10.4018/978-1-4666-6611-5.ch013 + +* Williams, R.W. (2011). **[Genetic and genomic web resources for +research on alcohol use and +abuse](http://pubs.niaaa.nih.gov/publications/arcr343/378-380.htm)**. +**Alcohol Research and Health**, 34 , 378-380. + +* Williams, R.W., & Mulligan, M.K. (2012). **[Genetic and molecular +network analysis of +behavior](http://www.ncbi.nlm.nih.gov/pubmed/23195314)**. +**International Review of Neurobiology**; 104:135-57. + +* Wiltshire, T., Ervin, R.B., Duan, H., Bogue, M.A., Zamboni, W.C., +Cook, S., Chung, W., Zou, F., & Tarantino, L.M. (2015). **[Initial +locomotor sensitivity to cocaine varies widely among inbred mouse +strains.](http://www.ncbi.nlm.nih.gov/pubmed/25727211)** - **Genes, +Brain and Behavior**, 14, 271-280. 10.1111/gbb.12209 + +* Wittkop, T., TerAvest, E., Evani, U.S., Fleisch, K.M., Berman, A.E., +Powell, C., Shah, N.H., & Mooney, S.D. (2013). **[STOP using just GO: A +multi-ontology hypothesis generation tool for high throughput +experimentation](https://www.ncbi.nlm.nih.gov/pubmed/23409969)**. **BMC Bioinformatics**, 14. + +* Zhang, Y., Phillips, C.A., Rogers, G.L., Baker, E.J., Chesler, E.J., +& Langston, M.A. (2014). **[On finding bicliques in bipartite graphs: a +novel algorithm and its application to the integration of diverse +biological data types.](http://www.ncbi.nlm.nih.gov/pubmed/24731198)** +**BMC Bioinformatics**, 15. + +#### Related Resources and Publications + +[GeneNetwork](http://GeneNetwork.org) diff --git a/docs/tutorial/creating-a-new-documentation-site.md b/docs/tutorial/creating-a-new-documentation-site.md deleted file mode 100644 index 320d54a..0000000 --- a/docs/tutorial/creating-a-new-documentation-site.md +++ /dev/null @@ -1,5 +0,0 @@ - -!!! example "Seeking Contributors!" - We know this page should exist, but we haven't found the time to write it yet. - If you would like to contribute to this page, please see the - [git repository](https://github.com/bergsalex/geneweaver-docs). \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index cad7757..b6c8eaa 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -11,8 +11,6 @@ theme: palette: - scheme: geneweaver media: "(prefers-color-scheme: light)" -# primary: green -# accent: blue toggle: icon: material/toggle-switch-off-outline name: Switch to dark mode @@ -38,19 +36,40 @@ theme: - content.tabs.link extra_css: - stylesheets/extra.css -repo_name: bergsalex/geneweaver-docs -repo_url: https://github.com/bergsalex/geneweaver-docs +repo_name: TheJacksonLaboratory/geneweaver-docs +repo_url: https://github.com/TheJacksonLaboratory/geneweaver-docs edit_uri: edit/main/docs/ extra: generator: true + social: + - icon: fontawesome/brands/github + link: https://github.com/orgs/TheJacksonLaboratory/repositories?q=geneweaver + name: GitHub Repositories + - icon: fontawesome/brands/python + link: https://pypi.org/search/?q=geneweaver + name: Python Packages + - icon: material/web + link: https://geneweaver.org + name: Geneweaver Website + - icon: material/api + link: https://geneweaver.jax.org/api/docs + name: Geneweaver API +copyright: These pages are maintained by the GeneWeaver team and the Chesler Lab at The Jackson Laboratory in Bar Harbor, Maine. markdown_extensions: - admonition - tables - pymdownx.details - pymdownx.superfences - attr_list + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg - md_in_html - pymdownx.superfences + - def_list + - footnotes + - pymdownx.tasklist: + custom_checkbox: true - pymdownx.tabbed: alternate_style: true - pymdownx.superfences: @@ -99,6 +118,15 @@ nav: - Logging In: reference/command-line/logging-in.md - API Commands: reference/command-line/api-commands.md - ReST API: reference/restful-api.md + - API Standards: + - reference/api-standards/index.md + - HTTP Standards: reference/api-standards/http_standards.md + - DNS Naming: reference/api-standards/dns_naming.md + - ReSTful Endpoint Naming: reference/api-standards/restful_endpoint_naming.md + - OpenAPI Schemas: reference/api-standards/openapi_schemas.md + - API Versioning: reference/api-standards/api_versioning.md + - API Monitoring: reference/api-standards/api_monitoring.md + - Response Errors: reference/api-standards/error_handling.md - Available Packages: reference/available-packages.md - Available Tools: reference/available-tools.md # - Scientific Workflows: reference/scientific-workflows.md @@ -106,4 +134,8 @@ nav: - GeneSet Tiers: reference/geneset-tiers.md - Contributing Guide: reference/contributing-guide.md - Development Guide: reference/development-guide.md + - External Data Sources: reference/external-data-sources.md + - Publications: reference/publications.md + - License: reference/license.md + - Policies: reference/policies.md - Acknowledgements: reference/acknowledgements.md diff --git a/poetry.lock b/poetry.lock index a4c3fb6..f6e1941 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,26 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "anyio" +version = "4.2.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] [[package]] name = "appnope" @@ -11,6 +33,82 @@ files = [ {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, ] +[[package]] +name = "argon2-cffi" +version = "23.1.0" +description = "Argon2 for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + +[[package]] +name = "arrow" +version = "1.3.0" +description = "Better dates & times for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, +] + +[package.dependencies] +python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] + [[package]] name = "asttokens" version = "2.2.1" @@ -28,6 +126,20 @@ six = "*" [package.extras] test = ["astroid", "pytest"] +[[package]] +name = "async-lru" +version = "2.0.4" +description = "Simple LRU cache for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, + {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + [[package]] name = "attrs" version = "23.1.0" @@ -46,6 +158,20 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib- tests = ["attrs[tests-no-zope]", "zope-interface"] tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +[[package]] +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "backcall" version = "0.2.0" @@ -370,6 +496,20 @@ files = [ {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "executing" version = "1.2.0" @@ -398,6 +538,17 @@ files = [ [package.extras] devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] +[[package]] +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +files = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] + [[package]] name = "ghp-import" version = "2.1.0" @@ -415,6 +566,62 @@ python-dateutil = ">=2.8.1" [package.extras] dev = ["flake8", "markdown", "twine", "wheel"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.3" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, + {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.24.0)"] + +[[package]] +name = "httpx" +version = "0.26.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + [[package]] name = "idna" version = "3.4" @@ -517,6 +724,20 @@ qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +optional = false +python-versions = ">=3.7" +files = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] + +[package.dependencies] +arrow = ">=0.15.0" + [[package]] name = "jedi" version = "0.19.0" @@ -553,6 +774,31 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "json5" +version = "0.9.14" +description = "A Python implementation of the JSON5 data format." +optional = false +python-versions = "*" +files = [ + {file = "json5-0.9.14-py2.py3-none-any.whl", hash = "sha256:740c7f1b9e584a468dbb2939d8d458db3427f2c93ae2139d05f47e453eae964f"}, + {file = "json5-0.9.14.tar.gz", hash = "sha256:9ed66c3a6ca3510a976a9ef9b8c0787de24802724ab1860bc0153c7fdd589b02"}, +] + +[package.extras] +dev = ["hypothesis"] + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + [[package]] name = "jsonschema" version = "4.19.0" @@ -566,9 +812,17 @@ files = [ [package.dependencies] attrs = ">=22.2.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} jsonschema-specifications = ">=2023.03.6" referencing = ">=0.28.4" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} rpds-py = ">=0.7.1" +uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] @@ -631,6 +885,134 @@ traitlets = ">=5.3" docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] +[[package]] +name = "jupyter-events" +version = "0.9.0" +description = "Jupyter Event System library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_events-0.9.0-py3-none-any.whl", hash = "sha256:d853b3c10273ff9bc8bb8b30076d65e2c9685579db736873de6c2232dde148bf"}, + {file = "jupyter_events-0.9.0.tar.gz", hash = "sha256:81ad2e4bc710881ec274d31c6c50669d71bbaa5dd9d01e600b56faa85700d399"}, +] + +[package.dependencies] +jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} +python-json-logger = ">=2.0.4" +pyyaml = ">=5.3" +referencing = "*" +rfc3339-validator = "*" +rfc3986-validator = ">=0.1.1" +traitlets = ">=5.3" + +[package.extras] +cli = ["click", "rich"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] +test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] + +[[package]] +name = "jupyter-lsp" +version = "2.2.2" +description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter-lsp-2.2.2.tar.gz", hash = "sha256:256d24620542ae4bba04a50fc1f6ffe208093a07d8e697fea0a8d1b8ca1b7e5b"}, + {file = "jupyter_lsp-2.2.2-py3-none-any.whl", hash = "sha256:3b95229e4168355a8c91928057c1621ac3510ba98b2a925e82ebd77f078b1aa5"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-server = ">=1.1.2" + +[[package]] +name = "jupyter-server" +version = "2.12.5" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, + {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, +] + +[package.dependencies] +anyio = ">=3.1.0" +argon2-cffi = "*" +jinja2 = "*" +jupyter-client = ">=7.4.4" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-events = ">=0.9.0" +jupyter-server-terminals = "*" +nbconvert = ">=6.4.4" +nbformat = ">=5.3.0" +overrides = "*" +packaging = "*" +prometheus-client = "*" +pywinpty = {version = "*", markers = "os_name == \"nt\""} +pyzmq = ">=24" +send2trash = ">=1.8.2" +terminado = ">=0.8.3" +tornado = ">=6.2.0" +traitlets = ">=5.6.0" +websocket-client = "*" + +[package.extras] +docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] + +[[package]] +name = "jupyter-server-terminals" +version = "0.5.2" +description = "A Jupyter Server Extension Providing Terminals." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server_terminals-0.5.2-py3-none-any.whl", hash = "sha256:1b80c12765da979513c42c90215481bbc39bd8ae7c0350b4f85bc3eb58d0fa80"}, + {file = "jupyter_server_terminals-0.5.2.tar.gz", hash = "sha256:396b5ccc0881e550bf0ee7012c6ef1b53edbde69e67cab1d56e89711b46052e8"}, +] + +[package.dependencies] +pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} +terminado = ">=0.8.3" + +[package.extras] +docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] + +[[package]] +name = "jupyterlab" +version = "4.1.1" +description = "JupyterLab computational environment" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab-4.1.1-py3-none-any.whl", hash = "sha256:fa3e8c18b804eac04e51ceebd9dd3dd396e08106816f0d09cc426799d7087632"}, + {file = "jupyterlab-4.1.1.tar.gz", hash = "sha256:8acc9f561729d8f32c14c294c397917cddfeeb13a5d46f811979b71b4911a9fd"}, +] + +[package.dependencies] +async-lru = ">=1.0.0" +httpx = ">=0.25.0" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +ipykernel = "*" +jinja2 = ">=3.0.3" +jupyter-core = "*" +jupyter-lsp = ">=2.0.0" +jupyter-server = ">=2.4.0,<3" +jupyterlab-server = ">=2.19.0,<3" +notebook-shim = ">=0.2" +packaging = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} +tornado = ">=6.2.0" +traitlets = "*" + +[package.extras] +dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.2.0)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.2.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.1)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post6)", "matplotlib (==3.8.2)", "nbconvert (>=7.0.0)", "pandas (==2.2.0)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] +test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] + [[package]] name = "jupyterlab-pygments" version = "0.2.2" @@ -642,6 +1024,32 @@ files = [ {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, ] +[[package]] +name = "jupyterlab-server" +version = "2.24.0" +description = "A set of server components for JupyterLab and JupyterLab like applications." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jupyterlab_server-2.24.0-py3-none-any.whl", hash = "sha256:5f077e142bb8dc9b843d960f940c513581bceca3793a0d80f9c67d9522c4e876"}, + {file = "jupyterlab_server-2.24.0.tar.gz", hash = "sha256:4e6f99e0a5579bbbc32e449c4dbb039561d4f1a7827d5733273ed56738f21f07"}, +] + +[package.dependencies] +babel = ">=2.10" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0.3" +json5 = ">=0.9.0" +jsonschema = ">=4.17.3" +jupyter-server = ">=1.21,<3" +packaging = ">=21.3" +requests = ">=2.28" + +[package.extras] +docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] +openapi = ["openapi-core (>=0.16.1,<0.17.0)", "ruamel-yaml"] +test = ["hatch", "ipykernel", "jupyterlab-server[openapi]", "openapi-spec-validator (>=0.5.1,<0.7.0)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] + [[package]] name = "jupytext" version = "1.15.0" @@ -832,13 +1240,13 @@ files = [ [[package]] name = "mkdocs" -version = "1.4.2" +version = "1.5.3" description = "Project documentation with Markdown." optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs-1.4.2-py3-none-any.whl", hash = "sha256:c8856a832c1e56702577023cd64cc5f84948280c1c0fcc6af4cd39006ea6aa8c"}, - {file = "mkdocs-1.4.2.tar.gz", hash = "sha256:8947af423a6d0facf41ea1195b8e1e8c85ad94ac95ae307fe11232e0424b11c5"}, + {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"}, + {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"}, ] [package.dependencies] @@ -847,16 +1255,19 @@ colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} jinja2 = ">=2.11.1" -markdown = ">=3.2.1,<3.4" +markdown = ">=3.2.1" +markupsafe = ">=2.0.1" mergedeep = ">=1.3.4" packaging = ">=20.5" +pathspec = ">=0.11.1" +platformdirs = ">=2.2.0" pyyaml = ">=5.1" pyyaml-env-tag = ">=0.1" watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] -min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] [[package]] name = "mkdocs-jupyter" @@ -882,35 +1293,42 @@ test = ["coverage[toml]", "pymdown-extensions", "pytest", "pytest-cov"] [[package]] name = "mkdocs-material" -version = "9.0.13" +version = "9.5.15" description = "Documentation that simply works" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.0.13-py3-none-any.whl", hash = "sha256:06e51eba6a090de070a3489890cf1e491d52c04c6ff2b06dd4586c6cdd974a3f"}, - {file = "mkdocs_material-9.0.13.tar.gz", hash = "sha256:a62696610899d01df091b4d5ad23f9811f878a1f34307d7cea677baf4854c84f"}, + {file = "mkdocs_material-9.5.15-py3-none-any.whl", hash = "sha256:e5c96dec3d19491de49ca643fc1dbb92b278e43cdb816c775bc47db77d9b62fb"}, + {file = "mkdocs_material-9.5.15.tar.gz", hash = "sha256:39f03cca45e82bf54eb7456b5a18bd252eabfdd67f237a229471484a0a4d4635"}, ] [package.dependencies] -colorama = ">=0.4" -jinja2 = ">=3.0" -markdown = ">=3.2" -mkdocs = ">=1.4.2" -mkdocs-material-extensions = ">=1.1" -pygments = ">=2.14" -pymdown-extensions = ">=9.9.1" -regex = ">=2022.4.24" -requests = ">=2.26" +babel = ">=2.10,<3.0" +colorama = ">=0.4,<1.0" +jinja2 = ">=3.0,<4.0" +markdown = ">=3.2,<4.0" +mkdocs = ">=1.5.3,<1.6.0" +mkdocs-material-extensions = ">=1.3,<2.0" +paginate = ">=0.5,<1.0" +pygments = ">=2.16,<3.0" +pymdown-extensions = ">=10.2,<11.0" +regex = ">=2022.4" +requests = ">=2.26,<3.0" + +[package.extras] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] +recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] [[package]] name = "mkdocs-material-extensions" -version = "1.1.1" +version = "1.3.1" description = "Extension pack for Python Markdown and MkDocs Material." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mkdocs_material_extensions-1.1.1-py3-none-any.whl", hash = "sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945"}, - {file = "mkdocs_material_extensions-1.1.1.tar.gz", hash = "sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93"}, + {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, + {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, ] [[package]] @@ -1005,6 +1423,57 @@ files = [ {file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"}, ] +[[package]] +name = "notebook" +version = "7.1.0" +description = "Jupyter Notebook - A web-based notebook environment for interactive computing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "notebook-7.1.0-py3-none-any.whl", hash = "sha256:a8fa4ccb5e5fe220f29d9900337efd7752bc6f2efe004d6f320db01f7743adc9"}, + {file = "notebook-7.1.0.tar.gz", hash = "sha256:99caf01ff166b1cc86355c9b37c1ba9bf566c1d7fc4ab57bb6f8f24e36c4260e"}, +] + +[package.dependencies] +jupyter-server = ">=2.4.0,<3" +jupyterlab = ">=4.1.1,<4.2" +jupyterlab-server = ">=2.22.1,<3" +notebook-shim = ">=0.2,<0.3" +tornado = ">=6.2.0" + +[package.extras] +dev = ["hatch", "pre-commit"] +docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.22.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] + +[[package]] +name = "notebook-shim" +version = "0.2.4" +description = "A shim layer for notebook traits and config" +optional = false +python-versions = ">=3.7" +files = [ + {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, + {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, +] + +[package.dependencies] +jupyter-server = ">=1.8,<3" + +[package.extras] +test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] + +[[package]] +name = "overrides" +version = "7.7.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = ">=3.6" +files = [ + {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, + {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, +] + [[package]] name = "packaging" version = "23.0" @@ -1016,6 +1485,16 @@ files = [ {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, ] +[[package]] +name = "paginate" +version = "0.5.6" +description = "Divides large result sets into pages for easier browsing" +optional = false +python-versions = "*" +files = [ + {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, +] + [[package]] name = "pandocfilters" version = "1.5.0" @@ -1042,6 +1521,17 @@ files = [ qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["docopt", "pytest (<6.0.0)"] +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + [[package]] name = "pexpect" version = "4.8.0" @@ -1082,6 +1572,20 @@ files = [ docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +[[package]] +name = "prometheus-client" +version = "0.20.0" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.8" +files = [ + {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, + {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, +] + +[package.extras] +twisted = ["twisted"] + [[package]] name = "prompt-toolkit" version = "3.0.39" @@ -1160,31 +1664,36 @@ files = [ [[package]] name = "pygments" -version = "2.14.0" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, - {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pymdown-extensions" -version = "9.9.2" +version = "10.4" description = "Extension pack for Python Markdown." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-9.9.2-py3-none-any.whl", hash = "sha256:c3d804eb4a42b85bafb5f36436342a5ad38df03878bb24db8855a4aa8b08b765"}, - {file = "pymdown_extensions-9.9.2.tar.gz", hash = "sha256:ebb33069bafcb64d5f5988043331d4ea4929325dc678a6bcf247ddfcf96499f8"}, + {file = "pymdown_extensions-10.4-py3-none-any.whl", hash = "sha256:cfc28d6a09d19448bcbf8eee3ce098c7d17ff99f7bd3069db4819af181212037"}, + {file = "pymdown_extensions-10.4.tar.gz", hash = "sha256:bc46f11749ecd4d6b71cf62396104b4a200bad3498cb0f5dad1b8502fe461a35"}, ] [package.dependencies] markdown = ">=3.2" +pyyaml = "*" + +[package.extras] +extra = ["pygments (>=2.12)"] [[package]] name = "python-dateutil" @@ -1200,6 +1709,17 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] + [[package]] name = "pywin32" version = "306" @@ -1223,6 +1743,21 @@ files = [ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] +[[package]] +name = "pywinpty" +version = "2.0.12" +description = "Pseudo terminal support for Windows from Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"}, + {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"}, + {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"}, + {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"}, + {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"}, + {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"}, +] + [[package]] name = "pyyaml" version = "6.0" @@ -1524,6 +2059,31 @@ urllib3 = ">=1.21.1,<1.27" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +description = "Pure python rfc3986 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] + [[package]] name = "rpds-py" version = "0.9.2" @@ -1630,6 +2190,22 @@ files = [ {file = "rpds_py-0.9.2.tar.gz", hash = "sha256:8d70e8f14900f2657c249ea4def963bed86a29b81f81f5b76b5a9215680de945"}, ] +[[package]] +name = "send2trash" +version = "1.8.2" +description = "Send file to trash natively under Mac OS X, Windows and Linux" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "Send2Trash-1.8.2-py3-none-any.whl", hash = "sha256:a384719d99c07ce1eefd6905d2decb6f8b7ed054025bb0e618919f945de4f679"}, + {file = "Send2Trash-1.8.2.tar.gz", hash = "sha256:c132d59fa44b9ca2b1699af5c86f57ce9f4c5eb56629d5d55fbb7a35f84e2312"}, +] + +[package.extras] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] +win32 = ["pywin32"] + [[package]] name = "six" version = "1.16.0" @@ -1641,6 +2217,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "soupsieve" version = "2.4.1" @@ -1671,6 +2258,27 @@ pure-eval = "*" [package.extras] tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] +[[package]] +name = "terminado" +version = "0.18.0" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "terminado-0.18.0-py3-none-any.whl", hash = "sha256:87b0d96642d0fe5f5abd7783857b9cab167f221a39ff98e3b9619a788a3c0f2e"}, + {file = "terminado-0.18.0.tar.gz", hash = "sha256:1ea08a89b835dd1b8c0c900d92848147cef2537243361b2e3f4dc15df9b6fded"}, +] + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} +tornado = ">=6.1.0" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] +typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] + [[package]] name = "tinycss2" version = "1.2.1" @@ -1700,6 +2308,17 @@ files = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + [[package]] name = "tornado" version = "6.3.3" @@ -1735,6 +2354,17 @@ files = [ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +[[package]] +name = "types-python-dateutil" +version = "2.8.19.20240106" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, + {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, +] + [[package]] name = "typing-extensions" version = "4.7.1" @@ -1746,6 +2376,20 @@ files = [ {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] +[[package]] +name = "uri-template" +version = "1.3.0" +description = "RFC 6570 URI Template Processor" +optional = false +python-versions = ">=3.7" +files = [ + {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, + {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, +] + +[package.extras] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] + [[package]] name = "urllib3" version = "1.26.14" @@ -1813,6 +2457,21 @@ files = [ {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, ] +[[package]] +name = "webcolors" +version = "1.13" +description = "A library for working with the color formats defined by HTML and CSS." +optional = false +python-versions = ">=3.7" +files = [ + {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, + {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, +] + +[package.extras] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] +tests = ["pytest", "pytest-cov"] + [[package]] name = "webencodings" version = "0.5.1" @@ -1824,6 +2483,22 @@ files = [ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] +[[package]] +name = "websocket-client" +version = "1.7.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, + {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + [[package]] name = "zipp" version = "3.14.0" @@ -1842,4 +2517,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "e42e0bed9e31894b9d5b199e6375e158298346e71294c0646b48947997540672" +content-hash = "13150baedb89dade88fc626e0153c1c8243fd037921a4b21b1304574ad7b576b" diff --git a/pyproject.toml b/pyproject.toml index e845ef4..8d2e1a9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,9 +8,13 @@ packages = [] [tool.poetry.dependencies] python = "^3.9" -mkdocs-material = "^9.0.13" +mkdocs = "^1.2.3" +mkdocs-material = "^9.5.11" mkdocs-jupyter = "^0.24.2" +[tool.poetry.group.dev.dependencies] +notebook = "^7.1.0" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api"