diff --git a/2.3.0/fauna.html b/2.3.0/fauna.html new file mode 100644 index 00000000..4902349c --- /dev/null +++ b/2.3.0/fauna.html @@ -0,0 +1,269 @@ + + + + + + + fauna API documentation + + + + + + + + + +
+
+

+fauna

+ + + + + + +
 1__title__ = "Fauna"
+ 2__version__ = "2.3.0"
+ 3__api_version__ = "10"
+ 4__author__ = "Fauna, Inc"
+ 5__license__ = "MPL 2.0"
+ 6__copyright__ = "2023 Fauna, Inc"
+ 7
+ 8from fauna.query import fql, Document, DocumentReference, NamedDocument, NamedDocumentReference, NullDocument, Module, Page
+ 9
+10global_http_client = None
+
+ + +
+
+
+ global_http_client = +None + + +
+ + + + +
+
+ + diff --git a/2.3.0/fauna/client.html b/2.3.0/fauna/client.html new file mode 100644 index 00000000..78d0e773 --- /dev/null +++ b/2.3.0/fauna/client.html @@ -0,0 +1,248 @@ + + + + + + + fauna.client API documentation + + + + + + + + + +
+
+

+fauna.client

+ + + + + + +
1from .client import Client, QueryOptions, StreamOptions, FeedOptions, FeedPage, FeedIterator
+2from .endpoints import Endpoints
+3from .headers import Header
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/client/client.html b/2.3.0/fauna/client/client.html new file mode 100644 index 00000000..a20b39cd --- /dev/null +++ b/2.3.0/fauna/client/client.html @@ -0,0 +1,3513 @@ + + + + + + + fauna.client.client API documentation + + + + + + + + + +
+
+

+fauna.client.client

+ + + + + + +
  1import logging
+  2from dataclasses import dataclass
+  3from datetime import timedelta
+  4from typing import Any, Dict, Iterator, Mapping, Optional, Union, List
+  5
+  6import fauna
+  7from fauna.client.headers import _DriverEnvironment, _Header, _Auth, Header
+  8from fauna.client.retryable import Retryable
+  9from fauna.client.utils import _Environment, LastTxnTs
+ 10from fauna.encoding import FaunaEncoder, FaunaDecoder
+ 11from fauna.encoding import QuerySuccess, QueryTags, QueryStats
+ 12from fauna.errors import FaunaError, ClientError, ProtocolError, \
+ 13  RetryableFaunaException, NetworkError
+ 14from fauna.http.http_client import HTTPClient
+ 15from fauna.query import EventSource, Query, Page, fql
+ 16
+ 17logger = logging.getLogger("fauna")
+ 18
+ 19DefaultHttpConnectTimeout = timedelta(seconds=5)
+ 20DefaultHttpReadTimeout: Optional[timedelta] = None
+ 21DefaultHttpWriteTimeout = timedelta(seconds=5)
+ 22DefaultHttpPoolTimeout = timedelta(seconds=5)
+ 23DefaultIdleConnectionTimeout = timedelta(seconds=5)
+ 24DefaultQueryTimeout = timedelta(seconds=5)
+ 25DefaultClientBufferTimeout = timedelta(seconds=5)
+ 26DefaultMaxConnections = 20
+ 27DefaultMaxIdleConnections = 20
+ 28
+ 29
+ 30@dataclass
+ 31class QueryOptions:
+ 32  """
+ 33    A dataclass representing options available for a query.
+ 34
+ 35    * linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+ 36    * max_contention_retries - The max number of times to retry the query if contention is encountered.
+ 37    * query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
+ 38    * query_tags - Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+ 39    * traceparent - A traceparent to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_ Must match format: https://www.w3.org/TR/trace-context/#traceparent-header
+ 40    * typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the "typechecked" flag on the database configuration.
+ 41    * additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
+ 42    """
+ 43
+ 44  linearized: Optional[bool] = None
+ 45  max_contention_retries: Optional[int] = None
+ 46  query_timeout: Optional[timedelta] = DefaultQueryTimeout
+ 47  query_tags: Optional[Mapping[str, str]] = None
+ 48  traceparent: Optional[str] = None
+ 49  typecheck: Optional[bool] = None
+ 50  additional_headers: Optional[Dict[str, str]] = None
+ 51
+ 52
+ 53@dataclass
+ 54class StreamOptions:
+ 55  """
+ 56    A dataclass representing options available for a stream.
+ 57
+ 58    * max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
+ 59    * max_backoff - The maximum backoff in seconds for an individual retry.
+ 60    * start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after
+ 61    the timestamp.
+ 62    * cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
+ 63    * status_events - Indicates if stream should include status events. Status events are periodic events that
+ 64    update the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics
+ 65    about the cost of maintaining the stream other than the cost of the received events.
+ 66    """
+ 67
+ 68  max_attempts: Optional[int] = None
+ 69  max_backoff: Optional[int] = None
+ 70  start_ts: Optional[int] = None
+ 71  cursor: Optional[str] = None
+ 72  status_events: bool = False
+ 73
+ 74
+ 75@dataclass
+ 76class FeedOptions:
+ 77  """
+ 78    A dataclass representing options available for an Event Feed.
+ 79
+ 80    * max_attempts - The maximum number of times to attempt an Event Feed query when a retryable exception is thrown.
+ 81    * max_backoff - The maximum backoff in seconds for an individual retry.
+ 82    * query_timeout - Controls the maximum amount of time Fauna will execute a query before returning a page of events.
+ 83    * start_ts - The starting timestamp of the Event Feed, exclusive. If set, Fauna will return events starting after
+ 84    the timestamp.
+ 85    * cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
+ 86    * page_size - The desired number of events per page.
+ 87    """
+ 88  max_attempts: Optional[int] = None
+ 89  max_backoff: Optional[int] = None
+ 90  query_timeout: Optional[timedelta] = None
+ 91  page_size: Optional[int] = None
+ 92  start_ts: Optional[int] = None
+ 93  cursor: Optional[str] = None
+ 94
+ 95
+ 96class Client:
+ 97
+ 98  def __init__(
+ 99      self,
+100      endpoint: Optional[str] = None,
+101      secret: Optional[str] = None,
+102      http_client: Optional[HTTPClient] = None,
+103      query_tags: Optional[Mapping[str, str]] = None,
+104      linearized: Optional[bool] = None,
+105      max_contention_retries: Optional[int] = None,
+106      typecheck: Optional[bool] = None,
+107      additional_headers: Optional[Dict[str, str]] = None,
+108      query_timeout: Optional[timedelta] = DefaultQueryTimeout,
+109      client_buffer_timeout: Optional[timedelta] = DefaultClientBufferTimeout,
+110      http_read_timeout: Optional[timedelta] = DefaultHttpReadTimeout,
+111      http_write_timeout: Optional[timedelta] = DefaultHttpWriteTimeout,
+112      http_connect_timeout: Optional[timedelta] = DefaultHttpConnectTimeout,
+113      http_pool_timeout: Optional[timedelta] = DefaultHttpPoolTimeout,
+114      http_idle_timeout: Optional[timedelta] = DefaultIdleConnectionTimeout,
+115      max_attempts: int = 3,
+116      max_backoff: int = 20,
+117  ):
+118    """Initializes a Client.
+119
+120        :param endpoint: The Fauna Endpoint to use. Defaults to https://db.fauna.com, or the `FAUNA_ENDPOINT` env variable.
+121        :param secret: The Fauna Secret to use. Defaults to empty, or the `FAUNA_SECRET` env variable.
+122        :param http_client: An :class:`HTTPClient` implementation. Defaults to a global :class:`HTTPXClient`.
+123        :param query_tags: Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+124        :param linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+125        :param max_contention_retries: The max number of times to retry the query if contention is encountered.
+126        :param typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
+127        :param additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
+128        :param query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is :py:data:`DefaultQueryTimeout`.
+129        :param client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is :py:data:`DefaultClientBufferTimeout`, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
+130        :param http_read_timeout: Set HTTP Read timeout, default is :py:data:`DefaultHttpReadTimeout`.
+131        :param http_write_timeout: Set HTTP Write timeout, default is :py:data:`DefaultHttpWriteTimeout`.
+132        :param http_connect_timeout: Set HTTP Connect timeout, default is :py:data:`DefaultHttpConnectTimeout`.
+133        :param http_pool_timeout: Set HTTP Pool timeout, default is :py:data:`DefaultHttpPoolTimeout`.
+134        :param http_idle_timeout: Set HTTP Idle timeout, default is :py:data:`DefaultIdleConnectionTimeout`.
+135        :param max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
+136        :param max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
+137        """
+138
+139    self._set_endpoint(endpoint)
+140    self._max_attempts = max_attempts
+141    self._max_backoff = max_backoff
+142
+143    if secret is None:
+144      self._auth = _Auth(_Environment.EnvFaunaSecret())
+145    else:
+146      self._auth = _Auth(secret)
+147
+148    self._last_txn_ts = LastTxnTs()
+149
+150    self._query_tags = {}
+151    if query_tags is not None:
+152      self._query_tags.update(query_tags)
+153
+154    if query_timeout is not None:
+155      self._query_timeout_ms = int(query_timeout.total_seconds() * 1000)
+156    else:
+157      self._query_timeout_ms = None
+158
+159    self._headers: Dict[str, str] = {
+160        _Header.AcceptEncoding: "gzip",
+161        _Header.ContentType: "application/json;charset=utf-8",
+162        _Header.Driver: "python",
+163        _Header.DriverEnv: str(_DriverEnvironment()),
+164    }
+165
+166    if typecheck is not None:
+167      self._headers[Header.Typecheck] = str(typecheck).lower()
+168
+169    if linearized is not None:
+170      self._headers[Header.Linearized] = str(linearized).lower()
+171
+172    if max_contention_retries is not None and max_contention_retries > 0:
+173      self._headers[Header.MaxContentionRetries] = \
+174          f"{max_contention_retries}"
+175
+176    if additional_headers is not None:
+177      self._headers = {
+178          **self._headers,
+179          **additional_headers,
+180      }
+181
+182    self._session: HTTPClient
+183
+184    if http_client is not None:
+185      self._session = http_client
+186    else:
+187      if fauna.global_http_client is None:
+188        timeout_s: Optional[float] = None
+189        if query_timeout is not None and client_buffer_timeout is not None:
+190          timeout_s = (query_timeout + client_buffer_timeout).total_seconds()
+191        read_timeout_s: Optional[float] = None
+192        if http_read_timeout is not None:
+193          read_timeout_s = http_read_timeout.total_seconds()
+194
+195        write_timeout_s: Optional[float] = http_write_timeout.total_seconds(
+196        ) if http_write_timeout is not None else None
+197        connect_timeout_s: Optional[float] = http_connect_timeout.total_seconds(
+198        ) if http_connect_timeout is not None else None
+199        pool_timeout_s: Optional[float] = http_pool_timeout.total_seconds(
+200        ) if http_pool_timeout is not None else None
+201        idle_timeout_s: Optional[float] = http_idle_timeout.total_seconds(
+202        ) if http_idle_timeout is not None else None
+203
+204        import httpx
+205        from fauna.http.httpx_client import HTTPXClient
+206        c = HTTPXClient(
+207            httpx.Client(
+208                http1=True,
+209                http2=False,
+210                timeout=httpx.Timeout(
+211                    timeout=timeout_s,
+212                    connect=connect_timeout_s,
+213                    read=read_timeout_s,
+214                    write=write_timeout_s,
+215                    pool=pool_timeout_s,
+216                ),
+217                limits=httpx.Limits(
+218                    max_connections=DefaultMaxConnections,
+219                    max_keepalive_connections=DefaultMaxIdleConnections,
+220                    keepalive_expiry=idle_timeout_s,
+221                ),
+222            ), logger)
+223        fauna.global_http_client = c
+224
+225      self._session = fauna.global_http_client
+226
+227  def close(self):
+228    self._session.close()
+229    if self._session == fauna.global_http_client:
+230      fauna.global_http_client = None
+231
+232  def set_last_txn_ts(self, txn_ts: int):
+233    """
+234        Set the last timestamp seen by this client.
+235        This has no effect if earlier than stored timestamp.
+236
+237        .. WARNING:: This should be used only when coordinating timestamps across
+238        multiple clients. Moving the timestamp arbitrarily forward into
+239        the future will cause transactions to stall.
+240
+241        :param txn_ts: the new transaction time.
+242        """
+243    self._last_txn_ts.update_txn_time(txn_ts)
+244
+245  def get_last_txn_ts(self) -> Optional[int]:
+246    """
+247        Get the last timestamp seen by this client.
+248        :return:
+249        """
+250    return self._last_txn_ts.time
+251
+252  def get_query_timeout(self) -> Optional[timedelta]:
+253    """
+254        Get the query timeout for all queries.
+255        """
+256    if self._query_timeout_ms is not None:
+257      return timedelta(milliseconds=self._query_timeout_ms)
+258    else:
+259      return None
+260
+261  def paginate(
+262      self,
+263      fql: Query,
+264      opts: Optional[QueryOptions] = None,
+265  ) -> "QueryIterator":
+266    """
+267        Run a query on Fauna and returning an iterator of results. If the query
+268        returns a Page, the iterator will fetch additional Pages until the
+269        after token is null. Each call for a page will be retried with exponential
+270        backoff up to the max_attempts set in the client's retry policy in the
+271        event of a 429 or 502.
+272
+273        :param fql: A Query
+274        :param opts: (Optional) Query Options
+275
+276        :return: a :class:`QueryResponse`
+277
+278        :raises NetworkError: HTTP Request failed in transit
+279        :raises ProtocolError: HTTP error not from Fauna
+280        :raises ServiceError: Fauna returned an error
+281        :raises ValueError: Encoding and decoding errors
+282        :raises TypeError: Invalid param types
+283        """
+284
+285    if not isinstance(fql, Query):
+286      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+287                 f"Query by calling fauna.fql()"
+288      raise TypeError(err_msg)
+289
+290    return QueryIterator(self, fql, opts)
+291
+292  def query(
+293      self,
+294      fql: Query,
+295      opts: Optional[QueryOptions] = None,
+296  ) -> QuerySuccess:
+297    """
+298        Run a query on Fauna. A query will be retried max_attempt times with exponential backoff
+299        up to the max_backoff in the event of a 429.
+300
+301        :param fql: A Query
+302        :param opts: (Optional) Query Options
+303
+304        :return: a :class:`QueryResponse`
+305
+306        :raises NetworkError: HTTP Request failed in transit
+307        :raises ProtocolError: HTTP error not from Fauna
+308        :raises ServiceError: Fauna returned an error
+309        :raises ValueError: Encoding and decoding errors
+310        :raises TypeError: Invalid param types
+311        """
+312
+313    if not isinstance(fql, Query):
+314      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+315                 f"Query by calling fauna.fql()"
+316      raise TypeError(err_msg)
+317
+318    try:
+319      encoded_query: Mapping[str, Any] = FaunaEncoder.encode(fql)
+320    except Exception as e:
+321      raise ClientError("Failed to encode Query") from e
+322
+323    retryable = Retryable[QuerySuccess](
+324        self._max_attempts,
+325        self._max_backoff,
+326        self._query,
+327        "/query/1",
+328        fql=encoded_query,
+329        opts=opts,
+330    )
+331
+332    r = retryable.run()
+333    r.response.stats.attempts = r.attempts
+334    return r.response
+335
+336  def _query(
+337      self,
+338      path: str,
+339      fql: Mapping[str, Any],
+340      arguments: Optional[Mapping[str, Any]] = None,
+341      opts: Optional[QueryOptions] = None,
+342  ) -> QuerySuccess:
+343
+344    headers = self._headers.copy()
+345    headers[_Header.Format] = "tagged"
+346    headers[_Header.Authorization] = self._auth.bearer()
+347
+348    if self._query_timeout_ms is not None:
+349      headers[Header.QueryTimeoutMs] = str(self._query_timeout_ms)
+350
+351    headers.update(self._last_txn_ts.request_header)
+352
+353    query_tags = {}
+354    if self._query_tags is not None:
+355      query_tags.update(self._query_tags)
+356
+357    if opts is not None:
+358      if opts.linearized is not None:
+359        headers[Header.Linearized] = str(opts.linearized).lower()
+360      if opts.max_contention_retries is not None:
+361        headers[Header.MaxContentionRetries] = \
+362            f"{opts.max_contention_retries}"
+363      if opts.traceparent is not None:
+364        headers[Header.Traceparent] = opts.traceparent
+365      if opts.query_timeout is not None:
+366        timeout_ms = f"{int(opts.query_timeout.total_seconds() * 1000)}"
+367        headers[Header.QueryTimeoutMs] = timeout_ms
+368      if opts.query_tags is not None:
+369        query_tags.update(opts.query_tags)
+370      if opts.typecheck is not None:
+371        headers[Header.Typecheck] = str(opts.typecheck).lower()
+372      if opts.additional_headers is not None:
+373        headers.update(opts.additional_headers)
+374
+375    if len(query_tags) > 0:
+376      headers[Header.Tags] = QueryTags.encode(query_tags)
+377
+378    data: dict[str, Any] = {
+379        "query": fql,
+380        "arguments": arguments or {},
+381    }
+382
+383    with self._session.request(
+384        method="POST",
+385        url=self._endpoint + path,
+386        headers=headers,
+387        data=data,
+388    ) as response:
+389      status_code = response.status_code()
+390      response_json = response.json()
+391      headers = response.headers()
+392
+393      self._check_protocol(response_json, status_code)
+394
+395      dec: Any = FaunaDecoder.decode(response_json)
+396
+397      if status_code > 399:
+398        FaunaError.parse_error_and_throw(dec, status_code)
+399
+400      if "txn_ts" in dec:
+401        self.set_last_txn_ts(int(response_json["txn_ts"]))
+402
+403      stats = QueryStats(dec["stats"]) if "stats" in dec else None
+404      summary = dec["summary"] if "summary" in dec else None
+405      query_tags = QueryTags.decode(
+406          dec["query_tags"]) if "query_tags" in dec else None
+407      txn_ts = dec["txn_ts"] if "txn_ts" in dec else None
+408      schema_version = dec["schema_version"] if "schema_version" in dec else None
+409      traceparent = headers.get("traceparent", None)
+410      static_type = dec["static_type"] if "static_type" in dec else None
+411
+412      return QuerySuccess(
+413          data=dec["data"],
+414          query_tags=query_tags,
+415          static_type=static_type,
+416          stats=stats,
+417          summary=summary,
+418          traceparent=traceparent,
+419          txn_ts=txn_ts,
+420          schema_version=schema_version,
+421      )
+422
+423  def stream(
+424      self,
+425      fql: Union[EventSource, Query],
+426      opts: StreamOptions = StreamOptions()
+427  ) -> "StreamIterator":
+428    """
+429        Opens a Stream in Fauna and returns an iterator that consume Fauna events.
+430
+431        :param fql: An EventSource or a Query that returns an EventSource.
+432        :param opts: (Optional) Stream Options.
+433
+434        :return: a :class:`StreamIterator`
+435
+436        :raises ClientError: Invalid options provided
+437        :raises NetworkError: HTTP Request failed in transit
+438        :raises ProtocolError: HTTP error not from Fauna
+439        :raises ServiceError: Fauna returned an error
+440        :raises ValueError: Encoding and decoding errors
+441        :raises TypeError: Invalid param types
+442        """
+443
+444    if isinstance(fql, Query):
+445      if opts.cursor is not None:
+446        raise ClientError(
+447            "The 'cursor' configuration can only be used with an event source.")
+448
+449      source = self.query(fql).data
+450    else:
+451      source = fql
+452
+453    if not isinstance(source, EventSource):
+454      err_msg = f"'fql' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+455      raise TypeError(err_msg)
+456
+457    headers = self._headers.copy()
+458    headers[_Header.Format] = "tagged"
+459    headers[_Header.Authorization] = self._auth.bearer()
+460
+461    return StreamIterator(self._session, headers, self._endpoint + "/stream/1",
+462                          self._max_attempts, self._max_backoff, opts, source)
+463
+464  def feed(
+465      self,
+466      source: Union[EventSource, Query],
+467      opts: FeedOptions = FeedOptions(),
+468  ) -> "FeedIterator":
+469    """
+470        Opens an Event Feed in Fauna and returns an iterator that consume Fauna events.
+471
+472        :param source: An EventSource or a Query that returns an EventSource.
+473        :param opts: (Optional) Event Feed options.
+474
+475        :return: a :class:`FeedIterator`
+476
+477        :raises ClientError: Invalid options provided
+478        :raises NetworkError: HTTP Request failed in transit
+479        :raises ProtocolError: HTTP error not from Fauna
+480        :raises ServiceError: Fauna returned an error
+481        :raises ValueError: Encoding and decoding errors
+482        :raises TypeError: Invalid param types
+483        """
+484
+485    if isinstance(source, Query):
+486      source = self.query(source).data
+487
+488    if not isinstance(source, EventSource):
+489      err_msg = f"'source' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+490      raise TypeError(err_msg)
+491
+492    headers = self._headers.copy()
+493    headers[_Header.Format] = "tagged"
+494    headers[_Header.Authorization] = self._auth.bearer()
+495
+496    if opts.query_timeout is not None:
+497      query_timeout_ms = int(opts.query_timeout.total_seconds() * 1000)
+498      headers[Header.QueryTimeoutMs] = str(query_timeout_ms)
+499    elif self._query_timeout_ms is not None:
+500      headers[Header.QueryTimeoutMs] = str(self._query_timeout_ms)
+501
+502    return FeedIterator(self._session, headers, self._endpoint + "/feed/1",
+503                        self._max_attempts, self._max_backoff, opts, source)
+504
+505  def _check_protocol(self, response_json: Any, status_code):
+506    # TODO: Logic to validate wire protocol belongs elsewhere.
+507    should_raise = False
+508
+509    # check for QuerySuccess
+510    if status_code <= 399 and "data" not in response_json:
+511      should_raise = True
+512
+513    # check for QueryFailure
+514    if status_code > 399:
+515      if "error" not in response_json:
+516        should_raise = True
+517      else:
+518        e = response_json["error"]
+519        if "code" not in e or "message" not in e:
+520          should_raise = True
+521
+522    if should_raise:
+523      raise ProtocolError(
+524          status_code,
+525          f"Response is in an unknown format: \n{response_json}",
+526      )
+527
+528  def _set_endpoint(self, endpoint):
+529    if endpoint is None:
+530      endpoint = _Environment.EnvFaunaEndpoint()
+531
+532    if endpoint[-1:] == "/":
+533      endpoint = endpoint[:-1]
+534
+535    self._endpoint = endpoint
+536
+537
+538class StreamIterator:
+539  """A class that mixes a ContextManager and an Iterator so we can detected retryable errors."""
+540
+541  def __init__(self, http_client: HTTPClient, headers: Dict[str, str],
+542               endpoint: str, max_attempts: int, max_backoff: int,
+543               opts: StreamOptions, source: EventSource):
+544    self._http_client = http_client
+545    self._headers = headers
+546    self._endpoint = endpoint
+547    self._max_attempts = max_attempts
+548    self._max_backoff = max_backoff
+549    self._opts = opts
+550    self._source = source
+551    self._stream = None
+552    self.last_ts = None
+553    self.last_cursor = None
+554    self._ctx = self._create_stream()
+555
+556    if opts.start_ts is not None and opts.cursor is not None:
+557      err_msg = "Only one of 'start_ts' or 'cursor' can be defined in the StreamOptions."
+558      raise TypeError(err_msg)
+559
+560  def __enter__(self):
+561    return self
+562
+563  def __exit__(self, exc_type, exc_value, exc_traceback):
+564    if self._stream is not None:
+565      self._stream.close()
+566
+567    self._ctx.__exit__(exc_type, exc_value, exc_traceback)
+568    return False
+569
+570  def __iter__(self):
+571    return self
+572
+573  def __next__(self):
+574    if self._opts.max_attempts is not None:
+575      max_attempts = self._opts.max_attempts
+576    else:
+577      max_attempts = self._max_attempts
+578
+579    if self._opts.max_backoff is not None:
+580      max_backoff = self._opts.max_backoff
+581    else:
+582      max_backoff = self._max_backoff
+583
+584    retryable = Retryable[Any](max_attempts, max_backoff, self._next_element)
+585    return retryable.run().response
+586
+587  def _next_element(self):
+588    try:
+589      if self._stream is None:
+590        try:
+591          self._stream = self._ctx.__enter__()
+592        except Exception:
+593          self._retry_stream()
+594
+595      if self._stream is not None:
+596        event: Any = FaunaDecoder.decode(next(self._stream))
+597
+598        if event["type"] == "error":
+599          FaunaError.parse_error_and_throw(event, 400)
+600
+601        self.last_ts = event["txn_ts"]
+602        self.last_cursor = event.get('cursor')
+603
+604        if event["type"] == "start":
+605          return self._next_element()
+606
+607        if not self._opts.status_events and event["type"] == "status":
+608          return self._next_element()
+609
+610        return event
+611
+612      raise StopIteration
+613    except NetworkError:
+614      self._retry_stream()
+615
+616  def _retry_stream(self):
+617    if self._stream is not None:
+618      self._stream.close()
+619
+620    self._stream = None
+621
+622    try:
+623      self._ctx = self._create_stream()
+624    except Exception:
+625      pass
+626    raise RetryableFaunaException
+627
+628  def _create_stream(self):
+629    data: Dict[str, Any] = {"token": self._source.token}
+630    if self.last_cursor is not None:
+631      data["cursor"] = self.last_cursor
+632    elif self._opts.cursor is not None:
+633      data["cursor"] = self._opts.cursor
+634    elif self._opts.start_ts is not None:
+635      data["start_ts"] = self._opts.start_ts
+636
+637    return self._http_client.stream(
+638        url=self._endpoint, headers=self._headers, data=data)
+639
+640  def close(self):
+641    if self._stream is not None:
+642      self._stream.close()
+643
+644
+645class FeedPage:
+646
+647  def __init__(self, events: List[Any], cursor: str, stats: QueryStats):
+648    self._events = events
+649    self.cursor = cursor
+650    self.stats = stats
+651
+652  def __len__(self):
+653    return len(self._events)
+654
+655  def __iter__(self) -> Iterator[Any]:
+656    for event in self._events:
+657      if event["type"] == "error":
+658        FaunaError.parse_error_and_throw(event, 400)
+659      yield event
+660
+661
+662class FeedIterator:
+663  """A class to provide an iterator on top of Event Feed pages."""
+664
+665  def __init__(self, http: HTTPClient, headers: Dict[str, str], endpoint: str,
+666               max_attempts: int, max_backoff: int, opts: FeedOptions,
+667               source: EventSource):
+668    self._http = http
+669    self._headers = headers
+670    self._endpoint = endpoint
+671    self._max_attempts = opts.max_attempts or max_attempts
+672    self._max_backoff = opts.max_backoff or max_backoff
+673    self._request: Dict[str, Any] = {"token": source.token}
+674    self._is_done = False
+675
+676    if opts.start_ts is not None and opts.cursor is not None:
+677      err_msg = "Only one of 'start_ts' or 'cursor' can be defined in the FeedOptions."
+678      raise TypeError(err_msg)
+679
+680    if opts.page_size is not None:
+681      self._request["page_size"] = opts.page_size
+682
+683    if opts.cursor is not None:
+684      self._request["cursor"] = opts.cursor
+685    elif opts.start_ts is not None:
+686      self._request["start_ts"] = opts.start_ts
+687
+688  def __iter__(self) -> Iterator[FeedPage]:
+689    self._is_done = False
+690    return self
+691
+692  def __next__(self) -> FeedPage:
+693    if self._is_done:
+694      raise StopIteration
+695
+696    retryable = Retryable[Any](self._max_attempts, self._max_backoff,
+697                               self._next_page)
+698    return retryable.run().response
+699
+700  def _next_page(self) -> FeedPage:
+701    with self._http.request(
+702        method="POST",
+703        url=self._endpoint,
+704        headers=self._headers,
+705        data=self._request,
+706    ) as response:
+707      status_code = response.status_code()
+708      decoded: Any = FaunaDecoder.decode(response.json())
+709
+710      if status_code > 399:
+711        FaunaError.parse_error_and_throw(decoded, status_code)
+712
+713      self._is_done = not decoded["has_next"]
+714      self._request["cursor"] = decoded["cursor"]
+715
+716      if "start_ts" in self._request:
+717        del self._request["start_ts"]
+718
+719      return FeedPage(decoded["events"], decoded["cursor"],
+720                      QueryStats(decoded["stats"]))
+721
+722  def flatten(self) -> Iterator:
+723    """A generator that yields events instead of pages of events."""
+724    for page in self:
+725      for event in page:
+726        yield event
+727
+728
+729class QueryIterator:
+730  """A class to provider an iterator on top of Fauna queries."""
+731
+732  def __init__(self,
+733               client: Client,
+734               fql: Query,
+735               opts: Optional[QueryOptions] = None):
+736    """Initializes the QueryIterator
+737
+738        :param fql: A Query
+739        :param opts: (Optional) Query Options
+740
+741        :raises TypeError: Invalid param types
+742        """
+743    if not isinstance(client, Client):
+744      err_msg = f"'client' must be a Client but was a {type(client)}. You can build a " \
+745                  f"Client by calling fauna.client.Client()"
+746      raise TypeError(err_msg)
+747
+748    if not isinstance(fql, Query):
+749      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+750                 f"Query by calling fauna.fql()"
+751      raise TypeError(err_msg)
+752
+753    self.client = client
+754    self.fql = fql
+755    self.opts = opts
+756
+757  def __iter__(self) -> Iterator:
+758    return self.iter()
+759
+760  def iter(self) -> Iterator:
+761    """
+762        A generator function that immediately fetches and yields the results of
+763        the stored query. Yields additional pages on subsequent iterations if
+764        they exist
+765        """
+766
+767    cursor = None
+768    initial_response = self.client.query(self.fql, self.opts)
+769
+770    if isinstance(initial_response.data, Page):
+771      cursor = initial_response.data.after
+772      yield initial_response.data.data
+773
+774      while cursor is not None:
+775        next_response = self.client.query(
+776            fql("Set.paginate(${after})", after=cursor), self.opts)
+777        # TODO: `Set.paginate` does not yet return a `@set` tagged value
+778        #       so we will get back a plain object that might not have
+779        #       an after property.
+780        cursor = next_response.data.get("after")
+781        yield next_response.data.get("data")
+782
+783    else:
+784      yield [initial_response.data]
+785
+786  def flatten(self) -> Iterator:
+787    """
+788        A generator function that immediately fetches and yields the results of
+789        the stored query. Yields each item individually, rather than a whole
+790        Page at a time. Fetches additional pages as required if they exist.
+791        """
+792
+793    for page in self.iter():
+794      for item in page:
+795        yield item
+
+ + +
+
+
+ logger = +<Logger fauna (WARNING)> + + +
+ + + + +
+
+
+ DefaultHttpConnectTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultHttpReadTimeout: Optional[datetime.timedelta] = +None + + +
+ + + + +
+
+
+ DefaultHttpWriteTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultHttpPoolTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultIdleConnectionTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultQueryTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultClientBufferTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultMaxConnections = +20 + + +
+ + + + +
+
+
+ DefaultMaxIdleConnections = +20 + + +
+ + + + +
+
+ +
+
@dataclass
+ + class + QueryOptions: + + + +
+ +
31@dataclass
+32class QueryOptions:
+33  """
+34    A dataclass representing options available for a query.
+35
+36    * linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+37    * max_contention_retries - The max number of times to retry the query if contention is encountered.
+38    * query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
+39    * query_tags - Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+40    * traceparent - A traceparent to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_ Must match format: https://www.w3.org/TR/trace-context/#traceparent-header
+41    * typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the "typechecked" flag on the database configuration.
+42    * additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
+43    """
+44
+45  linearized: Optional[bool] = None
+46  max_contention_retries: Optional[int] = None
+47  query_timeout: Optional[timedelta] = DefaultQueryTimeout
+48  query_tags: Optional[Mapping[str, str]] = None
+49  traceparent: Optional[str] = None
+50  typecheck: Optional[bool] = None
+51  additional_headers: Optional[Dict[str, str]] = None
+
+ + +

A dataclass representing options available for a query.

+ +
    +
  • linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
  • +
  • max_contention_retries - The max number of times to retry the query if contention is encountered.
  • +
  • query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
  • +
  • query_tags - Tags to associate with the query. See logging
  • +
  • traceparent - A traceparent to associate with the query. See logging Must match format: https://www.w3.org/TR/trace-context/#traceparent-header
  • +
  • typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the "typechecked" flag on the database configuration.
  • +
  • additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
  • +
+
+ + +
+
+ + QueryOptions( linearized: Optional[bool] = None, max_contention_retries: Optional[int] = None, query_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), query_tags: Optional[Mapping[str, str]] = None, traceparent: Optional[str] = None, typecheck: Optional[bool] = None, additional_headers: Optional[Dict[str, str]] = None) + + +
+ + + + +
+
+
+ linearized: Optional[bool] = +None + + +
+ + + + +
+
+
+ max_contention_retries: Optional[int] = +None + + +
+ + + + +
+
+
+ query_timeout: Optional[datetime.timedelta] = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ query_tags: Optional[Mapping[str, str]] = +None + + +
+ + + + +
+
+
+ traceparent: Optional[str] = +None + + +
+ + + + +
+
+
+ typecheck: Optional[bool] = +None + + +
+ + + + +
+
+
+ additional_headers: Optional[Dict[str, str]] = +None + + +
+ + + + +
+
+
+ +
+
@dataclass
+ + class + StreamOptions: + + + +
+ +
54@dataclass
+55class StreamOptions:
+56  """
+57    A dataclass representing options available for a stream.
+58
+59    * max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
+60    * max_backoff - The maximum backoff in seconds for an individual retry.
+61    * start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after
+62    the timestamp.
+63    * cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
+64    * status_events - Indicates if stream should include status events. Status events are periodic events that
+65    update the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics
+66    about the cost of maintaining the stream other than the cost of the received events.
+67    """
+68
+69  max_attempts: Optional[int] = None
+70  max_backoff: Optional[int] = None
+71  start_ts: Optional[int] = None
+72  cursor: Optional[str] = None
+73  status_events: bool = False
+
+ + +

A dataclass representing options available for a stream.

+ +
    +
  • max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
  • +
  • max_backoff - The maximum backoff in seconds for an individual retry.
  • +
  • start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after +the timestamp.
  • +
  • cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
  • +
  • status_events - Indicates if stream should include status events. Status events are periodic events that +update the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics +about the cost of maintaining the stream other than the cost of the received events.
  • +
+
+ + +
+
+ + StreamOptions( max_attempts: Optional[int] = None, max_backoff: Optional[int] = None, start_ts: Optional[int] = None, cursor: Optional[str] = None, status_events: bool = False) + + +
+ + + + +
+
+
+ max_attempts: Optional[int] = +None + + +
+ + + + +
+
+
+ max_backoff: Optional[int] = +None + + +
+ + + + +
+
+
+ start_ts: Optional[int] = +None + + +
+ + + + +
+
+
+ cursor: Optional[str] = +None + + +
+ + + + +
+
+
+ status_events: bool = +False + + +
+ + + + +
+
+
+ +
+
@dataclass
+ + class + FeedOptions: + + + +
+ +
76@dataclass
+77class FeedOptions:
+78  """
+79    A dataclass representing options available for an Event Feed.
+80
+81    * max_attempts - The maximum number of times to attempt an Event Feed query when a retryable exception is thrown.
+82    * max_backoff - The maximum backoff in seconds for an individual retry.
+83    * query_timeout - Controls the maximum amount of time Fauna will execute a query before returning a page of events.
+84    * start_ts - The starting timestamp of the Event Feed, exclusive. If set, Fauna will return events starting after
+85    the timestamp.
+86    * cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
+87    * page_size - The desired number of events per page.
+88    """
+89  max_attempts: Optional[int] = None
+90  max_backoff: Optional[int] = None
+91  query_timeout: Optional[timedelta] = None
+92  page_size: Optional[int] = None
+93  start_ts: Optional[int] = None
+94  cursor: Optional[str] = None
+
+ + +

A dataclass representing options available for an Event Feed.

+ +
    +
  • max_attempts - The maximum number of times to attempt an Event Feed query when a retryable exception is thrown.
  • +
  • max_backoff - The maximum backoff in seconds for an individual retry.
  • +
  • query_timeout - Controls the maximum amount of time Fauna will execute a query before returning a page of events.
  • +
  • start_ts - The starting timestamp of the Event Feed, exclusive. If set, Fauna will return events starting after +the timestamp.
  • +
  • cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
  • +
  • page_size - The desired number of events per page.
  • +
+
+ + +
+
+ + FeedOptions( max_attempts: Optional[int] = None, max_backoff: Optional[int] = None, query_timeout: Optional[datetime.timedelta] = None, page_size: Optional[int] = None, start_ts: Optional[int] = None, cursor: Optional[str] = None) + + +
+ + + + +
+
+
+ max_attempts: Optional[int] = +None + + +
+ + + + +
+
+
+ max_backoff: Optional[int] = +None + + +
+ + + + +
+
+
+ query_timeout: Optional[datetime.timedelta] = +None + + +
+ + + + +
+
+
+ page_size: Optional[int] = +None + + +
+ + + + +
+
+
+ start_ts: Optional[int] = +None + + +
+ + + + +
+
+
+ cursor: Optional[str] = +None + + +
+ + + + +
+
+
+ +
+ + class + Client: + + + +
+ +
 97class Client:
+ 98
+ 99  def __init__(
+100      self,
+101      endpoint: Optional[str] = None,
+102      secret: Optional[str] = None,
+103      http_client: Optional[HTTPClient] = None,
+104      query_tags: Optional[Mapping[str, str]] = None,
+105      linearized: Optional[bool] = None,
+106      max_contention_retries: Optional[int] = None,
+107      typecheck: Optional[bool] = None,
+108      additional_headers: Optional[Dict[str, str]] = None,
+109      query_timeout: Optional[timedelta] = DefaultQueryTimeout,
+110      client_buffer_timeout: Optional[timedelta] = DefaultClientBufferTimeout,
+111      http_read_timeout: Optional[timedelta] = DefaultHttpReadTimeout,
+112      http_write_timeout: Optional[timedelta] = DefaultHttpWriteTimeout,
+113      http_connect_timeout: Optional[timedelta] = DefaultHttpConnectTimeout,
+114      http_pool_timeout: Optional[timedelta] = DefaultHttpPoolTimeout,
+115      http_idle_timeout: Optional[timedelta] = DefaultIdleConnectionTimeout,
+116      max_attempts: int = 3,
+117      max_backoff: int = 20,
+118  ):
+119    """Initializes a Client.
+120
+121        :param endpoint: The Fauna Endpoint to use. Defaults to https://db.fauna.com, or the `FAUNA_ENDPOINT` env variable.
+122        :param secret: The Fauna Secret to use. Defaults to empty, or the `FAUNA_SECRET` env variable.
+123        :param http_client: An :class:`HTTPClient` implementation. Defaults to a global :class:`HTTPXClient`.
+124        :param query_tags: Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+125        :param linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+126        :param max_contention_retries: The max number of times to retry the query if contention is encountered.
+127        :param typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
+128        :param additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
+129        :param query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is :py:data:`DefaultQueryTimeout`.
+130        :param client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is :py:data:`DefaultClientBufferTimeout`, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
+131        :param http_read_timeout: Set HTTP Read timeout, default is :py:data:`DefaultHttpReadTimeout`.
+132        :param http_write_timeout: Set HTTP Write timeout, default is :py:data:`DefaultHttpWriteTimeout`.
+133        :param http_connect_timeout: Set HTTP Connect timeout, default is :py:data:`DefaultHttpConnectTimeout`.
+134        :param http_pool_timeout: Set HTTP Pool timeout, default is :py:data:`DefaultHttpPoolTimeout`.
+135        :param http_idle_timeout: Set HTTP Idle timeout, default is :py:data:`DefaultIdleConnectionTimeout`.
+136        :param max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
+137        :param max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
+138        """
+139
+140    self._set_endpoint(endpoint)
+141    self._max_attempts = max_attempts
+142    self._max_backoff = max_backoff
+143
+144    if secret is None:
+145      self._auth = _Auth(_Environment.EnvFaunaSecret())
+146    else:
+147      self._auth = _Auth(secret)
+148
+149    self._last_txn_ts = LastTxnTs()
+150
+151    self._query_tags = {}
+152    if query_tags is not None:
+153      self._query_tags.update(query_tags)
+154
+155    if query_timeout is not None:
+156      self._query_timeout_ms = int(query_timeout.total_seconds() * 1000)
+157    else:
+158      self._query_timeout_ms = None
+159
+160    self._headers: Dict[str, str] = {
+161        _Header.AcceptEncoding: "gzip",
+162        _Header.ContentType: "application/json;charset=utf-8",
+163        _Header.Driver: "python",
+164        _Header.DriverEnv: str(_DriverEnvironment()),
+165    }
+166
+167    if typecheck is not None:
+168      self._headers[Header.Typecheck] = str(typecheck).lower()
+169
+170    if linearized is not None:
+171      self._headers[Header.Linearized] = str(linearized).lower()
+172
+173    if max_contention_retries is not None and max_contention_retries > 0:
+174      self._headers[Header.MaxContentionRetries] = \
+175          f"{max_contention_retries}"
+176
+177    if additional_headers is not None:
+178      self._headers = {
+179          **self._headers,
+180          **additional_headers,
+181      }
+182
+183    self._session: HTTPClient
+184
+185    if http_client is not None:
+186      self._session = http_client
+187    else:
+188      if fauna.global_http_client is None:
+189        timeout_s: Optional[float] = None
+190        if query_timeout is not None and client_buffer_timeout is not None:
+191          timeout_s = (query_timeout + client_buffer_timeout).total_seconds()
+192        read_timeout_s: Optional[float] = None
+193        if http_read_timeout is not None:
+194          read_timeout_s = http_read_timeout.total_seconds()
+195
+196        write_timeout_s: Optional[float] = http_write_timeout.total_seconds(
+197        ) if http_write_timeout is not None else None
+198        connect_timeout_s: Optional[float] = http_connect_timeout.total_seconds(
+199        ) if http_connect_timeout is not None else None
+200        pool_timeout_s: Optional[float] = http_pool_timeout.total_seconds(
+201        ) if http_pool_timeout is not None else None
+202        idle_timeout_s: Optional[float] = http_idle_timeout.total_seconds(
+203        ) if http_idle_timeout is not None else None
+204
+205        import httpx
+206        from fauna.http.httpx_client import HTTPXClient
+207        c = HTTPXClient(
+208            httpx.Client(
+209                http1=True,
+210                http2=False,
+211                timeout=httpx.Timeout(
+212                    timeout=timeout_s,
+213                    connect=connect_timeout_s,
+214                    read=read_timeout_s,
+215                    write=write_timeout_s,
+216                    pool=pool_timeout_s,
+217                ),
+218                limits=httpx.Limits(
+219                    max_connections=DefaultMaxConnections,
+220                    max_keepalive_connections=DefaultMaxIdleConnections,
+221                    keepalive_expiry=idle_timeout_s,
+222                ),
+223            ), logger)
+224        fauna.global_http_client = c
+225
+226      self._session = fauna.global_http_client
+227
+228  def close(self):
+229    self._session.close()
+230    if self._session == fauna.global_http_client:
+231      fauna.global_http_client = None
+232
+233  def set_last_txn_ts(self, txn_ts: int):
+234    """
+235        Set the last timestamp seen by this client.
+236        This has no effect if earlier than stored timestamp.
+237
+238        .. WARNING:: This should be used only when coordinating timestamps across
+239        multiple clients. Moving the timestamp arbitrarily forward into
+240        the future will cause transactions to stall.
+241
+242        :param txn_ts: the new transaction time.
+243        """
+244    self._last_txn_ts.update_txn_time(txn_ts)
+245
+246  def get_last_txn_ts(self) -> Optional[int]:
+247    """
+248        Get the last timestamp seen by this client.
+249        :return:
+250        """
+251    return self._last_txn_ts.time
+252
+253  def get_query_timeout(self) -> Optional[timedelta]:
+254    """
+255        Get the query timeout for all queries.
+256        """
+257    if self._query_timeout_ms is not None:
+258      return timedelta(milliseconds=self._query_timeout_ms)
+259    else:
+260      return None
+261
+262  def paginate(
+263      self,
+264      fql: Query,
+265      opts: Optional[QueryOptions] = None,
+266  ) -> "QueryIterator":
+267    """
+268        Run a query on Fauna and returning an iterator of results. If the query
+269        returns a Page, the iterator will fetch additional Pages until the
+270        after token is null. Each call for a page will be retried with exponential
+271        backoff up to the max_attempts set in the client's retry policy in the
+272        event of a 429 or 502.
+273
+274        :param fql: A Query
+275        :param opts: (Optional) Query Options
+276
+277        :return: a :class:`QueryResponse`
+278
+279        :raises NetworkError: HTTP Request failed in transit
+280        :raises ProtocolError: HTTP error not from Fauna
+281        :raises ServiceError: Fauna returned an error
+282        :raises ValueError: Encoding and decoding errors
+283        :raises TypeError: Invalid param types
+284        """
+285
+286    if not isinstance(fql, Query):
+287      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+288                 f"Query by calling fauna.fql()"
+289      raise TypeError(err_msg)
+290
+291    return QueryIterator(self, fql, opts)
+292
+293  def query(
+294      self,
+295      fql: Query,
+296      opts: Optional[QueryOptions] = None,
+297  ) -> QuerySuccess:
+298    """
+299        Run a query on Fauna. A query will be retried max_attempt times with exponential backoff
+300        up to the max_backoff in the event of a 429.
+301
+302        :param fql: A Query
+303        :param opts: (Optional) Query Options
+304
+305        :return: a :class:`QueryResponse`
+306
+307        :raises NetworkError: HTTP Request failed in transit
+308        :raises ProtocolError: HTTP error not from Fauna
+309        :raises ServiceError: Fauna returned an error
+310        :raises ValueError: Encoding and decoding errors
+311        :raises TypeError: Invalid param types
+312        """
+313
+314    if not isinstance(fql, Query):
+315      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+316                 f"Query by calling fauna.fql()"
+317      raise TypeError(err_msg)
+318
+319    try:
+320      encoded_query: Mapping[str, Any] = FaunaEncoder.encode(fql)
+321    except Exception as e:
+322      raise ClientError("Failed to encode Query") from e
+323
+324    retryable = Retryable[QuerySuccess](
+325        self._max_attempts,
+326        self._max_backoff,
+327        self._query,
+328        "/query/1",
+329        fql=encoded_query,
+330        opts=opts,
+331    )
+332
+333    r = retryable.run()
+334    r.response.stats.attempts = r.attempts
+335    return r.response
+336
+337  def _query(
+338      self,
+339      path: str,
+340      fql: Mapping[str, Any],
+341      arguments: Optional[Mapping[str, Any]] = None,
+342      opts: Optional[QueryOptions] = None,
+343  ) -> QuerySuccess:
+344
+345    headers = self._headers.copy()
+346    headers[_Header.Format] = "tagged"
+347    headers[_Header.Authorization] = self._auth.bearer()
+348
+349    if self._query_timeout_ms is not None:
+350      headers[Header.QueryTimeoutMs] = str(self._query_timeout_ms)
+351
+352    headers.update(self._last_txn_ts.request_header)
+353
+354    query_tags = {}
+355    if self._query_tags is not None:
+356      query_tags.update(self._query_tags)
+357
+358    if opts is not None:
+359      if opts.linearized is not None:
+360        headers[Header.Linearized] = str(opts.linearized).lower()
+361      if opts.max_contention_retries is not None:
+362        headers[Header.MaxContentionRetries] = \
+363            f"{opts.max_contention_retries}"
+364      if opts.traceparent is not None:
+365        headers[Header.Traceparent] = opts.traceparent
+366      if opts.query_timeout is not None:
+367        timeout_ms = f"{int(opts.query_timeout.total_seconds() * 1000)}"
+368        headers[Header.QueryTimeoutMs] = timeout_ms
+369      if opts.query_tags is not None:
+370        query_tags.update(opts.query_tags)
+371      if opts.typecheck is not None:
+372        headers[Header.Typecheck] = str(opts.typecheck).lower()
+373      if opts.additional_headers is not None:
+374        headers.update(opts.additional_headers)
+375
+376    if len(query_tags) > 0:
+377      headers[Header.Tags] = QueryTags.encode(query_tags)
+378
+379    data: dict[str, Any] = {
+380        "query": fql,
+381        "arguments": arguments or {},
+382    }
+383
+384    with self._session.request(
+385        method="POST",
+386        url=self._endpoint + path,
+387        headers=headers,
+388        data=data,
+389    ) as response:
+390      status_code = response.status_code()
+391      response_json = response.json()
+392      headers = response.headers()
+393
+394      self._check_protocol(response_json, status_code)
+395
+396      dec: Any = FaunaDecoder.decode(response_json)
+397
+398      if status_code > 399:
+399        FaunaError.parse_error_and_throw(dec, status_code)
+400
+401      if "txn_ts" in dec:
+402        self.set_last_txn_ts(int(response_json["txn_ts"]))
+403
+404      stats = QueryStats(dec["stats"]) if "stats" in dec else None
+405      summary = dec["summary"] if "summary" in dec else None
+406      query_tags = QueryTags.decode(
+407          dec["query_tags"]) if "query_tags" in dec else None
+408      txn_ts = dec["txn_ts"] if "txn_ts" in dec else None
+409      schema_version = dec["schema_version"] if "schema_version" in dec else None
+410      traceparent = headers.get("traceparent", None)
+411      static_type = dec["static_type"] if "static_type" in dec else None
+412
+413      return QuerySuccess(
+414          data=dec["data"],
+415          query_tags=query_tags,
+416          static_type=static_type,
+417          stats=stats,
+418          summary=summary,
+419          traceparent=traceparent,
+420          txn_ts=txn_ts,
+421          schema_version=schema_version,
+422      )
+423
+424  def stream(
+425      self,
+426      fql: Union[EventSource, Query],
+427      opts: StreamOptions = StreamOptions()
+428  ) -> "StreamIterator":
+429    """
+430        Opens a Stream in Fauna and returns an iterator that consume Fauna events.
+431
+432        :param fql: An EventSource or a Query that returns an EventSource.
+433        :param opts: (Optional) Stream Options.
+434
+435        :return: a :class:`StreamIterator`
+436
+437        :raises ClientError: Invalid options provided
+438        :raises NetworkError: HTTP Request failed in transit
+439        :raises ProtocolError: HTTP error not from Fauna
+440        :raises ServiceError: Fauna returned an error
+441        :raises ValueError: Encoding and decoding errors
+442        :raises TypeError: Invalid param types
+443        """
+444
+445    if isinstance(fql, Query):
+446      if opts.cursor is not None:
+447        raise ClientError(
+448            "The 'cursor' configuration can only be used with an event source.")
+449
+450      source = self.query(fql).data
+451    else:
+452      source = fql
+453
+454    if not isinstance(source, EventSource):
+455      err_msg = f"'fql' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+456      raise TypeError(err_msg)
+457
+458    headers = self._headers.copy()
+459    headers[_Header.Format] = "tagged"
+460    headers[_Header.Authorization] = self._auth.bearer()
+461
+462    return StreamIterator(self._session, headers, self._endpoint + "/stream/1",
+463                          self._max_attempts, self._max_backoff, opts, source)
+464
+465  def feed(
+466      self,
+467      source: Union[EventSource, Query],
+468      opts: FeedOptions = FeedOptions(),
+469  ) -> "FeedIterator":
+470    """
+471        Opens an Event Feed in Fauna and returns an iterator that consume Fauna events.
+472
+473        :param source: An EventSource or a Query that returns an EventSource.
+474        :param opts: (Optional) Event Feed options.
+475
+476        :return: a :class:`FeedIterator`
+477
+478        :raises ClientError: Invalid options provided
+479        :raises NetworkError: HTTP Request failed in transit
+480        :raises ProtocolError: HTTP error not from Fauna
+481        :raises ServiceError: Fauna returned an error
+482        :raises ValueError: Encoding and decoding errors
+483        :raises TypeError: Invalid param types
+484        """
+485
+486    if isinstance(source, Query):
+487      source = self.query(source).data
+488
+489    if not isinstance(source, EventSource):
+490      err_msg = f"'source' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+491      raise TypeError(err_msg)
+492
+493    headers = self._headers.copy()
+494    headers[_Header.Format] = "tagged"
+495    headers[_Header.Authorization] = self._auth.bearer()
+496
+497    if opts.query_timeout is not None:
+498      query_timeout_ms = int(opts.query_timeout.total_seconds() * 1000)
+499      headers[Header.QueryTimeoutMs] = str(query_timeout_ms)
+500    elif self._query_timeout_ms is not None:
+501      headers[Header.QueryTimeoutMs] = str(self._query_timeout_ms)
+502
+503    return FeedIterator(self._session, headers, self._endpoint + "/feed/1",
+504                        self._max_attempts, self._max_backoff, opts, source)
+505
+506  def _check_protocol(self, response_json: Any, status_code):
+507    # TODO: Logic to validate wire protocol belongs elsewhere.
+508    should_raise = False
+509
+510    # check for QuerySuccess
+511    if status_code <= 399 and "data" not in response_json:
+512      should_raise = True
+513
+514    # check for QueryFailure
+515    if status_code > 399:
+516      if "error" not in response_json:
+517        should_raise = True
+518      else:
+519        e = response_json["error"]
+520        if "code" not in e or "message" not in e:
+521          should_raise = True
+522
+523    if should_raise:
+524      raise ProtocolError(
+525          status_code,
+526          f"Response is in an unknown format: \n{response_json}",
+527      )
+528
+529  def _set_endpoint(self, endpoint):
+530    if endpoint is None:
+531      endpoint = _Environment.EnvFaunaEndpoint()
+532
+533    if endpoint[-1:] == "/":
+534      endpoint = endpoint[:-1]
+535
+536    self._endpoint = endpoint
+
+ + + + +
+ +
+ + Client( endpoint: Optional[str] = None, secret: Optional[str] = None, http_client: Optional[fauna.http.http_client.HTTPClient] = None, query_tags: Optional[Mapping[str, str]] = None, linearized: Optional[bool] = None, max_contention_retries: Optional[int] = None, typecheck: Optional[bool] = None, additional_headers: Optional[Dict[str, str]] = None, query_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), client_buffer_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), http_read_timeout: Optional[datetime.timedelta] = None, http_write_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), http_connect_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), http_pool_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), http_idle_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), max_attempts: int = 3, max_backoff: int = 20) + + + +
+ +
 99  def __init__(
+100      self,
+101      endpoint: Optional[str] = None,
+102      secret: Optional[str] = None,
+103      http_client: Optional[HTTPClient] = None,
+104      query_tags: Optional[Mapping[str, str]] = None,
+105      linearized: Optional[bool] = None,
+106      max_contention_retries: Optional[int] = None,
+107      typecheck: Optional[bool] = None,
+108      additional_headers: Optional[Dict[str, str]] = None,
+109      query_timeout: Optional[timedelta] = DefaultQueryTimeout,
+110      client_buffer_timeout: Optional[timedelta] = DefaultClientBufferTimeout,
+111      http_read_timeout: Optional[timedelta] = DefaultHttpReadTimeout,
+112      http_write_timeout: Optional[timedelta] = DefaultHttpWriteTimeout,
+113      http_connect_timeout: Optional[timedelta] = DefaultHttpConnectTimeout,
+114      http_pool_timeout: Optional[timedelta] = DefaultHttpPoolTimeout,
+115      http_idle_timeout: Optional[timedelta] = DefaultIdleConnectionTimeout,
+116      max_attempts: int = 3,
+117      max_backoff: int = 20,
+118  ):
+119    """Initializes a Client.
+120
+121        :param endpoint: The Fauna Endpoint to use. Defaults to https://db.fauna.com, or the `FAUNA_ENDPOINT` env variable.
+122        :param secret: The Fauna Secret to use. Defaults to empty, or the `FAUNA_SECRET` env variable.
+123        :param http_client: An :class:`HTTPClient` implementation. Defaults to a global :class:`HTTPXClient`.
+124        :param query_tags: Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+125        :param linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+126        :param max_contention_retries: The max number of times to retry the query if contention is encountered.
+127        :param typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
+128        :param additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
+129        :param query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is :py:data:`DefaultQueryTimeout`.
+130        :param client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is :py:data:`DefaultClientBufferTimeout`, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
+131        :param http_read_timeout: Set HTTP Read timeout, default is :py:data:`DefaultHttpReadTimeout`.
+132        :param http_write_timeout: Set HTTP Write timeout, default is :py:data:`DefaultHttpWriteTimeout`.
+133        :param http_connect_timeout: Set HTTP Connect timeout, default is :py:data:`DefaultHttpConnectTimeout`.
+134        :param http_pool_timeout: Set HTTP Pool timeout, default is :py:data:`DefaultHttpPoolTimeout`.
+135        :param http_idle_timeout: Set HTTP Idle timeout, default is :py:data:`DefaultIdleConnectionTimeout`.
+136        :param max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
+137        :param max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
+138        """
+139
+140    self._set_endpoint(endpoint)
+141    self._max_attempts = max_attempts
+142    self._max_backoff = max_backoff
+143
+144    if secret is None:
+145      self._auth = _Auth(_Environment.EnvFaunaSecret())
+146    else:
+147      self._auth = _Auth(secret)
+148
+149    self._last_txn_ts = LastTxnTs()
+150
+151    self._query_tags = {}
+152    if query_tags is not None:
+153      self._query_tags.update(query_tags)
+154
+155    if query_timeout is not None:
+156      self._query_timeout_ms = int(query_timeout.total_seconds() * 1000)
+157    else:
+158      self._query_timeout_ms = None
+159
+160    self._headers: Dict[str, str] = {
+161        _Header.AcceptEncoding: "gzip",
+162        _Header.ContentType: "application/json;charset=utf-8",
+163        _Header.Driver: "python",
+164        _Header.DriverEnv: str(_DriverEnvironment()),
+165    }
+166
+167    if typecheck is not None:
+168      self._headers[Header.Typecheck] = str(typecheck).lower()
+169
+170    if linearized is not None:
+171      self._headers[Header.Linearized] = str(linearized).lower()
+172
+173    if max_contention_retries is not None and max_contention_retries > 0:
+174      self._headers[Header.MaxContentionRetries] = \
+175          f"{max_contention_retries}"
+176
+177    if additional_headers is not None:
+178      self._headers = {
+179          **self._headers,
+180          **additional_headers,
+181      }
+182
+183    self._session: HTTPClient
+184
+185    if http_client is not None:
+186      self._session = http_client
+187    else:
+188      if fauna.global_http_client is None:
+189        timeout_s: Optional[float] = None
+190        if query_timeout is not None and client_buffer_timeout is not None:
+191          timeout_s = (query_timeout + client_buffer_timeout).total_seconds()
+192        read_timeout_s: Optional[float] = None
+193        if http_read_timeout is not None:
+194          read_timeout_s = http_read_timeout.total_seconds()
+195
+196        write_timeout_s: Optional[float] = http_write_timeout.total_seconds(
+197        ) if http_write_timeout is not None else None
+198        connect_timeout_s: Optional[float] = http_connect_timeout.total_seconds(
+199        ) if http_connect_timeout is not None else None
+200        pool_timeout_s: Optional[float] = http_pool_timeout.total_seconds(
+201        ) if http_pool_timeout is not None else None
+202        idle_timeout_s: Optional[float] = http_idle_timeout.total_seconds(
+203        ) if http_idle_timeout is not None else None
+204
+205        import httpx
+206        from fauna.http.httpx_client import HTTPXClient
+207        c = HTTPXClient(
+208            httpx.Client(
+209                http1=True,
+210                http2=False,
+211                timeout=httpx.Timeout(
+212                    timeout=timeout_s,
+213                    connect=connect_timeout_s,
+214                    read=read_timeout_s,
+215                    write=write_timeout_s,
+216                    pool=pool_timeout_s,
+217                ),
+218                limits=httpx.Limits(
+219                    max_connections=DefaultMaxConnections,
+220                    max_keepalive_connections=DefaultMaxIdleConnections,
+221                    keepalive_expiry=idle_timeout_s,
+222                ),
+223            ), logger)
+224        fauna.global_http_client = c
+225
+226      self._session = fauna.global_http_client
+
+ + +

Initializes a Client.

+ +
Parameters
+ +
    +
  • endpoint: The Fauna Endpoint to use. Defaults to https: //db.fauna.com, or the FAUNA_ENDPOINT env variable.
  • +
  • secret: The Fauna Secret to use. Defaults to empty, or the FAUNA_SECRET env variable.
  • +
  • http_client: An HTTPClient implementation. Defaults to a global HTTPXClient.
  • +
  • **query_tags: Tags to associate with the query. See logging
  • +
  • linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
  • +
  • max_contention_retries: The max number of times to retry the query if contention is encountered.
  • +
  • typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
  • +
  • additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
  • +
  • query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is DefaultQueryTimeout.
  • +
  • client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is DefaultClientBufferTimeout, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
  • +
  • http_read_timeout: Set HTTP Read timeout, default is DefaultHttpReadTimeout.
  • +
  • http_write_timeout: Set HTTP Write timeout, default is DefaultHttpWriteTimeout.
  • +
  • http_connect_timeout: Set HTTP Connect timeout, default is DefaultHttpConnectTimeout.
  • +
  • http_pool_timeout: Set HTTP Pool timeout, default is DefaultHttpPoolTimeout.
  • +
  • http_idle_timeout: Set HTTP Idle timeout, default is DefaultIdleConnectionTimeout.
  • +
  • max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
  • +
  • max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
  • +
+
+ + +
+
+ +
+ + def + close(self): + + + +
+ +
228  def close(self):
+229    self._session.close()
+230    if self._session == fauna.global_http_client:
+231      fauna.global_http_client = None
+
+ + + + +
+
+ +
+ + def + set_last_txn_ts(self, txn_ts: int): + + + +
+ +
233  def set_last_txn_ts(self, txn_ts: int):
+234    """
+235        Set the last timestamp seen by this client.
+236        This has no effect if earlier than stored timestamp.
+237
+238        .. WARNING:: This should be used only when coordinating timestamps across
+239        multiple clients. Moving the timestamp arbitrarily forward into
+240        the future will cause transactions to stall.
+241
+242        :param txn_ts: the new transaction time.
+243        """
+244    self._last_txn_ts.update_txn_time(txn_ts)
+
+ + +

Set the last timestamp seen by this client. +This has no effect if earlier than stored timestamp.

+ +

.. WARNING:: This should be used only when coordinating timestamps across +multiple clients. Moving the timestamp arbitrarily forward into +the future will cause transactions to stall.

+ +
Parameters
+ +
    +
  • txn_ts: the new transaction time.
  • +
+
+ + +
+
+ +
+ + def + get_last_txn_ts(self) -> Optional[int]: + + + +
+ +
246  def get_last_txn_ts(self) -> Optional[int]:
+247    """
+248        Get the last timestamp seen by this client.
+249        :return:
+250        """
+251    return self._last_txn_ts.time
+
+ + +

Get the last timestamp seen by this client.

+ +
Returns
+
+ + +
+
+ +
+ + def + get_query_timeout(self) -> Optional[datetime.timedelta]: + + + +
+ +
253  def get_query_timeout(self) -> Optional[timedelta]:
+254    """
+255        Get the query timeout for all queries.
+256        """
+257    if self._query_timeout_ms is not None:
+258      return timedelta(milliseconds=self._query_timeout_ms)
+259    else:
+260      return None
+
+ + +

Get the query timeout for all queries.

+
+ + +
+
+ +
+ + def + paginate( self, fql: fauna.query.query_builder.Query, opts: Optional[QueryOptions] = None) -> QueryIterator: + + + +
+ +
262  def paginate(
+263      self,
+264      fql: Query,
+265      opts: Optional[QueryOptions] = None,
+266  ) -> "QueryIterator":
+267    """
+268        Run a query on Fauna and returning an iterator of results. If the query
+269        returns a Page, the iterator will fetch additional Pages until the
+270        after token is null. Each call for a page will be retried with exponential
+271        backoff up to the max_attempts set in the client's retry policy in the
+272        event of a 429 or 502.
+273
+274        :param fql: A Query
+275        :param opts: (Optional) Query Options
+276
+277        :return: a :class:`QueryResponse`
+278
+279        :raises NetworkError: HTTP Request failed in transit
+280        :raises ProtocolError: HTTP error not from Fauna
+281        :raises ServiceError: Fauna returned an error
+282        :raises ValueError: Encoding and decoding errors
+283        :raises TypeError: Invalid param types
+284        """
+285
+286    if not isinstance(fql, Query):
+287      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+288                 f"Query by calling fauna.fql()"
+289      raise TypeError(err_msg)
+290
+291    return QueryIterator(self, fql, opts)
+
+ + +

Run a query on Fauna and returning an iterator of results. If the query +returns a Page, the iterator will fetch additional Pages until the +after token is null. Each call for a page will be retried with exponential +backoff up to the max_attempts set in the client's retry policy in the +event of a 429 or 502.

+ +
Parameters
+ +
    +
  • fql: A Query
  • +
  • opts: (Optional) Query Options
  • +
+ +
Returns
+ +
+

a QueryResponse

+
+ +
Raises
+ +
    +
  • NetworkError: HTTP Request failed in transit
  • +
  • ProtocolError: HTTP error not from Fauna
  • +
  • ServiceError: Fauna returned an error
  • +
  • ValueError: Encoding and decoding errors
  • +
  • TypeError: Invalid param types
  • +
+
+ + +
+
+ +
+ + def + query( self, fql: fauna.query.query_builder.Query, opts: Optional[QueryOptions] = None) -> fauna.encoding.wire_protocol.QuerySuccess: + + + +
+ +
293  def query(
+294      self,
+295      fql: Query,
+296      opts: Optional[QueryOptions] = None,
+297  ) -> QuerySuccess:
+298    """
+299        Run a query on Fauna. A query will be retried max_attempt times with exponential backoff
+300        up to the max_backoff in the event of a 429.
+301
+302        :param fql: A Query
+303        :param opts: (Optional) Query Options
+304
+305        :return: a :class:`QueryResponse`
+306
+307        :raises NetworkError: HTTP Request failed in transit
+308        :raises ProtocolError: HTTP error not from Fauna
+309        :raises ServiceError: Fauna returned an error
+310        :raises ValueError: Encoding and decoding errors
+311        :raises TypeError: Invalid param types
+312        """
+313
+314    if not isinstance(fql, Query):
+315      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+316                 f"Query by calling fauna.fql()"
+317      raise TypeError(err_msg)
+318
+319    try:
+320      encoded_query: Mapping[str, Any] = FaunaEncoder.encode(fql)
+321    except Exception as e:
+322      raise ClientError("Failed to encode Query") from e
+323
+324    retryable = Retryable[QuerySuccess](
+325        self._max_attempts,
+326        self._max_backoff,
+327        self._query,
+328        "/query/1",
+329        fql=encoded_query,
+330        opts=opts,
+331    )
+332
+333    r = retryable.run()
+334    r.response.stats.attempts = r.attempts
+335    return r.response
+
+ + +

Run a query on Fauna. A query will be retried max_attempt times with exponential backoff +up to the max_backoff in the event of a 429.

+ +
Parameters
+ +
    +
  • fql: A Query
  • +
  • opts: (Optional) Query Options
  • +
+ +
Returns
+ +
+

a QueryResponse

+
+ +
Raises
+ +
    +
  • NetworkError: HTTP Request failed in transit
  • +
  • ProtocolError: HTTP error not from Fauna
  • +
  • ServiceError: Fauna returned an error
  • +
  • ValueError: Encoding and decoding errors
  • +
  • TypeError: Invalid param types
  • +
+
+ + +
+
+ +
+ + def + stream( self, fql: Union[fauna.query.models.EventSource, fauna.query.query_builder.Query], opts: StreamOptions = StreamOptions(max_attempts=None, max_backoff=None, start_ts=None, cursor=None, status_events=False)) -> StreamIterator: + + + +
+ +
424  def stream(
+425      self,
+426      fql: Union[EventSource, Query],
+427      opts: StreamOptions = StreamOptions()
+428  ) -> "StreamIterator":
+429    """
+430        Opens a Stream in Fauna and returns an iterator that consume Fauna events.
+431
+432        :param fql: An EventSource or a Query that returns an EventSource.
+433        :param opts: (Optional) Stream Options.
+434
+435        :return: a :class:`StreamIterator`
+436
+437        :raises ClientError: Invalid options provided
+438        :raises NetworkError: HTTP Request failed in transit
+439        :raises ProtocolError: HTTP error not from Fauna
+440        :raises ServiceError: Fauna returned an error
+441        :raises ValueError: Encoding and decoding errors
+442        :raises TypeError: Invalid param types
+443        """
+444
+445    if isinstance(fql, Query):
+446      if opts.cursor is not None:
+447        raise ClientError(
+448            "The 'cursor' configuration can only be used with an event source.")
+449
+450      source = self.query(fql).data
+451    else:
+452      source = fql
+453
+454    if not isinstance(source, EventSource):
+455      err_msg = f"'fql' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+456      raise TypeError(err_msg)
+457
+458    headers = self._headers.copy()
+459    headers[_Header.Format] = "tagged"
+460    headers[_Header.Authorization] = self._auth.bearer()
+461
+462    return StreamIterator(self._session, headers, self._endpoint + "/stream/1",
+463                          self._max_attempts, self._max_backoff, opts, source)
+
+ + +

Opens a Stream in Fauna and returns an iterator that consume Fauna events.

+ +
Parameters
+ +
    +
  • fql: An EventSource or a Query that returns an EventSource.
  • +
  • opts: (Optional) Stream Options.
  • +
+ +
Returns
+ +
+

a StreamIterator

+
+ +
Raises
+ +
    +
  • ClientError: Invalid options provided
  • +
  • NetworkError: HTTP Request failed in transit
  • +
  • ProtocolError: HTTP error not from Fauna
  • +
  • ServiceError: Fauna returned an error
  • +
  • ValueError: Encoding and decoding errors
  • +
  • TypeError: Invalid param types
  • +
+
+ + +
+
+ +
+ + def + feed( self, source: Union[fauna.query.models.EventSource, fauna.query.query_builder.Query], opts: FeedOptions = FeedOptions(max_attempts=None, max_backoff=None, query_timeout=None, page_size=None, start_ts=None, cursor=None)) -> FeedIterator: + + + +
+ +
465  def feed(
+466      self,
+467      source: Union[EventSource, Query],
+468      opts: FeedOptions = FeedOptions(),
+469  ) -> "FeedIterator":
+470    """
+471        Opens an Event Feed in Fauna and returns an iterator that consume Fauna events.
+472
+473        :param source: An EventSource or a Query that returns an EventSource.
+474        :param opts: (Optional) Event Feed options.
+475
+476        :return: a :class:`FeedIterator`
+477
+478        :raises ClientError: Invalid options provided
+479        :raises NetworkError: HTTP Request failed in transit
+480        :raises ProtocolError: HTTP error not from Fauna
+481        :raises ServiceError: Fauna returned an error
+482        :raises ValueError: Encoding and decoding errors
+483        :raises TypeError: Invalid param types
+484        """
+485
+486    if isinstance(source, Query):
+487      source = self.query(source).data
+488
+489    if not isinstance(source, EventSource):
+490      err_msg = f"'source' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+491      raise TypeError(err_msg)
+492
+493    headers = self._headers.copy()
+494    headers[_Header.Format] = "tagged"
+495    headers[_Header.Authorization] = self._auth.bearer()
+496
+497    if opts.query_timeout is not None:
+498      query_timeout_ms = int(opts.query_timeout.total_seconds() * 1000)
+499      headers[Header.QueryTimeoutMs] = str(query_timeout_ms)
+500    elif self._query_timeout_ms is not None:
+501      headers[Header.QueryTimeoutMs] = str(self._query_timeout_ms)
+502
+503    return FeedIterator(self._session, headers, self._endpoint + "/feed/1",
+504                        self._max_attempts, self._max_backoff, opts, source)
+
+ + +

Opens an Event Feed in Fauna and returns an iterator that consume Fauna events.

+ +
Parameters
+ +
    +
  • source: An EventSource or a Query that returns an EventSource.
  • +
  • opts: (Optional) Event Feed options.
  • +
+ +
Returns
+ +
+

a FeedIterator

+
+ +
Raises
+ +
    +
  • ClientError: Invalid options provided
  • +
  • NetworkError: HTTP Request failed in transit
  • +
  • ProtocolError: HTTP error not from Fauna
  • +
  • ServiceError: Fauna returned an error
  • +
  • ValueError: Encoding and decoding errors
  • +
  • TypeError: Invalid param types
  • +
+
+ + +
+
+
+ +
+ + class + StreamIterator: + + + +
+ +
539class StreamIterator:
+540  """A class that mixes a ContextManager and an Iterator so we can detected retryable errors."""
+541
+542  def __init__(self, http_client: HTTPClient, headers: Dict[str, str],
+543               endpoint: str, max_attempts: int, max_backoff: int,
+544               opts: StreamOptions, source: EventSource):
+545    self._http_client = http_client
+546    self._headers = headers
+547    self._endpoint = endpoint
+548    self._max_attempts = max_attempts
+549    self._max_backoff = max_backoff
+550    self._opts = opts
+551    self._source = source
+552    self._stream = None
+553    self.last_ts = None
+554    self.last_cursor = None
+555    self._ctx = self._create_stream()
+556
+557    if opts.start_ts is not None and opts.cursor is not None:
+558      err_msg = "Only one of 'start_ts' or 'cursor' can be defined in the StreamOptions."
+559      raise TypeError(err_msg)
+560
+561  def __enter__(self):
+562    return self
+563
+564  def __exit__(self, exc_type, exc_value, exc_traceback):
+565    if self._stream is not None:
+566      self._stream.close()
+567
+568    self._ctx.__exit__(exc_type, exc_value, exc_traceback)
+569    return False
+570
+571  def __iter__(self):
+572    return self
+573
+574  def __next__(self):
+575    if self._opts.max_attempts is not None:
+576      max_attempts = self._opts.max_attempts
+577    else:
+578      max_attempts = self._max_attempts
+579
+580    if self._opts.max_backoff is not None:
+581      max_backoff = self._opts.max_backoff
+582    else:
+583      max_backoff = self._max_backoff
+584
+585    retryable = Retryable[Any](max_attempts, max_backoff, self._next_element)
+586    return retryable.run().response
+587
+588  def _next_element(self):
+589    try:
+590      if self._stream is None:
+591        try:
+592          self._stream = self._ctx.__enter__()
+593        except Exception:
+594          self._retry_stream()
+595
+596      if self._stream is not None:
+597        event: Any = FaunaDecoder.decode(next(self._stream))
+598
+599        if event["type"] == "error":
+600          FaunaError.parse_error_and_throw(event, 400)
+601
+602        self.last_ts = event["txn_ts"]
+603        self.last_cursor = event.get('cursor')
+604
+605        if event["type"] == "start":
+606          return self._next_element()
+607
+608        if not self._opts.status_events and event["type"] == "status":
+609          return self._next_element()
+610
+611        return event
+612
+613      raise StopIteration
+614    except NetworkError:
+615      self._retry_stream()
+616
+617  def _retry_stream(self):
+618    if self._stream is not None:
+619      self._stream.close()
+620
+621    self._stream = None
+622
+623    try:
+624      self._ctx = self._create_stream()
+625    except Exception:
+626      pass
+627    raise RetryableFaunaException
+628
+629  def _create_stream(self):
+630    data: Dict[str, Any] = {"token": self._source.token}
+631    if self.last_cursor is not None:
+632      data["cursor"] = self.last_cursor
+633    elif self._opts.cursor is not None:
+634      data["cursor"] = self._opts.cursor
+635    elif self._opts.start_ts is not None:
+636      data["start_ts"] = self._opts.start_ts
+637
+638    return self._http_client.stream(
+639        url=self._endpoint, headers=self._headers, data=data)
+640
+641  def close(self):
+642    if self._stream is not None:
+643      self._stream.close()
+
+ + +

A class that mixes a ContextManager and an Iterator so we can detected retryable errors.

+
+ + +
+ +
+ + StreamIterator( http_client: fauna.http.http_client.HTTPClient, headers: Dict[str, str], endpoint: str, max_attempts: int, max_backoff: int, opts: StreamOptions, source: fauna.query.models.EventSource) + + + +
+ +
542  def __init__(self, http_client: HTTPClient, headers: Dict[str, str],
+543               endpoint: str, max_attempts: int, max_backoff: int,
+544               opts: StreamOptions, source: EventSource):
+545    self._http_client = http_client
+546    self._headers = headers
+547    self._endpoint = endpoint
+548    self._max_attempts = max_attempts
+549    self._max_backoff = max_backoff
+550    self._opts = opts
+551    self._source = source
+552    self._stream = None
+553    self.last_ts = None
+554    self.last_cursor = None
+555    self._ctx = self._create_stream()
+556
+557    if opts.start_ts is not None and opts.cursor is not None:
+558      err_msg = "Only one of 'start_ts' or 'cursor' can be defined in the StreamOptions."
+559      raise TypeError(err_msg)
+
+ + + + +
+
+
+ last_ts + + +
+ + + + +
+
+
+ last_cursor + + +
+ + + + +
+
+ +
+ + def + close(self): + + + +
+ +
641  def close(self):
+642    if self._stream is not None:
+643      self._stream.close()
+
+ + + + +
+
+
+ +
+ + class + FeedPage: + + + +
+ +
646class FeedPage:
+647
+648  def __init__(self, events: List[Any], cursor: str, stats: QueryStats):
+649    self._events = events
+650    self.cursor = cursor
+651    self.stats = stats
+652
+653  def __len__(self):
+654    return len(self._events)
+655
+656  def __iter__(self) -> Iterator[Any]:
+657    for event in self._events:
+658      if event["type"] == "error":
+659        FaunaError.parse_error_and_throw(event, 400)
+660      yield event
+
+ + + + +
+ +
+ + FeedPage( events: List[Any], cursor: str, stats: fauna.encoding.wire_protocol.QueryStats) + + + +
+ +
648  def __init__(self, events: List[Any], cursor: str, stats: QueryStats):
+649    self._events = events
+650    self.cursor = cursor
+651    self.stats = stats
+
+ + + + +
+
+
+ cursor + + +
+ + + + +
+
+
+ stats + + +
+ + + + +
+
+
+ +
+ + class + FeedIterator: + + + +
+ +
663class FeedIterator:
+664  """A class to provide an iterator on top of Event Feed pages."""
+665
+666  def __init__(self, http: HTTPClient, headers: Dict[str, str], endpoint: str,
+667               max_attempts: int, max_backoff: int, opts: FeedOptions,
+668               source: EventSource):
+669    self._http = http
+670    self._headers = headers
+671    self._endpoint = endpoint
+672    self._max_attempts = opts.max_attempts or max_attempts
+673    self._max_backoff = opts.max_backoff or max_backoff
+674    self._request: Dict[str, Any] = {"token": source.token}
+675    self._is_done = False
+676
+677    if opts.start_ts is not None and opts.cursor is not None:
+678      err_msg = "Only one of 'start_ts' or 'cursor' can be defined in the FeedOptions."
+679      raise TypeError(err_msg)
+680
+681    if opts.page_size is not None:
+682      self._request["page_size"] = opts.page_size
+683
+684    if opts.cursor is not None:
+685      self._request["cursor"] = opts.cursor
+686    elif opts.start_ts is not None:
+687      self._request["start_ts"] = opts.start_ts
+688
+689  def __iter__(self) -> Iterator[FeedPage]:
+690    self._is_done = False
+691    return self
+692
+693  def __next__(self) -> FeedPage:
+694    if self._is_done:
+695      raise StopIteration
+696
+697    retryable = Retryable[Any](self._max_attempts, self._max_backoff,
+698                               self._next_page)
+699    return retryable.run().response
+700
+701  def _next_page(self) -> FeedPage:
+702    with self._http.request(
+703        method="POST",
+704        url=self._endpoint,
+705        headers=self._headers,
+706        data=self._request,
+707    ) as response:
+708      status_code = response.status_code()
+709      decoded: Any = FaunaDecoder.decode(response.json())
+710
+711      if status_code > 399:
+712        FaunaError.parse_error_and_throw(decoded, status_code)
+713
+714      self._is_done = not decoded["has_next"]
+715      self._request["cursor"] = decoded["cursor"]
+716
+717      if "start_ts" in self._request:
+718        del self._request["start_ts"]
+719
+720      return FeedPage(decoded["events"], decoded["cursor"],
+721                      QueryStats(decoded["stats"]))
+722
+723  def flatten(self) -> Iterator:
+724    """A generator that yields events instead of pages of events."""
+725    for page in self:
+726      for event in page:
+727        yield event
+
+ + +

A class to provide an iterator on top of Event Feed pages.

+
+ + +
+ +
+ + FeedIterator( http: fauna.http.http_client.HTTPClient, headers: Dict[str, str], endpoint: str, max_attempts: int, max_backoff: int, opts: FeedOptions, source: fauna.query.models.EventSource) + + + +
+ +
666  def __init__(self, http: HTTPClient, headers: Dict[str, str], endpoint: str,
+667               max_attempts: int, max_backoff: int, opts: FeedOptions,
+668               source: EventSource):
+669    self._http = http
+670    self._headers = headers
+671    self._endpoint = endpoint
+672    self._max_attempts = opts.max_attempts or max_attempts
+673    self._max_backoff = opts.max_backoff or max_backoff
+674    self._request: Dict[str, Any] = {"token": source.token}
+675    self._is_done = False
+676
+677    if opts.start_ts is not None and opts.cursor is not None:
+678      err_msg = "Only one of 'start_ts' or 'cursor' can be defined in the FeedOptions."
+679      raise TypeError(err_msg)
+680
+681    if opts.page_size is not None:
+682      self._request["page_size"] = opts.page_size
+683
+684    if opts.cursor is not None:
+685      self._request["cursor"] = opts.cursor
+686    elif opts.start_ts is not None:
+687      self._request["start_ts"] = opts.start_ts
+
+ + + + +
+
+ +
+ + def + flatten(self) -> Iterator: + + + +
+ +
723  def flatten(self) -> Iterator:
+724    """A generator that yields events instead of pages of events."""
+725    for page in self:
+726      for event in page:
+727        yield event
+
+ + +

A generator that yields events instead of pages of events.

+
+ + +
+
+
+ +
+ + class + QueryIterator: + + + +
+ +
730class QueryIterator:
+731  """A class to provider an iterator on top of Fauna queries."""
+732
+733  def __init__(self,
+734               client: Client,
+735               fql: Query,
+736               opts: Optional[QueryOptions] = None):
+737    """Initializes the QueryIterator
+738
+739        :param fql: A Query
+740        :param opts: (Optional) Query Options
+741
+742        :raises TypeError: Invalid param types
+743        """
+744    if not isinstance(client, Client):
+745      err_msg = f"'client' must be a Client but was a {type(client)}. You can build a " \
+746                  f"Client by calling fauna.client.Client()"
+747      raise TypeError(err_msg)
+748
+749    if not isinstance(fql, Query):
+750      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+751                 f"Query by calling fauna.fql()"
+752      raise TypeError(err_msg)
+753
+754    self.client = client
+755    self.fql = fql
+756    self.opts = opts
+757
+758  def __iter__(self) -> Iterator:
+759    return self.iter()
+760
+761  def iter(self) -> Iterator:
+762    """
+763        A generator function that immediately fetches and yields the results of
+764        the stored query. Yields additional pages on subsequent iterations if
+765        they exist
+766        """
+767
+768    cursor = None
+769    initial_response = self.client.query(self.fql, self.opts)
+770
+771    if isinstance(initial_response.data, Page):
+772      cursor = initial_response.data.after
+773      yield initial_response.data.data
+774
+775      while cursor is not None:
+776        next_response = self.client.query(
+777            fql("Set.paginate(${after})", after=cursor), self.opts)
+778        # TODO: `Set.paginate` does not yet return a `@set` tagged value
+779        #       so we will get back a plain object that might not have
+780        #       an after property.
+781        cursor = next_response.data.get("after")
+782        yield next_response.data.get("data")
+783
+784    else:
+785      yield [initial_response.data]
+786
+787  def flatten(self) -> Iterator:
+788    """
+789        A generator function that immediately fetches and yields the results of
+790        the stored query. Yields each item individually, rather than a whole
+791        Page at a time. Fetches additional pages as required if they exist.
+792        """
+793
+794    for page in self.iter():
+795      for item in page:
+796        yield item
+
+ + +

A class to provider an iterator on top of Fauna queries.

+
+ + +
+ +
+ + QueryIterator( client: Client, fql: fauna.query.query_builder.Query, opts: Optional[QueryOptions] = None) + + + +
+ +
733  def __init__(self,
+734               client: Client,
+735               fql: Query,
+736               opts: Optional[QueryOptions] = None):
+737    """Initializes the QueryIterator
+738
+739        :param fql: A Query
+740        :param opts: (Optional) Query Options
+741
+742        :raises TypeError: Invalid param types
+743        """
+744    if not isinstance(client, Client):
+745      err_msg = f"'client' must be a Client but was a {type(client)}. You can build a " \
+746                  f"Client by calling fauna.client.Client()"
+747      raise TypeError(err_msg)
+748
+749    if not isinstance(fql, Query):
+750      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+751                 f"Query by calling fauna.fql()"
+752      raise TypeError(err_msg)
+753
+754    self.client = client
+755    self.fql = fql
+756    self.opts = opts
+
+ + +

Initializes the QueryIterator

+ +
Parameters
+ +
    +
  • fql: A Query
  • +
  • opts: (Optional) Query Options
  • +
+ +
Raises
+ +
    +
  • TypeError: Invalid param types
  • +
+
+ + +
+
+
+ client + + +
+ + + + +
+
+
+ fql + + +
+ + + + +
+
+
+ opts + + +
+ + + + +
+
+ +
+ + def + iter(self) -> Iterator: + + + +
+ +
761  def iter(self) -> Iterator:
+762    """
+763        A generator function that immediately fetches and yields the results of
+764        the stored query. Yields additional pages on subsequent iterations if
+765        they exist
+766        """
+767
+768    cursor = None
+769    initial_response = self.client.query(self.fql, self.opts)
+770
+771    if isinstance(initial_response.data, Page):
+772      cursor = initial_response.data.after
+773      yield initial_response.data.data
+774
+775      while cursor is not None:
+776        next_response = self.client.query(
+777            fql("Set.paginate(${after})", after=cursor), self.opts)
+778        # TODO: `Set.paginate` does not yet return a `@set` tagged value
+779        #       so we will get back a plain object that might not have
+780        #       an after property.
+781        cursor = next_response.data.get("after")
+782        yield next_response.data.get("data")
+783
+784    else:
+785      yield [initial_response.data]
+
+ + +

A generator function that immediately fetches and yields the results of +the stored query. Yields additional pages on subsequent iterations if +they exist

+
+ + +
+
+ +
+ + def + flatten(self) -> Iterator: + + + +
+ +
787  def flatten(self) -> Iterator:
+788    """
+789        A generator function that immediately fetches and yields the results of
+790        the stored query. Yields each item individually, rather than a whole
+791        Page at a time. Fetches additional pages as required if they exist.
+792        """
+793
+794    for page in self.iter():
+795      for item in page:
+796        yield item
+
+ + +

A generator function that immediately fetches and yields the results of +the stored query. Yields each item individually, rather than a whole +Page at a time. Fetches additional pages as required if they exist.

+
+ + +
+
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/client/endpoints.html b/2.3.0/fauna/client/endpoints.html new file mode 100644 index 00000000..9012d33e --- /dev/null +++ b/2.3.0/fauna/client/endpoints.html @@ -0,0 +1,300 @@ + + + + + + + fauna.client.endpoints API documentation + + + + + + + + + +
+
+

+fauna.client.endpoints

+ + + + + + +
1class Endpoints:
+2  Default = "https://db.fauna.com"
+3  Local = "http://localhost:8443"
+
+ + +
+
+ +
+ + class + Endpoints: + + + +
+ +
2class Endpoints:
+3  Default = "https://db.fauna.com"
+4  Local = "http://localhost:8443"
+
+ + + + +
+
+ Default = +'https://db.fauna.com' + + +
+ + + + +
+
+
+ Local = +'http://localhost:8443' + + +
+ + + + +
+
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/client/headers.html b/2.3.0/fauna/client/headers.html new file mode 100644 index 00000000..85c500ba --- /dev/null +++ b/2.3.0/fauna/client/headers.html @@ -0,0 +1,487 @@ + + + + + + + fauna.client.headers API documentation + + + + + + + + + +
+
+

+fauna.client.headers

+ + + + + + +
  1import os
+  2import platform
+  3import sys
+  4from dataclasses import dataclass
+  5from typing import Callable
+  6
+  7from fauna import __version__
+  8
+  9
+ 10class Header:
+ 11  LastTxnTs = "X-Last-Txn-Ts"
+ 12  Linearized = "X-Linearized"
+ 13  MaxContentionRetries = "X-Max-Contention-Retries"
+ 14  QueryTimeoutMs = "X-Query-Timeout-Ms"
+ 15  Typecheck = "X-Typecheck"
+ 16  Tags = "X-Query-Tags"
+ 17  Traceparent = "Traceparent"
+ 18
+ 19
+ 20class _Header:
+ 21  AcceptEncoding = "Accept-Encoding"
+ 22  Authorization = "Authorization"
+ 23  ContentType = "Content-Type"
+ 24  Driver = "X-Driver"
+ 25  DriverEnv = "X-Driver-Env"
+ 26  Format = "X-Format"
+ 27
+ 28
+ 29class _Auth:
+ 30  """Creates an auth helper object"""
+ 31
+ 32  def bearer(self):
+ 33    return "Bearer {}".format(self.secret)
+ 34
+ 35  def __init__(self, secret):
+ 36    self.secret = secret
+ 37
+ 38  def __eq__(self, other):
+ 39    return self.secret == getattr(other, 'secret', None)
+ 40
+ 41  def __ne__(self, other):
+ 42    return not self == other
+ 43
+ 44
+ 45class _DriverEnvironment:
+ 46
+ 47  def __init__(self):
+ 48    self.pythonVersion = "{0}.{1}.{2}-{3}".format(*sys.version_info)
+ 49    self.driverVersion = __version__
+ 50    self.env = self._get_runtime_env()
+ 51    self.os = "{0}-{1}".format(platform.system(), platform.release())
+ 52
+ 53  @staticmethod
+ 54  def _get_runtime_env():
+ 55
+ 56    @dataclass
+ 57    class EnvChecker:
+ 58      name: str
+ 59      check: Callable[[], bool]
+ 60
+ 61    env: list[EnvChecker] = [
+ 62        EnvChecker(
+ 63            name="Netlify",
+ 64            check=lambda: "NETLIFY_IMAGES_CDN_DOMAIN" in os.environ,
+ 65        ),
+ 66        EnvChecker(
+ 67            name="Vercel",
+ 68            check=lambda: "VERCEL" in os.environ,
+ 69        ),
+ 70        EnvChecker(
+ 71            name="Heroku",
+ 72            check=lambda: "PATH" in \
+ 73                os.environ and ".heroku" in os.environ["PATH"],
+ 74        ),
+ 75        EnvChecker(
+ 76            name="AWS Lambda",
+ 77            check=lambda: "AWS_LAMBDA_FUNCTION_VERSION" in os.environ,
+ 78        ),
+ 79        EnvChecker(
+ 80            name="GCP Cloud Functions",
+ 81            check=lambda: "_" in \
+ 82                os.environ and "google" in os.environ["_"],
+ 83        ),
+ 84        EnvChecker(
+ 85            name="GCP Compute Instances",
+ 86            check=lambda: "GOOGLE_CLOUD_PROJECT" in os.environ,
+ 87        ),
+ 88        EnvChecker(
+ 89            name="Azure Cloud Functions",
+ 90            check=lambda: "WEBSITE_FUNCTIONS_AZUREMONITOR_CATEGORIES" in \
+ 91                os.environ,
+ 92        ),
+ 93        EnvChecker(
+ 94            name="Azure Compute",
+ 95            check=lambda: "ORYX_ENV_TYPE" in os.environ and \
+ 96                "WEBSITE_INSTANCE_ID" in os.environ and \
+ 97                os.environ["ORYX_ENV_TYPE"] == "AppService",
+ 98        ),
+ 99    ]
+100
+101    try:
+102      recognized = next(e for e in env if e.check())
+103      if recognized is not None:
+104        return recognized.name
+105    except:
+106      return "Unknown"
+107
+108  def __str__(self):
+109    return "driver=python-{0}; runtime=python-{1} env={2}; os={3}".format(
+110        self.driverVersion, self.pythonVersion, self.env, self.os).lower()
+
+ + +
+ +
+ + \ No newline at end of file diff --git a/2.3.0/fauna/client/retryable.html b/2.3.0/fauna/client/retryable.html new file mode 100644 index 00000000..8f59f924 --- /dev/null +++ b/2.3.0/fauna/client/retryable.html @@ -0,0 +1,666 @@ + + + + + + + fauna.client.retryable API documentation + + + + + + + + + +
+
+

+fauna.client.retryable

+ + + + + + +
 1import abc
+ 2from dataclasses import dataclass
+ 3from random import random
+ 4from time import sleep
+ 5from typing import Callable, Optional, TypeVar, Generic
+ 6
+ 7from fauna.errors import RetryableFaunaException
+ 8
+ 9
+10class RetryStrategy:
+11
+12  @abc.abstractmethod
+13  def wait(self) -> float:
+14    pass
+15
+16
+17class ExponentialBackoffStrategy(RetryStrategy):
+18
+19  def __init__(self, max_backoff: int):
+20    self._max_backoff = float(max_backoff)
+21    self._i = 0.0
+22
+23  def wait(self) -> float:
+24    """Returns the number of seconds to wait for the next call."""
+25    backoff = random() * (2.0**self._i)
+26    self._i += 1.0
+27    return min(backoff, self._max_backoff)
+28
+29
+30T = TypeVar('T')
+31
+32
+33@dataclass
+34class RetryableResponse(Generic[T]):
+35  attempts: int
+36  response: T
+37
+38
+39class Retryable(Generic[T]):
+40  """
+41    Retryable is a wrapper class that acts on a Callable that returns a T type.
+42    """
+43  _strategy: RetryStrategy
+44  _error: Optional[Exception]
+45
+46  def __init__(
+47      self,
+48      max_attempts: int,
+49      max_backoff: int,
+50      func: Callable[..., T],
+51      *args,
+52      **kwargs,
+53  ):
+54    self._max_attempts = max_attempts
+55    self._strategy = ExponentialBackoffStrategy(max_backoff)
+56    self._func = func
+57    self._args = args
+58    self._kwargs = kwargs
+59    self._error = None
+60
+61  def run(self) -> RetryableResponse[T]:
+62    """Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates
+63        the thrown exception if max_attempts is reached or if a non-retryable is thrown.
+64
+65        Returns the number of attempts and the response
+66        """
+67    attempt = 0
+68    while True:
+69      sleep_time = 0.0 if attempt == 0 else self._strategy.wait()
+70      sleep(sleep_time)
+71
+72      try:
+73        attempt += 1
+74        qs = self._func(*self._args, **self._kwargs)
+75        return RetryableResponse[T](attempt, qs)
+76      except RetryableFaunaException as e:
+77        if attempt >= self._max_attempts:
+78          raise e
+
+ + +
+
+ +
+ + class + RetryStrategy: + + + +
+ +
11class RetryStrategy:
+12
+13  @abc.abstractmethod
+14  def wait(self) -> float:
+15    pass
+
+ + + + +
+ +
+
@abc.abstractmethod
+ + def + wait(self) -> float: + + + +
+ +
13  @abc.abstractmethod
+14  def wait(self) -> float:
+15    pass
+
+ + + + +
+
+
+ +
+ + class + ExponentialBackoffStrategy(RetryStrategy): + + + +
+ +
18class ExponentialBackoffStrategy(RetryStrategy):
+19
+20  def __init__(self, max_backoff: int):
+21    self._max_backoff = float(max_backoff)
+22    self._i = 0.0
+23
+24  def wait(self) -> float:
+25    """Returns the number of seconds to wait for the next call."""
+26    backoff = random() * (2.0**self._i)
+27    self._i += 1.0
+28    return min(backoff, self._max_backoff)
+
+ + + + +
+ +
+ + ExponentialBackoffStrategy(max_backoff: int) + + + +
+ +
20  def __init__(self, max_backoff: int):
+21    self._max_backoff = float(max_backoff)
+22    self._i = 0.0
+
+ + + + +
+
+ +
+ + def + wait(self) -> float: + + + +
+ +
24  def wait(self) -> float:
+25    """Returns the number of seconds to wait for the next call."""
+26    backoff = random() * (2.0**self._i)
+27    self._i += 1.0
+28    return min(backoff, self._max_backoff)
+
+ + +

Returns the number of seconds to wait for the next call.

+
+ + +
+
+
+ +
+
@dataclass
+ + class + RetryableResponse(typing.Generic[~T]): + + + +
+ +
34@dataclass
+35class RetryableResponse(Generic[T]):
+36  attempts: int
+37  response: T
+
+ + + + +
+
+ + RetryableResponse(attempts: int, response: ~T) + + +
+ + + + +
+
+
+ attempts: int + + +
+ + + + +
+
+
+ response: ~T + + +
+ + + + +
+
+
+ +
+ + class + Retryable(typing.Generic[~T]): + + + +
+ +
40class Retryable(Generic[T]):
+41  """
+42    Retryable is a wrapper class that acts on a Callable that returns a T type.
+43    """
+44  _strategy: RetryStrategy
+45  _error: Optional[Exception]
+46
+47  def __init__(
+48      self,
+49      max_attempts: int,
+50      max_backoff: int,
+51      func: Callable[..., T],
+52      *args,
+53      **kwargs,
+54  ):
+55    self._max_attempts = max_attempts
+56    self._strategy = ExponentialBackoffStrategy(max_backoff)
+57    self._func = func
+58    self._args = args
+59    self._kwargs = kwargs
+60    self._error = None
+61
+62  def run(self) -> RetryableResponse[T]:
+63    """Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates
+64        the thrown exception if max_attempts is reached or if a non-retryable is thrown.
+65
+66        Returns the number of attempts and the response
+67        """
+68    attempt = 0
+69    while True:
+70      sleep_time = 0.0 if attempt == 0 else self._strategy.wait()
+71      sleep(sleep_time)
+72
+73      try:
+74        attempt += 1
+75        qs = self._func(*self._args, **self._kwargs)
+76        return RetryableResponse[T](attempt, qs)
+77      except RetryableFaunaException as e:
+78        if attempt >= self._max_attempts:
+79          raise e
+
+ + +

Retryable is a wrapper class that acts on a Callable that returns a T type.

+
+ + +
+ +
+ + Retryable( max_attempts: int, max_backoff: int, func: Callable[..., ~T], *args, **kwargs) + + + +
+ +
47  def __init__(
+48      self,
+49      max_attempts: int,
+50      max_backoff: int,
+51      func: Callable[..., T],
+52      *args,
+53      **kwargs,
+54  ):
+55    self._max_attempts = max_attempts
+56    self._strategy = ExponentialBackoffStrategy(max_backoff)
+57    self._func = func
+58    self._args = args
+59    self._kwargs = kwargs
+60    self._error = None
+
+ + + + +
+
+ +
+ + def + run(self) -> RetryableResponse[~T]: + + + +
+ +
62  def run(self) -> RetryableResponse[T]:
+63    """Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates
+64        the thrown exception if max_attempts is reached or if a non-retryable is thrown.
+65
+66        Returns the number of attempts and the response
+67        """
+68    attempt = 0
+69    while True:
+70      sleep_time = 0.0 if attempt == 0 else self._strategy.wait()
+71      sleep(sleep_time)
+72
+73      try:
+74        attempt += 1
+75        qs = self._func(*self._args, **self._kwargs)
+76        return RetryableResponse[T](attempt, qs)
+77      except RetryableFaunaException as e:
+78        if attempt >= self._max_attempts:
+79          raise e
+
+ + +

Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates +the thrown exception if max_attempts is reached or if a non-retryable is thrown.

+ +

Returns the number of attempts and the response

+
+ + +
+
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/client/utils.html b/2.3.0/fauna/client/utils.html new file mode 100644 index 00000000..7c2c921e --- /dev/null +++ b/2.3.0/fauna/client/utils.html @@ -0,0 +1,488 @@ + + + + + + + fauna.client.utils API documentation + + + + + + + + + +
+
+

+fauna.client.utils

+ + + + + + +
 1import os
+ 2import threading
+ 3from typing import Generic, Callable, TypeVar, Optional
+ 4
+ 5from fauna.client.endpoints import Endpoints
+ 6from fauna.client.headers import Header
+ 7
+ 8
+ 9def _fancy_bool_from_str(val: str) -> bool:
+10  return val.lower() in ["1", "true", "yes", "y"]
+11
+12
+13class LastTxnTs(object):
+14  """Wraps tracking the last transaction time supplied from the database."""
+15
+16  def __init__(
+17      self,
+18      time: Optional[int] = None,
+19  ):
+20    self._lock: threading.Lock = threading.Lock()
+21    self._time: Optional[int] = time
+22
+23  @property
+24  def time(self):
+25    """Produces the last transaction time, or, None if not yet updated."""
+26    with self._lock:
+27      return self._time
+28
+29  @property
+30  def request_header(self):
+31    """Produces a dictionary with a non-zero `X-Last-Seen-Txn` header; or,
+32        if one has not yet been set, the empty header dictionary."""
+33    t = self._time
+34    if t is None:
+35      return {}
+36    return {Header.LastTxnTs: str(t)}
+37
+38  def update_txn_time(self, new_txn_time: int):
+39    """Updates the internal transaction time.
+40        In order to maintain a monotonically-increasing value, `newTxnTime`
+41        is discarded if it is behind the current timestamp."""
+42    with self._lock:
+43      self._time = max(self._time or 0, new_txn_time)
+44
+45
+46T = TypeVar('T')
+47
+48
+49class _SettingFromEnviron(Generic[T]):
+50
+51  def __init__(
+52      self,
+53      var_name: str,
+54      default_value: str,
+55      adapt_from_str: Callable[[str], T],
+56  ):
+57    self.__var_name = var_name
+58    self.__default_value = default_value
+59    self.__adapt_from_str = adapt_from_str
+60
+61  def __call__(self) -> T:
+62    return self.__adapt_from_str(
+63        os.environ.get(
+64            self.__var_name,
+65            default=self.__default_value,
+66        ))
+67
+68
+69class _Environment:
+70  EnvFaunaEndpoint = _SettingFromEnviron(
+71      "FAUNA_ENDPOINT",
+72      Endpoints.Default,
+73      str,
+74  )
+75  """environment variable for Fauna Client HTTP endpoint"""
+76
+77  EnvFaunaSecret = _SettingFromEnviron(
+78      "FAUNA_SECRET",
+79      "",
+80      str,
+81  )
+82  """environment variable for Fauna Client authentication"""
+
+ + +
+
+ +
+ + class + LastTxnTs: + + + +
+ +
14class LastTxnTs(object):
+15  """Wraps tracking the last transaction time supplied from the database."""
+16
+17  def __init__(
+18      self,
+19      time: Optional[int] = None,
+20  ):
+21    self._lock: threading.Lock = threading.Lock()
+22    self._time: Optional[int] = time
+23
+24  @property
+25  def time(self):
+26    """Produces the last transaction time, or, None if not yet updated."""
+27    with self._lock:
+28      return self._time
+29
+30  @property
+31  def request_header(self):
+32    """Produces a dictionary with a non-zero `X-Last-Seen-Txn` header; or,
+33        if one has not yet been set, the empty header dictionary."""
+34    t = self._time
+35    if t is None:
+36      return {}
+37    return {Header.LastTxnTs: str(t)}
+38
+39  def update_txn_time(self, new_txn_time: int):
+40    """Updates the internal transaction time.
+41        In order to maintain a monotonically-increasing value, `newTxnTime`
+42        is discarded if it is behind the current timestamp."""
+43    with self._lock:
+44      self._time = max(self._time or 0, new_txn_time)
+
+ + +

Wraps tracking the last transaction time supplied from the database.

+
+ + +
+ +
+ + LastTxnTs(time: Optional[int] = None) + + + +
+ +
17  def __init__(
+18      self,
+19      time: Optional[int] = None,
+20  ):
+21    self._lock: threading.Lock = threading.Lock()
+22    self._time: Optional[int] = time
+
+ + + + +
+
+ +
+ time + + + +
+ +
24  @property
+25  def time(self):
+26    """Produces the last transaction time, or, None if not yet updated."""
+27    with self._lock:
+28      return self._time
+
+ + +

Produces the last transaction time, or, None if not yet updated.

+
+ + +
+
+ +
+ request_header + + + +
+ +
30  @property
+31  def request_header(self):
+32    """Produces a dictionary with a non-zero `X-Last-Seen-Txn` header; or,
+33        if one has not yet been set, the empty header dictionary."""
+34    t = self._time
+35    if t is None:
+36      return {}
+37    return {Header.LastTxnTs: str(t)}
+
+ + +

Produces a dictionary with a non-zero X-Last-Seen-Txn header; or, +if one has not yet been set, the empty header dictionary.

+
+ + +
+
+ +
+ + def + update_txn_time(self, new_txn_time: int): + + + +
+ +
39  def update_txn_time(self, new_txn_time: int):
+40    """Updates the internal transaction time.
+41        In order to maintain a monotonically-increasing value, `newTxnTime`
+42        is discarded if it is behind the current timestamp."""
+43    with self._lock:
+44      self._time = max(self._time or 0, new_txn_time)
+
+ + +

Updates the internal transaction time. +In order to maintain a monotonically-increasing value, newTxnTime +is discarded if it is behind the current timestamp.

+
+ + +
+
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/encoding.html b/2.3.0/fauna/encoding.html new file mode 100644 index 00000000..22e7c62a --- /dev/null +++ b/2.3.0/fauna/encoding.html @@ -0,0 +1,246 @@ + + + + + + + fauna.encoding API documentation + + + + + + + + + +
+
+

+fauna.encoding

+ + + + + + +
1from .decoder import FaunaDecoder
+2from .encoder import FaunaEncoder
+3from .wire_protocol import ConstraintFailure, QueryTags, QueryInfo, QueryStats, QuerySuccess
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/encoding/decoder.html b/2.3.0/fauna/encoding/decoder.html new file mode 100644 index 00000000..887f7570 --- /dev/null +++ b/2.3.0/fauna/encoding/decoder.html @@ -0,0 +1,719 @@ + + + + + + + fauna.encoding.decoder API documentation + + + + + + + + + +
+
+

+fauna.encoding.decoder

+ + + + + + +
  1import base64
+  2from typing import Any, List, Union
+  3
+  4from iso8601 import parse_date
+  5
+  6from fauna.query.models import Module, DocumentReference, Document, NamedDocument, NamedDocumentReference, Page, \
+  7  NullDocument, EventSource
+  8
+  9
+ 10class FaunaDecoder:
+ 11  """Supports the following types:
+ 12
+ 13     +--------------------+---------------+
+ 14     | Python             | Fauna         |
+ 15     +====================+===============+
+ 16     | dict               | object        |
+ 17     +--------------------+---------------+
+ 18     | list, tuple        | array         |
+ 19     +--------------------+---------------+
+ 20     | str                | string        |
+ 21     +--------------------+---------------+
+ 22     | int                | @int          |
+ 23     +--------------------+---------------+
+ 24     | int                | @long         |
+ 25     +--------------------+---------------+
+ 26     | float              | @double       |
+ 27     +--------------------+---------------+
+ 28     | datetime.datetime  | @time         |
+ 29     +--------------------+---------------+
+ 30     | datetime.date      | @date         |
+ 31     +--------------------+---------------+
+ 32     | True               | true          |
+ 33     +--------------------+---------------+
+ 34     | False              | false         |
+ 35     +--------------------+---------------+
+ 36     | None               | null          |
+ 37     +--------------------+---------------+
+ 38     | bytearray          | @bytes        |
+ 39     +--------------------+---------------+
+ 40     | *DocumentReference | @ref          |
+ 41     +--------------------+---------------+
+ 42     | *Document          | @doc          |
+ 43     +--------------------+---------------+
+ 44     | Module             | @mod          |
+ 45     +--------------------+---------------+
+ 46     | Page               | @set          |
+ 47     +--------------------+---------------+
+ 48     | EventSource        | @stream       |
+ 49     +--------------------+---------------+
+ 50
+ 51     """
+ 52
+ 53  @staticmethod
+ 54  def decode(obj: Any):
+ 55    """Decodes supported objects from the tagged typed into untagged.
+ 56
+ 57        Examples:
+ 58            - { "@int": "100" } decodes to 100 of type int
+ 59            - { "@double": "100" } decodes to 100.0 of type float
+ 60            - { "@long": "100" } decodes to 100 of type int
+ 61            - { "@time": "..." } decodes to a datetime
+ 62            - { "@date": "..." } decodes to a date
+ 63            - { "@doc": ... } decodes to a Document or NamedDocument
+ 64            - { "@ref": ... } decodes to a DocumentReference or NamedDocumentReference
+ 65            - { "@mod": ... } decodes to a Module
+ 66            - { "@set": ... } decodes to a Page
+ 67            - { "@stream": ... } decodes to an EventSource
+ 68            - { "@bytes": ... } decodes to a bytearray
+ 69
+ 70        :param obj: the object to decode
+ 71        """
+ 72    return FaunaDecoder._decode(obj)
+ 73
+ 74  @staticmethod
+ 75  def _decode(o: Any, escaped: bool = False):
+ 76    if isinstance(o, (str, bool, int, float)):
+ 77      return o
+ 78    elif isinstance(o, list):
+ 79      return FaunaDecoder._decode_list(o)
+ 80    elif isinstance(o, dict):
+ 81      return FaunaDecoder._decode_dict(o, escaped)
+ 82
+ 83  @staticmethod
+ 84  def _decode_list(lst: List):
+ 85    return [FaunaDecoder._decode(i) for i in lst]
+ 86
+ 87  @staticmethod
+ 88  def _decode_dict(dct: dict, escaped: bool):
+ 89    keys = dct.keys()
+ 90
+ 91    # If escaped, everything is user-specified
+ 92    if escaped:
+ 93      return {k: FaunaDecoder._decode(v) for k, v in dct.items()}
+ 94
+ 95    if len(keys) == 1:
+ 96      if "@int" in keys:
+ 97        return int(dct["@int"])
+ 98      if "@long" in keys:
+ 99        return int(dct["@long"])
+100      if "@double" in dct:
+101        return float(dct["@double"])
+102      if "@object" in dct:
+103        return FaunaDecoder._decode(dct["@object"], True)
+104      if "@mod" in dct:
+105        return Module(dct["@mod"])
+106      if "@time" in dct:
+107        return parse_date(dct["@time"])
+108      if "@date" in dct:
+109        return parse_date(dct["@date"]).date()
+110      if "@bytes" in dct:
+111        bts = base64.b64decode(dct["@bytes"])
+112        return bytearray(bts)
+113      if "@doc" in dct:
+114        value = dct["@doc"]
+115        if isinstance(value, str):
+116          # Not distinguishing between DocumentReference and NamedDocumentReference because this shouldn't
+117          # be an issue much longer
+118          return DocumentReference.from_string(value)
+119
+120        contents = FaunaDecoder._decode(value)
+121
+122        if "id" in contents and "coll" in contents and "ts" in contents:
+123          doc_id = contents.pop("id")
+124          doc_coll = contents.pop("coll")
+125          doc_ts = contents.pop("ts")
+126
+127          return Document(
+128              id=doc_id,
+129              coll=doc_coll,
+130              ts=doc_ts,
+131              data=contents,
+132          )
+133        elif "name" in contents and "coll" in contents and "ts" in contents:
+134          doc_name = contents.pop("name")
+135          doc_coll = contents.pop("coll")
+136          doc_ts = contents.pop("ts")
+137
+138          return NamedDocument(
+139              name=doc_name,
+140              coll=doc_coll,
+141              ts=doc_ts,
+142              data=contents,
+143          )
+144        else:
+145          # Unsupported document reference. Return the unwrapped value to futureproof.
+146          return contents
+147
+148      if "@ref" in dct:
+149        value = dct["@ref"]
+150        if "id" not in value and "name" not in value:
+151          # Unsupported document reference. Return the unwrapped value to futureproof.
+152          return value
+153
+154        col = FaunaDecoder._decode(value["coll"])
+155        doc_ref: Union[DocumentReference, NamedDocumentReference]
+156
+157        if "id" in value:
+158          doc_ref = DocumentReference(col, value["id"])
+159        else:
+160          doc_ref = NamedDocumentReference(col, value["name"])
+161
+162        if "exists" in value and not value["exists"]:
+163          cause = value["cause"] if "cause" in value else None
+164          return NullDocument(doc_ref, cause)
+165
+166        return doc_ref
+167
+168      if "@set" in dct:
+169        value = dct["@set"]
+170        if isinstance(value, str):
+171          return Page(after=value)
+172
+173        after = value["after"] if "after" in value else None
+174        data = FaunaDecoder._decode(value["data"]) if "data" in value else None
+175
+176        return Page(data=data, after=after)
+177
+178      if "@stream" in dct:
+179        return EventSource(dct["@stream"])
+180
+181    return {k: FaunaDecoder._decode(v) for k, v in dct.items()}
+
+ + +
+
+ +
+ + class + FaunaDecoder: + + + +
+ +
 11class FaunaDecoder:
+ 12  """Supports the following types:
+ 13
+ 14     +--------------------+---------------+
+ 15     | Python             | Fauna         |
+ 16     +====================+===============+
+ 17     | dict               | object        |
+ 18     +--------------------+---------------+
+ 19     | list, tuple        | array         |
+ 20     +--------------------+---------------+
+ 21     | str                | string        |
+ 22     +--------------------+---------------+
+ 23     | int                | @int          |
+ 24     +--------------------+---------------+
+ 25     | int                | @long         |
+ 26     +--------------------+---------------+
+ 27     | float              | @double       |
+ 28     +--------------------+---------------+
+ 29     | datetime.datetime  | @time         |
+ 30     +--------------------+---------------+
+ 31     | datetime.date      | @date         |
+ 32     +--------------------+---------------+
+ 33     | True               | true          |
+ 34     +--------------------+---------------+
+ 35     | False              | false         |
+ 36     +--------------------+---------------+
+ 37     | None               | null          |
+ 38     +--------------------+---------------+
+ 39     | bytearray          | @bytes        |
+ 40     +--------------------+---------------+
+ 41     | *DocumentReference | @ref          |
+ 42     +--------------------+---------------+
+ 43     | *Document          | @doc          |
+ 44     +--------------------+---------------+
+ 45     | Module             | @mod          |
+ 46     +--------------------+---------------+
+ 47     | Page               | @set          |
+ 48     +--------------------+---------------+
+ 49     | EventSource        | @stream       |
+ 50     +--------------------+---------------+
+ 51
+ 52     """
+ 53
+ 54  @staticmethod
+ 55  def decode(obj: Any):
+ 56    """Decodes supported objects from the tagged typed into untagged.
+ 57
+ 58        Examples:
+ 59            - { "@int": "100" } decodes to 100 of type int
+ 60            - { "@double": "100" } decodes to 100.0 of type float
+ 61            - { "@long": "100" } decodes to 100 of type int
+ 62            - { "@time": "..." } decodes to a datetime
+ 63            - { "@date": "..." } decodes to a date
+ 64            - { "@doc": ... } decodes to a Document or NamedDocument
+ 65            - { "@ref": ... } decodes to a DocumentReference or NamedDocumentReference
+ 66            - { "@mod": ... } decodes to a Module
+ 67            - { "@set": ... } decodes to a Page
+ 68            - { "@stream": ... } decodes to an EventSource
+ 69            - { "@bytes": ... } decodes to a bytearray
+ 70
+ 71        :param obj: the object to decode
+ 72        """
+ 73    return FaunaDecoder._decode(obj)
+ 74
+ 75  @staticmethod
+ 76  def _decode(o: Any, escaped: bool = False):
+ 77    if isinstance(o, (str, bool, int, float)):
+ 78      return o
+ 79    elif isinstance(o, list):
+ 80      return FaunaDecoder._decode_list(o)
+ 81    elif isinstance(o, dict):
+ 82      return FaunaDecoder._decode_dict(o, escaped)
+ 83
+ 84  @staticmethod
+ 85  def _decode_list(lst: List):
+ 86    return [FaunaDecoder._decode(i) for i in lst]
+ 87
+ 88  @staticmethod
+ 89  def _decode_dict(dct: dict, escaped: bool):
+ 90    keys = dct.keys()
+ 91
+ 92    # If escaped, everything is user-specified
+ 93    if escaped:
+ 94      return {k: FaunaDecoder._decode(v) for k, v in dct.items()}
+ 95
+ 96    if len(keys) == 1:
+ 97      if "@int" in keys:
+ 98        return int(dct["@int"])
+ 99      if "@long" in keys:
+100        return int(dct["@long"])
+101      if "@double" in dct:
+102        return float(dct["@double"])
+103      if "@object" in dct:
+104        return FaunaDecoder._decode(dct["@object"], True)
+105      if "@mod" in dct:
+106        return Module(dct["@mod"])
+107      if "@time" in dct:
+108        return parse_date(dct["@time"])
+109      if "@date" in dct:
+110        return parse_date(dct["@date"]).date()
+111      if "@bytes" in dct:
+112        bts = base64.b64decode(dct["@bytes"])
+113        return bytearray(bts)
+114      if "@doc" in dct:
+115        value = dct["@doc"]
+116        if isinstance(value, str):
+117          # Not distinguishing between DocumentReference and NamedDocumentReference because this shouldn't
+118          # be an issue much longer
+119          return DocumentReference.from_string(value)
+120
+121        contents = FaunaDecoder._decode(value)
+122
+123        if "id" in contents and "coll" in contents and "ts" in contents:
+124          doc_id = contents.pop("id")
+125          doc_coll = contents.pop("coll")
+126          doc_ts = contents.pop("ts")
+127
+128          return Document(
+129              id=doc_id,
+130              coll=doc_coll,
+131              ts=doc_ts,
+132              data=contents,
+133          )
+134        elif "name" in contents and "coll" in contents and "ts" in contents:
+135          doc_name = contents.pop("name")
+136          doc_coll = contents.pop("coll")
+137          doc_ts = contents.pop("ts")
+138
+139          return NamedDocument(
+140              name=doc_name,
+141              coll=doc_coll,
+142              ts=doc_ts,
+143              data=contents,
+144          )
+145        else:
+146          # Unsupported document reference. Return the unwrapped value to futureproof.
+147          return contents
+148
+149      if "@ref" in dct:
+150        value = dct["@ref"]
+151        if "id" not in value and "name" not in value:
+152          # Unsupported document reference. Return the unwrapped value to futureproof.
+153          return value
+154
+155        col = FaunaDecoder._decode(value["coll"])
+156        doc_ref: Union[DocumentReference, NamedDocumentReference]
+157
+158        if "id" in value:
+159          doc_ref = DocumentReference(col, value["id"])
+160        else:
+161          doc_ref = NamedDocumentReference(col, value["name"])
+162
+163        if "exists" in value and not value["exists"]:
+164          cause = value["cause"] if "cause" in value else None
+165          return NullDocument(doc_ref, cause)
+166
+167        return doc_ref
+168
+169      if "@set" in dct:
+170        value = dct["@set"]
+171        if isinstance(value, str):
+172          return Page(after=value)
+173
+174        after = value["after"] if "after" in value else None
+175        data = FaunaDecoder._decode(value["data"]) if "data" in value else None
+176
+177        return Page(data=data, after=after)
+178
+179      if "@stream" in dct:
+180        return EventSource(dct["@stream"])
+181
+182    return {k: FaunaDecoder._decode(v) for k, v in dct.items()}
+
+ + +

Supports the following types:

+ +

+--------------------+---------------+ +| Python | Fauna | ++====================+===============+ +| dict | object | ++--------------------+---------------+ +| list, tuple | array | ++--------------------+---------------+ +| str | string | ++--------------------+---------------+ +| int | @int | ++--------------------+---------------+ +| int | @long | ++--------------------+---------------+ +| float | @double | ++--------------------+---------------+ +| datetime.datetime | @time | ++--------------------+---------------+ +| datetime.date | @date | ++--------------------+---------------+ +| True | true | ++--------------------+---------------+ +| False | false | ++--------------------+---------------+ +| None | null | ++--------------------+---------------+ +| bytearray | @bytes | ++--------------------+---------------+ +| *DocumentReference | @ref | ++--------------------+---------------+ +| *Document | @doc | ++--------------------+---------------+ +| Module | @mod | ++--------------------+---------------+ +| Page | @set | ++--------------------+---------------+ +| EventSource | @stream | ++--------------------+---------------+

+
+ + +
+ +
+
@staticmethod
+ + def + decode(obj: Any): + + + +
+ +
54  @staticmethod
+55  def decode(obj: Any):
+56    """Decodes supported objects from the tagged typed into untagged.
+57
+58        Examples:
+59            - { "@int": "100" } decodes to 100 of type int
+60            - { "@double": "100" } decodes to 100.0 of type float
+61            - { "@long": "100" } decodes to 100 of type int
+62            - { "@time": "..." } decodes to a datetime
+63            - { "@date": "..." } decodes to a date
+64            - { "@doc": ... } decodes to a Document or NamedDocument
+65            - { "@ref": ... } decodes to a DocumentReference or NamedDocumentReference
+66            - { "@mod": ... } decodes to a Module
+67            - { "@set": ... } decodes to a Page
+68            - { "@stream": ... } decodes to an EventSource
+69            - { "@bytes": ... } decodes to a bytearray
+70
+71        :param obj: the object to decode
+72        """
+73    return FaunaDecoder._decode(obj)
+
+ + +

Decodes supported objects from the tagged typed into untagged.

+ +

Examples: + - { "@int": "100" } decodes to 100 of type int + - { "@double": "100" } decodes to 100.0 of type float + - { "@long": "100" } decodes to 100 of type int + - { "@time": "..." } decodes to a datetime + - { "@date": "..." } decodes to a date + - { "@doc": ... } decodes to a Document or NamedDocument + - { "@ref": ... } decodes to a DocumentReference or NamedDocumentReference + - { "@mod": ... } decodes to a Module + - { "@set": ... } decodes to a Page + - { "@stream": ... } decodes to an EventSource + - { "@bytes": ... } decodes to a bytearray

+ +
Parameters
+ +
    +
  • obj: the object to decode
  • +
+
+ + +
+
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/encoding/encoder.html b/2.3.0/fauna/encoding/encoder.html new file mode 100644 index 00000000..0ae97f8c --- /dev/null +++ b/2.3.0/fauna/encoding/encoder.html @@ -0,0 +1,1222 @@ + + + + + + + fauna.encoding.encoder API documentation + + + + + + + + + +
+
+

+fauna.encoding.encoder

+ + + + + + +
  1import base64
+  2from datetime import datetime, date
+  3from typing import Any, Optional, List, Union
+  4
+  5from fauna.query.models import DocumentReference, Module, Document, NamedDocument, NamedDocumentReference, NullDocument, \
+  6  EventSource
+  7from fauna.query.query_builder import Query, Fragment, LiteralFragment, ValueFragment
+  8
+  9_RESERVED_TAGS = [
+ 10    "@date",
+ 11    "@doc",
+ 12    "@double",
+ 13    "@int",
+ 14    "@long",
+ 15    "@mod",
+ 16    "@object",
+ 17    "@ref",
+ 18    "@set",
+ 19    "@time",
+ 20]
+ 21
+ 22
+ 23class FaunaEncoder:
+ 24  """Supports the following types:
+ 25
+ 26    +-------------------------------+---------------+
+ 27    | Python                        | Fauna Tags    |
+ 28    +===============================+===============+
+ 29    | dict                          | @object       |
+ 30    +-------------------------------+---------------+
+ 31    | list, tuple                   | array         |
+ 32    +-------------------------------+---------------+
+ 33    | str                           | string        |
+ 34    +-------------------------------+---------------+
+ 35    | int 32-bit signed             | @int          |
+ 36    +-------------------------------+---------------+
+ 37    | int 64-bit signed             | @long         |
+ 38    +-------------------------------+---------------+
+ 39    | float                         | @double       |
+ 40    +-------------------------------+---------------+
+ 41    | datetime.datetime             | @time         |
+ 42    +-------------------------------+---------------+
+ 43    | datetime.date                 | @date         |
+ 44    +-------------------------------+---------------+
+ 45    | True                          | True          |
+ 46    +-------------------------------+---------------+
+ 47    | False                         | False         |
+ 48    +-------------------------------+---------------+
+ 49    | None                          | None          |
+ 50    +-------------------------------+---------------+
+ 51    | bytes / bytearray             | @bytes        |
+ 52    +-------------------------------+---------------+
+ 53    | *Document                     | @ref          |
+ 54    +-------------------------------+---------------+
+ 55    | *DocumentReference            | @ref          |
+ 56    +-------------------------------+---------------+
+ 57    | Module                        | @mod          |
+ 58    +-------------------------------+---------------+
+ 59    | Query                         | fql           |
+ 60    +-------------------------------+---------------+
+ 61    | ValueFragment                 | value         |
+ 62    +-------------------------------+---------------+
+ 63    | TemplateFragment              | string        |
+ 64    +-------------------------------+---------------+
+ 65    | EventSource                   | string        |
+ 66    +-------------------------------+---------------+
+ 67
+ 68    """
+ 69
+ 70  @staticmethod
+ 71  def encode(obj: Any) -> Any:
+ 72    """Encodes supported objects into the tagged format.
+ 73
+ 74        Examples:
+ 75            - Up to 32-bit ints encode to { "@int": "..." }
+ 76            - Up to 64-bit ints encode to { "@long": "..." }
+ 77            - Floats encode to { "@double": "..." }
+ 78            - datetime encodes to { "@time": "..." }
+ 79            - date encodes to { "@date": "..." }
+ 80            - DocumentReference encodes to { "@doc": "..." }
+ 81            - Module encodes to { "@mod": "..." }
+ 82            - Query encodes to { "fql": [...] }
+ 83            - ValueFragment encodes to { "value": <encoded_val> }
+ 84            - LiteralFragment encodes to a string
+ 85            - EventSource encodes to a string
+ 86
+ 87        :raises ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
+ 88        :param obj: the object to decode
+ 89        """
+ 90    return FaunaEncoder._encode(obj)
+ 91
+ 92  @staticmethod
+ 93  def from_int(obj: int):
+ 94    if -2**31 <= obj <= 2**31 - 1:
+ 95      return {"@int": repr(obj)}
+ 96    elif -2**63 <= obj <= 2**63 - 1:
+ 97      return {"@long": repr(obj)}
+ 98    else:
+ 99      raise ValueError("Precision loss when converting int to Fauna type")
+100
+101  @staticmethod
+102  def from_bool(obj: bool):
+103    return obj
+104
+105  @staticmethod
+106  def from_float(obj: float):
+107    return {"@double": repr(obj)}
+108
+109  @staticmethod
+110  def from_str(obj: str):
+111    return obj
+112
+113  @staticmethod
+114  def from_datetime(obj: datetime):
+115    if obj.utcoffset() is None:
+116      raise ValueError("datetimes must be timezone-aware")
+117
+118    return {"@time": obj.isoformat(sep="T")}
+119
+120  @staticmethod
+121  def from_date(obj: date):
+122    return {"@date": obj.isoformat()}
+123
+124  @staticmethod
+125  def from_bytes(obj: Union[bytearray, bytes]):
+126    return {"@bytes": base64.b64encode(obj).decode('ascii')}
+127
+128  @staticmethod
+129  def from_doc_ref(obj: DocumentReference):
+130    return {"@ref": {"id": obj.id, "coll": FaunaEncoder.from_mod(obj.coll)}}
+131
+132  @staticmethod
+133  def from_named_doc_ref(obj: NamedDocumentReference):
+134    return {"@ref": {"name": obj.name, "coll": FaunaEncoder.from_mod(obj.coll)}}
+135
+136  @staticmethod
+137  def from_mod(obj: Module):
+138    return {"@mod": obj.name}
+139
+140  @staticmethod
+141  def from_dict(obj: Any):
+142    return {"@object": obj}
+143
+144  @staticmethod
+145  def from_none():
+146    return None
+147
+148  @staticmethod
+149  def from_fragment(obj: Fragment):
+150    if isinstance(obj, LiteralFragment):
+151      return obj.get()
+152    elif isinstance(obj, ValueFragment):
+153      v = obj.get()
+154      if isinstance(v, Query):
+155        return FaunaEncoder.from_query_interpolation_builder(v)
+156      else:
+157        return {"value": FaunaEncoder.encode(v)}
+158    else:
+159      raise ValueError(f"Unknown fragment type: {type(obj)}")
+160
+161  @staticmethod
+162  def from_query_interpolation_builder(obj: Query):
+163    return {"fql": [FaunaEncoder.from_fragment(f) for f in obj.fragments]}
+164
+165  @staticmethod
+166  def from_streamtoken(obj: EventSource):
+167    return {"@stream": obj.token}
+168
+169  @staticmethod
+170  def _encode(o: Any, _markers: Optional[List] = None):
+171    if _markers is None:
+172      _markers = []
+173
+174    if isinstance(o, str):
+175      return FaunaEncoder.from_str(o)
+176    elif o is None:
+177      return FaunaEncoder.from_none()
+178    elif o is True:
+179      return FaunaEncoder.from_bool(o)
+180    elif o is False:
+181      return FaunaEncoder.from_bool(o)
+182    elif isinstance(o, int):
+183      return FaunaEncoder.from_int(o)
+184    elif isinstance(o, float):
+185      return FaunaEncoder.from_float(o)
+186    elif isinstance(o, Module):
+187      return FaunaEncoder.from_mod(o)
+188    elif isinstance(o, DocumentReference):
+189      return FaunaEncoder.from_doc_ref(o)
+190    elif isinstance(o, NamedDocumentReference):
+191      return FaunaEncoder.from_named_doc_ref(o)
+192    elif isinstance(o, datetime):
+193      return FaunaEncoder.from_datetime(o)
+194    elif isinstance(o, date):
+195      return FaunaEncoder.from_date(o)
+196    elif isinstance(o, bytearray) or isinstance(o, bytes):
+197      return FaunaEncoder.from_bytes(o)
+198    elif isinstance(o, Document):
+199      return FaunaEncoder.from_doc_ref(DocumentReference(o.coll, o.id))
+200    elif isinstance(o, NamedDocument):
+201      return FaunaEncoder.from_named_doc_ref(
+202          NamedDocumentReference(o.coll, o.name))
+203    elif isinstance(o, NullDocument):
+204      return FaunaEncoder.encode(o.ref)
+205    elif isinstance(o, (list, tuple)):
+206      return FaunaEncoder._encode_list(o, _markers)
+207    elif isinstance(o, dict):
+208      return FaunaEncoder._encode_dict(o, _markers)
+209    elif isinstance(o, Query):
+210      return FaunaEncoder.from_query_interpolation_builder(o)
+211    elif isinstance(o, EventSource):
+212      return FaunaEncoder.from_streamtoken(o)
+213    else:
+214      raise ValueError(f"Object {o} of type {type(o)} cannot be encoded")
+215
+216  @staticmethod
+217  def _encode_list(lst, markers):
+218    _id = id(lst)
+219    if _id in markers:
+220      raise ValueError("Circular reference detected")
+221
+222    markers.append(id(lst))
+223    res = [FaunaEncoder._encode(elem, markers) for elem in lst]
+224    markers.pop()
+225    return res
+226
+227  @staticmethod
+228  def _encode_dict(dct, markers):
+229    _id = id(dct)
+230    if _id in markers:
+231      raise ValueError("Circular reference detected")
+232
+233    markers.append(id(dct))
+234    if any(i in _RESERVED_TAGS for i in dct.keys()):
+235      res = {
+236          "@object": {
+237              k: FaunaEncoder._encode(v, markers) for k, v in dct.items()
+238          }
+239      }
+240      markers.pop()
+241      return res
+242    else:
+243      res = {k: FaunaEncoder._encode(v, markers) for k, v in dct.items()}
+244      markers.pop()
+245      return res
+
+ + +
+
+ +
+ + class + FaunaEncoder: + + + +
+ +
 24class FaunaEncoder:
+ 25  """Supports the following types:
+ 26
+ 27    +-------------------------------+---------------+
+ 28    | Python                        | Fauna Tags    |
+ 29    +===============================+===============+
+ 30    | dict                          | @object       |
+ 31    +-------------------------------+---------------+
+ 32    | list, tuple                   | array         |
+ 33    +-------------------------------+---------------+
+ 34    | str                           | string        |
+ 35    +-------------------------------+---------------+
+ 36    | int 32-bit signed             | @int          |
+ 37    +-------------------------------+---------------+
+ 38    | int 64-bit signed             | @long         |
+ 39    +-------------------------------+---------------+
+ 40    | float                         | @double       |
+ 41    +-------------------------------+---------------+
+ 42    | datetime.datetime             | @time         |
+ 43    +-------------------------------+---------------+
+ 44    | datetime.date                 | @date         |
+ 45    +-------------------------------+---------------+
+ 46    | True                          | True          |
+ 47    +-------------------------------+---------------+
+ 48    | False                         | False         |
+ 49    +-------------------------------+---------------+
+ 50    | None                          | None          |
+ 51    +-------------------------------+---------------+
+ 52    | bytes / bytearray             | @bytes        |
+ 53    +-------------------------------+---------------+
+ 54    | *Document                     | @ref          |
+ 55    +-------------------------------+---------------+
+ 56    | *DocumentReference            | @ref          |
+ 57    +-------------------------------+---------------+
+ 58    | Module                        | @mod          |
+ 59    +-------------------------------+---------------+
+ 60    | Query                         | fql           |
+ 61    +-------------------------------+---------------+
+ 62    | ValueFragment                 | value         |
+ 63    +-------------------------------+---------------+
+ 64    | TemplateFragment              | string        |
+ 65    +-------------------------------+---------------+
+ 66    | EventSource                   | string        |
+ 67    +-------------------------------+---------------+
+ 68
+ 69    """
+ 70
+ 71  @staticmethod
+ 72  def encode(obj: Any) -> Any:
+ 73    """Encodes supported objects into the tagged format.
+ 74
+ 75        Examples:
+ 76            - Up to 32-bit ints encode to { "@int": "..." }
+ 77            - Up to 64-bit ints encode to { "@long": "..." }
+ 78            - Floats encode to { "@double": "..." }
+ 79            - datetime encodes to { "@time": "..." }
+ 80            - date encodes to { "@date": "..." }
+ 81            - DocumentReference encodes to { "@doc": "..." }
+ 82            - Module encodes to { "@mod": "..." }
+ 83            - Query encodes to { "fql": [...] }
+ 84            - ValueFragment encodes to { "value": <encoded_val> }
+ 85            - LiteralFragment encodes to a string
+ 86            - EventSource encodes to a string
+ 87
+ 88        :raises ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
+ 89        :param obj: the object to decode
+ 90        """
+ 91    return FaunaEncoder._encode(obj)
+ 92
+ 93  @staticmethod
+ 94  def from_int(obj: int):
+ 95    if -2**31 <= obj <= 2**31 - 1:
+ 96      return {"@int": repr(obj)}
+ 97    elif -2**63 <= obj <= 2**63 - 1:
+ 98      return {"@long": repr(obj)}
+ 99    else:
+100      raise ValueError("Precision loss when converting int to Fauna type")
+101
+102  @staticmethod
+103  def from_bool(obj: bool):
+104    return obj
+105
+106  @staticmethod
+107  def from_float(obj: float):
+108    return {"@double": repr(obj)}
+109
+110  @staticmethod
+111  def from_str(obj: str):
+112    return obj
+113
+114  @staticmethod
+115  def from_datetime(obj: datetime):
+116    if obj.utcoffset() is None:
+117      raise ValueError("datetimes must be timezone-aware")
+118
+119    return {"@time": obj.isoformat(sep="T")}
+120
+121  @staticmethod
+122  def from_date(obj: date):
+123    return {"@date": obj.isoformat()}
+124
+125  @staticmethod
+126  def from_bytes(obj: Union[bytearray, bytes]):
+127    return {"@bytes": base64.b64encode(obj).decode('ascii')}
+128
+129  @staticmethod
+130  def from_doc_ref(obj: DocumentReference):
+131    return {"@ref": {"id": obj.id, "coll": FaunaEncoder.from_mod(obj.coll)}}
+132
+133  @staticmethod
+134  def from_named_doc_ref(obj: NamedDocumentReference):
+135    return {"@ref": {"name": obj.name, "coll": FaunaEncoder.from_mod(obj.coll)}}
+136
+137  @staticmethod
+138  def from_mod(obj: Module):
+139    return {"@mod": obj.name}
+140
+141  @staticmethod
+142  def from_dict(obj: Any):
+143    return {"@object": obj}
+144
+145  @staticmethod
+146  def from_none():
+147    return None
+148
+149  @staticmethod
+150  def from_fragment(obj: Fragment):
+151    if isinstance(obj, LiteralFragment):
+152      return obj.get()
+153    elif isinstance(obj, ValueFragment):
+154      v = obj.get()
+155      if isinstance(v, Query):
+156        return FaunaEncoder.from_query_interpolation_builder(v)
+157      else:
+158        return {"value": FaunaEncoder.encode(v)}
+159    else:
+160      raise ValueError(f"Unknown fragment type: {type(obj)}")
+161
+162  @staticmethod
+163  def from_query_interpolation_builder(obj: Query):
+164    return {"fql": [FaunaEncoder.from_fragment(f) for f in obj.fragments]}
+165
+166  @staticmethod
+167  def from_streamtoken(obj: EventSource):
+168    return {"@stream": obj.token}
+169
+170  @staticmethod
+171  def _encode(o: Any, _markers: Optional[List] = None):
+172    if _markers is None:
+173      _markers = []
+174
+175    if isinstance(o, str):
+176      return FaunaEncoder.from_str(o)
+177    elif o is None:
+178      return FaunaEncoder.from_none()
+179    elif o is True:
+180      return FaunaEncoder.from_bool(o)
+181    elif o is False:
+182      return FaunaEncoder.from_bool(o)
+183    elif isinstance(o, int):
+184      return FaunaEncoder.from_int(o)
+185    elif isinstance(o, float):
+186      return FaunaEncoder.from_float(o)
+187    elif isinstance(o, Module):
+188      return FaunaEncoder.from_mod(o)
+189    elif isinstance(o, DocumentReference):
+190      return FaunaEncoder.from_doc_ref(o)
+191    elif isinstance(o, NamedDocumentReference):
+192      return FaunaEncoder.from_named_doc_ref(o)
+193    elif isinstance(o, datetime):
+194      return FaunaEncoder.from_datetime(o)
+195    elif isinstance(o, date):
+196      return FaunaEncoder.from_date(o)
+197    elif isinstance(o, bytearray) or isinstance(o, bytes):
+198      return FaunaEncoder.from_bytes(o)
+199    elif isinstance(o, Document):
+200      return FaunaEncoder.from_doc_ref(DocumentReference(o.coll, o.id))
+201    elif isinstance(o, NamedDocument):
+202      return FaunaEncoder.from_named_doc_ref(
+203          NamedDocumentReference(o.coll, o.name))
+204    elif isinstance(o, NullDocument):
+205      return FaunaEncoder.encode(o.ref)
+206    elif isinstance(o, (list, tuple)):
+207      return FaunaEncoder._encode_list(o, _markers)
+208    elif isinstance(o, dict):
+209      return FaunaEncoder._encode_dict(o, _markers)
+210    elif isinstance(o, Query):
+211      return FaunaEncoder.from_query_interpolation_builder(o)
+212    elif isinstance(o, EventSource):
+213      return FaunaEncoder.from_streamtoken(o)
+214    else:
+215      raise ValueError(f"Object {o} of type {type(o)} cannot be encoded")
+216
+217  @staticmethod
+218  def _encode_list(lst, markers):
+219    _id = id(lst)
+220    if _id in markers:
+221      raise ValueError("Circular reference detected")
+222
+223    markers.append(id(lst))
+224    res = [FaunaEncoder._encode(elem, markers) for elem in lst]
+225    markers.pop()
+226    return res
+227
+228  @staticmethod
+229  def _encode_dict(dct, markers):
+230    _id = id(dct)
+231    if _id in markers:
+232      raise ValueError("Circular reference detected")
+233
+234    markers.append(id(dct))
+235    if any(i in _RESERVED_TAGS for i in dct.keys()):
+236      res = {
+237          "@object": {
+238              k: FaunaEncoder._encode(v, markers) for k, v in dct.items()
+239          }
+240      }
+241      markers.pop()
+242      return res
+243    else:
+244      res = {k: FaunaEncoder._encode(v, markers) for k, v in dct.items()}
+245      markers.pop()
+246      return res
+
+ + +

Supports the following types:

+ +

+-------------------------------+---------------+ +| Python | Fauna Tags | ++===============================+===============+ +| dict | @object | ++-------------------------------+---------------+ +| list, tuple | array | ++-------------------------------+---------------+ +| str | string | ++-------------------------------+---------------+ +| int 32-bit signed | @int | ++-------------------------------+---------------+ +| int 64-bit signed | @long | ++-------------------------------+---------------+ +| float | @double | ++-------------------------------+---------------+ +| datetime.datetime | @time | ++-------------------------------+---------------+ +| datetime.date | @date | ++-------------------------------+---------------+ +| True | True | ++-------------------------------+---------------+ +| False | False | ++-------------------------------+---------------+ +| None | None | ++-------------------------------+---------------+ +| bytes / bytearray | @bytes | ++-------------------------------+---------------+ +| *Document | @ref | ++-------------------------------+---------------+ +| *DocumentReference | @ref | ++-------------------------------+---------------+ +| Module | @mod | ++-------------------------------+---------------+ +| Query | fql | ++-------------------------------+---------------+ +| ValueFragment | value | ++-------------------------------+---------------+ +| TemplateFragment | string | ++-------------------------------+---------------+ +| EventSource | string | ++-------------------------------+---------------+

+
+ + +
+ +
+
@staticmethod
+ + def + encode(obj: Any) -> Any: + + + +
+ +
71  @staticmethod
+72  def encode(obj: Any) -> Any:
+73    """Encodes supported objects into the tagged format.
+74
+75        Examples:
+76            - Up to 32-bit ints encode to { "@int": "..." }
+77            - Up to 64-bit ints encode to { "@long": "..." }
+78            - Floats encode to { "@double": "..." }
+79            - datetime encodes to { "@time": "..." }
+80            - date encodes to { "@date": "..." }
+81            - DocumentReference encodes to { "@doc": "..." }
+82            - Module encodes to { "@mod": "..." }
+83            - Query encodes to { "fql": [...] }
+84            - ValueFragment encodes to { "value": <encoded_val> }
+85            - LiteralFragment encodes to a string
+86            - EventSource encodes to a string
+87
+88        :raises ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
+89        :param obj: the object to decode
+90        """
+91    return FaunaEncoder._encode(obj)
+
+ + +

Encodes supported objects into the tagged format.

+ +

Examples: + - Up to 32-bit ints encode to { "@int": "..." } + - Up to 64-bit ints encode to { "@long": "..." } + - Floats encode to { "@double": "..." } + - datetime encodes to { "@time": "..." } + - date encodes to { "@date": "..." } + - DocumentReference encodes to { "@doc": "..." } + - Module encodes to { "@mod": "..." } + - Query encodes to { "fql": [...] } + - ValueFragment encodes to { "value": } + - LiteralFragment encodes to a string + - EventSource encodes to a string

+ +
Raises
+ +
    +
  • ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
  • +
+ +
Parameters
+ +
    +
  • obj: the object to decode
  • +
+
+ + +
+
+ +
+
@staticmethod
+ + def + from_int(obj: int): + + + +
+ +
 93  @staticmethod
+ 94  def from_int(obj: int):
+ 95    if -2**31 <= obj <= 2**31 - 1:
+ 96      return {"@int": repr(obj)}
+ 97    elif -2**63 <= obj <= 2**63 - 1:
+ 98      return {"@long": repr(obj)}
+ 99    else:
+100      raise ValueError("Precision loss when converting int to Fauna type")
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_bool(obj: bool): + + + +
+ +
102  @staticmethod
+103  def from_bool(obj: bool):
+104    return obj
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_float(obj: float): + + + +
+ +
106  @staticmethod
+107  def from_float(obj: float):
+108    return {"@double": repr(obj)}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_str(obj: str): + + + +
+ +
110  @staticmethod
+111  def from_str(obj: str):
+112    return obj
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_datetime(obj: datetime.datetime): + + + +
+ +
114  @staticmethod
+115  def from_datetime(obj: datetime):
+116    if obj.utcoffset() is None:
+117      raise ValueError("datetimes must be timezone-aware")
+118
+119    return {"@time": obj.isoformat(sep="T")}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_date(obj: datetime.date): + + + +
+ +
121  @staticmethod
+122  def from_date(obj: date):
+123    return {"@date": obj.isoformat()}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_bytes(obj: Union[bytearray, bytes]): + + + +
+ +
125  @staticmethod
+126  def from_bytes(obj: Union[bytearray, bytes]):
+127    return {"@bytes": base64.b64encode(obj).decode('ascii')}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_doc_ref(obj: fauna.query.models.DocumentReference): + + + +
+ +
129  @staticmethod
+130  def from_doc_ref(obj: DocumentReference):
+131    return {"@ref": {"id": obj.id, "coll": FaunaEncoder.from_mod(obj.coll)}}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_named_doc_ref(obj: fauna.query.models.NamedDocumentReference): + + + +
+ +
133  @staticmethod
+134  def from_named_doc_ref(obj: NamedDocumentReference):
+135    return {"@ref": {"name": obj.name, "coll": FaunaEncoder.from_mod(obj.coll)}}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_mod(obj: fauna.query.models.Module): + + + +
+ +
137  @staticmethod
+138  def from_mod(obj: Module):
+139    return {"@mod": obj.name}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_dict(obj: Any): + + + +
+ +
141  @staticmethod
+142  def from_dict(obj: Any):
+143    return {"@object": obj}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_none(): + + + +
+ +
145  @staticmethod
+146  def from_none():
+147    return None
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_fragment(obj: fauna.query.query_builder.Fragment): + + + +
+ +
149  @staticmethod
+150  def from_fragment(obj: Fragment):
+151    if isinstance(obj, LiteralFragment):
+152      return obj.get()
+153    elif isinstance(obj, ValueFragment):
+154      v = obj.get()
+155      if isinstance(v, Query):
+156        return FaunaEncoder.from_query_interpolation_builder(v)
+157      else:
+158        return {"value": FaunaEncoder.encode(v)}
+159    else:
+160      raise ValueError(f"Unknown fragment type: {type(obj)}")
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_query_interpolation_builder(obj: fauna.query.query_builder.Query): + + + +
+ +
162  @staticmethod
+163  def from_query_interpolation_builder(obj: Query):
+164    return {"fql": [FaunaEncoder.from_fragment(f) for f in obj.fragments]}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_streamtoken(obj: fauna.query.models.EventSource): + + + +
+ +
166  @staticmethod
+167  def from_streamtoken(obj: EventSource):
+168    return {"@stream": obj.token}
+
+ + + + +
+
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/encoding/wire_protocol.html b/2.3.0/fauna/encoding/wire_protocol.html new file mode 100644 index 00000000..4e306ded --- /dev/null +++ b/2.3.0/fauna/encoding/wire_protocol.html @@ -0,0 +1,1387 @@ + + + + + + + fauna.encoding.wire_protocol API documentation + + + + + + + + + +
+
+

+fauna.encoding.wire_protocol

+ + + + + + +
  1from dataclasses import dataclass
+  2from typing import Optional, Mapping, Any, List
+  3
+  4
+  5class QueryStats:
+  6  """Query stats"""
+  7
+  8  @property
+  9  def compute_ops(self) -> int:
+ 10    """The amount of Transactional Compute Ops consumed by the query."""
+ 11    return self._compute_ops
+ 12
+ 13  @property
+ 14  def read_ops(self) -> int:
+ 15    """The amount of Transactional Read Ops consumed by the query."""
+ 16    return self._read_ops
+ 17
+ 18  @property
+ 19  def write_ops(self) -> int:
+ 20    """The amount of Transactional Write Ops consumed by the query."""
+ 21    return self._write_ops
+ 22
+ 23  @property
+ 24  def query_time_ms(self) -> int:
+ 25    """The query run time in milliseconds."""
+ 26    return self._query_time_ms
+ 27
+ 28  @property
+ 29  def storage_bytes_read(self) -> int:
+ 30    """The amount of data read from storage, in bytes."""
+ 31    return self._storage_bytes_read
+ 32
+ 33  @property
+ 34  def storage_bytes_write(self) -> int:
+ 35    """The amount of data written to storage, in bytes."""
+ 36    return self._storage_bytes_write
+ 37
+ 38  @property
+ 39  def contention_retries(self) -> int:
+ 40    """The number of times the transaction was retried due to write contention."""
+ 41    return self._contention_retries
+ 42
+ 43  @property
+ 44  def attempts(self) -> int:
+ 45    """The number of attempts made by the client to run the query."""
+ 46    return self._attempts
+ 47
+ 48  @attempts.setter
+ 49  def attempts(self, value):
+ 50    self._attempts = value
+ 51
+ 52  def __init__(self, stats: Mapping[str, Any]):
+ 53    self._compute_ops = stats.get("compute_ops", 0)
+ 54    self._read_ops = stats.get("read_ops", 0)
+ 55    self._write_ops = stats.get("write_ops", 0)
+ 56    self._query_time_ms = stats.get("query_time_ms", 0)
+ 57    self._storage_bytes_read = stats.get("storage_bytes_read", 0)
+ 58    self._storage_bytes_write = stats.get("storage_bytes_write", 0)
+ 59    self._contention_retries = stats.get("contention_retries", 0)
+ 60    self._attempts = 0
+ 61
+ 62  def __repr__(self):
+ 63    stats = {
+ 64        "compute_ops": self._compute_ops,
+ 65        "read_ops": self._read_ops,
+ 66        "write_ops": self._write_ops,
+ 67        "query_time_ms": self._query_time_ms,
+ 68        "storage_bytes_read": self._storage_bytes_read,
+ 69        "storage_bytes_write": self._storage_bytes_write,
+ 70        "contention_retries": self._contention_retries,
+ 71        "attempts": self._attempts,
+ 72    }
+ 73
+ 74    return f"{self.__class__.__name__}(stats={repr(stats)})"
+ 75
+ 76  def __eq__(self, other):
+ 77    return type(self) == type(other) \
+ 78        and self.compute_ops == other.compute_ops \
+ 79        and self.read_ops == other.read_ops \
+ 80        and self.write_ops == other.write_ops \
+ 81        and self.query_time_ms == other.query_time_ms \
+ 82        and self.storage_bytes_read == other.storage_bytes_read \
+ 83        and self.storage_bytes_write == other.storage_bytes_write \
+ 84        and self.contention_retries == other.contention_retries \
+ 85        and self.attempts == other.attempts
+ 86
+ 87  def __ne__(self, other):
+ 88    return not self.__eq__(other)
+ 89
+ 90
+ 91class QueryInfo:
+ 92
+ 93  @property
+ 94  def query_tags(self) -> Mapping[str, Any]:
+ 95    """The tags associated with the query."""
+ 96    return self._query_tags
+ 97
+ 98  @property
+ 99  def summary(self) -> str:
+100    """A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query."""
+101    return self._summary
+102
+103  @property
+104  def stats(self) -> QueryStats:
+105    """Query stats associated with the query."""
+106    return self._stats
+107
+108  @property
+109  def txn_ts(self) -> int:
+110    """The last transaction timestamp of the query. A Unix epoch in microseconds."""
+111    return self._txn_ts
+112
+113  @property
+114  def schema_version(self) -> int:
+115    """The schema version that was used for the query execution."""
+116    return self._schema_version
+117
+118  def __init__(
+119      self,
+120      query_tags: Optional[Mapping[str, str]] = None,
+121      stats: Optional[QueryStats] = None,
+122      summary: Optional[str] = None,
+123      txn_ts: Optional[int] = None,
+124      schema_version: Optional[int] = None,
+125  ):
+126    self._query_tags = query_tags or {}
+127    self._stats = stats or QueryStats({})
+128    self._summary = summary or ""
+129    self._txn_ts = txn_ts or 0
+130    self._schema_version = schema_version or 0
+131
+132  def __repr__(self):
+133    return f"{self.__class__.__name__}(" \
+134           f"query_tags={repr(self.query_tags)}," \
+135           f"stats={repr(self.stats)}," \
+136           f"summary={repr(self.summary)}," \
+137           f"txn_ts={repr(self.txn_ts)}," \
+138           f"schema_version={repr(self.schema_version)})"
+139
+140
+141class QuerySuccess(QueryInfo):
+142  """The result of the query."""
+143
+144  @property
+145  def data(self) -> Any:
+146    """The data returned by the query. This is the result of the FQL query."""
+147    return self._data
+148
+149  @property
+150  def static_type(self) -> Optional[str]:
+151    """If typechecked, the query's inferred static result type, if the query was typechecked."""
+152    return self._static_type
+153
+154  @property
+155  def traceparent(self) -> Optional[str]:
+156    """The traceparent for the query."""
+157    return self._traceparent
+158
+159  def __init__(
+160      self,
+161      data: Any,
+162      query_tags: Optional[Mapping[str, str]],
+163      static_type: Optional[str],
+164      stats: Optional[QueryStats],
+165      summary: Optional[str],
+166      traceparent: Optional[str],
+167      txn_ts: Optional[int],
+168      schema_version: Optional[int],
+169  ):
+170
+171    super().__init__(
+172        query_tags=query_tags,
+173        stats=stats,
+174        summary=summary,
+175        txn_ts=txn_ts,
+176        schema_version=schema_version,
+177    )
+178
+179    self._traceparent = traceparent
+180    self._static_type = static_type
+181    self._data = data
+182
+183  def __repr__(self):
+184    return f"{self.__class__.__name__}(" \
+185           f"query_tags={repr(self.query_tags)}," \
+186           f"static_type={repr(self.static_type)}," \
+187           f"stats={repr(self.stats)}," \
+188           f"summary={repr(self.summary)}," \
+189           f"traceparent={repr(self.traceparent)}," \
+190           f"txn_ts={repr(self.txn_ts)}," \
+191           f"schema_version={repr(self.schema_version)}," \
+192           f"data={repr(self.data)})"
+193
+194
+195@dataclass
+196class ConstraintFailure:
+197  message: str
+198  name: Optional[str] = None
+199  paths: Optional[List[Any]] = None
+200
+201
+202class QueryTags:
+203
+204  @staticmethod
+205  def encode(tags: Mapping[str, str]) -> str:
+206    return ",".join([f"{k}={v}" for k, v in tags.items()])
+207
+208  @staticmethod
+209  def decode(tag_str: str) -> Mapping[str, str]:
+210    res: dict[str, str] = {}
+211    for pair in tag_str.split(","):
+212      kv = pair.split("=")
+213      res[kv[0]] = kv[1]
+214    return res
+
+ + +
+
+ +
+ + class + QueryStats: + + + +
+ +
 6class QueryStats:
+ 7  """Query stats"""
+ 8
+ 9  @property
+10  def compute_ops(self) -> int:
+11    """The amount of Transactional Compute Ops consumed by the query."""
+12    return self._compute_ops
+13
+14  @property
+15  def read_ops(self) -> int:
+16    """The amount of Transactional Read Ops consumed by the query."""
+17    return self._read_ops
+18
+19  @property
+20  def write_ops(self) -> int:
+21    """The amount of Transactional Write Ops consumed by the query."""
+22    return self._write_ops
+23
+24  @property
+25  def query_time_ms(self) -> int:
+26    """The query run time in milliseconds."""
+27    return self._query_time_ms
+28
+29  @property
+30  def storage_bytes_read(self) -> int:
+31    """The amount of data read from storage, in bytes."""
+32    return self._storage_bytes_read
+33
+34  @property
+35  def storage_bytes_write(self) -> int:
+36    """The amount of data written to storage, in bytes."""
+37    return self._storage_bytes_write
+38
+39  @property
+40  def contention_retries(self) -> int:
+41    """The number of times the transaction was retried due to write contention."""
+42    return self._contention_retries
+43
+44  @property
+45  def attempts(self) -> int:
+46    """The number of attempts made by the client to run the query."""
+47    return self._attempts
+48
+49  @attempts.setter
+50  def attempts(self, value):
+51    self._attempts = value
+52
+53  def __init__(self, stats: Mapping[str, Any]):
+54    self._compute_ops = stats.get("compute_ops", 0)
+55    self._read_ops = stats.get("read_ops", 0)
+56    self._write_ops = stats.get("write_ops", 0)
+57    self._query_time_ms = stats.get("query_time_ms", 0)
+58    self._storage_bytes_read = stats.get("storage_bytes_read", 0)
+59    self._storage_bytes_write = stats.get("storage_bytes_write", 0)
+60    self._contention_retries = stats.get("contention_retries", 0)
+61    self._attempts = 0
+62
+63  def __repr__(self):
+64    stats = {
+65        "compute_ops": self._compute_ops,
+66        "read_ops": self._read_ops,
+67        "write_ops": self._write_ops,
+68        "query_time_ms": self._query_time_ms,
+69        "storage_bytes_read": self._storage_bytes_read,
+70        "storage_bytes_write": self._storage_bytes_write,
+71        "contention_retries": self._contention_retries,
+72        "attempts": self._attempts,
+73    }
+74
+75    return f"{self.__class__.__name__}(stats={repr(stats)})"
+76
+77  def __eq__(self, other):
+78    return type(self) == type(other) \
+79        and self.compute_ops == other.compute_ops \
+80        and self.read_ops == other.read_ops \
+81        and self.write_ops == other.write_ops \
+82        and self.query_time_ms == other.query_time_ms \
+83        and self.storage_bytes_read == other.storage_bytes_read \
+84        and self.storage_bytes_write == other.storage_bytes_write \
+85        and self.contention_retries == other.contention_retries \
+86        and self.attempts == other.attempts
+87
+88  def __ne__(self, other):
+89    return not self.__eq__(other)
+
+ + +

Query stats

+
+ + +
+ +
+ + QueryStats(stats: Mapping[str, Any]) + + + +
+ +
53  def __init__(self, stats: Mapping[str, Any]):
+54    self._compute_ops = stats.get("compute_ops", 0)
+55    self._read_ops = stats.get("read_ops", 0)
+56    self._write_ops = stats.get("write_ops", 0)
+57    self._query_time_ms = stats.get("query_time_ms", 0)
+58    self._storage_bytes_read = stats.get("storage_bytes_read", 0)
+59    self._storage_bytes_write = stats.get("storage_bytes_write", 0)
+60    self._contention_retries = stats.get("contention_retries", 0)
+61    self._attempts = 0
+
+ + + + +
+
+ +
+ compute_ops: int + + + +
+ +
 9  @property
+10  def compute_ops(self) -> int:
+11    """The amount of Transactional Compute Ops consumed by the query."""
+12    return self._compute_ops
+
+ + +

The amount of Transactional Compute Ops consumed by the query.

+
+ + +
+
+ +
+ read_ops: int + + + +
+ +
14  @property
+15  def read_ops(self) -> int:
+16    """The amount of Transactional Read Ops consumed by the query."""
+17    return self._read_ops
+
+ + +

The amount of Transactional Read Ops consumed by the query.

+
+ + +
+
+ +
+ write_ops: int + + + +
+ +
19  @property
+20  def write_ops(self) -> int:
+21    """The amount of Transactional Write Ops consumed by the query."""
+22    return self._write_ops
+
+ + +

The amount of Transactional Write Ops consumed by the query.

+
+ + +
+
+ +
+ query_time_ms: int + + + +
+ +
24  @property
+25  def query_time_ms(self) -> int:
+26    """The query run time in milliseconds."""
+27    return self._query_time_ms
+
+ + +

The query run time in milliseconds.

+
+ + +
+
+ +
+ storage_bytes_read: int + + + +
+ +
29  @property
+30  def storage_bytes_read(self) -> int:
+31    """The amount of data read from storage, in bytes."""
+32    return self._storage_bytes_read
+
+ + +

The amount of data read from storage, in bytes.

+
+ + +
+
+ +
+ storage_bytes_write: int + + + +
+ +
34  @property
+35  def storage_bytes_write(self) -> int:
+36    """The amount of data written to storage, in bytes."""
+37    return self._storage_bytes_write
+
+ + +

The amount of data written to storage, in bytes.

+
+ + +
+
+ +
+ contention_retries: int + + + +
+ +
39  @property
+40  def contention_retries(self) -> int:
+41    """The number of times the transaction was retried due to write contention."""
+42    return self._contention_retries
+
+ + +

The number of times the transaction was retried due to write contention.

+
+ + +
+
+ +
+ attempts: int + + + +
+ +
44  @property
+45  def attempts(self) -> int:
+46    """The number of attempts made by the client to run the query."""
+47    return self._attempts
+
+ + +

The number of attempts made by the client to run the query.

+
+ + +
+
+
+ +
+ + class + QueryInfo: + + + +
+ +
 92class QueryInfo:
+ 93
+ 94  @property
+ 95  def query_tags(self) -> Mapping[str, Any]:
+ 96    """The tags associated with the query."""
+ 97    return self._query_tags
+ 98
+ 99  @property
+100  def summary(self) -> str:
+101    """A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query."""
+102    return self._summary
+103
+104  @property
+105  def stats(self) -> QueryStats:
+106    """Query stats associated with the query."""
+107    return self._stats
+108
+109  @property
+110  def txn_ts(self) -> int:
+111    """The last transaction timestamp of the query. A Unix epoch in microseconds."""
+112    return self._txn_ts
+113
+114  @property
+115  def schema_version(self) -> int:
+116    """The schema version that was used for the query execution."""
+117    return self._schema_version
+118
+119  def __init__(
+120      self,
+121      query_tags: Optional[Mapping[str, str]] = None,
+122      stats: Optional[QueryStats] = None,
+123      summary: Optional[str] = None,
+124      txn_ts: Optional[int] = None,
+125      schema_version: Optional[int] = None,
+126  ):
+127    self._query_tags = query_tags or {}
+128    self._stats = stats or QueryStats({})
+129    self._summary = summary or ""
+130    self._txn_ts = txn_ts or 0
+131    self._schema_version = schema_version or 0
+132
+133  def __repr__(self):
+134    return f"{self.__class__.__name__}(" \
+135           f"query_tags={repr(self.query_tags)}," \
+136           f"stats={repr(self.stats)}," \
+137           f"summary={repr(self.summary)}," \
+138           f"txn_ts={repr(self.txn_ts)}," \
+139           f"schema_version={repr(self.schema_version)})"
+
+ + + + +
+ +
+ + QueryInfo( query_tags: Optional[Mapping[str, str]] = None, stats: Optional[QueryStats] = None, summary: Optional[str] = None, txn_ts: Optional[int] = None, schema_version: Optional[int] = None) + + + +
+ +
119  def __init__(
+120      self,
+121      query_tags: Optional[Mapping[str, str]] = None,
+122      stats: Optional[QueryStats] = None,
+123      summary: Optional[str] = None,
+124      txn_ts: Optional[int] = None,
+125      schema_version: Optional[int] = None,
+126  ):
+127    self._query_tags = query_tags or {}
+128    self._stats = stats or QueryStats({})
+129    self._summary = summary or ""
+130    self._txn_ts = txn_ts or 0
+131    self._schema_version = schema_version or 0
+
+ + + + +
+
+ +
+ query_tags: Mapping[str, Any] + + + +
+ +
94  @property
+95  def query_tags(self) -> Mapping[str, Any]:
+96    """The tags associated with the query."""
+97    return self._query_tags
+
+ + +

The tags associated with the query.

+
+ + +
+
+ +
+ summary: str + + + +
+ +
 99  @property
+100  def summary(self) -> str:
+101    """A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query."""
+102    return self._summary
+
+ + +

A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query.

+
+ + +
+
+ +
+ stats: QueryStats + + + +
+ +
104  @property
+105  def stats(self) -> QueryStats:
+106    """Query stats associated with the query."""
+107    return self._stats
+
+ + +

Query stats associated with the query.

+
+ + +
+
+ +
+ txn_ts: int + + + +
+ +
109  @property
+110  def txn_ts(self) -> int:
+111    """The last transaction timestamp of the query. A Unix epoch in microseconds."""
+112    return self._txn_ts
+
+ + +

The last transaction timestamp of the query. A Unix epoch in microseconds.

+
+ + +
+
+ +
+ schema_version: int + + + +
+ +
114  @property
+115  def schema_version(self) -> int:
+116    """The schema version that was used for the query execution."""
+117    return self._schema_version
+
+ + +

The schema version that was used for the query execution.

+
+ + +
+
+
+ +
+ + class + QuerySuccess(QueryInfo): + + + +
+ +
142class QuerySuccess(QueryInfo):
+143  """The result of the query."""
+144
+145  @property
+146  def data(self) -> Any:
+147    """The data returned by the query. This is the result of the FQL query."""
+148    return self._data
+149
+150  @property
+151  def static_type(self) -> Optional[str]:
+152    """If typechecked, the query's inferred static result type, if the query was typechecked."""
+153    return self._static_type
+154
+155  @property
+156  def traceparent(self) -> Optional[str]:
+157    """The traceparent for the query."""
+158    return self._traceparent
+159
+160  def __init__(
+161      self,
+162      data: Any,
+163      query_tags: Optional[Mapping[str, str]],
+164      static_type: Optional[str],
+165      stats: Optional[QueryStats],
+166      summary: Optional[str],
+167      traceparent: Optional[str],
+168      txn_ts: Optional[int],
+169      schema_version: Optional[int],
+170  ):
+171
+172    super().__init__(
+173        query_tags=query_tags,
+174        stats=stats,
+175        summary=summary,
+176        txn_ts=txn_ts,
+177        schema_version=schema_version,
+178    )
+179
+180    self._traceparent = traceparent
+181    self._static_type = static_type
+182    self._data = data
+183
+184  def __repr__(self):
+185    return f"{self.__class__.__name__}(" \
+186           f"query_tags={repr(self.query_tags)}," \
+187           f"static_type={repr(self.static_type)}," \
+188           f"stats={repr(self.stats)}," \
+189           f"summary={repr(self.summary)}," \
+190           f"traceparent={repr(self.traceparent)}," \
+191           f"txn_ts={repr(self.txn_ts)}," \
+192           f"schema_version={repr(self.schema_version)}," \
+193           f"data={repr(self.data)})"
+
+ + +

The result of the query.

+
+ + +
+ +
+ + QuerySuccess( data: Any, query_tags: Optional[Mapping[str, str]], static_type: Optional[str], stats: Optional[QueryStats], summary: Optional[str], traceparent: Optional[str], txn_ts: Optional[int], schema_version: Optional[int]) + + + +
+ +
160  def __init__(
+161      self,
+162      data: Any,
+163      query_tags: Optional[Mapping[str, str]],
+164      static_type: Optional[str],
+165      stats: Optional[QueryStats],
+166      summary: Optional[str],
+167      traceparent: Optional[str],
+168      txn_ts: Optional[int],
+169      schema_version: Optional[int],
+170  ):
+171
+172    super().__init__(
+173        query_tags=query_tags,
+174        stats=stats,
+175        summary=summary,
+176        txn_ts=txn_ts,
+177        schema_version=schema_version,
+178    )
+179
+180    self._traceparent = traceparent
+181    self._static_type = static_type
+182    self._data = data
+
+ + + + +
+
+ +
+ data: Any + + + +
+ +
145  @property
+146  def data(self) -> Any:
+147    """The data returned by the query. This is the result of the FQL query."""
+148    return self._data
+
+ + +

The data returned by the query. This is the result of the FQL query.

+
+ + +
+
+ +
+ static_type: Optional[str] + + + +
+ +
150  @property
+151  def static_type(self) -> Optional[str]:
+152    """If typechecked, the query's inferred static result type, if the query was typechecked."""
+153    return self._static_type
+
+ + +

If typechecked, the query's inferred static result type, if the query was typechecked.

+
+ + +
+
+ +
+ traceparent: Optional[str] + + + +
+ +
155  @property
+156  def traceparent(self) -> Optional[str]:
+157    """The traceparent for the query."""
+158    return self._traceparent
+
+ + +

The traceparent for the query.

+
+ + +
+
+
Inherited Members
+
+
QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
+
+
+
+ +
+
@dataclass
+ + class + ConstraintFailure: + + + +
+ +
196@dataclass
+197class ConstraintFailure:
+198  message: str
+199  name: Optional[str] = None
+200  paths: Optional[List[Any]] = None
+
+ + + + +
+
+ + ConstraintFailure( message: str, name: Optional[str] = None, paths: Optional[List[Any]] = None) + + +
+ + + + +
+
+
+ message: str + + +
+ + + + +
+
+
+ name: Optional[str] = +None + + +
+ + + + +
+
+
+ paths: Optional[List[Any]] = +None + + +
+ + + + +
+
+
+ +
+ + class + QueryTags: + + + +
+ +
203class QueryTags:
+204
+205  @staticmethod
+206  def encode(tags: Mapping[str, str]) -> str:
+207    return ",".join([f"{k}={v}" for k, v in tags.items()])
+208
+209  @staticmethod
+210  def decode(tag_str: str) -> Mapping[str, str]:
+211    res: dict[str, str] = {}
+212    for pair in tag_str.split(","):
+213      kv = pair.split("=")
+214      res[kv[0]] = kv[1]
+215    return res
+
+ + + + +
+ +
+
@staticmethod
+ + def + encode(tags: Mapping[str, str]) -> str: + + + +
+ +
205  @staticmethod
+206  def encode(tags: Mapping[str, str]) -> str:
+207    return ",".join([f"{k}={v}" for k, v in tags.items()])
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + decode(tag_str: str) -> Mapping[str, str]: + + + +
+ +
209  @staticmethod
+210  def decode(tag_str: str) -> Mapping[str, str]:
+211    res: dict[str, str] = {}
+212    for pair in tag_str.split(","):
+213      kv = pair.split("=")
+214      res[kv[0]] = kv[1]
+215    return res
+
+ + + + +
+
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/errors.html b/2.3.0/fauna/errors.html new file mode 100644 index 00000000..3e5fc0da --- /dev/null +++ b/2.3.0/fauna/errors.html @@ -0,0 +1,247 @@ + + + + + + + fauna.errors API documentation + + + + + + + + + +
+
+

+fauna.errors

+ + + + + + +
1from .errors import AuthenticationError, AuthorizationError, QueryCheckError, QueryRuntimeError, \
+2  QueryTimeoutError, ServiceInternalError, ServiceTimeoutError, ThrottlingError, ContendedTransactionError, \
+3  InvalidRequestError, AbortError, RetryableFaunaException
+4from .errors import ClientError, FaunaError, NetworkError
+5from .errors import FaunaException
+6from .errors import ProtocolError, ServiceError
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/errors/errors.html b/2.3.0/fauna/errors/errors.html new file mode 100644 index 00000000..f31e4996 --- /dev/null +++ b/2.3.0/fauna/errors/errors.html @@ -0,0 +1,2359 @@ + + + + + + + fauna.errors.errors API documentation + + + + + + + + + +
+
+

+fauna.errors.errors

+ + + + + + +
  1from typing import Optional, List, Any, Mapping
+  2
+  3from fauna.encoding import ConstraintFailure, QueryStats, QueryInfo, QueryTags
+  4
+  5
+  6class FaunaException(Exception):
+  7  """Base class Fauna Exceptions"""
+  8  pass
+  9
+ 10
+ 11class RetryableFaunaException(FaunaException):
+ 12  pass
+ 13
+ 14
+ 15class ClientError(FaunaException):
+ 16  """An error representing a failure internal to the client, itself.
+ 17    This indicates Fauna was never called - the client failed internally
+ 18    prior to sending the request."""
+ 19  pass
+ 20
+ 21
+ 22class NetworkError(FaunaException):
+ 23  """An error representing a failure due to the network.
+ 24    This indicates Fauna was never reached."""
+ 25  pass
+ 26
+ 27
+ 28class ProtocolError(FaunaException):
+ 29  """An error representing a HTTP failure - but one not directly emitted by Fauna."""
+ 30
+ 31  @property
+ 32  def status_code(self) -> int:
+ 33    return self._status_code
+ 34
+ 35  @property
+ 36  def message(self) -> str:
+ 37    return self._message
+ 38
+ 39  def __init__(self, status_code: int, message: str):
+ 40    self._status_code = status_code
+ 41    self._message = message
+ 42
+ 43  def __str__(self):
+ 44    return f"{self.status_code}: {self.message}"
+ 45
+ 46
+ 47class FaunaError(FaunaException):
+ 48  """Base class Fauna Errors"""
+ 49
+ 50  @property
+ 51  def status_code(self) -> int:
+ 52    return self._status_code
+ 53
+ 54  @property
+ 55  def code(self) -> str:
+ 56    return self._code
+ 57
+ 58  @property
+ 59  def message(self) -> str:
+ 60    return self._message
+ 61
+ 62  @property
+ 63  def abort(self) -> Optional[Any]:
+ 64    return self._abort
+ 65
+ 66  @property
+ 67  def constraint_failures(self) -> Optional[List['ConstraintFailure']]:
+ 68    return self._constraint_failures
+ 69
+ 70  def __init__(
+ 71      self,
+ 72      status_code: int,
+ 73      code: str,
+ 74      message: str,
+ 75      abort: Optional[Any] = None,
+ 76      constraint_failures: Optional[List['ConstraintFailure']] = None,
+ 77  ):
+ 78    self._status_code = status_code
+ 79    self._code = code
+ 80    self._message = message
+ 81    self._abort = abort
+ 82    self._constraint_failures = constraint_failures
+ 83
+ 84  def __str__(self):
+ 85    return f"{self.status_code}: {self.code}\n{self.message}"
+ 86
+ 87  @staticmethod
+ 88  def parse_error_and_throw(body: Any, status_code: int):
+ 89    err = body["error"]
+ 90    code = err["code"]
+ 91    message = err["message"]
+ 92
+ 93    query_tags = QueryTags.decode(
+ 94        body["query_tags"]) if "query_tags" in body else None
+ 95    stats = QueryStats(body["stats"]) if "stats" in body else None
+ 96    txn_ts = body["txn_ts"] if "txn_ts" in body else None
+ 97    schema_version = body["schema_version"] if "schema_version" in body else None
+ 98    summary = body["summary"] if "summary" in body else None
+ 99
+100    constraint_failures: Optional[List[ConstraintFailure]] = None
+101    if "constraint_failures" in err:
+102      constraint_failures = [
+103          ConstraintFailure(
+104              message=cf["message"],
+105              name=cf["name"] if "name" in cf else None,
+106              paths=cf["paths"] if "paths" in cf else None,
+107          ) for cf in err["constraint_failures"]
+108      ]
+109
+110    if status_code >= 400 and status_code < 500:
+111      if code == "invalid_query":
+112        raise QueryCheckError(
+113            status_code=400,
+114            code=code,
+115            message=message,
+116            summary=summary,
+117            constraint_failures=constraint_failures,
+118            query_tags=query_tags,
+119            stats=stats,
+120            txn_ts=txn_ts,
+121            schema_version=schema_version,
+122        )
+123      elif code == "invalid_request":
+124        raise InvalidRequestError(
+125            status_code=400,
+126            code=code,
+127            message=message,
+128            summary=summary,
+129            constraint_failures=constraint_failures,
+130            query_tags=query_tags,
+131            stats=stats,
+132            txn_ts=txn_ts,
+133            schema_version=schema_version,
+134        )
+135      elif code == "abort":
+136        abort = err["abort"] if "abort" in err else None
+137        raise AbortError(
+138            status_code=400,
+139            code=code,
+140            message=message,
+141            summary=summary,
+142            abort=abort,
+143            constraint_failures=constraint_failures,
+144            query_tags=query_tags,
+145            stats=stats,
+146            txn_ts=txn_ts,
+147            schema_version=schema_version,
+148        )
+149      elif code == "unauthorized":
+150        raise AuthenticationError(
+151            status_code=401,
+152            code=code,
+153            message=message,
+154            summary=summary,
+155            constraint_failures=constraint_failures,
+156            query_tags=query_tags,
+157            stats=stats,
+158            txn_ts=txn_ts,
+159            schema_version=schema_version,
+160        )
+161      elif code == "forbidden" and status_code == 403:
+162        raise AuthorizationError(
+163            status_code=403,
+164            code=code,
+165            message=message,
+166            summary=summary,
+167            constraint_failures=constraint_failures,
+168            query_tags=query_tags,
+169            stats=stats,
+170            txn_ts=txn_ts,
+171            schema_version=schema_version,
+172        )
+173      elif code == "method_not_allowed":
+174        raise QueryRuntimeError(
+175            status_code=405,
+176            code=code,
+177            message=message,
+178            summary=summary,
+179            constraint_failures=constraint_failures,
+180            query_tags=query_tags,
+181            stats=stats,
+182            txn_ts=txn_ts,
+183            schema_version=schema_version,
+184        )
+185      elif code == "conflict":
+186        raise ContendedTransactionError(
+187            status_code=409,
+188            code=code,
+189            message=message,
+190            summary=summary,
+191            constraint_failures=constraint_failures,
+192            query_tags=query_tags,
+193            stats=stats,
+194            txn_ts=txn_ts,
+195            schema_version=schema_version,
+196        )
+197      elif code == "request_size_exceeded":
+198        raise QueryRuntimeError(
+199            status_code=413,
+200            code=code,
+201            message=message,
+202            summary=summary,
+203            constraint_failures=constraint_failures,
+204            query_tags=query_tags,
+205            stats=stats,
+206            txn_ts=txn_ts,
+207            schema_version=schema_version,
+208        )
+209      elif code == "limit_exceeded":
+210        raise ThrottlingError(
+211            status_code=429,
+212            code=code,
+213            message=message,
+214            summary=summary,
+215            constraint_failures=constraint_failures,
+216            query_tags=query_tags,
+217            stats=stats,
+218            txn_ts=txn_ts,
+219            schema_version=schema_version,
+220        )
+221      elif code == "time_out":
+222        raise QueryTimeoutError(
+223            status_code=440,
+224            code=code,
+225            message=message,
+226            summary=summary,
+227            constraint_failures=constraint_failures,
+228            query_tags=query_tags,
+229            stats=stats,
+230            txn_ts=txn_ts,
+231            schema_version=schema_version,
+232        )
+233      else:
+234        raise QueryRuntimeError(
+235            status_code=status_code,
+236            code=code,
+237            message=message,
+238            summary=summary,
+239            constraint_failures=constraint_failures,
+240            query_tags=query_tags,
+241            stats=stats,
+242            txn_ts=txn_ts,
+243            schema_version=schema_version,
+244        )
+245    elif status_code == 500:
+246      raise ServiceInternalError(
+247          status_code=status_code,
+248          code=code,
+249          message=message,
+250          summary=summary,
+251          constraint_failures=constraint_failures,
+252          query_tags=query_tags,
+253          stats=stats,
+254          txn_ts=txn_ts,
+255          schema_version=schema_version,
+256      )
+257    elif status_code == 503:
+258      raise ServiceTimeoutError(
+259          status_code=status_code,
+260          code=code,
+261          message=message,
+262          summary=summary,
+263          constraint_failures=constraint_failures,
+264          query_tags=query_tags,
+265          stats=stats,
+266          txn_ts=txn_ts,
+267          schema_version=schema_version,
+268      )
+269    else:
+270      raise ServiceError(
+271          status_code=status_code,
+272          code=code,
+273          message=message,
+274          summary=summary,
+275          constraint_failures=constraint_failures,
+276          query_tags=query_tags,
+277          stats=stats,
+278          txn_ts=txn_ts,
+279          schema_version=schema_version,
+280      )
+281
+282
+283class ServiceError(FaunaError, QueryInfo):
+284  """An error representing a query failure returned by Fauna."""
+285
+286  def __init__(
+287      self,
+288      status_code: int,
+289      code: str,
+290      message: str,
+291      summary: Optional[str] = None,
+292      abort: Optional[Any] = None,
+293      constraint_failures: Optional[List['ConstraintFailure']] = None,
+294      query_tags: Optional[Mapping[str, str]] = None,
+295      stats: Optional[QueryStats] = None,
+296      txn_ts: Optional[int] = None,
+297      schema_version: Optional[int] = None,
+298  ):
+299    QueryInfo.__init__(
+300        self,
+301        query_tags=query_tags,
+302        stats=stats,
+303        summary=summary,
+304        txn_ts=txn_ts,
+305        schema_version=schema_version,
+306    )
+307
+308    FaunaError.__init__(
+309        self,
+310        status_code=status_code,
+311        code=code,
+312        message=message,
+313        abort=abort,
+314        constraint_failures=constraint_failures,
+315    )
+316
+317  def __str__(self):
+318    constraint_str = "---"
+319    if self._constraint_failures:
+320      constraint_str = f"---\nconstraint failures: {self._constraint_failures}\n---"
+321
+322    return f"{self._status_code}: {self.code}\n{self.message}\n{constraint_str}\n{self.summary or ''}"
+323
+324
+325class AbortError(ServiceError):
+326  pass
+327
+328
+329class InvalidRequestError(ServiceError):
+330  pass
+331
+332
+333class QueryCheckError(ServiceError):
+334  """An error due to a "compile-time" check of the query failing."""
+335  pass
+336
+337
+338class ContendedTransactionError(ServiceError):
+339  """Transaction is aborted due to concurrent modification."""
+340  pass
+341
+342
+343class QueryRuntimeError(ServiceError):
+344  """An error response that is the result of the query failing during execution.
+345    QueryRuntimeError's occur when a bug in your query causes an invalid execution
+346    to be requested.
+347    The 'code' field will vary based on the specific error cause."""
+348  pass
+349
+350
+351class AuthenticationError(ServiceError):
+352  """AuthenticationError indicates invalid credentials were used."""
+353  pass
+354
+355
+356class AuthorizationError(ServiceError):
+357  """AuthorizationError indicates the credentials used do not have
+358    permission to perform the requested action."""
+359  pass
+360
+361
+362class ThrottlingError(ServiceError, RetryableFaunaException):
+363  """ThrottlingError indicates some capacity limit was exceeded
+364    and thus the request could not be served."""
+365  pass
+366
+367
+368class QueryTimeoutError(ServiceError):
+369  """A failure due to the timeout being exceeded, but the timeout
+370    was set lower than the query's expected processing time.
+371    This response is distinguished from a ServiceTimeoutException
+372    in that a QueryTimeoutError shows Fauna behaving in an expected manner."""
+373  pass
+374
+375
+376class ServiceInternalError(ServiceError):
+377  """ServiceInternalError indicates Fauna failed unexpectedly."""
+378  pass
+379
+380
+381class ServiceTimeoutError(ServiceError):
+382  """ServiceTimeoutError indicates Fauna was not available to service
+383    the request before the timeout was reached."""
+384  pass
+
+ + +
+
+ +
+ + class + FaunaException(builtins.Exception): + + + +
+ +
7class FaunaException(Exception):
+8  """Base class Fauna Exceptions"""
+9  pass
+
+ + +

Base class Fauna Exceptions

+
+ + +
+
Inherited Members
+
+
builtins.Exception
+
Exception
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + RetryableFaunaException(FaunaException): + + + +
+ +
12class RetryableFaunaException(FaunaException):
+13  pass
+
+ + +

Base class Fauna Exceptions

+
+ + +
+
Inherited Members
+
+
builtins.Exception
+
Exception
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + ClientError(FaunaException): + + + +
+ +
16class ClientError(FaunaException):
+17  """An error representing a failure internal to the client, itself.
+18    This indicates Fauna was never called - the client failed internally
+19    prior to sending the request."""
+20  pass
+
+ + +

An error representing a failure internal to the client, itself. +This indicates Fauna was never called - the client failed internally +prior to sending the request.

+
+ + +
+
Inherited Members
+
+
builtins.Exception
+
Exception
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + NetworkError(FaunaException): + + + +
+ +
23class NetworkError(FaunaException):
+24  """An error representing a failure due to the network.
+25    This indicates Fauna was never reached."""
+26  pass
+
+ + +

An error representing a failure due to the network. +This indicates Fauna was never reached.

+
+ + +
+
Inherited Members
+
+
builtins.Exception
+
Exception
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + ProtocolError(FaunaException): + + + +
+ +
29class ProtocolError(FaunaException):
+30  """An error representing a HTTP failure - but one not directly emitted by Fauna."""
+31
+32  @property
+33  def status_code(self) -> int:
+34    return self._status_code
+35
+36  @property
+37  def message(self) -> str:
+38    return self._message
+39
+40  def __init__(self, status_code: int, message: str):
+41    self._status_code = status_code
+42    self._message = message
+43
+44  def __str__(self):
+45    return f"{self.status_code}: {self.message}"
+
+ + +

An error representing a HTTP failure - but one not directly emitted by Fauna.

+
+ + +
+ +
+ + ProtocolError(status_code: int, message: str) + + + +
+ +
40  def __init__(self, status_code: int, message: str):
+41    self._status_code = status_code
+42    self._message = message
+
+ + + + +
+
+ +
+ status_code: int + + + +
+ +
32  @property
+33  def status_code(self) -> int:
+34    return self._status_code
+
+ + + + +
+
+ +
+ message: str + + + +
+ +
36  @property
+37  def message(self) -> str:
+38    return self._message
+
+ + + + +
+
+
Inherited Members
+
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + FaunaError(FaunaException): + + + +
+ +
 48class FaunaError(FaunaException):
+ 49  """Base class Fauna Errors"""
+ 50
+ 51  @property
+ 52  def status_code(self) -> int:
+ 53    return self._status_code
+ 54
+ 55  @property
+ 56  def code(self) -> str:
+ 57    return self._code
+ 58
+ 59  @property
+ 60  def message(self) -> str:
+ 61    return self._message
+ 62
+ 63  @property
+ 64  def abort(self) -> Optional[Any]:
+ 65    return self._abort
+ 66
+ 67  @property
+ 68  def constraint_failures(self) -> Optional[List['ConstraintFailure']]:
+ 69    return self._constraint_failures
+ 70
+ 71  def __init__(
+ 72      self,
+ 73      status_code: int,
+ 74      code: str,
+ 75      message: str,
+ 76      abort: Optional[Any] = None,
+ 77      constraint_failures: Optional[List['ConstraintFailure']] = None,
+ 78  ):
+ 79    self._status_code = status_code
+ 80    self._code = code
+ 81    self._message = message
+ 82    self._abort = abort
+ 83    self._constraint_failures = constraint_failures
+ 84
+ 85  def __str__(self):
+ 86    return f"{self.status_code}: {self.code}\n{self.message}"
+ 87
+ 88  @staticmethod
+ 89  def parse_error_and_throw(body: Any, status_code: int):
+ 90    err = body["error"]
+ 91    code = err["code"]
+ 92    message = err["message"]
+ 93
+ 94    query_tags = QueryTags.decode(
+ 95        body["query_tags"]) if "query_tags" in body else None
+ 96    stats = QueryStats(body["stats"]) if "stats" in body else None
+ 97    txn_ts = body["txn_ts"] if "txn_ts" in body else None
+ 98    schema_version = body["schema_version"] if "schema_version" in body else None
+ 99    summary = body["summary"] if "summary" in body else None
+100
+101    constraint_failures: Optional[List[ConstraintFailure]] = None
+102    if "constraint_failures" in err:
+103      constraint_failures = [
+104          ConstraintFailure(
+105              message=cf["message"],
+106              name=cf["name"] if "name" in cf else None,
+107              paths=cf["paths"] if "paths" in cf else None,
+108          ) for cf in err["constraint_failures"]
+109      ]
+110
+111    if status_code >= 400 and status_code < 500:
+112      if code == "invalid_query":
+113        raise QueryCheckError(
+114            status_code=400,
+115            code=code,
+116            message=message,
+117            summary=summary,
+118            constraint_failures=constraint_failures,
+119            query_tags=query_tags,
+120            stats=stats,
+121            txn_ts=txn_ts,
+122            schema_version=schema_version,
+123        )
+124      elif code == "invalid_request":
+125        raise InvalidRequestError(
+126            status_code=400,
+127            code=code,
+128            message=message,
+129            summary=summary,
+130            constraint_failures=constraint_failures,
+131            query_tags=query_tags,
+132            stats=stats,
+133            txn_ts=txn_ts,
+134            schema_version=schema_version,
+135        )
+136      elif code == "abort":
+137        abort = err["abort"] if "abort" in err else None
+138        raise AbortError(
+139            status_code=400,
+140            code=code,
+141            message=message,
+142            summary=summary,
+143            abort=abort,
+144            constraint_failures=constraint_failures,
+145            query_tags=query_tags,
+146            stats=stats,
+147            txn_ts=txn_ts,
+148            schema_version=schema_version,
+149        )
+150      elif code == "unauthorized":
+151        raise AuthenticationError(
+152            status_code=401,
+153            code=code,
+154            message=message,
+155            summary=summary,
+156            constraint_failures=constraint_failures,
+157            query_tags=query_tags,
+158            stats=stats,
+159            txn_ts=txn_ts,
+160            schema_version=schema_version,
+161        )
+162      elif code == "forbidden" and status_code == 403:
+163        raise AuthorizationError(
+164            status_code=403,
+165            code=code,
+166            message=message,
+167            summary=summary,
+168            constraint_failures=constraint_failures,
+169            query_tags=query_tags,
+170            stats=stats,
+171            txn_ts=txn_ts,
+172            schema_version=schema_version,
+173        )
+174      elif code == "method_not_allowed":
+175        raise QueryRuntimeError(
+176            status_code=405,
+177            code=code,
+178            message=message,
+179            summary=summary,
+180            constraint_failures=constraint_failures,
+181            query_tags=query_tags,
+182            stats=stats,
+183            txn_ts=txn_ts,
+184            schema_version=schema_version,
+185        )
+186      elif code == "conflict":
+187        raise ContendedTransactionError(
+188            status_code=409,
+189            code=code,
+190            message=message,
+191            summary=summary,
+192            constraint_failures=constraint_failures,
+193            query_tags=query_tags,
+194            stats=stats,
+195            txn_ts=txn_ts,
+196            schema_version=schema_version,
+197        )
+198      elif code == "request_size_exceeded":
+199        raise QueryRuntimeError(
+200            status_code=413,
+201            code=code,
+202            message=message,
+203            summary=summary,
+204            constraint_failures=constraint_failures,
+205            query_tags=query_tags,
+206            stats=stats,
+207            txn_ts=txn_ts,
+208            schema_version=schema_version,
+209        )
+210      elif code == "limit_exceeded":
+211        raise ThrottlingError(
+212            status_code=429,
+213            code=code,
+214            message=message,
+215            summary=summary,
+216            constraint_failures=constraint_failures,
+217            query_tags=query_tags,
+218            stats=stats,
+219            txn_ts=txn_ts,
+220            schema_version=schema_version,
+221        )
+222      elif code == "time_out":
+223        raise QueryTimeoutError(
+224            status_code=440,
+225            code=code,
+226            message=message,
+227            summary=summary,
+228            constraint_failures=constraint_failures,
+229            query_tags=query_tags,
+230            stats=stats,
+231            txn_ts=txn_ts,
+232            schema_version=schema_version,
+233        )
+234      else:
+235        raise QueryRuntimeError(
+236            status_code=status_code,
+237            code=code,
+238            message=message,
+239            summary=summary,
+240            constraint_failures=constraint_failures,
+241            query_tags=query_tags,
+242            stats=stats,
+243            txn_ts=txn_ts,
+244            schema_version=schema_version,
+245        )
+246    elif status_code == 500:
+247      raise ServiceInternalError(
+248          status_code=status_code,
+249          code=code,
+250          message=message,
+251          summary=summary,
+252          constraint_failures=constraint_failures,
+253          query_tags=query_tags,
+254          stats=stats,
+255          txn_ts=txn_ts,
+256          schema_version=schema_version,
+257      )
+258    elif status_code == 503:
+259      raise ServiceTimeoutError(
+260          status_code=status_code,
+261          code=code,
+262          message=message,
+263          summary=summary,
+264          constraint_failures=constraint_failures,
+265          query_tags=query_tags,
+266          stats=stats,
+267          txn_ts=txn_ts,
+268          schema_version=schema_version,
+269      )
+270    else:
+271      raise ServiceError(
+272          status_code=status_code,
+273          code=code,
+274          message=message,
+275          summary=summary,
+276          constraint_failures=constraint_failures,
+277          query_tags=query_tags,
+278          stats=stats,
+279          txn_ts=txn_ts,
+280          schema_version=schema_version,
+281      )
+
+ + +

Base class Fauna Errors

+
+ + +
+ +
+ + FaunaError( status_code: int, code: str, message: str, abort: Optional[Any] = None, constraint_failures: Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]] = None) + + + +
+ +
71  def __init__(
+72      self,
+73      status_code: int,
+74      code: str,
+75      message: str,
+76      abort: Optional[Any] = None,
+77      constraint_failures: Optional[List['ConstraintFailure']] = None,
+78  ):
+79    self._status_code = status_code
+80    self._code = code
+81    self._message = message
+82    self._abort = abort
+83    self._constraint_failures = constraint_failures
+
+ + + + +
+
+ +
+ status_code: int + + + +
+ +
51  @property
+52  def status_code(self) -> int:
+53    return self._status_code
+
+ + + + +
+
+ +
+ code: str + + + +
+ +
55  @property
+56  def code(self) -> str:
+57    return self._code
+
+ + + + +
+
+ +
+ message: str + + + +
+ +
59  @property
+60  def message(self) -> str:
+61    return self._message
+
+ + + + +
+
+ +
+ abort: Optional[Any] + + + +
+ +
63  @property
+64  def abort(self) -> Optional[Any]:
+65    return self._abort
+
+ + + + +
+
+ +
+ constraint_failures: Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]] + + + +
+ +
67  @property
+68  def constraint_failures(self) -> Optional[List['ConstraintFailure']]:
+69    return self._constraint_failures
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + parse_error_and_throw(body: Any, status_code: int): + + + +
+ +
 88  @staticmethod
+ 89  def parse_error_and_throw(body: Any, status_code: int):
+ 90    err = body["error"]
+ 91    code = err["code"]
+ 92    message = err["message"]
+ 93
+ 94    query_tags = QueryTags.decode(
+ 95        body["query_tags"]) if "query_tags" in body else None
+ 96    stats = QueryStats(body["stats"]) if "stats" in body else None
+ 97    txn_ts = body["txn_ts"] if "txn_ts" in body else None
+ 98    schema_version = body["schema_version"] if "schema_version" in body else None
+ 99    summary = body["summary"] if "summary" in body else None
+100
+101    constraint_failures: Optional[List[ConstraintFailure]] = None
+102    if "constraint_failures" in err:
+103      constraint_failures = [
+104          ConstraintFailure(
+105              message=cf["message"],
+106              name=cf["name"] if "name" in cf else None,
+107              paths=cf["paths"] if "paths" in cf else None,
+108          ) for cf in err["constraint_failures"]
+109      ]
+110
+111    if status_code >= 400 and status_code < 500:
+112      if code == "invalid_query":
+113        raise QueryCheckError(
+114            status_code=400,
+115            code=code,
+116            message=message,
+117            summary=summary,
+118            constraint_failures=constraint_failures,
+119            query_tags=query_tags,
+120            stats=stats,
+121            txn_ts=txn_ts,
+122            schema_version=schema_version,
+123        )
+124      elif code == "invalid_request":
+125        raise InvalidRequestError(
+126            status_code=400,
+127            code=code,
+128            message=message,
+129            summary=summary,
+130            constraint_failures=constraint_failures,
+131            query_tags=query_tags,
+132            stats=stats,
+133            txn_ts=txn_ts,
+134            schema_version=schema_version,
+135        )
+136      elif code == "abort":
+137        abort = err["abort"] if "abort" in err else None
+138        raise AbortError(
+139            status_code=400,
+140            code=code,
+141            message=message,
+142            summary=summary,
+143            abort=abort,
+144            constraint_failures=constraint_failures,
+145            query_tags=query_tags,
+146            stats=stats,
+147            txn_ts=txn_ts,
+148            schema_version=schema_version,
+149        )
+150      elif code == "unauthorized":
+151        raise AuthenticationError(
+152            status_code=401,
+153            code=code,
+154            message=message,
+155            summary=summary,
+156            constraint_failures=constraint_failures,
+157            query_tags=query_tags,
+158            stats=stats,
+159            txn_ts=txn_ts,
+160            schema_version=schema_version,
+161        )
+162      elif code == "forbidden" and status_code == 403:
+163        raise AuthorizationError(
+164            status_code=403,
+165            code=code,
+166            message=message,
+167            summary=summary,
+168            constraint_failures=constraint_failures,
+169            query_tags=query_tags,
+170            stats=stats,
+171            txn_ts=txn_ts,
+172            schema_version=schema_version,
+173        )
+174      elif code == "method_not_allowed":
+175        raise QueryRuntimeError(
+176            status_code=405,
+177            code=code,
+178            message=message,
+179            summary=summary,
+180            constraint_failures=constraint_failures,
+181            query_tags=query_tags,
+182            stats=stats,
+183            txn_ts=txn_ts,
+184            schema_version=schema_version,
+185        )
+186      elif code == "conflict":
+187        raise ContendedTransactionError(
+188            status_code=409,
+189            code=code,
+190            message=message,
+191            summary=summary,
+192            constraint_failures=constraint_failures,
+193            query_tags=query_tags,
+194            stats=stats,
+195            txn_ts=txn_ts,
+196            schema_version=schema_version,
+197        )
+198      elif code == "request_size_exceeded":
+199        raise QueryRuntimeError(
+200            status_code=413,
+201            code=code,
+202            message=message,
+203            summary=summary,
+204            constraint_failures=constraint_failures,
+205            query_tags=query_tags,
+206            stats=stats,
+207            txn_ts=txn_ts,
+208            schema_version=schema_version,
+209        )
+210      elif code == "limit_exceeded":
+211        raise ThrottlingError(
+212            status_code=429,
+213            code=code,
+214            message=message,
+215            summary=summary,
+216            constraint_failures=constraint_failures,
+217            query_tags=query_tags,
+218            stats=stats,
+219            txn_ts=txn_ts,
+220            schema_version=schema_version,
+221        )
+222      elif code == "time_out":
+223        raise QueryTimeoutError(
+224            status_code=440,
+225            code=code,
+226            message=message,
+227            summary=summary,
+228            constraint_failures=constraint_failures,
+229            query_tags=query_tags,
+230            stats=stats,
+231            txn_ts=txn_ts,
+232            schema_version=schema_version,
+233        )
+234      else:
+235        raise QueryRuntimeError(
+236            status_code=status_code,
+237            code=code,
+238            message=message,
+239            summary=summary,
+240            constraint_failures=constraint_failures,
+241            query_tags=query_tags,
+242            stats=stats,
+243            txn_ts=txn_ts,
+244            schema_version=schema_version,
+245        )
+246    elif status_code == 500:
+247      raise ServiceInternalError(
+248          status_code=status_code,
+249          code=code,
+250          message=message,
+251          summary=summary,
+252          constraint_failures=constraint_failures,
+253          query_tags=query_tags,
+254          stats=stats,
+255          txn_ts=txn_ts,
+256          schema_version=schema_version,
+257      )
+258    elif status_code == 503:
+259      raise ServiceTimeoutError(
+260          status_code=status_code,
+261          code=code,
+262          message=message,
+263          summary=summary,
+264          constraint_failures=constraint_failures,
+265          query_tags=query_tags,
+266          stats=stats,
+267          txn_ts=txn_ts,
+268          schema_version=schema_version,
+269      )
+270    else:
+271      raise ServiceError(
+272          status_code=status_code,
+273          code=code,
+274          message=message,
+275          summary=summary,
+276          constraint_failures=constraint_failures,
+277          query_tags=query_tags,
+278          stats=stats,
+279          txn_ts=txn_ts,
+280          schema_version=schema_version,
+281      )
+
+ + + + +
+
+
Inherited Members
+
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + ServiceError(FaunaError, fauna.encoding.wire_protocol.QueryInfo): + + + +
+ +
284class ServiceError(FaunaError, QueryInfo):
+285  """An error representing a query failure returned by Fauna."""
+286
+287  def __init__(
+288      self,
+289      status_code: int,
+290      code: str,
+291      message: str,
+292      summary: Optional[str] = None,
+293      abort: Optional[Any] = None,
+294      constraint_failures: Optional[List['ConstraintFailure']] = None,
+295      query_tags: Optional[Mapping[str, str]] = None,
+296      stats: Optional[QueryStats] = None,
+297      txn_ts: Optional[int] = None,
+298      schema_version: Optional[int] = None,
+299  ):
+300    QueryInfo.__init__(
+301        self,
+302        query_tags=query_tags,
+303        stats=stats,
+304        summary=summary,
+305        txn_ts=txn_ts,
+306        schema_version=schema_version,
+307    )
+308
+309    FaunaError.__init__(
+310        self,
+311        status_code=status_code,
+312        code=code,
+313        message=message,
+314        abort=abort,
+315        constraint_failures=constraint_failures,
+316    )
+317
+318  def __str__(self):
+319    constraint_str = "---"
+320    if self._constraint_failures:
+321      constraint_str = f"---\nconstraint failures: {self._constraint_failures}\n---"
+322
+323    return f"{self._status_code}: {self.code}\n{self.message}\n{constraint_str}\n{self.summary or ''}"
+
+ + +

An error representing a query failure returned by Fauna.

+
+ + +
+ +
+ + ServiceError( status_code: int, code: str, message: str, summary: Optional[str] = None, abort: Optional[Any] = None, constraint_failures: Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]] = None, query_tags: Optional[Mapping[str, str]] = None, stats: Optional[fauna.encoding.wire_protocol.QueryStats] = None, txn_ts: Optional[int] = None, schema_version: Optional[int] = None) + + + +
+ +
287  def __init__(
+288      self,
+289      status_code: int,
+290      code: str,
+291      message: str,
+292      summary: Optional[str] = None,
+293      abort: Optional[Any] = None,
+294      constraint_failures: Optional[List['ConstraintFailure']] = None,
+295      query_tags: Optional[Mapping[str, str]] = None,
+296      stats: Optional[QueryStats] = None,
+297      txn_ts: Optional[int] = None,
+298      schema_version: Optional[int] = None,
+299  ):
+300    QueryInfo.__init__(
+301        self,
+302        query_tags=query_tags,
+303        stats=stats,
+304        summary=summary,
+305        txn_ts=txn_ts,
+306        schema_version=schema_version,
+307    )
+308
+309    FaunaError.__init__(
+310        self,
+311        status_code=status_code,
+312        code=code,
+313        message=message,
+314        abort=abort,
+315        constraint_failures=constraint_failures,
+316    )
+
+ + + + +
+
+
Inherited Members
+
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + AbortError(ServiceError): + + + +
+ +
326class AbortError(ServiceError):
+327  pass
+
+ + +

An error representing a query failure returned by Fauna.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + InvalidRequestError(ServiceError): + + + +
+ +
330class InvalidRequestError(ServiceError):
+331  pass
+
+ + +

An error representing a query failure returned by Fauna.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + QueryCheckError(ServiceError): + + + +
+ +
334class QueryCheckError(ServiceError):
+335  """An error due to a "compile-time" check of the query failing."""
+336  pass
+
+ + +

An error due to a "compile-time" check of the query failing.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + ContendedTransactionError(ServiceError): + + + +
+ +
339class ContendedTransactionError(ServiceError):
+340  """Transaction is aborted due to concurrent modification."""
+341  pass
+
+ + +

Transaction is aborted due to concurrent modification.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + QueryRuntimeError(ServiceError): + + + +
+ +
344class QueryRuntimeError(ServiceError):
+345  """An error response that is the result of the query failing during execution.
+346    QueryRuntimeError's occur when a bug in your query causes an invalid execution
+347    to be requested.
+348    The 'code' field will vary based on the specific error cause."""
+349  pass
+
+ + +

An error response that is the result of the query failing during execution. +QueryRuntimeError's occur when a bug in your query causes an invalid execution +to be requested. +The 'code' field will vary based on the specific error cause.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + AuthenticationError(ServiceError): + + + +
+ +
352class AuthenticationError(ServiceError):
+353  """AuthenticationError indicates invalid credentials were used."""
+354  pass
+
+ + +

AuthenticationError indicates invalid credentials were used.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + AuthorizationError(ServiceError): + + + +
+ +
357class AuthorizationError(ServiceError):
+358  """AuthorizationError indicates the credentials used do not have
+359    permission to perform the requested action."""
+360  pass
+
+ + +

AuthorizationError indicates the credentials used do not have +permission to perform the requested action.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + ThrottlingError(ServiceError, RetryableFaunaException): + + + +
+ +
363class ThrottlingError(ServiceError, RetryableFaunaException):
+364  """ThrottlingError indicates some capacity limit was exceeded
+365    and thus the request could not be served."""
+366  pass
+
+ + +

ThrottlingError indicates some capacity limit was exceeded +and thus the request could not be served.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + QueryTimeoutError(ServiceError): + + + +
+ +
369class QueryTimeoutError(ServiceError):
+370  """A failure due to the timeout being exceeded, but the timeout
+371    was set lower than the query's expected processing time.
+372    This response is distinguished from a ServiceTimeoutException
+373    in that a QueryTimeoutError shows Fauna behaving in an expected manner."""
+374  pass
+
+ + +

A failure due to the timeout being exceeded, but the timeout +was set lower than the query's expected processing time. +This response is distinguished from a ServiceTimeoutException +in that a QueryTimeoutError shows Fauna behaving in an expected manner.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + ServiceInternalError(ServiceError): + + + +
+ +
377class ServiceInternalError(ServiceError):
+378  """ServiceInternalError indicates Fauna failed unexpectedly."""
+379  pass
+
+ + +

ServiceInternalError indicates Fauna failed unexpectedly.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ +
+ + class + ServiceTimeoutError(ServiceError): + + + +
+ +
382class ServiceTimeoutError(ServiceError):
+383  """ServiceTimeoutError indicates Fauna was not available to service
+384    the request before the timeout was reached."""
+385  pass
+
+ + +

ServiceTimeoutError indicates Fauna was not available to service +the request before the timeout was reached.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
add_note
+
args
+ +
+
+
+
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/http.html b/2.3.0/fauna/http.html new file mode 100644 index 00000000..cea65a29 --- /dev/null +++ b/2.3.0/fauna/http.html @@ -0,0 +1,244 @@ + + + + + + + fauna.http API documentation + + + + + + + + + +
+
+

+fauna.http

+ + + + + + +
1from .http_client import HTTPClient, HTTPResponse
+2from .httpx_client import HTTPXClient
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/http/http_client.html b/2.3.0/fauna/http/http_client.html new file mode 100644 index 00000000..de01deac --- /dev/null +++ b/2.3.0/fauna/http/http_client.html @@ -0,0 +1,783 @@ + + + + + + + fauna.http.http_client API documentation + + + + + + + + + +
+
+

+fauna.http.http_client

+ + + + + + +
 1import abc
+ 2import contextlib
+ 3from dataclasses import dataclass
+ 4from typing import Iterator, Mapping, Any
+ 5
+ 6
+ 7@dataclass(frozen=True)
+ 8class ErrorResponse:
+ 9  status_code: int
+10  error_code: str
+11  error_message: str
+12  summary: str
+13
+14
+15class HTTPResponse(abc.ABC):
+16
+17  @abc.abstractmethod
+18  def headers(self) -> Mapping[str, str]:
+19    pass
+20
+21  @abc.abstractmethod
+22  def status_code(self) -> int:
+23    pass
+24
+25  @abc.abstractmethod
+26  def json(self) -> Any:
+27    pass
+28
+29  @abc.abstractmethod
+30  def text(self) -> str:
+31    pass
+32
+33  @abc.abstractmethod
+34  def read(self) -> bytes:
+35    pass
+36
+37  @abc.abstractmethod
+38  def iter_bytes(self) -> Iterator[bytes]:
+39    pass
+40
+41  @abc.abstractmethod
+42  def close(self):
+43    pass
+44
+45  def __enter__(self):
+46    return self
+47
+48  def __exit__(self, exc_type, exc_val, exc_tb):
+49    self.close()
+50
+51
+52class HTTPClient(abc.ABC):
+53
+54  @abc.abstractmethod
+55  def request(
+56      self,
+57      method: str,
+58      url: str,
+59      headers: Mapping[str, str],
+60      data: Mapping[str, Any],
+61  ) -> HTTPResponse:
+62    pass
+63
+64  @abc.abstractmethod
+65  @contextlib.contextmanager
+66  def stream(
+67      self,
+68      url: str,
+69      headers: Mapping[str, str],
+70      data: Mapping[str, Any],
+71  ) -> Iterator[Any]:
+72    pass
+73
+74  @abc.abstractmethod
+75  def close(self):
+76    pass
+
+ + +
+
+ +
+
@dataclass(frozen=True)
+ + class + ErrorResponse: + + + +
+ +
 8@dataclass(frozen=True)
+ 9class ErrorResponse:
+10  status_code: int
+11  error_code: str
+12  error_message: str
+13  summary: str
+
+ + + + +
+
+ + ErrorResponse(status_code: int, error_code: str, error_message: str, summary: str) + + +
+ + + + +
+
+
+ status_code: int + + +
+ + + + +
+
+
+ error_code: str + + +
+ + + + +
+
+
+ error_message: str + + +
+ + + + +
+
+
+ summary: str + + +
+ + + + +
+
+
+ +
+ + class + HTTPResponse(abc.ABC): + + + +
+ +
16class HTTPResponse(abc.ABC):
+17
+18  @abc.abstractmethod
+19  def headers(self) -> Mapping[str, str]:
+20    pass
+21
+22  @abc.abstractmethod
+23  def status_code(self) -> int:
+24    pass
+25
+26  @abc.abstractmethod
+27  def json(self) -> Any:
+28    pass
+29
+30  @abc.abstractmethod
+31  def text(self) -> str:
+32    pass
+33
+34  @abc.abstractmethod
+35  def read(self) -> bytes:
+36    pass
+37
+38  @abc.abstractmethod
+39  def iter_bytes(self) -> Iterator[bytes]:
+40    pass
+41
+42  @abc.abstractmethod
+43  def close(self):
+44    pass
+45
+46  def __enter__(self):
+47    return self
+48
+49  def __exit__(self, exc_type, exc_val, exc_tb):
+50    self.close()
+
+ + +

Helper class that provides a standard way to create an ABC using +inheritance.

+
+ + +
+ +
+
@abc.abstractmethod
+ + def + headers(self) -> Mapping[str, str]: + + + +
+ +
18  @abc.abstractmethod
+19  def headers(self) -> Mapping[str, str]:
+20    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + status_code(self) -> int: + + + +
+ +
22  @abc.abstractmethod
+23  def status_code(self) -> int:
+24    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + json(self) -> Any: + + + +
+ +
26  @abc.abstractmethod
+27  def json(self) -> Any:
+28    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + text(self) -> str: + + + +
+ +
30  @abc.abstractmethod
+31  def text(self) -> str:
+32    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + read(self) -> bytes: + + + +
+ +
34  @abc.abstractmethod
+35  def read(self) -> bytes:
+36    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + iter_bytes(self) -> Iterator[bytes]: + + + +
+ +
38  @abc.abstractmethod
+39  def iter_bytes(self) -> Iterator[bytes]:
+40    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + close(self): + + + +
+ +
42  @abc.abstractmethod
+43  def close(self):
+44    pass
+
+ + + + +
+
+
+ +
+ + class + HTTPClient(abc.ABC): + + + +
+ +
53class HTTPClient(abc.ABC):
+54
+55  @abc.abstractmethod
+56  def request(
+57      self,
+58      method: str,
+59      url: str,
+60      headers: Mapping[str, str],
+61      data: Mapping[str, Any],
+62  ) -> HTTPResponse:
+63    pass
+64
+65  @abc.abstractmethod
+66  @contextlib.contextmanager
+67  def stream(
+68      self,
+69      url: str,
+70      headers: Mapping[str, str],
+71      data: Mapping[str, Any],
+72  ) -> Iterator[Any]:
+73    pass
+74
+75  @abc.abstractmethod
+76  def close(self):
+77    pass
+
+ + +

Helper class that provides a standard way to create an ABC using +inheritance.

+
+ + +
+ +
+
@abc.abstractmethod
+ + def + request( self, method: str, url: str, headers: Mapping[str, str], data: Mapping[str, Any]) -> HTTPResponse: + + + +
+ +
55  @abc.abstractmethod
+56  def request(
+57      self,
+58      method: str,
+59      url: str,
+60      headers: Mapping[str, str],
+61      data: Mapping[str, Any],
+62  ) -> HTTPResponse:
+63    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+
@contextlib.contextmanager
+ + def + stream( self, url: str, headers: Mapping[str, str], data: Mapping[str, Any]) -> Iterator[Any]: + + + +
+ +
65  @abc.abstractmethod
+66  @contextlib.contextmanager
+67  def stream(
+68      self,
+69      url: str,
+70      headers: Mapping[str, str],
+71      data: Mapping[str, Any],
+72  ) -> Iterator[Any]:
+73    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + close(self): + + + +
+ +
75  @abc.abstractmethod
+76  def close(self):
+77    pass
+
+ + + + +
+
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/http/httpx_client.html b/2.3.0/fauna/http/httpx_client.html new file mode 100644 index 00000000..f6e2e19c --- /dev/null +++ b/2.3.0/fauna/http/httpx_client.html @@ -0,0 +1,916 @@ + + + + + + + fauna.http.httpx_client API documentation + + + + + + + + + +
+
+

+fauna.http.httpx_client

+ + + + + + +
  1import json
+  2import logging
+  3from contextlib import contextmanager
+  4from json import JSONDecodeError
+  5from typing import Mapping, Any, Optional, Iterator
+  6
+  7import httpx
+  8
+  9from fauna.errors import ClientError, NetworkError
+ 10from fauna.http.http_client import HTTPResponse, HTTPClient
+ 11
+ 12
+ 13class HTTPXResponse(HTTPResponse):
+ 14
+ 15  def __init__(self, response: httpx.Response):
+ 16    self._r = response
+ 17
+ 18  def headers(self) -> Mapping[str, str]:
+ 19    h = {}
+ 20    for (k, v) in self._r.headers.items():
+ 21      h[k] = v
+ 22    return h
+ 23
+ 24  def json(self) -> Any:
+ 25    try:
+ 26      decoded = self._r.read().decode("utf-8")
+ 27      return json.loads(decoded)
+ 28    except (JSONDecodeError, UnicodeDecodeError) as e:
+ 29      raise ClientError(
+ 30          f"Unable to decode response from endpoint {self._r.request.url}. Check that your endpoint is valid."
+ 31      ) from e
+ 32
+ 33  def text(self) -> str:
+ 34    return str(self.read(), encoding='utf-8')
+ 35
+ 36  def status_code(self) -> int:
+ 37    return self._r.status_code
+ 38
+ 39  def read(self) -> bytes:
+ 40    return self._r.read()
+ 41
+ 42  def iter_bytes(self, size: Optional[int] = None) -> Iterator[bytes]:
+ 43    return self._r.iter_bytes(size)
+ 44
+ 45  def close(self) -> None:
+ 46    try:
+ 47      self._r.close()
+ 48    except Exception as e:
+ 49      raise ClientError("Error closing response") from e
+ 50
+ 51
+ 52class HTTPXClient(HTTPClient):
+ 53
+ 54  def __init__(self,
+ 55               client: httpx.Client,
+ 56               logger: logging.Logger = logging.getLogger("fauna")):
+ 57    super(HTTPXClient, self).__init__()
+ 58    self._c = client
+ 59    self._logger = logger
+ 60
+ 61  def request(
+ 62      self,
+ 63      method: str,
+ 64      url: str,
+ 65      headers: Mapping[str, str],
+ 66      data: Mapping[str, Any],
+ 67  ) -> HTTPResponse:
+ 68
+ 69    try:
+ 70      request = self._c.build_request(
+ 71          method,
+ 72          url,
+ 73          json=data,
+ 74          headers=headers,
+ 75      )
+ 76
+ 77      if self._logger.isEnabledFor(logging.DEBUG):
+ 78        headers_to_log = request.headers.copy()
+ 79        headers_to_log.pop("Authorization")
+ 80        self._logger.debug(
+ 81            f"query.request method={request.method} url={request.url} headers={headers_to_log} data={data}"
+ 82        )
+ 83
+ 84    except httpx.InvalidURL as e:
+ 85      raise ClientError("Invalid URL Format") from e
+ 86
+ 87    try:
+ 88      response = self._c.send(
+ 89          request,
+ 90          stream=False,
+ 91      )
+ 92
+ 93      if self._logger.isEnabledFor(logging.DEBUG):
+ 94        self._logger.debug(
+ 95            f"query.response status_code={response.status_code} headers={response.headers} data={response.text}"
+ 96        )
+ 97
+ 98      return HTTPXResponse(response)
+ 99    except (httpx.HTTPError, httpx.InvalidURL) as e:
+100      raise NetworkError("Exception re-raised from HTTP request") from e
+101
+102  @contextmanager
+103  def stream(
+104      self,
+105      url: str,
+106      headers: Mapping[str, str],
+107      data: Mapping[str, Any],
+108  ) -> Iterator[Any]:
+109    request = self._c.build_request(
+110        method="POST",
+111        url=url,
+112        headers=headers,
+113        json=data,
+114    )
+115
+116    if self._logger.isEnabledFor(logging.DEBUG):
+117      headers_to_log = request.headers.copy()
+118      headers_to_log.pop("Authorization")
+119      self._logger.debug(
+120          f"stream.request method={request.method} url={request.url} headers={headers_to_log} data={data}"
+121      )
+122
+123    response = self._c.send(
+124        request=request,
+125        stream=True,
+126    )
+127
+128    try:
+129      yield self._transform(response)
+130    finally:
+131      response.close()
+132
+133  def _transform(self, response):
+134    try:
+135      for line in response.iter_lines():
+136        loaded = json.loads(line)
+137        if self._logger.isEnabledFor(logging.DEBUG):
+138          self._logger.debug(f"stream.data data={loaded}")
+139        yield loaded
+140    except httpx.ReadTimeout as e:
+141      raise NetworkError("Stream timeout") from e
+142    except (httpx.HTTPError, httpx.InvalidURL) as e:
+143      raise NetworkError("Exception re-raised from HTTP request") from e
+144
+145  def close(self):
+146    self._c.close()
+
+ + +
+
+ +
+ + class + HTTPXResponse(fauna.http.http_client.HTTPResponse): + + + +
+ +
14class HTTPXResponse(HTTPResponse):
+15
+16  def __init__(self, response: httpx.Response):
+17    self._r = response
+18
+19  def headers(self) -> Mapping[str, str]:
+20    h = {}
+21    for (k, v) in self._r.headers.items():
+22      h[k] = v
+23    return h
+24
+25  def json(self) -> Any:
+26    try:
+27      decoded = self._r.read().decode("utf-8")
+28      return json.loads(decoded)
+29    except (JSONDecodeError, UnicodeDecodeError) as e:
+30      raise ClientError(
+31          f"Unable to decode response from endpoint {self._r.request.url}. Check that your endpoint is valid."
+32      ) from e
+33
+34  def text(self) -> str:
+35    return str(self.read(), encoding='utf-8')
+36
+37  def status_code(self) -> int:
+38    return self._r.status_code
+39
+40  def read(self) -> bytes:
+41    return self._r.read()
+42
+43  def iter_bytes(self, size: Optional[int] = None) -> Iterator[bytes]:
+44    return self._r.iter_bytes(size)
+45
+46  def close(self) -> None:
+47    try:
+48      self._r.close()
+49    except Exception as e:
+50      raise ClientError("Error closing response") from e
+
+ + +

Helper class that provides a standard way to create an ABC using +inheritance.

+
+ + +
+ +
+ + HTTPXResponse(response: httpx.Response) + + + +
+ +
16  def __init__(self, response: httpx.Response):
+17    self._r = response
+
+ + + + +
+
+ +
+ + def + headers(self) -> Mapping[str, str]: + + + +
+ +
19  def headers(self) -> Mapping[str, str]:
+20    h = {}
+21    for (k, v) in self._r.headers.items():
+22      h[k] = v
+23    return h
+
+ + + + +
+
+ +
+ + def + json(self) -> Any: + + + +
+ +
25  def json(self) -> Any:
+26    try:
+27      decoded = self._r.read().decode("utf-8")
+28      return json.loads(decoded)
+29    except (JSONDecodeError, UnicodeDecodeError) as e:
+30      raise ClientError(
+31          f"Unable to decode response from endpoint {self._r.request.url}. Check that your endpoint is valid."
+32      ) from e
+
+ + + + +
+
+ +
+ + def + text(self) -> str: + + + +
+ +
34  def text(self) -> str:
+35    return str(self.read(), encoding='utf-8')
+
+ + + + +
+
+ +
+ + def + status_code(self) -> int: + + + +
+ +
37  def status_code(self) -> int:
+38    return self._r.status_code
+
+ + + + +
+
+ +
+ + def + read(self) -> bytes: + + + +
+ +
40  def read(self) -> bytes:
+41    return self._r.read()
+
+ + + + +
+
+ +
+ + def + iter_bytes(self, size: Optional[int] = None) -> Iterator[bytes]: + + + +
+ +
43  def iter_bytes(self, size: Optional[int] = None) -> Iterator[bytes]:
+44    return self._r.iter_bytes(size)
+
+ + + + +
+
+ +
+ + def + close(self) -> None: + + + +
+ +
46  def close(self) -> None:
+47    try:
+48      self._r.close()
+49    except Exception as e:
+50      raise ClientError("Error closing response") from e
+
+ + + + +
+
+
+ +
+ + class + HTTPXClient(fauna.http.http_client.HTTPClient): + + + +
+ +
 53class HTTPXClient(HTTPClient):
+ 54
+ 55  def __init__(self,
+ 56               client: httpx.Client,
+ 57               logger: logging.Logger = logging.getLogger("fauna")):
+ 58    super(HTTPXClient, self).__init__()
+ 59    self._c = client
+ 60    self._logger = logger
+ 61
+ 62  def request(
+ 63      self,
+ 64      method: str,
+ 65      url: str,
+ 66      headers: Mapping[str, str],
+ 67      data: Mapping[str, Any],
+ 68  ) -> HTTPResponse:
+ 69
+ 70    try:
+ 71      request = self._c.build_request(
+ 72          method,
+ 73          url,
+ 74          json=data,
+ 75          headers=headers,
+ 76      )
+ 77
+ 78      if self._logger.isEnabledFor(logging.DEBUG):
+ 79        headers_to_log = request.headers.copy()
+ 80        headers_to_log.pop("Authorization")
+ 81        self._logger.debug(
+ 82            f"query.request method={request.method} url={request.url} headers={headers_to_log} data={data}"
+ 83        )
+ 84
+ 85    except httpx.InvalidURL as e:
+ 86      raise ClientError("Invalid URL Format") from e
+ 87
+ 88    try:
+ 89      response = self._c.send(
+ 90          request,
+ 91          stream=False,
+ 92      )
+ 93
+ 94      if self._logger.isEnabledFor(logging.DEBUG):
+ 95        self._logger.debug(
+ 96            f"query.response status_code={response.status_code} headers={response.headers} data={response.text}"
+ 97        )
+ 98
+ 99      return HTTPXResponse(response)
+100    except (httpx.HTTPError, httpx.InvalidURL) as e:
+101      raise NetworkError("Exception re-raised from HTTP request") from e
+102
+103  @contextmanager
+104  def stream(
+105      self,
+106      url: str,
+107      headers: Mapping[str, str],
+108      data: Mapping[str, Any],
+109  ) -> Iterator[Any]:
+110    request = self._c.build_request(
+111        method="POST",
+112        url=url,
+113        headers=headers,
+114        json=data,
+115    )
+116
+117    if self._logger.isEnabledFor(logging.DEBUG):
+118      headers_to_log = request.headers.copy()
+119      headers_to_log.pop("Authorization")
+120      self._logger.debug(
+121          f"stream.request method={request.method} url={request.url} headers={headers_to_log} data={data}"
+122      )
+123
+124    response = self._c.send(
+125        request=request,
+126        stream=True,
+127    )
+128
+129    try:
+130      yield self._transform(response)
+131    finally:
+132      response.close()
+133
+134  def _transform(self, response):
+135    try:
+136      for line in response.iter_lines():
+137        loaded = json.loads(line)
+138        if self._logger.isEnabledFor(logging.DEBUG):
+139          self._logger.debug(f"stream.data data={loaded}")
+140        yield loaded
+141    except httpx.ReadTimeout as e:
+142      raise NetworkError("Stream timeout") from e
+143    except (httpx.HTTPError, httpx.InvalidURL) as e:
+144      raise NetworkError("Exception re-raised from HTTP request") from e
+145
+146  def close(self):
+147    self._c.close()
+
+ + +

Helper class that provides a standard way to create an ABC using +inheritance.

+
+ + +
+ +
+ + HTTPXClient( client: httpx.Client, logger: logging.Logger = <Logger fauna (WARNING)>) + + + +
+ +
55  def __init__(self,
+56               client: httpx.Client,
+57               logger: logging.Logger = logging.getLogger("fauna")):
+58    super(HTTPXClient, self).__init__()
+59    self._c = client
+60    self._logger = logger
+
+ + + + +
+
+ +
+ + def + request( self, method: str, url: str, headers: Mapping[str, str], data: Mapping[str, Any]) -> fauna.http.http_client.HTTPResponse: + + + +
+ +
 62  def request(
+ 63      self,
+ 64      method: str,
+ 65      url: str,
+ 66      headers: Mapping[str, str],
+ 67      data: Mapping[str, Any],
+ 68  ) -> HTTPResponse:
+ 69
+ 70    try:
+ 71      request = self._c.build_request(
+ 72          method,
+ 73          url,
+ 74          json=data,
+ 75          headers=headers,
+ 76      )
+ 77
+ 78      if self._logger.isEnabledFor(logging.DEBUG):
+ 79        headers_to_log = request.headers.copy()
+ 80        headers_to_log.pop("Authorization")
+ 81        self._logger.debug(
+ 82            f"query.request method={request.method} url={request.url} headers={headers_to_log} data={data}"
+ 83        )
+ 84
+ 85    except httpx.InvalidURL as e:
+ 86      raise ClientError("Invalid URL Format") from e
+ 87
+ 88    try:
+ 89      response = self._c.send(
+ 90          request,
+ 91          stream=False,
+ 92      )
+ 93
+ 94      if self._logger.isEnabledFor(logging.DEBUG):
+ 95        self._logger.debug(
+ 96            f"query.response status_code={response.status_code} headers={response.headers} data={response.text}"
+ 97        )
+ 98
+ 99      return HTTPXResponse(response)
+100    except (httpx.HTTPError, httpx.InvalidURL) as e:
+101      raise NetworkError("Exception re-raised from HTTP request") from e
+
+ + + + +
+
+ +
+
@contextmanager
+ + def + stream( self, url: str, headers: Mapping[str, str], data: Mapping[str, Any]) -> Iterator[Any]: + + + +
+ +
103  @contextmanager
+104  def stream(
+105      self,
+106      url: str,
+107      headers: Mapping[str, str],
+108      data: Mapping[str, Any],
+109  ) -> Iterator[Any]:
+110    request = self._c.build_request(
+111        method="POST",
+112        url=url,
+113        headers=headers,
+114        json=data,
+115    )
+116
+117    if self._logger.isEnabledFor(logging.DEBUG):
+118      headers_to_log = request.headers.copy()
+119      headers_to_log.pop("Authorization")
+120      self._logger.debug(
+121          f"stream.request method={request.method} url={request.url} headers={headers_to_log} data={data}"
+122      )
+123
+124    response = self._c.send(
+125        request=request,
+126        stream=True,
+127    )
+128
+129    try:
+130      yield self._transform(response)
+131    finally:
+132      response.close()
+
+ + + + +
+
+ +
+ + def + close(self): + + + +
+ +
146  def close(self):
+147    self._c.close()
+
+ + + + +
+
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/query.html b/2.3.0/fauna/query.html new file mode 100644 index 00000000..337e1c10 --- /dev/null +++ b/2.3.0/fauna/query.html @@ -0,0 +1,245 @@ + + + + + + + fauna.query API documentation + + + + + + + + + +
+
+

+fauna.query

+ + + + + + +
1from .models import Document, DocumentReference, EventSource, NamedDocument, NamedDocumentReference, NullDocument, Module, Page
+2from .query_builder import fql, Query
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/query/models.html b/2.3.0/fauna/query/models.html new file mode 100644 index 00000000..87a6d254 --- /dev/null +++ b/2.3.0/fauna/query/models.html @@ -0,0 +1,1843 @@ + + + + + + + fauna.query.models API documentation + + + + + + + + + +
+
+

+fauna.query.models

+ + + + + + +
  1import warnings
+  2from collections.abc import Mapping
+  3from datetime import datetime
+  4from typing import Union, Iterator, Any, Optional, List
+  5
+  6
+  7# NB. Override __getattr__ and __dir__ to deprecate StreamToken usages. Based
+  8# on: https://peps.python.org/pep-0562/
+  9def __getattr__(name):
+ 10    if name == "StreamToken":
+ 11        warnings.warn(
+ 12            "StreamToken is deprecated. Prefer fauna.query.EventSource instead.",
+ 13            DeprecationWarning,
+ 14            stacklevel=2
+ 15        )
+ 16        return EventSource
+ 17    raise AttributeError(f"module {__name__} has no attribute {name}")
+ 18
+ 19
+ 20def __dir__():
+ 21    return globals().keys() | {"StreamToken"}  # Include 'StreamToken' in the module attributes
+ 22
+ 23
+ 24class Page:
+ 25  """A class representing a Set in Fauna."""
+ 26
+ 27  def __init__(self,
+ 28               data: Optional[List[Any]] = None,
+ 29               after: Optional[str] = None):
+ 30    self.data = data
+ 31    self.after = after
+ 32
+ 33  def __repr__(self):
+ 34    args = []
+ 35    if self.data is not None:
+ 36      args.append(f"data={repr(self.data)}")
+ 37
+ 38    if self.after is not None:
+ 39      args.append(f"after={repr(self.after)}")
+ 40
+ 41    return f"{self.__class__.__name__}({','.join(args)})"
+ 42
+ 43  def __iter__(self) -> Iterator[Any]:
+ 44    return iter(self.data or [])
+ 45
+ 46  def __eq__(self, other):
+ 47    return isinstance(
+ 48        other, Page) and self.data == other.data and self.after == other.after
+ 49
+ 50  def __hash__(self):
+ 51    return hash((type(self), self.data, self.after))
+ 52
+ 53  def __ne__(self, other):
+ 54    return not self.__eq__(other)
+ 55
+ 56
+ 57class EventSource:
+ 58  """A class represeting an EventSource in Fauna."""
+ 59
+ 60  def __init__(self, token: str):
+ 61    self.token = token
+ 62
+ 63  def __eq__(self, other):
+ 64    return isinstance(other, EventSource) and self.token == other.token
+ 65
+ 66  def __hash__(self):
+ 67    return hash(self.token)
+ 68
+ 69
+ 70class Module:
+ 71  """A class representing a Module in Fauna. Examples of modules include Collection, Math, and a user-defined
+ 72    collection, among others.
+ 73
+ 74    Usage:
+ 75
+ 76       dogs = Module("Dogs")
+ 77       query = fql("${col}.all", col=dogs)
+ 78    """
+ 79
+ 80  def __init__(self, name: str):
+ 81    self.name = name
+ 82
+ 83  def __repr__(self):
+ 84    return f"{self.__class__.__name__}(name={repr(self.name)})"
+ 85
+ 86  def __eq__(self, other):
+ 87    return isinstance(other, Module) and str(self) == str(other)
+ 88
+ 89  def __hash__(self):
+ 90    return hash(self.name)
+ 91
+ 92
+ 93class BaseReference:
+ 94  _collection: Module
+ 95
+ 96  @property
+ 97  def coll(self) -> Module:
+ 98    return self._collection
+ 99
+100  def __init__(self, coll: Union[str, Module]):
+101    if isinstance(coll, Module):
+102      self._collection = coll
+103    elif isinstance(coll, str):
+104      self._collection = Module(coll)
+105    else:
+106      raise TypeError(
+107          f"'coll' should be of type Module or str, but was {type(coll)}")
+108
+109  def __repr__(self):
+110    return f"{self.__class__.__name__}(coll={repr(self._collection)})"
+111
+112  def __eq__(self, other):
+113    return isinstance(other, type(self)) and str(self) == str(other)
+114
+115
+116class DocumentReference(BaseReference):
+117  """A class representing a reference to a :class:`Document` stored in Fauna.
+118    """
+119
+120  @property
+121  def id(self) -> str:
+122    """The ID for the :class:`Document`. Valid IDs are 64-bit integers, stored as strings.
+123
+124        :rtype: str
+125        """
+126    return self._id
+127
+128  def __init__(self, coll: Union[str, Module], id: str):
+129    super().__init__(coll)
+130
+131    if not isinstance(id, str):
+132      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+133    self._id = id
+134
+135  def __hash__(self):
+136    return hash((type(self), self._collection, self._id))
+137
+138  def __repr__(self):
+139    return f"{self.__class__.__name__}(id={repr(self._id)},coll={repr(self._collection)})"
+140
+141  @staticmethod
+142  def from_string(ref: str):
+143    rs = ref.split(":")
+144    if len(rs) != 2:
+145      raise ValueError("Expects string of format <CollectionName>:<ID>")
+146    return DocumentReference(rs[0], rs[1])
+147
+148
+149class NamedDocumentReference(BaseReference):
+150  """A class representing a reference to a :class:`NamedDocument` stored in Fauna.
+151    """
+152
+153  @property
+154  def name(self) -> str:
+155    """The name of the :class:`NamedDocument`.
+156
+157        :rtype: str
+158        """
+159    return self._name
+160
+161  def __init__(self, coll: Union[str, Module], name: str):
+162    super().__init__(coll)
+163
+164    if not isinstance(name, str):
+165      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+166
+167    self._name = name
+168
+169  def __hash__(self):
+170    return hash((type(self), self._collection, self._name))
+171
+172  def __repr__(self):
+173    return f"{self.__class__.__name__}(name={repr(self._name)},coll={repr(self._collection)})"
+174
+175
+176class NullDocument:
+177
+178  @property
+179  def cause(self) -> Optional[str]:
+180    return self._cause
+181
+182  @property
+183  def ref(self) -> Union[DocumentReference, NamedDocumentReference]:
+184    return self._ref
+185
+186  def __init__(
+187      self,
+188      ref: Union[DocumentReference, NamedDocumentReference],
+189      cause: Optional[str] = None,
+190  ):
+191    self._cause = cause
+192    self._ref = ref
+193
+194  def __repr__(self):
+195    return f"{self.__class__.__name__}(ref={repr(self.ref)},cause={repr(self._cause)})"
+196
+197  def __eq__(self, other):
+198    if not isinstance(other, type(self)):
+199      return False
+200
+201    return self.ref == other.ref and self.cause == other.cause
+202
+203  def __ne__(self, other):
+204    return not self == other
+205
+206
+207class BaseDocument(Mapping):
+208  """A base document class implementing an immutable mapping.
+209    """
+210
+211  def __init__(self, *args, **kwargs):
+212    self._store = dict(*args, **kwargs)
+213
+214  def __getitem__(self, __k: str) -> Any:
+215    return self._store[__k]
+216
+217  def __len__(self) -> int:
+218    return len(self._store)
+219
+220  def __iter__(self) -> Iterator[Any]:
+221    return iter(self._store)
+222
+223  def __eq__(self, other):
+224    if not isinstance(other, type(self)):
+225      return False
+226
+227    if len(self) != len(other):
+228      return False
+229
+230    for k, v in self.items():
+231      if k not in other:
+232        return False
+233      if self[k] != other[k]:
+234        return False
+235
+236    return True
+237
+238  def __ne__(self, other):
+239    return not self.__eq__(other)
+240
+241
+242class Document(BaseDocument):
+243  """A class representing a user document stored in Fauna.
+244
+245    User data should be stored directly on the map, while id, ts, and coll should only be stored on the related
+246    properties. When working with a :class:`Document` in code, it should be considered immutable.
+247    """
+248
+249  @property
+250  def id(self) -> str:
+251    return self._id
+252
+253  @property
+254  def ts(self) -> datetime:
+255    return self._ts
+256
+257  @property
+258  def coll(self) -> Module:
+259    return self._coll
+260
+261  def __init__(self,
+262               id: str,
+263               ts: datetime,
+264               coll: Union[str, Module],
+265               data: Optional[Mapping] = None):
+266    if not isinstance(id, str):
+267      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+268
+269    if not isinstance(ts, datetime):
+270      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+271
+272    if not (isinstance(coll, str) or isinstance(coll, Module)):
+273      raise TypeError(
+274          f"'coll' should be of type Module or str, but was {type(coll)}")
+275
+276    if isinstance(coll, str):
+277      coll = Module(coll)
+278
+279    self._id = id
+280    self._ts = ts
+281    self._coll = coll
+282
+283    super().__init__(data or {})
+284
+285  def __eq__(self, other):
+286    return type(self) == type(other) \
+287        and self.id == other.id \
+288        and self.coll == other.coll \
+289        and self.ts == other.ts \
+290        and super().__eq__(other)
+291
+292  def __ne__(self, other):
+293    return not self.__eq__(other)
+294
+295  def __repr__(self):
+296    kvs = ",".join([f"{repr(k)}:{repr(v)}" for k, v in self.items()])
+297
+298    return f"{self.__class__.__name__}(" \
+299           f"id={repr(self.id)}," \
+300           f"coll={repr(self.coll)}," \
+301           f"ts={repr(self.ts)}," \
+302           f"data={{{kvs}}})"
+303
+304
+305class NamedDocument(BaseDocument):
+306  """A class representing a named document stored in Fauna. Examples of named documents include Collection
+307    definitions, Index definitions, and Roles, among others.
+308
+309    When working with a :class:`NamedDocument` in code, it should be considered immutable.
+310    """
+311
+312  @property
+313  def name(self) -> str:
+314    return self._name
+315
+316  @property
+317  def ts(self) -> datetime:
+318    return self._ts
+319
+320  @property
+321  def coll(self) -> Module:
+322    return self._coll
+323
+324  def __init__(self,
+325               name: str,
+326               ts: datetime,
+327               coll: Union[Module, str],
+328               data: Optional[Mapping] = None):
+329    if not isinstance(name, str):
+330      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+331
+332    if not isinstance(ts, datetime):
+333      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+334
+335    if not (isinstance(coll, str) or isinstance(coll, Module)):
+336      raise TypeError(
+337          f"'coll' should be of type Module or str, but was {type(coll)}")
+338
+339    if isinstance(coll, str):
+340      coll = Module(coll)
+341
+342    self._name = name
+343    self._ts = ts
+344    self._coll = coll
+345
+346    super().__init__(data or {})
+347
+348  def __eq__(self, other):
+349    return type(self) == type(other) \
+350        and self.name == other.name \
+351        and self.coll == other.coll \
+352        and self.ts == other.ts \
+353        and super().__eq__(other)
+354
+355  def __ne__(self, other):
+356    return not self.__eq__(other)
+357
+358  def __repr__(self):
+359    kvs = ",".join([f"{repr(k)}:{repr(v)}" for k, v in self.items()])
+360
+361    return f"{self.__class__.__name__}(" \
+362           f"name={repr(self.name)}," \
+363           f"coll={repr(self.coll)}," \
+364           f"ts={repr(self.ts)}," \
+365           f"data={{{kvs}}})"
+
+ + +
+
+ +
+ + class + Page: + + + +
+ +
25class Page:
+26  """A class representing a Set in Fauna."""
+27
+28  def __init__(self,
+29               data: Optional[List[Any]] = None,
+30               after: Optional[str] = None):
+31    self.data = data
+32    self.after = after
+33
+34  def __repr__(self):
+35    args = []
+36    if self.data is not None:
+37      args.append(f"data={repr(self.data)}")
+38
+39    if self.after is not None:
+40      args.append(f"after={repr(self.after)}")
+41
+42    return f"{self.__class__.__name__}({','.join(args)})"
+43
+44  def __iter__(self) -> Iterator[Any]:
+45    return iter(self.data or [])
+46
+47  def __eq__(self, other):
+48    return isinstance(
+49        other, Page) and self.data == other.data and self.after == other.after
+50
+51  def __hash__(self):
+52    return hash((type(self), self.data, self.after))
+53
+54  def __ne__(self, other):
+55    return not self.__eq__(other)
+
+ + +

A class representing a Set in Fauna.

+
+ + +
+ +
+ + Page(data: Optional[List[Any]] = None, after: Optional[str] = None) + + + +
+ +
28  def __init__(self,
+29               data: Optional[List[Any]] = None,
+30               after: Optional[str] = None):
+31    self.data = data
+32    self.after = after
+
+ + + + +
+
+
+ data + + +
+ + + + +
+
+
+ after + + +
+ + + + +
+
+
+ +
+ + class + EventSource: + + + +
+ +
58class EventSource:
+59  """A class represeting an EventSource in Fauna."""
+60
+61  def __init__(self, token: str):
+62    self.token = token
+63
+64  def __eq__(self, other):
+65    return isinstance(other, EventSource) and self.token == other.token
+66
+67  def __hash__(self):
+68    return hash(self.token)
+
+ + +

A class represeting an EventSource in Fauna.

+
+ + +
+ +
+ + EventSource(token: str) + + + +
+ +
61  def __init__(self, token: str):
+62    self.token = token
+
+ + + + +
+
+
+ token + + +
+ + + + +
+
+
+ +
+ + class + Module: + + + +
+ +
71class Module:
+72  """A class representing a Module in Fauna. Examples of modules include Collection, Math, and a user-defined
+73    collection, among others.
+74
+75    Usage:
+76
+77       dogs = Module("Dogs")
+78       query = fql("${col}.all", col=dogs)
+79    """
+80
+81  def __init__(self, name: str):
+82    self.name = name
+83
+84  def __repr__(self):
+85    return f"{self.__class__.__name__}(name={repr(self.name)})"
+86
+87  def __eq__(self, other):
+88    return isinstance(other, Module) and str(self) == str(other)
+89
+90  def __hash__(self):
+91    return hash(self.name)
+
+ + +

A class representing a Module in Fauna. Examples of modules include Collection, Math, and a user-defined +collection, among others.

+ +

Usage:

+ +

dogs = Module("Dogs") + query = fql("${col}.all", col=dogs)

+
+ + +
+ +
+ + Module(name: str) + + + +
+ +
81  def __init__(self, name: str):
+82    self.name = name
+
+ + + + +
+
+
+ name + + +
+ + + + +
+
+
+ +
+ + class + BaseReference: + + + +
+ +
 94class BaseReference:
+ 95  _collection: Module
+ 96
+ 97  @property
+ 98  def coll(self) -> Module:
+ 99    return self._collection
+100
+101  def __init__(self, coll: Union[str, Module]):
+102    if isinstance(coll, Module):
+103      self._collection = coll
+104    elif isinstance(coll, str):
+105      self._collection = Module(coll)
+106    else:
+107      raise TypeError(
+108          f"'coll' should be of type Module or str, but was {type(coll)}")
+109
+110  def __repr__(self):
+111    return f"{self.__class__.__name__}(coll={repr(self._collection)})"
+112
+113  def __eq__(self, other):
+114    return isinstance(other, type(self)) and str(self) == str(other)
+
+ + + + +
+ +
+ + BaseReference(coll: Union[str, Module]) + + + +
+ +
101  def __init__(self, coll: Union[str, Module]):
+102    if isinstance(coll, Module):
+103      self._collection = coll
+104    elif isinstance(coll, str):
+105      self._collection = Module(coll)
+106    else:
+107      raise TypeError(
+108          f"'coll' should be of type Module or str, but was {type(coll)}")
+
+ + + + +
+
+ +
+ coll: Module + + + +
+ +
97  @property
+98  def coll(self) -> Module:
+99    return self._collection
+
+ + + + +
+
+
+ +
+ + class + DocumentReference(BaseReference): + + + +
+ +
117class DocumentReference(BaseReference):
+118  """A class representing a reference to a :class:`Document` stored in Fauna.
+119    """
+120
+121  @property
+122  def id(self) -> str:
+123    """The ID for the :class:`Document`. Valid IDs are 64-bit integers, stored as strings.
+124
+125        :rtype: str
+126        """
+127    return self._id
+128
+129  def __init__(self, coll: Union[str, Module], id: str):
+130    super().__init__(coll)
+131
+132    if not isinstance(id, str):
+133      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+134    self._id = id
+135
+136  def __hash__(self):
+137    return hash((type(self), self._collection, self._id))
+138
+139  def __repr__(self):
+140    return f"{self.__class__.__name__}(id={repr(self._id)},coll={repr(self._collection)})"
+141
+142  @staticmethod
+143  def from_string(ref: str):
+144    rs = ref.split(":")
+145    if len(rs) != 2:
+146      raise ValueError("Expects string of format <CollectionName>:<ID>")
+147    return DocumentReference(rs[0], rs[1])
+
+ + +

A class representing a reference to a Document stored in Fauna.

+
+ + +
+ +
+ + DocumentReference(coll: Union[str, Module], id: str) + + + +
+ +
129  def __init__(self, coll: Union[str, Module], id: str):
+130    super().__init__(coll)
+131
+132    if not isinstance(id, str):
+133      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+134    self._id = id
+
+ + + + +
+
+ +
+ id: str + + + +
+ +
121  @property
+122  def id(self) -> str:
+123    """The ID for the :class:`Document`. Valid IDs are 64-bit integers, stored as strings.
+124
+125        :rtype: str
+126        """
+127    return self._id
+
+ + +

The ID for the Document. Valid IDs are 64-bit integers, stored as strings.

+
+ + +
+
+ +
+
@staticmethod
+ + def + from_string(ref: str): + + + +
+ +
142  @staticmethod
+143  def from_string(ref: str):
+144    rs = ref.split(":")
+145    if len(rs) != 2:
+146      raise ValueError("Expects string of format <CollectionName>:<ID>")
+147    return DocumentReference(rs[0], rs[1])
+
+ + + + +
+
+
Inherited Members
+
+
BaseReference
+
coll
+ +
+
+
+
+
+ +
+ + class + NamedDocumentReference(BaseReference): + + + +
+ +
150class NamedDocumentReference(BaseReference):
+151  """A class representing a reference to a :class:`NamedDocument` stored in Fauna.
+152    """
+153
+154  @property
+155  def name(self) -> str:
+156    """The name of the :class:`NamedDocument`.
+157
+158        :rtype: str
+159        """
+160    return self._name
+161
+162  def __init__(self, coll: Union[str, Module], name: str):
+163    super().__init__(coll)
+164
+165    if not isinstance(name, str):
+166      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+167
+168    self._name = name
+169
+170  def __hash__(self):
+171    return hash((type(self), self._collection, self._name))
+172
+173  def __repr__(self):
+174    return f"{self.__class__.__name__}(name={repr(self._name)},coll={repr(self._collection)})"
+
+ + +

A class representing a reference to a NamedDocument stored in Fauna.

+
+ + +
+ +
+ + NamedDocumentReference(coll: Union[str, Module], name: str) + + + +
+ +
162  def __init__(self, coll: Union[str, Module], name: str):
+163    super().__init__(coll)
+164
+165    if not isinstance(name, str):
+166      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+167
+168    self._name = name
+
+ + + + +
+
+ +
+ name: str + + + +
+ +
154  @property
+155  def name(self) -> str:
+156    """The name of the :class:`NamedDocument`.
+157
+158        :rtype: str
+159        """
+160    return self._name
+
+ + +

The name of the NamedDocument.

+
+ + +
+
+
Inherited Members
+
+
BaseReference
+
coll
+ +
+
+
+
+
+ +
+ + class + NullDocument: + + + +
+ +
177class NullDocument:
+178
+179  @property
+180  def cause(self) -> Optional[str]:
+181    return self._cause
+182
+183  @property
+184  def ref(self) -> Union[DocumentReference, NamedDocumentReference]:
+185    return self._ref
+186
+187  def __init__(
+188      self,
+189      ref: Union[DocumentReference, NamedDocumentReference],
+190      cause: Optional[str] = None,
+191  ):
+192    self._cause = cause
+193    self._ref = ref
+194
+195  def __repr__(self):
+196    return f"{self.__class__.__name__}(ref={repr(self.ref)},cause={repr(self._cause)})"
+197
+198  def __eq__(self, other):
+199    if not isinstance(other, type(self)):
+200      return False
+201
+202    return self.ref == other.ref and self.cause == other.cause
+203
+204  def __ne__(self, other):
+205    return not self == other
+
+ + + + +
+ +
+ + NullDocument( ref: Union[DocumentReference, NamedDocumentReference], cause: Optional[str] = None) + + + +
+ +
187  def __init__(
+188      self,
+189      ref: Union[DocumentReference, NamedDocumentReference],
+190      cause: Optional[str] = None,
+191  ):
+192    self._cause = cause
+193    self._ref = ref
+
+ + + + +
+
+ +
+ cause: Optional[str] + + + +
+ +
179  @property
+180  def cause(self) -> Optional[str]:
+181    return self._cause
+
+ + + + +
+
+ +
+ ref: Union[DocumentReference, NamedDocumentReference] + + + +
+ +
183  @property
+184  def ref(self) -> Union[DocumentReference, NamedDocumentReference]:
+185    return self._ref
+
+ + + + +
+
+
+ +
+ + class + BaseDocument(collections.abc.Mapping): + + + +
+ +
208class BaseDocument(Mapping):
+209  """A base document class implementing an immutable mapping.
+210    """
+211
+212  def __init__(self, *args, **kwargs):
+213    self._store = dict(*args, **kwargs)
+214
+215  def __getitem__(self, __k: str) -> Any:
+216    return self._store[__k]
+217
+218  def __len__(self) -> int:
+219    return len(self._store)
+220
+221  def __iter__(self) -> Iterator[Any]:
+222    return iter(self._store)
+223
+224  def __eq__(self, other):
+225    if not isinstance(other, type(self)):
+226      return False
+227
+228    if len(self) != len(other):
+229      return False
+230
+231    for k, v in self.items():
+232      if k not in other:
+233        return False
+234      if self[k] != other[k]:
+235        return False
+236
+237    return True
+238
+239  def __ne__(self, other):
+240    return not self.__eq__(other)
+
+ + +

A base document class implementing an immutable mapping.

+
+ + +
+ +
+ + BaseDocument(*args, **kwargs) + + + +
+ +
212  def __init__(self, *args, **kwargs):
+213    self._store = dict(*args, **kwargs)
+
+ + + + +
+
+
Inherited Members
+
+
collections.abc.Mapping
+
get
+
keys
+
items
+
values
+ +
+
+
+
+
+ +
+ + class + Document(BaseDocument): + + + +
+ +
243class Document(BaseDocument):
+244  """A class representing a user document stored in Fauna.
+245
+246    User data should be stored directly on the map, while id, ts, and coll should only be stored on the related
+247    properties. When working with a :class:`Document` in code, it should be considered immutable.
+248    """
+249
+250  @property
+251  def id(self) -> str:
+252    return self._id
+253
+254  @property
+255  def ts(self) -> datetime:
+256    return self._ts
+257
+258  @property
+259  def coll(self) -> Module:
+260    return self._coll
+261
+262  def __init__(self,
+263               id: str,
+264               ts: datetime,
+265               coll: Union[str, Module],
+266               data: Optional[Mapping] = None):
+267    if not isinstance(id, str):
+268      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+269
+270    if not isinstance(ts, datetime):
+271      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+272
+273    if not (isinstance(coll, str) or isinstance(coll, Module)):
+274      raise TypeError(
+275          f"'coll' should be of type Module or str, but was {type(coll)}")
+276
+277    if isinstance(coll, str):
+278      coll = Module(coll)
+279
+280    self._id = id
+281    self._ts = ts
+282    self._coll = coll
+283
+284    super().__init__(data or {})
+285
+286  def __eq__(self, other):
+287    return type(self) == type(other) \
+288        and self.id == other.id \
+289        and self.coll == other.coll \
+290        and self.ts == other.ts \
+291        and super().__eq__(other)
+292
+293  def __ne__(self, other):
+294    return not self.__eq__(other)
+295
+296  def __repr__(self):
+297    kvs = ",".join([f"{repr(k)}:{repr(v)}" for k, v in self.items()])
+298
+299    return f"{self.__class__.__name__}(" \
+300           f"id={repr(self.id)}," \
+301           f"coll={repr(self.coll)}," \
+302           f"ts={repr(self.ts)}," \
+303           f"data={{{kvs}}})"
+
+ + +

A class representing a user document stored in Fauna.

+ +

User data should be stored directly on the map, while id, ts, and coll should only be stored on the related +properties. When working with a Document in code, it should be considered immutable.

+
+ + +
+ +
+ + Document( id: str, ts: datetime.datetime, coll: Union[str, Module], data: Optional[Mapping] = None) + + + +
+ +
262  def __init__(self,
+263               id: str,
+264               ts: datetime,
+265               coll: Union[str, Module],
+266               data: Optional[Mapping] = None):
+267    if not isinstance(id, str):
+268      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+269
+270    if not isinstance(ts, datetime):
+271      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+272
+273    if not (isinstance(coll, str) or isinstance(coll, Module)):
+274      raise TypeError(
+275          f"'coll' should be of type Module or str, but was {type(coll)}")
+276
+277    if isinstance(coll, str):
+278      coll = Module(coll)
+279
+280    self._id = id
+281    self._ts = ts
+282    self._coll = coll
+283
+284    super().__init__(data or {})
+
+ + + + +
+
+ +
+ id: str + + + +
+ +
250  @property
+251  def id(self) -> str:
+252    return self._id
+
+ + + + +
+
+ +
+ ts: datetime.datetime + + + +
+ +
254  @property
+255  def ts(self) -> datetime:
+256    return self._ts
+
+ + + + +
+
+ +
+ coll: Module + + + +
+ +
258  @property
+259  def coll(self) -> Module:
+260    return self._coll
+
+ + + + +
+
+
Inherited Members
+
+
collections.abc.Mapping
+
get
+
keys
+
items
+
values
+ +
+
+
+
+
+ +
+ + class + NamedDocument(BaseDocument): + + + +
+ +
306class NamedDocument(BaseDocument):
+307  """A class representing a named document stored in Fauna. Examples of named documents include Collection
+308    definitions, Index definitions, and Roles, among others.
+309
+310    When working with a :class:`NamedDocument` in code, it should be considered immutable.
+311    """
+312
+313  @property
+314  def name(self) -> str:
+315    return self._name
+316
+317  @property
+318  def ts(self) -> datetime:
+319    return self._ts
+320
+321  @property
+322  def coll(self) -> Module:
+323    return self._coll
+324
+325  def __init__(self,
+326               name: str,
+327               ts: datetime,
+328               coll: Union[Module, str],
+329               data: Optional[Mapping] = None):
+330    if not isinstance(name, str):
+331      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+332
+333    if not isinstance(ts, datetime):
+334      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+335
+336    if not (isinstance(coll, str) or isinstance(coll, Module)):
+337      raise TypeError(
+338          f"'coll' should be of type Module or str, but was {type(coll)}")
+339
+340    if isinstance(coll, str):
+341      coll = Module(coll)
+342
+343    self._name = name
+344    self._ts = ts
+345    self._coll = coll
+346
+347    super().__init__(data or {})
+348
+349  def __eq__(self, other):
+350    return type(self) == type(other) \
+351        and self.name == other.name \
+352        and self.coll == other.coll \
+353        and self.ts == other.ts \
+354        and super().__eq__(other)
+355
+356  def __ne__(self, other):
+357    return not self.__eq__(other)
+358
+359  def __repr__(self):
+360    kvs = ",".join([f"{repr(k)}:{repr(v)}" for k, v in self.items()])
+361
+362    return f"{self.__class__.__name__}(" \
+363           f"name={repr(self.name)}," \
+364           f"coll={repr(self.coll)}," \
+365           f"ts={repr(self.ts)}," \
+366           f"data={{{kvs}}})"
+
+ + +

A class representing a named document stored in Fauna. Examples of named documents include Collection +definitions, Index definitions, and Roles, among others.

+ +

When working with a NamedDocument in code, it should be considered immutable.

+
+ + +
+ +
+ + NamedDocument( name: str, ts: datetime.datetime, coll: Union[Module, str], data: Optional[Mapping] = None) + + + +
+ +
325  def __init__(self,
+326               name: str,
+327               ts: datetime,
+328               coll: Union[Module, str],
+329               data: Optional[Mapping] = None):
+330    if not isinstance(name, str):
+331      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+332
+333    if not isinstance(ts, datetime):
+334      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+335
+336    if not (isinstance(coll, str) or isinstance(coll, Module)):
+337      raise TypeError(
+338          f"'coll' should be of type Module or str, but was {type(coll)}")
+339
+340    if isinstance(coll, str):
+341      coll = Module(coll)
+342
+343    self._name = name
+344    self._ts = ts
+345    self._coll = coll
+346
+347    super().__init__(data or {})
+
+ + + + +
+
+ +
+ name: str + + + +
+ +
313  @property
+314  def name(self) -> str:
+315    return self._name
+
+ + + + +
+
+ +
+ ts: datetime.datetime + + + +
+ +
317  @property
+318  def ts(self) -> datetime:
+319    return self._ts
+
+ + + + +
+
+ +
+ coll: Module + + + +
+ +
321  @property
+322  def coll(self) -> Module:
+323    return self._coll
+
+ + + + +
+
+
Inherited Members
+
+
collections.abc.Mapping
+
get
+
keys
+
items
+
values
+ +
+
+
+
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/query/query_builder.html b/2.3.0/fauna/query/query_builder.html new file mode 100644 index 00000000..44ec7677 --- /dev/null +++ b/2.3.0/fauna/query/query_builder.html @@ -0,0 +1,831 @@ + + + + + + + fauna.query.query_builder API documentation + + + + + + + + + +
+
+

+fauna.query.query_builder

+ + + + + + +
  1import abc
+  2from typing import Any, Optional, List
+  3
+  4from .template import FaunaTemplate
+  5
+  6
+  7class Fragment(abc.ABC):
+  8  """An abstract class representing a Fragment of a query.
+  9    """
+ 10
+ 11  @abc.abstractmethod
+ 12  def get(self) -> Any:
+ 13    """An abstract method for returning a stored value.
+ 14        """
+ 15    pass
+ 16
+ 17
+ 18class ValueFragment(Fragment):
+ 19  """A concrete :class:`Fragment` representing a part of a query that can represent a template variable.
+ 20    For example, if a template contains a variable ``${foo}``, and an object ``{ "prop": 1 }`` is provided for foo,
+ 21    then ``{ "prop": 1 }`` should be wrapped as a :class:`ValueFragment`.
+ 22
+ 23    :param Any val: The value to be used as a fragment.
+ 24    """
+ 25
+ 26  def __init__(self, val: Any):
+ 27    self._val = val
+ 28
+ 29  def get(self) -> Any:
+ 30    """Gets the stored value.
+ 31
+ 32        :returns: The stored value.
+ 33        """
+ 34    return self._val
+ 35
+ 36
+ 37class LiteralFragment(Fragment):
+ 38  """A concrete :class:`Fragment` representing a query literal For example, in the template ```let x = ${foo}```,
+ 39    the portion ```let x = ``` is a query literal and should be wrapped as a :class:`LiteralFragment`.
+ 40
+ 41    :param str val: The query literal to be used as a fragment.
+ 42    """
+ 43
+ 44  def __init__(self, val: str):
+ 45    self._val = val
+ 46
+ 47  def get(self) -> str:
+ 48    """Returns the stored value.
+ 49
+ 50        :returns: The stored value.
+ 51        """
+ 52    return self._val
+ 53
+ 54
+ 55class Query:
+ 56  """A class for representing a query.
+ 57
+ 58       e.g. { "fql": [...] }
+ 59    """
+ 60  _fragments: List[Fragment]
+ 61
+ 62  def __init__(self, fragments: Optional[List[Fragment]] = None):
+ 63    self._fragments = fragments or []
+ 64
+ 65  @property
+ 66  def fragments(self) -> List[Fragment]:
+ 67    """The list of stored Fragments"""
+ 68    return self._fragments
+ 69
+ 70  def __str__(self) -> str:
+ 71    res = ""
+ 72    for f in self._fragments:
+ 73      res += str(f.get())
+ 74
+ 75    return res
+ 76
+ 77
+ 78def fql(query: str, **kwargs: Any) -> Query:
+ 79  """Creates a Query - capable of performing query composition and simple querying. It can accept a
+ 80    simple string query, or can perform composition using ``${}`` sigil string template with ``**kwargs`` as
+ 81    substitutions.
+ 82
+ 83    The ``**kwargs`` can be Fauna data types - such as strings, document references, or modules - and embedded
+ 84    Query - allowing you to compose arbitrarily complex queries.
+ 85
+ 86    When providing ``**kwargs``, following types are accepted:
+ 87        - :class:`str`, :class:`int`, :class:`float`, :class:`bool`, :class:`datetime.datetime`, :class:`datetime.date`,
+ 88          :class:`dict`, :class:`list`, :class:`Query`, :class:`DocumentReference`, :class:`Module`
+ 89
+ 90    :raises ValueError: If there is an invalid template placeholder or a value that cannot be encoded.
+ 91    :returns: A :class:`Query` that can be passed to the client for evaluation against Fauna.
+ 92
+ 93    Examples:
+ 94
+ 95    .. code-block:: python
+ 96        :name: Simple-FQL-Example
+ 97        :caption: Simple query declaration using this function.
+ 98
+ 99        fql('Dogs.byName("Fido")')
+100
+101    .. code-block:: python
+102        :name: Composition-FQL-Example
+103        :caption: Query composition using this function.
+104
+105        def get_dog(id):
+106            return fql('Dogs.byId(${id})', id=id)
+107
+108        def get_vet_phone(id):
+109            return fql('${dog} { .vet_phone_number }', dog=get_dog(id))
+110
+111        get_vet_phone('d123')
+112
+113    """
+114
+115  fragments: List[Any] = []
+116  template = FaunaTemplate(query)
+117  for text, field_name in template.iter():
+118    if text is not None and len(text) > 0:
+119      fragments.append(LiteralFragment(text))
+120
+121    if field_name is not None:
+122      if field_name not in kwargs:
+123        raise ValueError(
+124            f"template variable `{field_name}` not found in provided kwargs")
+125
+126      # TODO: Reject if it's already a fragment, or accept *Fragment? Decide on API here
+127      fragments.append(ValueFragment(kwargs[field_name]))
+128  return Query(fragments)
+
+ + +
+
+ +
+ + class + Fragment(abc.ABC): + + + +
+ +
 8class Fragment(abc.ABC):
+ 9  """An abstract class representing a Fragment of a query.
+10    """
+11
+12  @abc.abstractmethod
+13  def get(self) -> Any:
+14    """An abstract method for returning a stored value.
+15        """
+16    pass
+
+ + +

An abstract class representing a Fragment of a query.

+
+ + +
+ +
+
@abc.abstractmethod
+ + def + get(self) -> Any: + + + +
+ +
12  @abc.abstractmethod
+13  def get(self) -> Any:
+14    """An abstract method for returning a stored value.
+15        """
+16    pass
+
+ + +

An abstract method for returning a stored value.

+
+ + +
+
+
+ +
+ + class + ValueFragment(Fragment): + + + +
+ +
19class ValueFragment(Fragment):
+20  """A concrete :class:`Fragment` representing a part of a query that can represent a template variable.
+21    For example, if a template contains a variable ``${foo}``, and an object ``{ "prop": 1 }`` is provided for foo,
+22    then ``{ "prop": 1 }`` should be wrapped as a :class:`ValueFragment`.
+23
+24    :param Any val: The value to be used as a fragment.
+25    """
+26
+27  def __init__(self, val: Any):
+28    self._val = val
+29
+30  def get(self) -> Any:
+31    """Gets the stored value.
+32
+33        :returns: The stored value.
+34        """
+35    return self._val
+
+ + +

A concrete Fragment representing a part of a query that can represent a template variable. +For example, if a template contains a variable ${foo}, and an object { "prop": 1 } is provided for foo, +then { "prop": 1 } should be wrapped as a ValueFragment.

+ +
Parameters
+ +
    +
  • Any val: The value to be used as a fragment.
  • +
+
+ + +
+ +
+ + ValueFragment(val: Any) + + + +
+ +
27  def __init__(self, val: Any):
+28    self._val = val
+
+ + + + +
+
+ +
+ + def + get(self) -> Any: + + + +
+ +
30  def get(self) -> Any:
+31    """Gets the stored value.
+32
+33        :returns: The stored value.
+34        """
+35    return self._val
+
+ + +

Gets the stored value.

+ +

:returns: The stored value.

+
+ + +
+
+
+ +
+ + class + LiteralFragment(Fragment): + + + +
+ +
38class LiteralFragment(Fragment):
+39  """A concrete :class:`Fragment` representing a query literal For example, in the template ```let x = ${foo}```,
+40    the portion ```let x = ``` is a query literal and should be wrapped as a :class:`LiteralFragment`.
+41
+42    :param str val: The query literal to be used as a fragment.
+43    """
+44
+45  def __init__(self, val: str):
+46    self._val = val
+47
+48  def get(self) -> str:
+49    """Returns the stored value.
+50
+51        :returns: The stored value.
+52        """
+53    return self._val
+
+ + +

A concrete Fragment representing a query literal For example, in the template let x = ${foo}, +the portion let x = is a query literal and should be wrapped as a LiteralFragment.

+ +
Parameters
+ +
    +
  • str val: The query literal to be used as a fragment.
  • +
+
+ + +
+ +
+ + LiteralFragment(val: str) + + + +
+ +
45  def __init__(self, val: str):
+46    self._val = val
+
+ + + + +
+
+ +
+ + def + get(self) -> str: + + + +
+ +
48  def get(self) -> str:
+49    """Returns the stored value.
+50
+51        :returns: The stored value.
+52        """
+53    return self._val
+
+ + +

Returns the stored value.

+ +

:returns: The stored value.

+
+ + +
+
+
+ +
+ + class + Query: + + + +
+ +
56class Query:
+57  """A class for representing a query.
+58
+59       e.g. { "fql": [...] }
+60    """
+61  _fragments: List[Fragment]
+62
+63  def __init__(self, fragments: Optional[List[Fragment]] = None):
+64    self._fragments = fragments or []
+65
+66  @property
+67  def fragments(self) -> List[Fragment]:
+68    """The list of stored Fragments"""
+69    return self._fragments
+70
+71  def __str__(self) -> str:
+72    res = ""
+73    for f in self._fragments:
+74      res += str(f.get())
+75
+76    return res
+
+ + +

A class for representing a query.

+ +

e.g. { "fql": [...] }

+
+ + +
+ +
+ + Query(fragments: Optional[List[Fragment]] = None) + + + +
+ +
63  def __init__(self, fragments: Optional[List[Fragment]] = None):
+64    self._fragments = fragments or []
+
+ + + + +
+
+ +
+ fragments: List[Fragment] + + + +
+ +
66  @property
+67  def fragments(self) -> List[Fragment]:
+68    """The list of stored Fragments"""
+69    return self._fragments
+
+ + +

The list of stored Fragments

+
+ + +
+
+
+ +
+ + def + fql(query: str, **kwargs: Any) -> Query: + + + +
+ +
 79def fql(query: str, **kwargs: Any) -> Query:
+ 80  """Creates a Query - capable of performing query composition and simple querying. It can accept a
+ 81    simple string query, or can perform composition using ``${}`` sigil string template with ``**kwargs`` as
+ 82    substitutions.
+ 83
+ 84    The ``**kwargs`` can be Fauna data types - such as strings, document references, or modules - and embedded
+ 85    Query - allowing you to compose arbitrarily complex queries.
+ 86
+ 87    When providing ``**kwargs``, following types are accepted:
+ 88        - :class:`str`, :class:`int`, :class:`float`, :class:`bool`, :class:`datetime.datetime`, :class:`datetime.date`,
+ 89          :class:`dict`, :class:`list`, :class:`Query`, :class:`DocumentReference`, :class:`Module`
+ 90
+ 91    :raises ValueError: If there is an invalid template placeholder or a value that cannot be encoded.
+ 92    :returns: A :class:`Query` that can be passed to the client for evaluation against Fauna.
+ 93
+ 94    Examples:
+ 95
+ 96    .. code-block:: python
+ 97        :name: Simple-FQL-Example
+ 98        :caption: Simple query declaration using this function.
+ 99
+100        fql('Dogs.byName("Fido")')
+101
+102    .. code-block:: python
+103        :name: Composition-FQL-Example
+104        :caption: Query composition using this function.
+105
+106        def get_dog(id):
+107            return fql('Dogs.byId(${id})', id=id)
+108
+109        def get_vet_phone(id):
+110            return fql('${dog} { .vet_phone_number }', dog=get_dog(id))
+111
+112        get_vet_phone('d123')
+113
+114    """
+115
+116  fragments: List[Any] = []
+117  template = FaunaTemplate(query)
+118  for text, field_name in template.iter():
+119    if text is not None and len(text) > 0:
+120      fragments.append(LiteralFragment(text))
+121
+122    if field_name is not None:
+123      if field_name not in kwargs:
+124        raise ValueError(
+125            f"template variable `{field_name}` not found in provided kwargs")
+126
+127      # TODO: Reject if it's already a fragment, or accept *Fragment? Decide on API here
+128      fragments.append(ValueFragment(kwargs[field_name]))
+129  return Query(fragments)
+
+ + +

Creates a Query - capable of performing query composition and simple querying. It can accept a +simple string query, or can perform composition using ${} sigil string template with **kwargs as +substitutions.

+ +

The **kwargs can be Fauna data types - such as strings, document references, or modules - and embedded +Query - allowing you to compose arbitrarily complex queries.

+ +

When providing **kwargs, following types are accepted: + - str, int, float, bool, datetime.datetime, datetime.date, + dict, list, Query, DocumentReference, Module

+ +
Raises
+ +
    +
  • ValueError: If there is an invalid template placeholder or a value that cannot be encoded. +:returns: A Query that can be passed to the client for evaluation against Fauna.
  • +
+ +

Examples:

+ +
+
fql('Dogs.byName("Fido")')
+
+
+ +
+
def get_dog(id):
+    return fql('Dogs.byId(${id})', id=id)
+
+def get_vet_phone(id):
+    return fql('${dog} { .vet_phone_number }', dog=get_dog(id))
+
+get_vet_phone('d123')
+
+
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.3.0/fauna/query/template.html b/2.3.0/fauna/query/template.html new file mode 100644 index 00000000..90266fb1 --- /dev/null +++ b/2.3.0/fauna/query/template.html @@ -0,0 +1,533 @@ + + + + + + + fauna.query.template API documentation + + + + + + + + + +
+
+

+fauna.query.template

+ + + + + + +
 1import re as _re
+ 2from typing import Optional, Tuple, Iterator, Match
+ 3
+ 4
+ 5class FaunaTemplate:
+ 6  """A template class that supports variables marked with a ${}-sigil. Its primary purpose
+ 7    is to expose an iterator for the template parts that support composition of FQL queries.
+ 8
+ 9    Implementation adapted from https://github.com/python/cpython/blob/main/Lib/string.py
+10
+11    :param template: A string template e.g. "${my_var} { name }"
+12    :type template: str
+13    """
+14
+15  _delimiter = '$'
+16  _idpattern = r'[_a-zA-Z][_a-zA-Z0-9]*'
+17  _flags = _re.VERBOSE
+18
+19  def __init__(self, template: str):
+20    """The initializer"""
+21    delim = _re.escape(self._delimiter)
+22    pattern = fr"""
+23        {delim}(?:
+24          (?P<escaped>{delim})  |   # Escape sequence of two delimiters
+25          {{(?P<braced>{self._idpattern})}} |   # delimiter and a braced identifier
+26          (?P<invalid>)             # Other ill-formed delimiter exprs
+27        ) 
+28        """
+29    self._pattern = _re.compile(pattern, self._flags)
+30    self._template = template
+31
+32  def iter(self) -> Iterator[Tuple[Optional[str], Optional[str]]]:
+33    """A method that returns an iterator over tuples representing template parts. The
+34        first value of the tuple, if not None, is a template literal. The second value of
+35        the tuple, if not None, is a template variable. If both are not None, then the
+36        template literal comes *before* the variable.
+37
+38        :raises ValueError: If there is an invalid template placeholder
+39
+40        :return: An iterator of template parts
+41        :rtype: collections.Iterable[Tuple[Optional[str], Optional[str]]]
+42        """
+43    match_objects = self._pattern.finditer(self._template)
+44    cur_pos = 0
+45    for mo in match_objects:
+46      if mo.group("invalid") is not None:
+47        self._handle_invalid(mo)
+48
+49      span_start_pos = mo.span()[0]
+50      span_end_pos = mo.span()[1]
+51      escaped_part = mo.group("escaped") or ""
+52      variable_part = mo.group("braced")
+53      literal_part: Optional[str] = None
+54
+55      if cur_pos != span_start_pos:
+56        literal_part = \
+57            self._template[cur_pos:span_start_pos] \
+58                + escaped_part
+59
+60      cur_pos = span_end_pos
+61
+62      yield literal_part, variable_part
+63
+64    if cur_pos != len(self._template):
+65      yield self._template[cur_pos:], None
+66
+67  def _handle_invalid(self, mo: Match) -> None:
+68    i = mo.start("invalid")
+69    lines = self._template[:i].splitlines(keepends=True)
+70
+71    if not lines:
+72      colno = 1
+73      lineno = 1
+74    else:
+75      colno = i - len(''.join(lines[:-1]))
+76      lineno = len(lines)
+77
+78    raise ValueError(
+79        f"Invalid placeholder in template: line {lineno}, col {colno}")
+
+ + +
+
+ +
+ + class + FaunaTemplate: + + + +
+ +
 6class FaunaTemplate:
+ 7  """A template class that supports variables marked with a ${}-sigil. Its primary purpose
+ 8    is to expose an iterator for the template parts that support composition of FQL queries.
+ 9
+10    Implementation adapted from https://github.com/python/cpython/blob/main/Lib/string.py
+11
+12    :param template: A string template e.g. "${my_var} { name }"
+13    :type template: str
+14    """
+15
+16  _delimiter = '$'
+17  _idpattern = r'[_a-zA-Z][_a-zA-Z0-9]*'
+18  _flags = _re.VERBOSE
+19
+20  def __init__(self, template: str):
+21    """The initializer"""
+22    delim = _re.escape(self._delimiter)
+23    pattern = fr"""
+24        {delim}(?:
+25          (?P<escaped>{delim})  |   # Escape sequence of two delimiters
+26          {{(?P<braced>{self._idpattern})}} |   # delimiter and a braced identifier
+27          (?P<invalid>)             # Other ill-formed delimiter exprs
+28        ) 
+29        """
+30    self._pattern = _re.compile(pattern, self._flags)
+31    self._template = template
+32
+33  def iter(self) -> Iterator[Tuple[Optional[str], Optional[str]]]:
+34    """A method that returns an iterator over tuples representing template parts. The
+35        first value of the tuple, if not None, is a template literal. The second value of
+36        the tuple, if not None, is a template variable. If both are not None, then the
+37        template literal comes *before* the variable.
+38
+39        :raises ValueError: If there is an invalid template placeholder
+40
+41        :return: An iterator of template parts
+42        :rtype: collections.Iterable[Tuple[Optional[str], Optional[str]]]
+43        """
+44    match_objects = self._pattern.finditer(self._template)
+45    cur_pos = 0
+46    for mo in match_objects:
+47      if mo.group("invalid") is not None:
+48        self._handle_invalid(mo)
+49
+50      span_start_pos = mo.span()[0]
+51      span_end_pos = mo.span()[1]
+52      escaped_part = mo.group("escaped") or ""
+53      variable_part = mo.group("braced")
+54      literal_part: Optional[str] = None
+55
+56      if cur_pos != span_start_pos:
+57        literal_part = \
+58            self._template[cur_pos:span_start_pos] \
+59                + escaped_part
+60
+61      cur_pos = span_end_pos
+62
+63      yield literal_part, variable_part
+64
+65    if cur_pos != len(self._template):
+66      yield self._template[cur_pos:], None
+67
+68  def _handle_invalid(self, mo: Match) -> None:
+69    i = mo.start("invalid")
+70    lines = self._template[:i].splitlines(keepends=True)
+71
+72    if not lines:
+73      colno = 1
+74      lineno = 1
+75    else:
+76      colno = i - len(''.join(lines[:-1]))
+77      lineno = len(lines)
+78
+79    raise ValueError(
+80        f"Invalid placeholder in template: line {lineno}, col {colno}")
+
+ + +

A template class that supports variables marked with a ${}-sigil. Its primary purpose +is to expose an iterator for the template parts that support composition of FQL queries.

+ +

Implementation adapted from https://github.com/python/cpython/blob/main/Lib/string.py

+ +
Parameters
+ +
    +
  • template: A string template e.g. "${my_var} { name }"
  • +
+
+ + +
+ +
+ + FaunaTemplate(template: str) + + + +
+ +
20  def __init__(self, template: str):
+21    """The initializer"""
+22    delim = _re.escape(self._delimiter)
+23    pattern = fr"""
+24        {delim}(?:
+25          (?P<escaped>{delim})  |   # Escape sequence of two delimiters
+26          {{(?P<braced>{self._idpattern})}} |   # delimiter and a braced identifier
+27          (?P<invalid>)             # Other ill-formed delimiter exprs
+28        ) 
+29        """
+30    self._pattern = _re.compile(pattern, self._flags)
+31    self._template = template
+
+ + +

The initializer

+
+ + +
+
+ +
+ + def + iter(self) -> Iterator[Tuple[Optional[str], Optional[str]]]: + + + +
+ +
33  def iter(self) -> Iterator[Tuple[Optional[str], Optional[str]]]:
+34    """A method that returns an iterator over tuples representing template parts. The
+35        first value of the tuple, if not None, is a template literal. The second value of
+36        the tuple, if not None, is a template variable. If both are not None, then the
+37        template literal comes *before* the variable.
+38
+39        :raises ValueError: If there is an invalid template placeholder
+40
+41        :return: An iterator of template parts
+42        :rtype: collections.Iterable[Tuple[Optional[str], Optional[str]]]
+43        """
+44    match_objects = self._pattern.finditer(self._template)
+45    cur_pos = 0
+46    for mo in match_objects:
+47      if mo.group("invalid") is not None:
+48        self._handle_invalid(mo)
+49
+50      span_start_pos = mo.span()[0]
+51      span_end_pos = mo.span()[1]
+52      escaped_part = mo.group("escaped") or ""
+53      variable_part = mo.group("braced")
+54      literal_part: Optional[str] = None
+55
+56      if cur_pos != span_start_pos:
+57        literal_part = \
+58            self._template[cur_pos:span_start_pos] \
+59                + escaped_part
+60
+61      cur_pos = span_end_pos
+62
+63      yield literal_part, variable_part
+64
+65    if cur_pos != len(self._template):
+66      yield self._template[cur_pos:], None
+
+ + +

A method that returns an iterator over tuples representing template parts. The +first value of the tuple, if not None, is a template literal. The second value of +the tuple, if not None, is a template variable. If both are not None, then the +template literal comes before the variable.

+ +
Raises
+ +
    +
  • ValueError: If there is an invalid template placeholder
  • +
+ +
Returns
+ +
+

An iterator of template parts

+
+
+ + +
+
+
+ + \ No newline at end of file diff --git a/2.3.0/index.html b/2.3.0/index.html new file mode 100644 index 00000000..cd7f994c --- /dev/null +++ b/2.3.0/index.html @@ -0,0 +1,7 @@ + + + + + + + diff --git a/2.3.0/search.js b/2.3.0/search.js new file mode 100644 index 00000000..4e699e36 --- /dev/null +++ b/2.3.0/search.js @@ -0,0 +1,46 @@ +window.pdocSearch = (function(){ +/** elasticlunr - http://weixsong.github.io * Copyright (C) 2017 Oliver Nightingale * Copyright (C) 2017 Wei Song * MIT Licensed */!function(){function e(e){if(null===e||"object"!=typeof e)return e;var t=e.constructor();for(var n in e)e.hasOwnProperty(n)&&(t[n]=e[n]);return t}var t=function(e){var n=new t.Index;return n.pipeline.add(t.trimmer,t.stopWordFilter,t.stemmer),e&&e.call(n,n),n};t.version="0.9.5",lunr=t,t.utils={},t.utils.warn=function(e){return function(t){e.console&&console.warn&&console.warn(t)}}(this),t.utils.toString=function(e){return void 0===e||null===e?"":e.toString()},t.EventEmitter=function(){this.events={}},t.EventEmitter.prototype.addListener=function(){var e=Array.prototype.slice.call(arguments),t=e.pop(),n=e;if("function"!=typeof t)throw new TypeError("last argument must be a function");n.forEach(function(e){this.hasHandler(e)||(this.events[e]=[]),this.events[e].push(t)},this)},t.EventEmitter.prototype.removeListener=function(e,t){if(this.hasHandler(e)){var n=this.events[e].indexOf(t);-1!==n&&(this.events[e].splice(n,1),0==this.events[e].length&&delete this.events[e])}},t.EventEmitter.prototype.emit=function(e){if(this.hasHandler(e)){var t=Array.prototype.slice.call(arguments,1);this.events[e].forEach(function(e){e.apply(void 0,t)},this)}},t.EventEmitter.prototype.hasHandler=function(e){return e in this.events},t.tokenizer=function(e){if(!arguments.length||null===e||void 0===e)return[];if(Array.isArray(e)){var n=e.filter(function(e){return null===e||void 0===e?!1:!0});n=n.map(function(e){return t.utils.toString(e).toLowerCase()});var i=[];return n.forEach(function(e){var n=e.split(t.tokenizer.seperator);i=i.concat(n)},this),i}return e.toString().trim().toLowerCase().split(t.tokenizer.seperator)},t.tokenizer.defaultSeperator=/[\s\-]+/,t.tokenizer.seperator=t.tokenizer.defaultSeperator,t.tokenizer.setSeperator=function(e){null!==e&&void 0!==e&&"object"==typeof e&&(t.tokenizer.seperator=e)},t.tokenizer.resetSeperator=function(){t.tokenizer.seperator=t.tokenizer.defaultSeperator},t.tokenizer.getSeperator=function(){return t.tokenizer.seperator},t.Pipeline=function(){this._queue=[]},t.Pipeline.registeredFunctions={},t.Pipeline.registerFunction=function(e,n){n in t.Pipeline.registeredFunctions&&t.utils.warn("Overwriting existing registered function: "+n),e.label=n,t.Pipeline.registeredFunctions[n]=e},t.Pipeline.getRegisteredFunction=function(e){return e in t.Pipeline.registeredFunctions!=!0?null:t.Pipeline.registeredFunctions[e]},t.Pipeline.warnIfFunctionNotRegistered=function(e){var n=e.label&&e.label in this.registeredFunctions;n||t.utils.warn("Function is not registered with pipeline. This may cause problems when serialising the index.\n",e)},t.Pipeline.load=function(e){var n=new t.Pipeline;return e.forEach(function(e){var i=t.Pipeline.getRegisteredFunction(e);if(!i)throw new Error("Cannot load un-registered function: "+e);n.add(i)}),n},t.Pipeline.prototype.add=function(){var e=Array.prototype.slice.call(arguments);e.forEach(function(e){t.Pipeline.warnIfFunctionNotRegistered(e),this._queue.push(e)},this)},t.Pipeline.prototype.after=function(e,n){t.Pipeline.warnIfFunctionNotRegistered(n);var i=this._queue.indexOf(e);if(-1===i)throw new Error("Cannot find existingFn");this._queue.splice(i+1,0,n)},t.Pipeline.prototype.before=function(e,n){t.Pipeline.warnIfFunctionNotRegistered(n);var i=this._queue.indexOf(e);if(-1===i)throw new Error("Cannot find existingFn");this._queue.splice(i,0,n)},t.Pipeline.prototype.remove=function(e){var t=this._queue.indexOf(e);-1!==t&&this._queue.splice(t,1)},t.Pipeline.prototype.run=function(e){for(var t=[],n=e.length,i=this._queue.length,o=0;n>o;o++){for(var r=e[o],s=0;i>s&&(r=this._queue[s](r,o,e),void 0!==r&&null!==r);s++);void 0!==r&&null!==r&&t.push(r)}return t},t.Pipeline.prototype.reset=function(){this._queue=[]},t.Pipeline.prototype.get=function(){return this._queue},t.Pipeline.prototype.toJSON=function(){return this._queue.map(function(e){return t.Pipeline.warnIfFunctionNotRegistered(e),e.label})},t.Index=function(){this._fields=[],this._ref="id",this.pipeline=new t.Pipeline,this.documentStore=new t.DocumentStore,this.index={},this.eventEmitter=new t.EventEmitter,this._idfCache={},this.on("add","remove","update",function(){this._idfCache={}}.bind(this))},t.Index.prototype.on=function(){var e=Array.prototype.slice.call(arguments);return this.eventEmitter.addListener.apply(this.eventEmitter,e)},t.Index.prototype.off=function(e,t){return this.eventEmitter.removeListener(e,t)},t.Index.load=function(e){e.version!==t.version&&t.utils.warn("version mismatch: current "+t.version+" importing "+e.version);var n=new this;n._fields=e.fields,n._ref=e.ref,n.documentStore=t.DocumentStore.load(e.documentStore),n.pipeline=t.Pipeline.load(e.pipeline),n.index={};for(var i in e.index)n.index[i]=t.InvertedIndex.load(e.index[i]);return n},t.Index.prototype.addField=function(e){return this._fields.push(e),this.index[e]=new t.InvertedIndex,this},t.Index.prototype.setRef=function(e){return this._ref=e,this},t.Index.prototype.saveDocument=function(e){return this.documentStore=new t.DocumentStore(e),this},t.Index.prototype.addDoc=function(e,n){if(e){var n=void 0===n?!0:n,i=e[this._ref];this.documentStore.addDoc(i,e),this._fields.forEach(function(n){var o=this.pipeline.run(t.tokenizer(e[n]));this.documentStore.addFieldLength(i,n,o.length);var r={};o.forEach(function(e){e in r?r[e]+=1:r[e]=1},this);for(var s in r){var u=r[s];u=Math.sqrt(u),this.index[n].addToken(s,{ref:i,tf:u})}},this),n&&this.eventEmitter.emit("add",e,this)}},t.Index.prototype.removeDocByRef=function(e){if(e&&this.documentStore.isDocStored()!==!1&&this.documentStore.hasDoc(e)){var t=this.documentStore.getDoc(e);this.removeDoc(t,!1)}},t.Index.prototype.removeDoc=function(e,n){if(e){var n=void 0===n?!0:n,i=e[this._ref];this.documentStore.hasDoc(i)&&(this.documentStore.removeDoc(i),this._fields.forEach(function(n){var o=this.pipeline.run(t.tokenizer(e[n]));o.forEach(function(e){this.index[n].removeToken(e,i)},this)},this),n&&this.eventEmitter.emit("remove",e,this))}},t.Index.prototype.updateDoc=function(e,t){var t=void 0===t?!0:t;this.removeDocByRef(e[this._ref],!1),this.addDoc(e,!1),t&&this.eventEmitter.emit("update",e,this)},t.Index.prototype.idf=function(e,t){var n="@"+t+"/"+e;if(Object.prototype.hasOwnProperty.call(this._idfCache,n))return this._idfCache[n];var i=this.index[t].getDocFreq(e),o=1+Math.log(this.documentStore.length/(i+1));return this._idfCache[n]=o,o},t.Index.prototype.getFields=function(){return this._fields.slice()},t.Index.prototype.search=function(e,n){if(!e)return[];e="string"==typeof e?{any:e}:JSON.parse(JSON.stringify(e));var i=null;null!=n&&(i=JSON.stringify(n));for(var o=new t.Configuration(i,this.getFields()).get(),r={},s=Object.keys(e),u=0;u0&&t.push(e);for(var i in n)"docs"!==i&&"df"!==i&&this.expandToken(e+i,t,n[i]);return t},t.InvertedIndex.prototype.toJSON=function(){return{root:this.root}},t.Configuration=function(e,n){var e=e||"";if(void 0==n||null==n)throw new Error("fields should not be null");this.config={};var i;try{i=JSON.parse(e),this.buildUserConfig(i,n)}catch(o){t.utils.warn("user configuration parse failed, will use default configuration"),this.buildDefaultConfig(n)}},t.Configuration.prototype.buildDefaultConfig=function(e){this.reset(),e.forEach(function(e){this.config[e]={boost:1,bool:"OR",expand:!1}},this)},t.Configuration.prototype.buildUserConfig=function(e,n){var i="OR",o=!1;if(this.reset(),"bool"in e&&(i=e.bool||i),"expand"in e&&(o=e.expand||o),"fields"in e)for(var r in e.fields)if(n.indexOf(r)>-1){var s=e.fields[r],u=o;void 0!=s.expand&&(u=s.expand),this.config[r]={boost:s.boost||0===s.boost?s.boost:1,bool:s.bool||i,expand:u}}else t.utils.warn("field name in user configuration not found in index instance fields");else this.addAllFields2UserConfig(i,o,n)},t.Configuration.prototype.addAllFields2UserConfig=function(e,t,n){n.forEach(function(n){this.config[n]={boost:1,bool:e,expand:t}},this)},t.Configuration.prototype.get=function(){return this.config},t.Configuration.prototype.reset=function(){this.config={}},lunr.SortedSet=function(){this.length=0,this.elements=[]},lunr.SortedSet.load=function(e){var t=new this;return t.elements=e,t.length=e.length,t},lunr.SortedSet.prototype.add=function(){var e,t;for(e=0;e1;){if(r===e)return o;e>r&&(t=o),r>e&&(n=o),i=n-t,o=t+Math.floor(i/2),r=this.elements[o]}return r===e?o:-1},lunr.SortedSet.prototype.locationFor=function(e){for(var t=0,n=this.elements.length,i=n-t,o=t+Math.floor(i/2),r=this.elements[o];i>1;)e>r&&(t=o),r>e&&(n=o),i=n-t,o=t+Math.floor(i/2),r=this.elements[o];return r>e?o:e>r?o+1:void 0},lunr.SortedSet.prototype.intersect=function(e){for(var t=new lunr.SortedSet,n=0,i=0,o=this.length,r=e.length,s=this.elements,u=e.elements;;){if(n>o-1||i>r-1)break;s[n]!==u[i]?s[n]u[i]&&i++:(t.add(s[n]),n++,i++)}return t},lunr.SortedSet.prototype.clone=function(){var e=new lunr.SortedSet;return e.elements=this.toArray(),e.length=e.elements.length,e},lunr.SortedSet.prototype.union=function(e){var t,n,i;this.length>=e.length?(t=this,n=e):(t=e,n=this),i=t.clone();for(var o=0,r=n.toArray();o

\n"}, {"fullname": "fauna.global_http_client", "modulename": "fauna", "qualname": "global_http_client", "kind": "variable", "doc": "

\n", "default_value": "None"}, {"fullname": "fauna.client", "modulename": "fauna.client", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.client", "modulename": "fauna.client.client", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.client.logger", "modulename": "fauna.client.client", "qualname": "logger", "kind": "variable", "doc": "

\n", "default_value": "<Logger fauna (WARNING)>"}, {"fullname": "fauna.client.client.DefaultHttpConnectTimeout", "modulename": "fauna.client.client", "qualname": "DefaultHttpConnectTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultHttpReadTimeout", "modulename": "fauna.client.client", "qualname": "DefaultHttpReadTimeout", "kind": "variable", "doc": "

\n", "annotation": ": Optional[datetime.timedelta]", "default_value": "None"}, {"fullname": "fauna.client.client.DefaultHttpWriteTimeout", "modulename": "fauna.client.client", "qualname": "DefaultHttpWriteTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultHttpPoolTimeout", "modulename": "fauna.client.client", "qualname": "DefaultHttpPoolTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultIdleConnectionTimeout", "modulename": "fauna.client.client", "qualname": "DefaultIdleConnectionTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultQueryTimeout", "modulename": "fauna.client.client", "qualname": "DefaultQueryTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultClientBufferTimeout", "modulename": "fauna.client.client", "qualname": "DefaultClientBufferTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultMaxConnections", "modulename": "fauna.client.client", "qualname": "DefaultMaxConnections", "kind": "variable", "doc": "

\n", "default_value": "20"}, {"fullname": "fauna.client.client.DefaultMaxIdleConnections", "modulename": "fauna.client.client", "qualname": "DefaultMaxIdleConnections", "kind": "variable", "doc": "

\n", "default_value": "20"}, {"fullname": "fauna.client.client.QueryOptions", "modulename": "fauna.client.client", "qualname": "QueryOptions", "kind": "class", "doc": "

A dataclass representing options available for a query.

\n\n
    \n
  • linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
  • \n
  • max_contention_retries - The max number of times to retry the query if contention is encountered.
  • \n
  • query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
  • \n
  • query_tags - Tags to associate with the query. See logging
  • \n
  • traceparent - A traceparent to associate with the query. See logging Must match format: https://www.w3.org/TR/trace-context/#traceparent-header
  • \n
  • typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the \"typechecked\" flag on the database configuration.
  • \n
  • additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
  • \n
\n"}, {"fullname": "fauna.client.client.QueryOptions.__init__", "modulename": "fauna.client.client", "qualname": "QueryOptions.__init__", "kind": "function", "doc": "

\n", "signature": "(\tlinearized: Optional[bool] = None,\tmax_contention_retries: Optional[int] = None,\tquery_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\tquery_tags: Optional[Mapping[str, str]] = None,\ttraceparent: Optional[str] = None,\ttypecheck: Optional[bool] = None,\tadditional_headers: Optional[Dict[str, str]] = None)"}, {"fullname": "fauna.client.client.QueryOptions.linearized", "modulename": "fauna.client.client", "qualname": "QueryOptions.linearized", "kind": "variable", "doc": "

\n", "annotation": ": Optional[bool]", "default_value": "None"}, {"fullname": "fauna.client.client.QueryOptions.max_contention_retries", "modulename": "fauna.client.client", "qualname": "QueryOptions.max_contention_retries", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.QueryOptions.query_timeout", "modulename": "fauna.client.client", "qualname": "QueryOptions.query_timeout", "kind": "variable", "doc": "

\n", "annotation": ": Optional[datetime.timedelta]", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.QueryOptions.query_tags", "modulename": "fauna.client.client", "qualname": "QueryOptions.query_tags", "kind": "variable", "doc": "

\n", "annotation": ": Optional[Mapping[str, str]]", "default_value": "None"}, {"fullname": "fauna.client.client.QueryOptions.traceparent", "modulename": "fauna.client.client", "qualname": "QueryOptions.traceparent", "kind": "variable", "doc": "

\n", "annotation": ": Optional[str]", "default_value": "None"}, {"fullname": "fauna.client.client.QueryOptions.typecheck", "modulename": "fauna.client.client", "qualname": "QueryOptions.typecheck", "kind": "variable", "doc": "

\n", "annotation": ": Optional[bool]", "default_value": "None"}, {"fullname": "fauna.client.client.QueryOptions.additional_headers", "modulename": "fauna.client.client", "qualname": "QueryOptions.additional_headers", "kind": "variable", "doc": "

\n", "annotation": ": Optional[Dict[str, str]]", "default_value": "None"}, {"fullname": "fauna.client.client.StreamOptions", "modulename": "fauna.client.client", "qualname": "StreamOptions", "kind": "class", "doc": "

A dataclass representing options available for a stream.

\n\n
    \n
  • max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
  • \n
  • max_backoff - The maximum backoff in seconds for an individual retry.
  • \n
  • start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after\nthe timestamp.
  • \n
  • cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
  • \n
  • status_events - Indicates if stream should include status events. Status events are periodic events that\nupdate the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics\nabout the cost of maintaining the stream other than the cost of the received events.
  • \n
\n"}, {"fullname": "fauna.client.client.StreamOptions.__init__", "modulename": "fauna.client.client", "qualname": "StreamOptions.__init__", "kind": "function", "doc": "

\n", "signature": "(\tmax_attempts: Optional[int] = None,\tmax_backoff: Optional[int] = None,\tstart_ts: Optional[int] = None,\tcursor: Optional[str] = None,\tstatus_events: bool = False)"}, {"fullname": "fauna.client.client.StreamOptions.max_attempts", "modulename": "fauna.client.client", "qualname": "StreamOptions.max_attempts", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.StreamOptions.max_backoff", "modulename": "fauna.client.client", "qualname": "StreamOptions.max_backoff", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.StreamOptions.start_ts", "modulename": "fauna.client.client", "qualname": "StreamOptions.start_ts", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.StreamOptions.cursor", "modulename": "fauna.client.client", "qualname": "StreamOptions.cursor", "kind": "variable", "doc": "

\n", "annotation": ": Optional[str]", "default_value": "None"}, {"fullname": "fauna.client.client.StreamOptions.status_events", "modulename": "fauna.client.client", "qualname": "StreamOptions.status_events", "kind": "variable", "doc": "

\n", "annotation": ": bool", "default_value": "False"}, {"fullname": "fauna.client.client.FeedOptions", "modulename": "fauna.client.client", "qualname": "FeedOptions", "kind": "class", "doc": "

A dataclass representing options available for an Event Feed.

\n\n
    \n
  • max_attempts - The maximum number of times to attempt an Event Feed query when a retryable exception is thrown.
  • \n
  • max_backoff - The maximum backoff in seconds for an individual retry.
  • \n
  • query_timeout - Controls the maximum amount of time Fauna will execute a query before returning a page of events.
  • \n
  • start_ts - The starting timestamp of the Event Feed, exclusive. If set, Fauna will return events starting after\nthe timestamp.
  • \n
  • cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
  • \n
  • page_size - The desired number of events per page.
  • \n
\n"}, {"fullname": "fauna.client.client.FeedOptions.__init__", "modulename": "fauna.client.client", "qualname": "FeedOptions.__init__", "kind": "function", "doc": "

\n", "signature": "(\tmax_attempts: Optional[int] = None,\tmax_backoff: Optional[int] = None,\tquery_timeout: Optional[datetime.timedelta] = None,\tpage_size: Optional[int] = None,\tstart_ts: Optional[int] = None,\tcursor: Optional[str] = None)"}, {"fullname": "fauna.client.client.FeedOptions.max_attempts", "modulename": "fauna.client.client", "qualname": "FeedOptions.max_attempts", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.FeedOptions.max_backoff", "modulename": "fauna.client.client", "qualname": "FeedOptions.max_backoff", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.FeedOptions.query_timeout", "modulename": "fauna.client.client", "qualname": "FeedOptions.query_timeout", "kind": "variable", "doc": "

\n", "annotation": ": Optional[datetime.timedelta]", "default_value": "None"}, {"fullname": "fauna.client.client.FeedOptions.page_size", "modulename": "fauna.client.client", "qualname": "FeedOptions.page_size", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.FeedOptions.start_ts", "modulename": "fauna.client.client", "qualname": "FeedOptions.start_ts", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.FeedOptions.cursor", "modulename": "fauna.client.client", "qualname": "FeedOptions.cursor", "kind": "variable", "doc": "

\n", "annotation": ": Optional[str]", "default_value": "None"}, {"fullname": "fauna.client.client.Client", "modulename": "fauna.client.client", "qualname": "Client", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.client.client.Client.__init__", "modulename": "fauna.client.client", "qualname": "Client.__init__", "kind": "function", "doc": "

Initializes a Client.

\n\n
Parameters
\n\n
    \n
  • endpoint: The Fauna Endpoint to use. Defaults to https: //db.fauna.com, or the FAUNA_ENDPOINT env variable.
  • \n
  • secret: The Fauna Secret to use. Defaults to empty, or the FAUNA_SECRET env variable.
  • \n
  • http_client: An HTTPClient implementation. Defaults to a global HTTPXClient.
  • \n
  • **query_tags: Tags to associate with the query. See logging
  • \n
  • linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
  • \n
  • max_contention_retries: The max number of times to retry the query if contention is encountered.
  • \n
  • typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the \"typechecked\" flag on the database configuration.
  • \n
  • additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
  • \n
  • query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is DefaultQueryTimeout.
  • \n
  • client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is DefaultClientBufferTimeout, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
  • \n
  • http_read_timeout: Set HTTP Read timeout, default is DefaultHttpReadTimeout.
  • \n
  • http_write_timeout: Set HTTP Write timeout, default is DefaultHttpWriteTimeout.
  • \n
  • http_connect_timeout: Set HTTP Connect timeout, default is DefaultHttpConnectTimeout.
  • \n
  • http_pool_timeout: Set HTTP Pool timeout, default is DefaultHttpPoolTimeout.
  • \n
  • http_idle_timeout: Set HTTP Idle timeout, default is DefaultIdleConnectionTimeout.
  • \n
  • max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
  • \n
  • max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
  • \n
\n", "signature": "(\tendpoint: Optional[str] = None,\tsecret: Optional[str] = None,\thttp_client: Optional[fauna.http.http_client.HTTPClient] = None,\tquery_tags: Optional[Mapping[str, str]] = None,\tlinearized: Optional[bool] = None,\tmax_contention_retries: Optional[int] = None,\ttypecheck: Optional[bool] = None,\tadditional_headers: Optional[Dict[str, str]] = None,\tquery_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\tclient_buffer_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\thttp_read_timeout: Optional[datetime.timedelta] = None,\thttp_write_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\thttp_connect_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\thttp_pool_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\thttp_idle_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\tmax_attempts: int = 3,\tmax_backoff: int = 20)"}, {"fullname": "fauna.client.client.Client.close", "modulename": "fauna.client.client", "qualname": "Client.close", "kind": "function", "doc": "

\n", "signature": "(self):", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.set_last_txn_ts", "modulename": "fauna.client.client", "qualname": "Client.set_last_txn_ts", "kind": "function", "doc": "

Set the last timestamp seen by this client.\nThis has no effect if earlier than stored timestamp.

\n\n

.. WARNING:: This should be used only when coordinating timestamps across\nmultiple clients. Moving the timestamp arbitrarily forward into\nthe future will cause transactions to stall.

\n\n
Parameters
\n\n
    \n
  • txn_ts: the new transaction time.
  • \n
\n", "signature": "(self, txn_ts: int):", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.get_last_txn_ts", "modulename": "fauna.client.client", "qualname": "Client.get_last_txn_ts", "kind": "function", "doc": "

Get the last timestamp seen by this client.

\n\n
Returns
\n", "signature": "(self) -> Optional[int]:", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.get_query_timeout", "modulename": "fauna.client.client", "qualname": "Client.get_query_timeout", "kind": "function", "doc": "

Get the query timeout for all queries.

\n", "signature": "(self) -> Optional[datetime.timedelta]:", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.paginate", "modulename": "fauna.client.client", "qualname": "Client.paginate", "kind": "function", "doc": "

Run a query on Fauna and returning an iterator of results. If the query\nreturns a Page, the iterator will fetch additional Pages until the\nafter token is null. Each call for a page will be retried with exponential\nbackoff up to the max_attempts set in the client's retry policy in the\nevent of a 429 or 502.

\n\n
Parameters
\n\n
    \n
  • fql: A Query
  • \n
  • opts: (Optional) Query Options
  • \n
\n\n
Returns
\n\n
\n

a QueryResponse

\n
\n\n
Raises
\n\n
    \n
  • NetworkError: HTTP Request failed in transit
  • \n
  • ProtocolError: HTTP error not from Fauna
  • \n
  • ServiceError: Fauna returned an error
  • \n
  • ValueError: Encoding and decoding errors
  • \n
  • TypeError: Invalid param types
  • \n
\n", "signature": "(\tself,\tfql: fauna.query.query_builder.Query,\topts: Optional[fauna.client.client.QueryOptions] = None) -> fauna.client.client.QueryIterator:", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.query", "modulename": "fauna.client.client", "qualname": "Client.query", "kind": "function", "doc": "

Run a query on Fauna. A query will be retried max_attempt times with exponential backoff\nup to the max_backoff in the event of a 429.

\n\n
Parameters
\n\n
    \n
  • fql: A Query
  • \n
  • opts: (Optional) Query Options
  • \n
\n\n
Returns
\n\n
\n

a QueryResponse

\n
\n\n
Raises
\n\n
    \n
  • NetworkError: HTTP Request failed in transit
  • \n
  • ProtocolError: HTTP error not from Fauna
  • \n
  • ServiceError: Fauna returned an error
  • \n
  • ValueError: Encoding and decoding errors
  • \n
  • TypeError: Invalid param types
  • \n
\n", "signature": "(\tself,\tfql: fauna.query.query_builder.Query,\topts: Optional[fauna.client.client.QueryOptions] = None) -> fauna.encoding.wire_protocol.QuerySuccess:", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.stream", "modulename": "fauna.client.client", "qualname": "Client.stream", "kind": "function", "doc": "

Opens a Stream in Fauna and returns an iterator that consume Fauna events.

\n\n
Parameters
\n\n
    \n
  • fql: An EventSource or a Query that returns an EventSource.
  • \n
  • opts: (Optional) Stream Options.
  • \n
\n\n
Returns
\n\n
\n

a StreamIterator

\n
\n\n
Raises
\n\n
    \n
  • ClientError: Invalid options provided
  • \n
  • NetworkError: HTTP Request failed in transit
  • \n
  • ProtocolError: HTTP error not from Fauna
  • \n
  • ServiceError: Fauna returned an error
  • \n
  • ValueError: Encoding and decoding errors
  • \n
  • TypeError: Invalid param types
  • \n
\n", "signature": "(\tself,\tfql: Union[fauna.query.models.EventSource, fauna.query.query_builder.Query],\topts: fauna.client.client.StreamOptions = StreamOptions(max_attempts=None, max_backoff=None, start_ts=None, cursor=None, status_events=False)) -> fauna.client.client.StreamIterator:", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.feed", "modulename": "fauna.client.client", "qualname": "Client.feed", "kind": "function", "doc": "

Opens an Event Feed in Fauna and returns an iterator that consume Fauna events.

\n\n
Parameters
\n\n
    \n
  • source: An EventSource or a Query that returns an EventSource.
  • \n
  • opts: (Optional) Event Feed options.
  • \n
\n\n
Returns
\n\n
\n

a FeedIterator

\n
\n\n
Raises
\n\n
    \n
  • ClientError: Invalid options provided
  • \n
  • NetworkError: HTTP Request failed in transit
  • \n
  • ProtocolError: HTTP error not from Fauna
  • \n
  • ServiceError: Fauna returned an error
  • \n
  • ValueError: Encoding and decoding errors
  • \n
  • TypeError: Invalid param types
  • \n
\n", "signature": "(\tself,\tsource: Union[fauna.query.models.EventSource, fauna.query.query_builder.Query],\topts: fauna.client.client.FeedOptions = FeedOptions(max_attempts=None, max_backoff=None, query_timeout=None, page_size=None, start_ts=None, cursor=None)) -> fauna.client.client.FeedIterator:", "funcdef": "def"}, {"fullname": "fauna.client.client.StreamIterator", "modulename": "fauna.client.client", "qualname": "StreamIterator", "kind": "class", "doc": "

A class that mixes a ContextManager and an Iterator so we can detected retryable errors.

\n"}, {"fullname": "fauna.client.client.StreamIterator.__init__", "modulename": "fauna.client.client", "qualname": "StreamIterator.__init__", "kind": "function", "doc": "

\n", "signature": "(\thttp_client: fauna.http.http_client.HTTPClient,\theaders: Dict[str, str],\tendpoint: str,\tmax_attempts: int,\tmax_backoff: int,\topts: fauna.client.client.StreamOptions,\tsource: fauna.query.models.EventSource)"}, {"fullname": "fauna.client.client.StreamIterator.last_ts", "modulename": "fauna.client.client", "qualname": "StreamIterator.last_ts", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.client.client.StreamIterator.last_cursor", "modulename": "fauna.client.client", "qualname": "StreamIterator.last_cursor", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.client.client.StreamIterator.close", "modulename": "fauna.client.client", "qualname": "StreamIterator.close", "kind": "function", "doc": "

\n", "signature": "(self):", "funcdef": "def"}, {"fullname": "fauna.client.client.FeedPage", "modulename": "fauna.client.client", "qualname": "FeedPage", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.client.client.FeedPage.__init__", "modulename": "fauna.client.client", "qualname": "FeedPage.__init__", "kind": "function", "doc": "

\n", "signature": "(\tevents: List[Any],\tcursor: str,\tstats: fauna.encoding.wire_protocol.QueryStats)"}, {"fullname": "fauna.client.client.FeedPage.cursor", "modulename": "fauna.client.client", "qualname": "FeedPage.cursor", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.client.client.FeedPage.stats", "modulename": "fauna.client.client", "qualname": "FeedPage.stats", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.client.client.FeedIterator", "modulename": "fauna.client.client", "qualname": "FeedIterator", "kind": "class", "doc": "

A class to provide an iterator on top of Event Feed pages.

\n"}, {"fullname": "fauna.client.client.FeedIterator.__init__", "modulename": "fauna.client.client", "qualname": "FeedIterator.__init__", "kind": "function", "doc": "

\n", "signature": "(\thttp: fauna.http.http_client.HTTPClient,\theaders: Dict[str, str],\tendpoint: str,\tmax_attempts: int,\tmax_backoff: int,\topts: fauna.client.client.FeedOptions,\tsource: fauna.query.models.EventSource)"}, {"fullname": "fauna.client.client.FeedIterator.flatten", "modulename": "fauna.client.client", "qualname": "FeedIterator.flatten", "kind": "function", "doc": "

A generator that yields events instead of pages of events.

\n", "signature": "(self) -> Iterator:", "funcdef": "def"}, {"fullname": "fauna.client.client.QueryIterator", "modulename": "fauna.client.client", "qualname": "QueryIterator", "kind": "class", "doc": "

A class to provider an iterator on top of Fauna queries.

\n"}, {"fullname": "fauna.client.client.QueryIterator.__init__", "modulename": "fauna.client.client", "qualname": "QueryIterator.__init__", "kind": "function", "doc": "

Initializes the QueryIterator

\n\n
Parameters
\n\n
    \n
  • fql: A Query
  • \n
  • opts: (Optional) Query Options
  • \n
\n\n
Raises
\n\n
    \n
  • TypeError: Invalid param types
  • \n
\n", "signature": "(\tclient: fauna.client.client.Client,\tfql: fauna.query.query_builder.Query,\topts: Optional[fauna.client.client.QueryOptions] = None)"}, {"fullname": "fauna.client.client.QueryIterator.client", "modulename": "fauna.client.client", "qualname": "QueryIterator.client", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.client.client.QueryIterator.fql", "modulename": "fauna.client.client", "qualname": "QueryIterator.fql", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.client.client.QueryIterator.opts", "modulename": "fauna.client.client", "qualname": "QueryIterator.opts", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.client.client.QueryIterator.iter", "modulename": "fauna.client.client", "qualname": "QueryIterator.iter", "kind": "function", "doc": "

A generator function that immediately fetches and yields the results of\nthe stored query. Yields additional pages on subsequent iterations if\nthey exist

\n", "signature": "(self) -> Iterator:", "funcdef": "def"}, {"fullname": "fauna.client.client.QueryIterator.flatten", "modulename": "fauna.client.client", "qualname": "QueryIterator.flatten", "kind": "function", "doc": "

A generator function that immediately fetches and yields the results of\nthe stored query. Yields each item individually, rather than a whole\nPage at a time. Fetches additional pages as required if they exist.

\n", "signature": "(self) -> Iterator:", "funcdef": "def"}, {"fullname": "fauna.client.endpoints", "modulename": "fauna.client.endpoints", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.endpoints.Endpoints", "modulename": "fauna.client.endpoints", "qualname": "Endpoints", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.client.endpoints.Endpoints.Default", "modulename": "fauna.client.endpoints", "qualname": "Endpoints.Default", "kind": "variable", "doc": "

\n", "default_value": "'https://db.fauna.com'"}, {"fullname": "fauna.client.endpoints.Endpoints.Local", "modulename": "fauna.client.endpoints", "qualname": "Endpoints.Local", "kind": "variable", "doc": "

\n", "default_value": "'http://localhost:8443'"}, {"fullname": "fauna.client.headers", "modulename": "fauna.client.headers", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.headers.Header", "modulename": "fauna.client.headers", "qualname": "Header", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.client.headers.Header.LastTxnTs", "modulename": "fauna.client.headers", "qualname": "Header.LastTxnTs", "kind": "variable", "doc": "

\n", "default_value": "'X-Last-Txn-Ts'"}, {"fullname": "fauna.client.headers.Header.Linearized", "modulename": "fauna.client.headers", "qualname": "Header.Linearized", "kind": "variable", "doc": "

\n", "default_value": "'X-Linearized'"}, {"fullname": "fauna.client.headers.Header.MaxContentionRetries", "modulename": "fauna.client.headers", "qualname": "Header.MaxContentionRetries", "kind": "variable", "doc": "

\n", "default_value": "'X-Max-Contention-Retries'"}, {"fullname": "fauna.client.headers.Header.QueryTimeoutMs", "modulename": "fauna.client.headers", "qualname": "Header.QueryTimeoutMs", "kind": "variable", "doc": "

\n", "default_value": "'X-Query-Timeout-Ms'"}, {"fullname": "fauna.client.headers.Header.Typecheck", "modulename": "fauna.client.headers", "qualname": "Header.Typecheck", "kind": "variable", "doc": "

\n", "default_value": "'X-Typecheck'"}, {"fullname": "fauna.client.headers.Header.Tags", "modulename": "fauna.client.headers", "qualname": "Header.Tags", "kind": "variable", "doc": "

\n", "default_value": "'X-Query-Tags'"}, {"fullname": "fauna.client.headers.Header.Traceparent", "modulename": "fauna.client.headers", "qualname": "Header.Traceparent", "kind": "variable", "doc": "

\n", "default_value": "'Traceparent'"}, {"fullname": "fauna.client.retryable", "modulename": "fauna.client.retryable", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.retryable.RetryStrategy", "modulename": "fauna.client.retryable", "qualname": "RetryStrategy", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.client.retryable.RetryStrategy.wait", "modulename": "fauna.client.retryable", "qualname": "RetryStrategy.wait", "kind": "function", "doc": "

\n", "signature": "(self) -> float:", "funcdef": "def"}, {"fullname": "fauna.client.retryable.ExponentialBackoffStrategy", "modulename": "fauna.client.retryable", "qualname": "ExponentialBackoffStrategy", "kind": "class", "doc": "

\n", "bases": "RetryStrategy"}, {"fullname": "fauna.client.retryable.ExponentialBackoffStrategy.__init__", "modulename": "fauna.client.retryable", "qualname": "ExponentialBackoffStrategy.__init__", "kind": "function", "doc": "

\n", "signature": "(max_backoff: int)"}, {"fullname": "fauna.client.retryable.ExponentialBackoffStrategy.wait", "modulename": "fauna.client.retryable", "qualname": "ExponentialBackoffStrategy.wait", "kind": "function", "doc": "

Returns the number of seconds to wait for the next call.

\n", "signature": "(self) -> float:", "funcdef": "def"}, {"fullname": "fauna.client.retryable.RetryableResponse", "modulename": "fauna.client.retryable", "qualname": "RetryableResponse", "kind": "class", "doc": "

\n", "bases": "typing.Generic[~T]"}, {"fullname": "fauna.client.retryable.RetryableResponse.__init__", "modulename": "fauna.client.retryable", "qualname": "RetryableResponse.__init__", "kind": "function", "doc": "

\n", "signature": "(attempts: int, response: ~T)"}, {"fullname": "fauna.client.retryable.RetryableResponse.attempts", "modulename": "fauna.client.retryable", "qualname": "RetryableResponse.attempts", "kind": "variable", "doc": "

\n", "annotation": ": int"}, {"fullname": "fauna.client.retryable.RetryableResponse.response", "modulename": "fauna.client.retryable", "qualname": "RetryableResponse.response", "kind": "variable", "doc": "

\n", "annotation": ": ~T"}, {"fullname": "fauna.client.retryable.Retryable", "modulename": "fauna.client.retryable", "qualname": "Retryable", "kind": "class", "doc": "

Retryable is a wrapper class that acts on a Callable that returns a T type.

\n", "bases": "typing.Generic[~T]"}, {"fullname": "fauna.client.retryable.Retryable.__init__", "modulename": "fauna.client.retryable", "qualname": "Retryable.__init__", "kind": "function", "doc": "

\n", "signature": "(\tmax_attempts: int,\tmax_backoff: int,\tfunc: Callable[..., ~T],\t*args,\t**kwargs)"}, {"fullname": "fauna.client.retryable.Retryable.run", "modulename": "fauna.client.retryable", "qualname": "Retryable.run", "kind": "function", "doc": "

Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates\nthe thrown exception if max_attempts is reached or if a non-retryable is thrown.

\n\n

Returns the number of attempts and the response

\n", "signature": "(self) -> fauna.client.retryable.RetryableResponse[~T]:", "funcdef": "def"}, {"fullname": "fauna.client.utils", "modulename": "fauna.client.utils", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.utils.LastTxnTs", "modulename": "fauna.client.utils", "qualname": "LastTxnTs", "kind": "class", "doc": "

Wraps tracking the last transaction time supplied from the database.

\n"}, {"fullname": "fauna.client.utils.LastTxnTs.__init__", "modulename": "fauna.client.utils", "qualname": "LastTxnTs.__init__", "kind": "function", "doc": "

\n", "signature": "(time: Optional[int] = None)"}, {"fullname": "fauna.client.utils.LastTxnTs.time", "modulename": "fauna.client.utils", "qualname": "LastTxnTs.time", "kind": "variable", "doc": "

Produces the last transaction time, or, None if not yet updated.

\n"}, {"fullname": "fauna.client.utils.LastTxnTs.request_header", "modulename": "fauna.client.utils", "qualname": "LastTxnTs.request_header", "kind": "variable", "doc": "

Produces a dictionary with a non-zero X-Last-Seen-Txn header; or,\nif one has not yet been set, the empty header dictionary.

\n"}, {"fullname": "fauna.client.utils.LastTxnTs.update_txn_time", "modulename": "fauna.client.utils", "qualname": "LastTxnTs.update_txn_time", "kind": "function", "doc": "

Updates the internal transaction time.\nIn order to maintain a monotonically-increasing value, newTxnTime\nis discarded if it is behind the current timestamp.

\n", "signature": "(self, new_txn_time: int):", "funcdef": "def"}, {"fullname": "fauna.encoding", "modulename": "fauna.encoding", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.encoding.decoder", "modulename": "fauna.encoding.decoder", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.encoding.decoder.FaunaDecoder", "modulename": "fauna.encoding.decoder", "qualname": "FaunaDecoder", "kind": "class", "doc": "

Supports the following types:

\n\n

+--------------------+---------------+\n| Python | Fauna |\n+====================+===============+\n| dict | object |\n+--------------------+---------------+\n| list, tuple | array |\n+--------------------+---------------+\n| str | string |\n+--------------------+---------------+\n| int | @int |\n+--------------------+---------------+\n| int | @long |\n+--------------------+---------------+\n| float | @double |\n+--------------------+---------------+\n| datetime.datetime | @time |\n+--------------------+---------------+\n| datetime.date | @date |\n+--------------------+---------------+\n| True | true |\n+--------------------+---------------+\n| False | false |\n+--------------------+---------------+\n| None | null |\n+--------------------+---------------+\n| bytearray | @bytes |\n+--------------------+---------------+\n| *DocumentReference | @ref |\n+--------------------+---------------+\n| *Document | @doc |\n+--------------------+---------------+\n| Module | @mod |\n+--------------------+---------------+\n| Page | @set |\n+--------------------+---------------+\n| EventSource | @stream |\n+--------------------+---------------+

\n"}, {"fullname": "fauna.encoding.decoder.FaunaDecoder.decode", "modulename": "fauna.encoding.decoder", "qualname": "FaunaDecoder.decode", "kind": "function", "doc": "

Decodes supported objects from the tagged typed into untagged.

\n\n

Examples:\n - { \"@int\": \"100\" } decodes to 100 of type int\n - { \"@double\": \"100\" } decodes to 100.0 of type float\n - { \"@long\": \"100\" } decodes to 100 of type int\n - { \"@time\": \"...\" } decodes to a datetime\n - { \"@date\": \"...\" } decodes to a date\n - { \"@doc\": ... } decodes to a Document or NamedDocument\n - { \"@ref\": ... } decodes to a DocumentReference or NamedDocumentReference\n - { \"@mod\": ... } decodes to a Module\n - { \"@set\": ... } decodes to a Page\n - { \"@stream\": ... } decodes to an EventSource\n - { \"@bytes\": ... } decodes to a bytearray

\n\n
Parameters
\n\n
    \n
  • obj: the object to decode
  • \n
\n", "signature": "(obj: Any):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder", "modulename": "fauna.encoding.encoder", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder", "kind": "class", "doc": "

Supports the following types:

\n\n

+-------------------------------+---------------+\n| Python | Fauna Tags |\n+===============================+===============+\n| dict | @object |\n+-------------------------------+---------------+\n| list, tuple | array |\n+-------------------------------+---------------+\n| str | string |\n+-------------------------------+---------------+\n| int 32-bit signed | @int |\n+-------------------------------+---------------+\n| int 64-bit signed | @long |\n+-------------------------------+---------------+\n| float | @double |\n+-------------------------------+---------------+\n| datetime.datetime | @time |\n+-------------------------------+---------------+\n| datetime.date | @date |\n+-------------------------------+---------------+\n| True | True |\n+-------------------------------+---------------+\n| False | False |\n+-------------------------------+---------------+\n| None | None |\n+-------------------------------+---------------+\n| bytes / bytearray | @bytes |\n+-------------------------------+---------------+\n| *Document | @ref |\n+-------------------------------+---------------+\n| *DocumentReference | @ref |\n+-------------------------------+---------------+\n| Module | @mod |\n+-------------------------------+---------------+\n| Query | fql |\n+-------------------------------+---------------+\n| ValueFragment | value |\n+-------------------------------+---------------+\n| TemplateFragment | string |\n+-------------------------------+---------------+\n| EventSource | string |\n+-------------------------------+---------------+

\n"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.encode", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.encode", "kind": "function", "doc": "

Encodes supported objects into the tagged format.

\n\n

Examples:\n - Up to 32-bit ints encode to { \"@int\": \"...\" }\n - Up to 64-bit ints encode to { \"@long\": \"...\" }\n - Floats encode to { \"@double\": \"...\" }\n - datetime encodes to { \"@time\": \"...\" }\n - date encodes to { \"@date\": \"...\" }\n - DocumentReference encodes to { \"@doc\": \"...\" }\n - Module encodes to { \"@mod\": \"...\" }\n - Query encodes to { \"fql\": [...] }\n - ValueFragment encodes to { \"value\": }\n - LiteralFragment encodes to a string\n - EventSource encodes to a string

\n\n
Raises
\n\n
    \n
  • ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
  • \n
\n\n
Parameters
\n\n
    \n
  • obj: the object to decode
  • \n
\n", "signature": "(obj: Any) -> Any:", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_int", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_int", "kind": "function", "doc": "

\n", "signature": "(obj: int):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_bool", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_bool", "kind": "function", "doc": "

\n", "signature": "(obj: bool):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_float", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_float", "kind": "function", "doc": "

\n", "signature": "(obj: float):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_str", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_str", "kind": "function", "doc": "

\n", "signature": "(obj: str):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_datetime", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_datetime", "kind": "function", "doc": "

\n", "signature": "(obj: datetime.datetime):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_date", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_date", "kind": "function", "doc": "

\n", "signature": "(obj: datetime.date):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_bytes", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_bytes", "kind": "function", "doc": "

\n", "signature": "(obj: Union[bytearray, bytes]):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_doc_ref", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_doc_ref", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.models.DocumentReference):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_named_doc_ref", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_named_doc_ref", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.models.NamedDocumentReference):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_mod", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_mod", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.models.Module):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_dict", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_dict", "kind": "function", "doc": "

\n", "signature": "(obj: Any):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_none", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_none", "kind": "function", "doc": "

\n", "signature": "():", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_fragment", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_fragment", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.query_builder.Fragment):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_query_interpolation_builder", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_query_interpolation_builder", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.query_builder.Query):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_streamtoken", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_streamtoken", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.models.EventSource):", "funcdef": "def"}, {"fullname": "fauna.encoding.wire_protocol", "modulename": "fauna.encoding.wire_protocol", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats", "kind": "class", "doc": "

Query stats

\n"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.__init__", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.__init__", "kind": "function", "doc": "

\n", "signature": "(stats: Mapping[str, Any])"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.compute_ops", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.compute_ops", "kind": "variable", "doc": "

The amount of Transactional Compute Ops consumed by the query.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.read_ops", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.read_ops", "kind": "variable", "doc": "

The amount of Transactional Read Ops consumed by the query.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.write_ops", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.write_ops", "kind": "variable", "doc": "

The amount of Transactional Write Ops consumed by the query.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.query_time_ms", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.query_time_ms", "kind": "variable", "doc": "

The query run time in milliseconds.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.storage_bytes_read", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.storage_bytes_read", "kind": "variable", "doc": "

The amount of data read from storage, in bytes.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.storage_bytes_write", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.storage_bytes_write", "kind": "variable", "doc": "

The amount of data written to storage, in bytes.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.contention_retries", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.contention_retries", "kind": "variable", "doc": "

The number of times the transaction was retried due to write contention.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.attempts", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.attempts", "kind": "variable", "doc": "

The number of attempts made by the client to run the query.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.__init__", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.__init__", "kind": "function", "doc": "

\n", "signature": "(\tquery_tags: Optional[Mapping[str, str]] = None,\tstats: Optional[fauna.encoding.wire_protocol.QueryStats] = None,\tsummary: Optional[str] = None,\ttxn_ts: Optional[int] = None,\tschema_version: Optional[int] = None)"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.query_tags", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.query_tags", "kind": "variable", "doc": "

The tags associated with the query.

\n", "annotation": ": Mapping[str, Any]"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.summary", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.summary", "kind": "variable", "doc": "

A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query.

\n", "annotation": ": str"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.stats", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.stats", "kind": "variable", "doc": "

Query stats associated with the query.

\n", "annotation": ": fauna.encoding.wire_protocol.QueryStats"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.txn_ts", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.txn_ts", "kind": "variable", "doc": "

The last transaction timestamp of the query. A Unix epoch in microseconds.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.schema_version", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.schema_version", "kind": "variable", "doc": "

The schema version that was used for the query execution.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QuerySuccess", "modulename": "fauna.encoding.wire_protocol", "qualname": "QuerySuccess", "kind": "class", "doc": "

The result of the query.

\n", "bases": "QueryInfo"}, {"fullname": "fauna.encoding.wire_protocol.QuerySuccess.__init__", "modulename": "fauna.encoding.wire_protocol", "qualname": "QuerySuccess.__init__", "kind": "function", "doc": "

\n", "signature": "(\tdata: Any,\tquery_tags: Optional[Mapping[str, str]],\tstatic_type: Optional[str],\tstats: Optional[fauna.encoding.wire_protocol.QueryStats],\tsummary: Optional[str],\ttraceparent: Optional[str],\ttxn_ts: Optional[int],\tschema_version: Optional[int])"}, {"fullname": "fauna.encoding.wire_protocol.QuerySuccess.data", "modulename": "fauna.encoding.wire_protocol", "qualname": "QuerySuccess.data", "kind": "variable", "doc": "

The data returned by the query. This is the result of the FQL query.

\n", "annotation": ": Any"}, {"fullname": "fauna.encoding.wire_protocol.QuerySuccess.static_type", "modulename": "fauna.encoding.wire_protocol", "qualname": "QuerySuccess.static_type", "kind": "variable", "doc": "

If typechecked, the query's inferred static result type, if the query was typechecked.

\n", "annotation": ": Optional[str]"}, {"fullname": "fauna.encoding.wire_protocol.QuerySuccess.traceparent", "modulename": "fauna.encoding.wire_protocol", "qualname": "QuerySuccess.traceparent", "kind": "variable", "doc": "

The traceparent for the query.

\n", "annotation": ": Optional[str]"}, {"fullname": "fauna.encoding.wire_protocol.ConstraintFailure", "modulename": "fauna.encoding.wire_protocol", "qualname": "ConstraintFailure", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.encoding.wire_protocol.ConstraintFailure.__init__", "modulename": "fauna.encoding.wire_protocol", "qualname": "ConstraintFailure.__init__", "kind": "function", "doc": "

\n", "signature": "(\tmessage: str,\tname: Optional[str] = None,\tpaths: Optional[List[Any]] = None)"}, {"fullname": "fauna.encoding.wire_protocol.ConstraintFailure.message", "modulename": "fauna.encoding.wire_protocol", "qualname": "ConstraintFailure.message", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.encoding.wire_protocol.ConstraintFailure.name", "modulename": "fauna.encoding.wire_protocol", "qualname": "ConstraintFailure.name", "kind": "variable", "doc": "

\n", "annotation": ": Optional[str]", "default_value": "None"}, {"fullname": "fauna.encoding.wire_protocol.ConstraintFailure.paths", "modulename": "fauna.encoding.wire_protocol", "qualname": "ConstraintFailure.paths", "kind": "variable", "doc": "

\n", "annotation": ": Optional[List[Any]]", "default_value": "None"}, {"fullname": "fauna.encoding.wire_protocol.QueryTags", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryTags", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.encoding.wire_protocol.QueryTags.encode", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryTags.encode", "kind": "function", "doc": "

\n", "signature": "(tags: Mapping[str, str]) -> str:", "funcdef": "def"}, {"fullname": "fauna.encoding.wire_protocol.QueryTags.decode", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryTags.decode", "kind": "function", "doc": "

\n", "signature": "(tag_str: str) -> Mapping[str, str]:", "funcdef": "def"}, {"fullname": "fauna.errors", "modulename": "fauna.errors", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.errors.errors", "modulename": "fauna.errors.errors", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.errors.errors.FaunaException", "modulename": "fauna.errors.errors", "qualname": "FaunaException", "kind": "class", "doc": "

Base class Fauna Exceptions

\n", "bases": "builtins.Exception"}, {"fullname": "fauna.errors.errors.RetryableFaunaException", "modulename": "fauna.errors.errors", "qualname": "RetryableFaunaException", "kind": "class", "doc": "

Base class Fauna Exceptions

\n", "bases": "FaunaException"}, {"fullname": "fauna.errors.errors.ClientError", "modulename": "fauna.errors.errors", "qualname": "ClientError", "kind": "class", "doc": "

An error representing a failure internal to the client, itself.\nThis indicates Fauna was never called - the client failed internally\nprior to sending the request.

\n", "bases": "FaunaException"}, {"fullname": "fauna.errors.errors.NetworkError", "modulename": "fauna.errors.errors", "qualname": "NetworkError", "kind": "class", "doc": "

An error representing a failure due to the network.\nThis indicates Fauna was never reached.

\n", "bases": "FaunaException"}, {"fullname": "fauna.errors.errors.ProtocolError", "modulename": "fauna.errors.errors", "qualname": "ProtocolError", "kind": "class", "doc": "

An error representing a HTTP failure - but one not directly emitted by Fauna.

\n", "bases": "FaunaException"}, {"fullname": "fauna.errors.errors.ProtocolError.__init__", "modulename": "fauna.errors.errors", "qualname": "ProtocolError.__init__", "kind": "function", "doc": "

\n", "signature": "(status_code: int, message: str)"}, {"fullname": "fauna.errors.errors.ProtocolError.status_code", "modulename": "fauna.errors.errors", "qualname": "ProtocolError.status_code", "kind": "variable", "doc": "

\n", "annotation": ": int"}, {"fullname": "fauna.errors.errors.ProtocolError.message", "modulename": "fauna.errors.errors", "qualname": "ProtocolError.message", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.errors.errors.FaunaError", "modulename": "fauna.errors.errors", "qualname": "FaunaError", "kind": "class", "doc": "

Base class Fauna Errors

\n", "bases": "FaunaException"}, {"fullname": "fauna.errors.errors.FaunaError.__init__", "modulename": "fauna.errors.errors", "qualname": "FaunaError.__init__", "kind": "function", "doc": "

\n", "signature": "(\tstatus_code: int,\tcode: str,\tmessage: str,\tabort: Optional[Any] = None,\tconstraint_failures: Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]] = None)"}, {"fullname": "fauna.errors.errors.FaunaError.status_code", "modulename": "fauna.errors.errors", "qualname": "FaunaError.status_code", "kind": "variable", "doc": "

\n", "annotation": ": int"}, {"fullname": "fauna.errors.errors.FaunaError.code", "modulename": "fauna.errors.errors", "qualname": "FaunaError.code", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.errors.errors.FaunaError.message", "modulename": "fauna.errors.errors", "qualname": "FaunaError.message", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.errors.errors.FaunaError.abort", "modulename": "fauna.errors.errors", "qualname": "FaunaError.abort", "kind": "variable", "doc": "

\n", "annotation": ": Optional[Any]"}, {"fullname": "fauna.errors.errors.FaunaError.constraint_failures", "modulename": "fauna.errors.errors", "qualname": "FaunaError.constraint_failures", "kind": "variable", "doc": "

\n", "annotation": ": Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]]"}, {"fullname": "fauna.errors.errors.FaunaError.parse_error_and_throw", "modulename": "fauna.errors.errors", "qualname": "FaunaError.parse_error_and_throw", "kind": "function", "doc": "

\n", "signature": "(body: Any, status_code: int):", "funcdef": "def"}, {"fullname": "fauna.errors.errors.ServiceError", "modulename": "fauna.errors.errors", "qualname": "ServiceError", "kind": "class", "doc": "

An error representing a query failure returned by Fauna.

\n", "bases": "FaunaError, fauna.encoding.wire_protocol.QueryInfo"}, {"fullname": "fauna.errors.errors.ServiceError.__init__", "modulename": "fauna.errors.errors", "qualname": "ServiceError.__init__", "kind": "function", "doc": "

\n", "signature": "(\tstatus_code: int,\tcode: str,\tmessage: str,\tsummary: Optional[str] = None,\tabort: Optional[Any] = None,\tconstraint_failures: Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]] = None,\tquery_tags: Optional[Mapping[str, str]] = None,\tstats: Optional[fauna.encoding.wire_protocol.QueryStats] = None,\ttxn_ts: Optional[int] = None,\tschema_version: Optional[int] = None)"}, {"fullname": "fauna.errors.errors.AbortError", "modulename": "fauna.errors.errors", "qualname": "AbortError", "kind": "class", "doc": "

An error representing a query failure returned by Fauna.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.InvalidRequestError", "modulename": "fauna.errors.errors", "qualname": "InvalidRequestError", "kind": "class", "doc": "

An error representing a query failure returned by Fauna.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.QueryCheckError", "modulename": "fauna.errors.errors", "qualname": "QueryCheckError", "kind": "class", "doc": "

An error due to a \"compile-time\" check of the query failing.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.ContendedTransactionError", "modulename": "fauna.errors.errors", "qualname": "ContendedTransactionError", "kind": "class", "doc": "

Transaction is aborted due to concurrent modification.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.QueryRuntimeError", "modulename": "fauna.errors.errors", "qualname": "QueryRuntimeError", "kind": "class", "doc": "

An error response that is the result of the query failing during execution.\nQueryRuntimeError's occur when a bug in your query causes an invalid execution\nto be requested.\nThe 'code' field will vary based on the specific error cause.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.AuthenticationError", "modulename": "fauna.errors.errors", "qualname": "AuthenticationError", "kind": "class", "doc": "

AuthenticationError indicates invalid credentials were used.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.AuthorizationError", "modulename": "fauna.errors.errors", "qualname": "AuthorizationError", "kind": "class", "doc": "

AuthorizationError indicates the credentials used do not have\npermission to perform the requested action.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.ThrottlingError", "modulename": "fauna.errors.errors", "qualname": "ThrottlingError", "kind": "class", "doc": "

ThrottlingError indicates some capacity limit was exceeded\nand thus the request could not be served.

\n", "bases": "ServiceError, RetryableFaunaException"}, {"fullname": "fauna.errors.errors.QueryTimeoutError", "modulename": "fauna.errors.errors", "qualname": "QueryTimeoutError", "kind": "class", "doc": "

A failure due to the timeout being exceeded, but the timeout\nwas set lower than the query's expected processing time.\nThis response is distinguished from a ServiceTimeoutException\nin that a QueryTimeoutError shows Fauna behaving in an expected manner.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.ServiceInternalError", "modulename": "fauna.errors.errors", "qualname": "ServiceInternalError", "kind": "class", "doc": "

ServiceInternalError indicates Fauna failed unexpectedly.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.ServiceTimeoutError", "modulename": "fauna.errors.errors", "qualname": "ServiceTimeoutError", "kind": "class", "doc": "

ServiceTimeoutError indicates Fauna was not available to service\nthe request before the timeout was reached.

\n", "bases": "ServiceError"}, {"fullname": "fauna.http", "modulename": "fauna.http", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.http.http_client", "modulename": "fauna.http.http_client", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.http.http_client.ErrorResponse", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.http.http_client.ErrorResponse.__init__", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse.__init__", "kind": "function", "doc": "

\n", "signature": "(status_code: int, error_code: str, error_message: str, summary: str)"}, {"fullname": "fauna.http.http_client.ErrorResponse.status_code", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse.status_code", "kind": "variable", "doc": "

\n", "annotation": ": int"}, {"fullname": "fauna.http.http_client.ErrorResponse.error_code", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse.error_code", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.http.http_client.ErrorResponse.error_message", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse.error_message", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.http.http_client.ErrorResponse.summary", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse.summary", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.http.http_client.HTTPResponse", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse", "kind": "class", "doc": "

Helper class that provides a standard way to create an ABC using\ninheritance.

\n", "bases": "abc.ABC"}, {"fullname": "fauna.http.http_client.HTTPResponse.headers", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.headers", "kind": "function", "doc": "

\n", "signature": "(self) -> Mapping[str, str]:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.status_code", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.status_code", "kind": "function", "doc": "

\n", "signature": "(self) -> int:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.json", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.json", "kind": "function", "doc": "

\n", "signature": "(self) -> Any:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.text", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.text", "kind": "function", "doc": "

\n", "signature": "(self) -> str:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.read", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.read", "kind": "function", "doc": "

\n", "signature": "(self) -> bytes:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.iter_bytes", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.iter_bytes", "kind": "function", "doc": "

\n", "signature": "(self) -> Iterator[bytes]:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.close", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.close", "kind": "function", "doc": "

\n", "signature": "(self):", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPClient", "modulename": "fauna.http.http_client", "qualname": "HTTPClient", "kind": "class", "doc": "

Helper class that provides a standard way to create an ABC using\ninheritance.

\n", "bases": "abc.ABC"}, {"fullname": "fauna.http.http_client.HTTPClient.request", "modulename": "fauna.http.http_client", "qualname": "HTTPClient.request", "kind": "function", "doc": "

\n", "signature": "(\tself,\tmethod: str,\turl: str,\theaders: Mapping[str, str],\tdata: Mapping[str, Any]) -> fauna.http.http_client.HTTPResponse:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPClient.stream", "modulename": "fauna.http.http_client", "qualname": "HTTPClient.stream", "kind": "function", "doc": "

\n", "signature": "(\tself,\turl: str,\theaders: Mapping[str, str],\tdata: Mapping[str, Any]) -> Iterator[Any]:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPClient.close", "modulename": "fauna.http.http_client", "qualname": "HTTPClient.close", "kind": "function", "doc": "

\n", "signature": "(self):", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client", "modulename": "fauna.http.httpx_client", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse", "kind": "class", "doc": "

Helper class that provides a standard way to create an ABC using\ninheritance.

\n", "bases": "fauna.http.http_client.HTTPResponse"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.__init__", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.__init__", "kind": "function", "doc": "

\n", "signature": "(response: httpx.Response)"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.headers", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.headers", "kind": "function", "doc": "

\n", "signature": "(self) -> Mapping[str, str]:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.json", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.json", "kind": "function", "doc": "

\n", "signature": "(self) -> Any:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.text", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.text", "kind": "function", "doc": "

\n", "signature": "(self) -> str:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.status_code", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.status_code", "kind": "function", "doc": "

\n", "signature": "(self) -> int:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.read", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.read", "kind": "function", "doc": "

\n", "signature": "(self) -> bytes:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.iter_bytes", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.iter_bytes", "kind": "function", "doc": "

\n", "signature": "(self, size: Optional[int] = None) -> Iterator[bytes]:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.close", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.close", "kind": "function", "doc": "

\n", "signature": "(self) -> None:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXClient", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXClient", "kind": "class", "doc": "

Helper class that provides a standard way to create an ABC using\ninheritance.

\n", "bases": "fauna.http.http_client.HTTPClient"}, {"fullname": "fauna.http.httpx_client.HTTPXClient.__init__", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXClient.__init__", "kind": "function", "doc": "

\n", "signature": "(\tclient: httpx.Client,\tlogger: logging.Logger = <Logger fauna (WARNING)>)"}, {"fullname": "fauna.http.httpx_client.HTTPXClient.request", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXClient.request", "kind": "function", "doc": "

\n", "signature": "(\tself,\tmethod: str,\turl: str,\theaders: Mapping[str, str],\tdata: Mapping[str, Any]) -> fauna.http.http_client.HTTPResponse:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXClient.stream", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXClient.stream", "kind": "function", "doc": "

\n", "signature": "(\tself,\turl: str,\theaders: Mapping[str, str],\tdata: Mapping[str, Any]) -> Iterator[Any]:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXClient.close", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXClient.close", "kind": "function", "doc": "

\n", "signature": "(self):", "funcdef": "def"}, {"fullname": "fauna.query", "modulename": "fauna.query", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.query.models", "modulename": "fauna.query.models", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.query.models.Page", "modulename": "fauna.query.models", "qualname": "Page", "kind": "class", "doc": "

A class representing a Set in Fauna.

\n"}, {"fullname": "fauna.query.models.Page.__init__", "modulename": "fauna.query.models", "qualname": "Page.__init__", "kind": "function", "doc": "

\n", "signature": "(data: Optional[List[Any]] = None, after: Optional[str] = None)"}, {"fullname": "fauna.query.models.Page.data", "modulename": "fauna.query.models", "qualname": "Page.data", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.query.models.Page.after", "modulename": "fauna.query.models", "qualname": "Page.after", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.query.models.EventSource", "modulename": "fauna.query.models", "qualname": "EventSource", "kind": "class", "doc": "

A class represeting an EventSource in Fauna.

\n"}, {"fullname": "fauna.query.models.EventSource.__init__", "modulename": "fauna.query.models", "qualname": "EventSource.__init__", "kind": "function", "doc": "

\n", "signature": "(token: str)"}, {"fullname": "fauna.query.models.EventSource.token", "modulename": "fauna.query.models", "qualname": "EventSource.token", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.query.models.Module", "modulename": "fauna.query.models", "qualname": "Module", "kind": "class", "doc": "

A class representing a Module in Fauna. Examples of modules include Collection, Math, and a user-defined\ncollection, among others.

\n\n

Usage:

\n\n

dogs = Module(\"Dogs\")\n query = fql(\"${col}.all\", col=dogs)

\n"}, {"fullname": "fauna.query.models.Module.__init__", "modulename": "fauna.query.models", "qualname": "Module.__init__", "kind": "function", "doc": "

\n", "signature": "(name: str)"}, {"fullname": "fauna.query.models.Module.name", "modulename": "fauna.query.models", "qualname": "Module.name", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.query.models.BaseReference", "modulename": "fauna.query.models", "qualname": "BaseReference", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.query.models.BaseReference.__init__", "modulename": "fauna.query.models", "qualname": "BaseReference.__init__", "kind": "function", "doc": "

\n", "signature": "(coll: Union[str, fauna.query.models.Module])"}, {"fullname": "fauna.query.models.BaseReference.coll", "modulename": "fauna.query.models", "qualname": "BaseReference.coll", "kind": "variable", "doc": "

\n", "annotation": ": fauna.query.models.Module"}, {"fullname": "fauna.query.models.DocumentReference", "modulename": "fauna.query.models", "qualname": "DocumentReference", "kind": "class", "doc": "

A class representing a reference to a Document stored in Fauna.

\n", "bases": "BaseReference"}, {"fullname": "fauna.query.models.DocumentReference.__init__", "modulename": "fauna.query.models", "qualname": "DocumentReference.__init__", "kind": "function", "doc": "

\n", "signature": "(coll: Union[str, fauna.query.models.Module], id: str)"}, {"fullname": "fauna.query.models.DocumentReference.id", "modulename": "fauna.query.models", "qualname": "DocumentReference.id", "kind": "variable", "doc": "

The ID for the Document. Valid IDs are 64-bit integers, stored as strings.

\n", "annotation": ": str"}, {"fullname": "fauna.query.models.DocumentReference.from_string", "modulename": "fauna.query.models", "qualname": "DocumentReference.from_string", "kind": "function", "doc": "

\n", "signature": "(ref: str):", "funcdef": "def"}, {"fullname": "fauna.query.models.NamedDocumentReference", "modulename": "fauna.query.models", "qualname": "NamedDocumentReference", "kind": "class", "doc": "

A class representing a reference to a NamedDocument stored in Fauna.

\n", "bases": "BaseReference"}, {"fullname": "fauna.query.models.NamedDocumentReference.__init__", "modulename": "fauna.query.models", "qualname": "NamedDocumentReference.__init__", "kind": "function", "doc": "

\n", "signature": "(coll: Union[str, fauna.query.models.Module], name: str)"}, {"fullname": "fauna.query.models.NamedDocumentReference.name", "modulename": "fauna.query.models", "qualname": "NamedDocumentReference.name", "kind": "variable", "doc": "

The name of the NamedDocument.

\n", "annotation": ": str"}, {"fullname": "fauna.query.models.NullDocument", "modulename": "fauna.query.models", "qualname": "NullDocument", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.query.models.NullDocument.__init__", "modulename": "fauna.query.models", "qualname": "NullDocument.__init__", "kind": "function", "doc": "

\n", "signature": "(\tref: Union[fauna.query.models.DocumentReference, fauna.query.models.NamedDocumentReference],\tcause: Optional[str] = None)"}, {"fullname": "fauna.query.models.NullDocument.cause", "modulename": "fauna.query.models", "qualname": "NullDocument.cause", "kind": "variable", "doc": "

\n", "annotation": ": Optional[str]"}, {"fullname": "fauna.query.models.NullDocument.ref", "modulename": "fauna.query.models", "qualname": "NullDocument.ref", "kind": "variable", "doc": "

\n", "annotation": ": Union[fauna.query.models.DocumentReference, fauna.query.models.NamedDocumentReference]"}, {"fullname": "fauna.query.models.BaseDocument", "modulename": "fauna.query.models", "qualname": "BaseDocument", "kind": "class", "doc": "

A base document class implementing an immutable mapping.

\n", "bases": "collections.abc.Mapping"}, {"fullname": "fauna.query.models.BaseDocument.__init__", "modulename": "fauna.query.models", "qualname": "BaseDocument.__init__", "kind": "function", "doc": "

\n", "signature": "(*args, **kwargs)"}, {"fullname": "fauna.query.models.Document", "modulename": "fauna.query.models", "qualname": "Document", "kind": "class", "doc": "

A class representing a user document stored in Fauna.

\n\n

User data should be stored directly on the map, while id, ts, and coll should only be stored on the related\nproperties. When working with a Document in code, it should be considered immutable.

\n", "bases": "BaseDocument"}, {"fullname": "fauna.query.models.Document.__init__", "modulename": "fauna.query.models", "qualname": "Document.__init__", "kind": "function", "doc": "

\n", "signature": "(\tid: str,\tts: datetime.datetime,\tcoll: Union[str, fauna.query.models.Module],\tdata: Optional[Mapping] = None)"}, {"fullname": "fauna.query.models.Document.id", "modulename": "fauna.query.models", "qualname": "Document.id", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.query.models.Document.ts", "modulename": "fauna.query.models", "qualname": "Document.ts", "kind": "variable", "doc": "

\n", "annotation": ": datetime.datetime"}, {"fullname": "fauna.query.models.Document.coll", "modulename": "fauna.query.models", "qualname": "Document.coll", "kind": "variable", "doc": "

\n", "annotation": ": fauna.query.models.Module"}, {"fullname": "fauna.query.models.NamedDocument", "modulename": "fauna.query.models", "qualname": "NamedDocument", "kind": "class", "doc": "

A class representing a named document stored in Fauna. Examples of named documents include Collection\ndefinitions, Index definitions, and Roles, among others.

\n\n

When working with a NamedDocument in code, it should be considered immutable.

\n", "bases": "BaseDocument"}, {"fullname": "fauna.query.models.NamedDocument.__init__", "modulename": "fauna.query.models", "qualname": "NamedDocument.__init__", "kind": "function", "doc": "

\n", "signature": "(\tname: str,\tts: datetime.datetime,\tcoll: Union[fauna.query.models.Module, str],\tdata: Optional[Mapping] = None)"}, {"fullname": "fauna.query.models.NamedDocument.name", "modulename": "fauna.query.models", "qualname": "NamedDocument.name", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.query.models.NamedDocument.ts", "modulename": "fauna.query.models", "qualname": "NamedDocument.ts", "kind": "variable", "doc": "

\n", "annotation": ": datetime.datetime"}, {"fullname": "fauna.query.models.NamedDocument.coll", "modulename": "fauna.query.models", "qualname": "NamedDocument.coll", "kind": "variable", "doc": "

\n", "annotation": ": fauna.query.models.Module"}, {"fullname": "fauna.query.query_builder", "modulename": "fauna.query.query_builder", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.query.query_builder.Fragment", "modulename": "fauna.query.query_builder", "qualname": "Fragment", "kind": "class", "doc": "

An abstract class representing a Fragment of a query.

\n", "bases": "abc.ABC"}, {"fullname": "fauna.query.query_builder.Fragment.get", "modulename": "fauna.query.query_builder", "qualname": "Fragment.get", "kind": "function", "doc": "

An abstract method for returning a stored value.

\n", "signature": "(self) -> Any:", "funcdef": "def"}, {"fullname": "fauna.query.query_builder.ValueFragment", "modulename": "fauna.query.query_builder", "qualname": "ValueFragment", "kind": "class", "doc": "

A concrete Fragment representing a part of a query that can represent a template variable.\nFor example, if a template contains a variable ${foo}, and an object { \"prop\": 1 } is provided for foo,\nthen { \"prop\": 1 } should be wrapped as a ValueFragment.

\n\n
Parameters
\n\n
    \n
  • Any val: The value to be used as a fragment.
  • \n
\n", "bases": "Fragment"}, {"fullname": "fauna.query.query_builder.ValueFragment.__init__", "modulename": "fauna.query.query_builder", "qualname": "ValueFragment.__init__", "kind": "function", "doc": "

\n", "signature": "(val: Any)"}, {"fullname": "fauna.query.query_builder.ValueFragment.get", "modulename": "fauna.query.query_builder", "qualname": "ValueFragment.get", "kind": "function", "doc": "

Gets the stored value.

\n\n

:returns: The stored value.

\n", "signature": "(self) -> Any:", "funcdef": "def"}, {"fullname": "fauna.query.query_builder.LiteralFragment", "modulename": "fauna.query.query_builder", "qualname": "LiteralFragment", "kind": "class", "doc": "

A concrete Fragment representing a query literal For example, in the template let x = ${foo},\nthe portion let x = is a query literal and should be wrapped as a LiteralFragment.

\n\n
Parameters
\n\n
    \n
  • str val: The query literal to be used as a fragment.
  • \n
\n", "bases": "Fragment"}, {"fullname": "fauna.query.query_builder.LiteralFragment.__init__", "modulename": "fauna.query.query_builder", "qualname": "LiteralFragment.__init__", "kind": "function", "doc": "

\n", "signature": "(val: str)"}, {"fullname": "fauna.query.query_builder.LiteralFragment.get", "modulename": "fauna.query.query_builder", "qualname": "LiteralFragment.get", "kind": "function", "doc": "

Returns the stored value.

\n\n

:returns: The stored value.

\n", "signature": "(self) -> str:", "funcdef": "def"}, {"fullname": "fauna.query.query_builder.Query", "modulename": "fauna.query.query_builder", "qualname": "Query", "kind": "class", "doc": "

A class for representing a query.

\n\n

e.g. { \"fql\": [...] }

\n"}, {"fullname": "fauna.query.query_builder.Query.__init__", "modulename": "fauna.query.query_builder", "qualname": "Query.__init__", "kind": "function", "doc": "

\n", "signature": "(fragments: Optional[List[fauna.query.query_builder.Fragment]] = None)"}, {"fullname": "fauna.query.query_builder.Query.fragments", "modulename": "fauna.query.query_builder", "qualname": "Query.fragments", "kind": "variable", "doc": "

The list of stored Fragments

\n", "annotation": ": List[fauna.query.query_builder.Fragment]"}, {"fullname": "fauna.query.query_builder.fql", "modulename": "fauna.query.query_builder", "qualname": "fql", "kind": "function", "doc": "

Creates a Query - capable of performing query composition and simple querying. It can accept a\nsimple string query, or can perform composition using ${} sigil string template with **kwargs as\nsubstitutions.

\n\n

The **kwargs can be Fauna data types - such as strings, document references, or modules - and embedded\nQuery - allowing you to compose arbitrarily complex queries.

\n\n

When providing **kwargs, following types are accepted:\n - str, int, float, bool, datetime.datetime, datetime.date,\n dict, list, Query, DocumentReference, Module

\n\n
Raises
\n\n
    \n
  • ValueError: If there is an invalid template placeholder or a value that cannot be encoded.\n:returns: A Query that can be passed to the client for evaluation against Fauna.
  • \n
\n\n

Examples:

\n\n
\n
fql('Dogs.byName("Fido")')\n
\n
\n\n
\n
def get_dog(id):\n    return fql('Dogs.byId(${id})', id=id)\n\ndef get_vet_phone(id):\n    return fql('${dog} { .vet_phone_number }', dog=get_dog(id))\n\nget_vet_phone('d123')\n
\n
\n", "signature": "(query: str, **kwargs: Any) -> fauna.query.query_builder.Query:", "funcdef": "def"}, {"fullname": "fauna.query.template", "modulename": "fauna.query.template", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.query.template.FaunaTemplate", "modulename": "fauna.query.template", "qualname": "FaunaTemplate", "kind": "class", "doc": "

A template class that supports variables marked with a ${}-sigil. Its primary purpose\nis to expose an iterator for the template parts that support composition of FQL queries.

\n\n

Implementation adapted from https://github.com/python/cpython/blob/main/Lib/string.py

\n\n
Parameters
\n\n
    \n
  • template: A string template e.g. \"${my_var} { name }\"
  • \n
\n"}, {"fullname": "fauna.query.template.FaunaTemplate.__init__", "modulename": "fauna.query.template", "qualname": "FaunaTemplate.__init__", "kind": "function", "doc": "

The initializer

\n", "signature": "(template: str)"}, {"fullname": "fauna.query.template.FaunaTemplate.iter", "modulename": "fauna.query.template", "qualname": "FaunaTemplate.iter", "kind": "function", "doc": "

A method that returns an iterator over tuples representing template parts. The\nfirst value of the tuple, if not None, is a template literal. The second value of\nthe tuple, if not None, is a template variable. If both are not None, then the\ntemplate literal comes before the variable.

\n\n
Raises
\n\n
    \n
  • ValueError: If there is an invalid template placeholder
  • \n
\n\n
Returns
\n\n
\n

An iterator of template parts

\n
\n", "signature": "(self) -> Iterator[Tuple[Optional[str], Optional[str]]]:", "funcdef": "def"}]; + + // mirrored in build-search-index.js (part 1) + // Also split on html tags. this is a cheap heuristic, but good enough. + elasticlunr.tokenizer.setSeperator(/[\s\-.;&_'"=,()]+|<[^>]*>/); + + let searchIndex; + if (docs._isPrebuiltIndex) { + console.info("using precompiled search index"); + searchIndex = elasticlunr.Index.load(docs); + } else { + console.time("building search index"); + // mirrored in build-search-index.js (part 2) + searchIndex = elasticlunr(function () { + this.pipeline.remove(elasticlunr.stemmer); + this.pipeline.remove(elasticlunr.stopWordFilter); + this.addField("qualname"); + this.addField("fullname"); + this.addField("annotation"); + this.addField("default_value"); + this.addField("signature"); + this.addField("bases"); + this.addField("doc"); + this.setRef("fullname"); + }); + for (let doc of docs) { + searchIndex.addDoc(doc); + } + console.timeEnd("building search index"); + } + + return (term) => searchIndex.search(term, { + fields: { + qualname: {boost: 4}, + fullname: {boost: 2}, + annotation: {boost: 2}, + default_value: {boost: 2}, + signature: {boost: 2}, + bases: {boost: 2}, + doc: {boost: 1}, + }, + expand: true + }); +})(); \ No newline at end of file diff --git a/latest b/latest index e3a4f193..cc6612c3 120000 --- a/latest +++ b/latest @@ -1 +1 @@ -2.2.0 \ No newline at end of file +2.3.0 \ No newline at end of file