diff --git a/2.1.0/api/fauna.html b/2.1.0/api/fauna.html new file mode 100644 index 00000000..bfb9fe91 --- /dev/null +++ b/2.1.0/api/fauna.html @@ -0,0 +1,269 @@ + + + + + + + fauna API documentation + + + + + + + + + +
+
+

+fauna

+ + + + + + +
 1__title__ = "Fauna"
+ 2__version__ = "2.1.0"
+ 3__api_version__ = "10"
+ 4__author__ = "Fauna, Inc"
+ 5__license__ = "MPL 2.0"
+ 6__copyright__ = "2023 Fauna, Inc"
+ 7
+ 8from fauna.query import fql, Document, DocumentReference, NamedDocument, NamedDocumentReference, NullDocument, Module, Page
+ 9
+10global_http_client = None
+
+ + +
+
+
+ global_http_client = +None + + +
+ + + + +
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/client.html b/2.1.0/api/fauna/client.html new file mode 100644 index 00000000..9e576653 --- /dev/null +++ b/2.1.0/api/fauna/client.html @@ -0,0 +1,248 @@ + + + + + + + fauna.client API documentation + + + + + + + + + +
+
+

+fauna.client

+ + + + + + +
1from .client import Client, QueryOptions, StreamOptions
+2from .endpoints import Endpoints
+3from .headers import Header
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/client/client.html b/2.1.0/api/fauna/client/client.html new file mode 100644 index 00000000..f5204f16 --- /dev/null +++ b/2.1.0/api/fauna/client/client.html @@ -0,0 +1,2745 @@ + + + + + + + fauna.client.client API documentation + + + + + + + + + +
+
+

+fauna.client.client

+ + + + + + +
  1from dataclasses import dataclass
+  2from datetime import timedelta
+  3from typing import Any, Dict, Iterator, Mapping, Optional, Union
+  4
+  5import fauna
+  6from fauna.client.headers import _DriverEnvironment, _Header, _Auth, Header
+  7from fauna.client.retryable import Retryable
+  8from fauna.client.utils import _Environment, LastTxnTs
+  9from fauna.encoding import FaunaEncoder, FaunaDecoder
+ 10from fauna.encoding import QuerySuccess, QueryTags, QueryStats
+ 11from fauna.errors import FaunaError, ClientError, ProtocolError, \
+ 12  RetryableFaunaException, NetworkError
+ 13from fauna.http.http_client import HTTPClient
+ 14from fauna.query import Query, Page, fql
+ 15from fauna.query.models import StreamToken
+ 16
+ 17DefaultHttpConnectTimeout = timedelta(seconds=5)
+ 18DefaultHttpReadTimeout: Optional[timedelta] = None
+ 19DefaultHttpWriteTimeout = timedelta(seconds=5)
+ 20DefaultHttpPoolTimeout = timedelta(seconds=5)
+ 21DefaultIdleConnectionTimeout = timedelta(seconds=5)
+ 22DefaultQueryTimeout = timedelta(seconds=5)
+ 23DefaultClientBufferTimeout = timedelta(seconds=5)
+ 24DefaultMaxConnections = 20
+ 25DefaultMaxIdleConnections = 20
+ 26
+ 27
+ 28@dataclass
+ 29class QueryOptions:
+ 30  """
+ 31    A dataclass representing options available for a query.
+ 32
+ 33    * linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+ 34    * max_contention_retries - The max number of times to retry the query if contention is encountered.
+ 35    * query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
+ 36    * query_tags - Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+ 37    * traceparent - A traceparent to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_ Must match format: https://www.w3.org/TR/trace-context/#traceparent-header
+ 38    * typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the "typechecked" flag on the database configuration.
+ 39    * additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
+ 40    """
+ 41
+ 42  linearized: Optional[bool] = None
+ 43  max_contention_retries: Optional[int] = None
+ 44  query_timeout: Optional[timedelta] = DefaultQueryTimeout
+ 45  query_tags: Optional[Mapping[str, str]] = None
+ 46  traceparent: Optional[str] = None
+ 47  typecheck: Optional[bool] = None
+ 48  additional_headers: Optional[Dict[str, str]] = None
+ 49
+ 50
+ 51@dataclass
+ 52class StreamOptions:
+ 53  """
+ 54    A dataclass representing options available for a stream.
+ 55
+ 56    * max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
+ 57    * max_backoff - The maximum backoff in seconds for an individual retry.
+ 58    * start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after
+ 59    the timestamp.
+ 60    * status_events - Indicates if stream should include status events. Status events are periodic events that
+ 61    update the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics
+ 62    about the cost of maintaining the stream other than the cost of the received events.
+ 63    """
+ 64
+ 65  max_attempts: Optional[int] = None
+ 66  max_backoff: Optional[int] = None
+ 67  start_ts: Optional[int] = None
+ 68  status_events: bool = False
+ 69
+ 70
+ 71class Client:
+ 72
+ 73  def __init__(
+ 74      self,
+ 75      endpoint: Optional[str] = None,
+ 76      secret: Optional[str] = None,
+ 77      http_client: Optional[HTTPClient] = None,
+ 78      query_tags: Optional[Mapping[str, str]] = None,
+ 79      linearized: Optional[bool] = None,
+ 80      max_contention_retries: Optional[int] = None,
+ 81      typecheck: Optional[bool] = None,
+ 82      additional_headers: Optional[Dict[str, str]] = None,
+ 83      query_timeout: Optional[timedelta] = DefaultQueryTimeout,
+ 84      client_buffer_timeout: Optional[timedelta] = DefaultClientBufferTimeout,
+ 85      http_read_timeout: Optional[timedelta] = DefaultHttpReadTimeout,
+ 86      http_write_timeout: Optional[timedelta] = DefaultHttpWriteTimeout,
+ 87      http_connect_timeout: Optional[timedelta] = DefaultHttpConnectTimeout,
+ 88      http_pool_timeout: Optional[timedelta] = DefaultHttpPoolTimeout,
+ 89      http_idle_timeout: Optional[timedelta] = DefaultIdleConnectionTimeout,
+ 90      max_attempts: int = 3,
+ 91      max_backoff: int = 20,
+ 92  ):
+ 93    """Initializes a Client.
+ 94
+ 95        :param endpoint: The Fauna Endpoint to use. Defaults to https://db.fauna.com, or the `FAUNA_ENDPOINT` env variable.
+ 96        :param secret: The Fauna Secret to use. Defaults to empty, or the `FAUNA_SECRET` env variable.
+ 97        :param http_client: An :class:`HTTPClient` implementation. Defaults to a global :class:`HTTPXClient`.
+ 98        :param query_tags: Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+ 99        :param linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+100        :param max_contention_retries: The max number of times to retry the query if contention is encountered.
+101        :param typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
+102        :param additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
+103        :param query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is :py:data:`DefaultQueryTimeout`.
+104        :param client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is :py:data:`DefaultClientBufferTimeout`, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
+105        :param http_read_timeout: Set HTTP Read timeout, default is :py:data:`DefaultHttpReadTimeout`.
+106        :param http_write_timeout: Set HTTP Write timeout, default is :py:data:`DefaultHttpWriteTimeout`.
+107        :param http_connect_timeout: Set HTTP Connect timeout, default is :py:data:`DefaultHttpConnectTimeout`.
+108        :param http_pool_timeout: Set HTTP Pool timeout, default is :py:data:`DefaultHttpPoolTimeout`.
+109        :param http_idle_timeout: Set HTTP Idle timeout, default is :py:data:`DefaultIdleConnectionTimeout`.
+110        :param max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
+111        :param max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
+112        """
+113
+114    self._set_endpoint(endpoint)
+115    self._max_attempts = max_attempts
+116    self._max_backoff = max_backoff
+117
+118    if secret is None:
+119      self._auth = _Auth(_Environment.EnvFaunaSecret())
+120    else:
+121      self._auth = _Auth(secret)
+122
+123    self._last_txn_ts = LastTxnTs()
+124
+125    self._query_tags = {}
+126    if query_tags is not None:
+127      self._query_tags.update(query_tags)
+128
+129    if query_timeout is not None:
+130      self._query_timeout_ms = int(query_timeout.total_seconds() * 1000)
+131    else:
+132      self._query_timeout_ms = None
+133
+134    self._headers: Dict[str, str] = {
+135        _Header.AcceptEncoding: "gzip",
+136        _Header.ContentType: "application/json;charset=utf-8",
+137        _Header.Driver: "python",
+138        _Header.DriverEnv: str(_DriverEnvironment()),
+139    }
+140
+141    if typecheck is not None:
+142      self._headers[Header.Typecheck] = str(typecheck).lower()
+143
+144    if linearized is not None:
+145      self._headers[Header.Linearized] = str(linearized).lower()
+146
+147    if max_contention_retries is not None and max_contention_retries > 0:
+148      self._headers[Header.MaxContentionRetries] = \
+149          f"{max_contention_retries}"
+150
+151    if additional_headers is not None:
+152      self._headers = {
+153          **self._headers,
+154          **additional_headers,
+155      }
+156
+157    self._session: HTTPClient
+158
+159    if http_client is not None:
+160      self._session = http_client
+161    else:
+162      if fauna.global_http_client is None:
+163        timeout_s: Optional[float] = None
+164        if query_timeout is not None and client_buffer_timeout is not None:
+165          timeout_s = (query_timeout + client_buffer_timeout).total_seconds()
+166        read_timeout_s: Optional[float] = None
+167        if http_read_timeout is not None:
+168          read_timeout_s = http_read_timeout.total_seconds()
+169
+170        write_timeout_s: Optional[float] = http_write_timeout.total_seconds(
+171        ) if http_write_timeout is not None else None
+172        connect_timeout_s: Optional[float] = http_connect_timeout.total_seconds(
+173        ) if http_connect_timeout is not None else None
+174        pool_timeout_s: Optional[float] = http_pool_timeout.total_seconds(
+175        ) if http_pool_timeout is not None else None
+176        idle_timeout_s: Optional[float] = http_idle_timeout.total_seconds(
+177        ) if http_idle_timeout is not None else None
+178
+179        import httpx
+180        from fauna.http.httpx_client import HTTPXClient
+181        c = HTTPXClient(
+182            httpx.Client(
+183                http1=True,
+184                http2=False,
+185                timeout=httpx.Timeout(
+186                    timeout=timeout_s,
+187                    connect=connect_timeout_s,
+188                    read=read_timeout_s,
+189                    write=write_timeout_s,
+190                    pool=pool_timeout_s,
+191                ),
+192                limits=httpx.Limits(
+193                    max_connections=DefaultMaxConnections,
+194                    max_keepalive_connections=DefaultMaxIdleConnections,
+195                    keepalive_expiry=idle_timeout_s,
+196                ),
+197            ))
+198        fauna.global_http_client = c
+199
+200      self._session = fauna.global_http_client
+201
+202  def close(self):
+203    self._session.close()
+204    if self._session == fauna.global_http_client:
+205      fauna.global_http_client = None
+206
+207  def set_last_txn_ts(self, txn_ts: int):
+208    """
+209        Set the last timestamp seen by this client.
+210        This has no effect if earlier than stored timestamp.
+211
+212        .. WARNING:: This should be used only when coordinating timestamps across
+213        multiple clients. Moving the timestamp arbitrarily forward into
+214        the future will cause transactions to stall.
+215
+216        :param txn_ts: the new transaction time.
+217        """
+218    self._last_txn_ts.update_txn_time(txn_ts)
+219
+220  def get_last_txn_ts(self) -> Optional[int]:
+221    """
+222        Get the last timestamp seen by this client.
+223        :return:
+224        """
+225    return self._last_txn_ts.time
+226
+227  def get_query_timeout(self) -> Optional[timedelta]:
+228    """
+229        Get the query timeout for all queries.
+230        """
+231    if self._query_timeout_ms is not None:
+232      return timedelta(milliseconds=self._query_timeout_ms)
+233    else:
+234      return None
+235
+236  def paginate(
+237      self,
+238      fql: Query,
+239      opts: Optional[QueryOptions] = None,
+240  ) -> "QueryIterator":
+241    """
+242        Run a query on Fauna and returning an iterator of results. If the query
+243        returns a Page, the iterator will fetch additional Pages until the
+244        after token is null. Each call for a page will be retried with exponential
+245        backoff up to the max_attempts set in the client's retry policy in the
+246        event of a 429 or 502.
+247
+248        :param fql: A Query
+249        :param opts: (Optional) Query Options
+250
+251        :return: a :class:`QueryResponse`
+252
+253        :raises NetworkError: HTTP Request failed in transit
+254        :raises ProtocolError: HTTP error not from Fauna
+255        :raises ServiceError: Fauna returned an error
+256        :raises ValueError: Encoding and decoding errors
+257        :raises TypeError: Invalid param types
+258        """
+259
+260    if not isinstance(fql, Query):
+261      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+262                 f"Query by calling fauna.fql()"
+263      raise TypeError(err_msg)
+264
+265    return QueryIterator(self, fql, opts)
+266
+267  def query(
+268      self,
+269      fql: Query,
+270      opts: Optional[QueryOptions] = None,
+271  ) -> QuerySuccess:
+272    """
+273        Run a query on Fauna. A query will be retried max_attempt times with exponential backoff
+274        up to the max_backoff in the event of a 429.
+275
+276        :param fql: A Query
+277        :param opts: (Optional) Query Options
+278
+279        :return: a :class:`QueryResponse`
+280
+281        :raises NetworkError: HTTP Request failed in transit
+282        :raises ProtocolError: HTTP error not from Fauna
+283        :raises ServiceError: Fauna returned an error
+284        :raises ValueError: Encoding and decoding errors
+285        :raises TypeError: Invalid param types
+286        """
+287
+288    if not isinstance(fql, Query):
+289      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+290                 f"Query by calling fauna.fql()"
+291      raise TypeError(err_msg)
+292
+293    try:
+294      encoded_query: Mapping[str, Any] = FaunaEncoder.encode(fql)
+295    except Exception as e:
+296      raise ClientError("Failed to encode Query") from e
+297
+298    retryable = Retryable[QuerySuccess](
+299        self._max_attempts,
+300        self._max_backoff,
+301        self._query,
+302        "/query/1",
+303        fql=encoded_query,
+304        opts=opts,
+305    )
+306
+307    r = retryable.run()
+308    r.response.stats.attempts = r.attempts
+309    return r.response
+310
+311  def _query(
+312      self,
+313      path: str,
+314      fql: Mapping[str, Any],
+315      arguments: Optional[Mapping[str, Any]] = None,
+316      opts: Optional[QueryOptions] = None,
+317  ) -> QuerySuccess:
+318
+319    headers = self._headers.copy()
+320    headers[_Header.Format] = "tagged"
+321    headers[_Header.Authorization] = self._auth.bearer()
+322
+323    if self._query_timeout_ms is not None:
+324      headers[Header.QueryTimeoutMs] = str(self._query_timeout_ms)
+325
+326    headers.update(self._last_txn_ts.request_header)
+327
+328    query_tags = {}
+329    if self._query_tags is not None:
+330      query_tags.update(self._query_tags)
+331
+332    if opts is not None:
+333      if opts.linearized is not None:
+334        headers[Header.Linearized] = str(opts.linearized).lower()
+335      if opts.max_contention_retries is not None:
+336        headers[Header.MaxContentionRetries] = \
+337            f"{opts.max_contention_retries}"
+338      if opts.traceparent is not None:
+339        headers[Header.Traceparent] = opts.traceparent
+340      if opts.query_timeout is not None:
+341        timeout_ms = f"{int(opts.query_timeout.total_seconds() * 1000)}"
+342        headers[Header.QueryTimeoutMs] = timeout_ms
+343      if opts.query_tags is not None:
+344        query_tags.update(opts.query_tags)
+345      if opts.typecheck is not None:
+346        headers[Header.Typecheck] = str(opts.typecheck).lower()
+347      if opts.additional_headers is not None:
+348        headers.update(opts.additional_headers)
+349
+350    if len(query_tags) > 0:
+351      headers[Header.Tags] = QueryTags.encode(query_tags)
+352
+353    data: dict[str, Any] = {
+354        "query": fql,
+355        "arguments": arguments or {},
+356    }
+357
+358    with self._session.request(
+359        method="POST",
+360        url=self._endpoint + path,
+361        headers=headers,
+362        data=data,
+363    ) as response:
+364      status_code = response.status_code()
+365      response_json = response.json()
+366      headers = response.headers()
+367
+368      self._check_protocol(response_json, status_code)
+369
+370      dec: Any = FaunaDecoder.decode(response_json)
+371
+372      if status_code > 399:
+373        FaunaError.parse_error_and_throw(dec, status_code)
+374
+375      if "txn_ts" in dec:
+376        self.set_last_txn_ts(int(response_json["txn_ts"]))
+377
+378      stats = QueryStats(dec["stats"]) if "stats" in dec else None
+379      summary = dec["summary"] if "summary" in dec else None
+380      query_tags = QueryTags.decode(
+381          dec["query_tags"]) if "query_tags" in dec else None
+382      txn_ts = dec["txn_ts"] if "txn_ts" in dec else None
+383      schema_version = dec["schema_version"] if "schema_version" in dec else None
+384      traceparent = headers.get("traceparent", None)
+385      static_type = dec["static_type"] if "static_type" in dec else None
+386
+387      return QuerySuccess(
+388          data=dec["data"],
+389          query_tags=query_tags,
+390          static_type=static_type,
+391          stats=stats,
+392          summary=summary,
+393          traceparent=traceparent,
+394          txn_ts=txn_ts,
+395          schema_version=schema_version,
+396      )
+397
+398  def stream(
+399      self,
+400      fql: Union[StreamToken, Query],
+401      opts: StreamOptions = StreamOptions()
+402  ) -> "StreamIterator":
+403    """
+404        Opens a Stream in Fauna and returns an iterator that consume Fauna events.
+405
+406        :param fql: A Query that returns a StreamToken or a StreamToken.
+407        :param opts: (Optional) Stream Options.
+408
+409        :return: a :class:`StreamIterator`
+410
+411        :raises NetworkError: HTTP Request failed in transit
+412        :raises ProtocolError: HTTP error not from Fauna
+413        :raises ServiceError: Fauna returned an error
+414        :raises ValueError: Encoding and decoding errors
+415        :raises TypeError: Invalid param types
+416        """
+417
+418    if isinstance(fql, Query):
+419      token = self.query(fql).data
+420    else:
+421      token = fql
+422
+423    if not isinstance(token, StreamToken):
+424      err_msg = f"'fql' must be a StreamToken, or a Query that returns a StreamToken but was a {type(token)}."
+425      raise TypeError(err_msg)
+426
+427    headers = self._headers.copy()
+428    headers[_Header.Format] = "tagged"
+429    headers[_Header.Authorization] = self._auth.bearer()
+430
+431    return StreamIterator(self._session, headers, self._endpoint + "/stream/1",
+432                          self._max_attempts, self._max_backoff, opts, token)
+433
+434  def _check_protocol(self, response_json: Any, status_code):
+435    # TODO: Logic to validate wire protocol belongs elsewhere.
+436    should_raise = False
+437
+438    # check for QuerySuccess
+439    if status_code <= 399 and "data" not in response_json:
+440      should_raise = True
+441
+442    # check for QueryFailure
+443    if status_code > 399:
+444      if "error" not in response_json:
+445        should_raise = True
+446      else:
+447        e = response_json["error"]
+448        if "code" not in e or "message" not in e:
+449          should_raise = True
+450
+451    if should_raise:
+452      raise ProtocolError(
+453          status_code,
+454          f"Response is in an unknown format: \n{response_json}",
+455      )
+456
+457  def _set_endpoint(self, endpoint):
+458    if endpoint is None:
+459      endpoint = _Environment.EnvFaunaEndpoint()
+460
+461    if endpoint[-1:] == "/":
+462      endpoint = endpoint[:-1]
+463
+464    self._endpoint = endpoint
+465
+466
+467class StreamIterator:
+468  """A class that mixes a ContextManager and an Iterator so we can detected retryable errors."""
+469
+470  def __init__(self, http_client: HTTPClient, headers: Dict[str, str],
+471               endpoint: str, max_attempts: int, max_backoff: int,
+472               opts: StreamOptions, token: StreamToken):
+473    self._http_client = http_client
+474    self._headers = headers
+475    self._endpoint = endpoint
+476    self._max_attempts = max_attempts
+477    self._max_backoff = max_backoff
+478    self._opts = opts
+479    self._token = token
+480    self._stream = None
+481    self.last_ts = None
+482    self._ctx = self._create_stream()
+483
+484  def __enter__(self):
+485    return self
+486
+487  def __exit__(self, exc_type, exc_value, exc_traceback):
+488    if self._stream is not None:
+489      self._stream.close()
+490
+491    self._ctx.__exit__(exc_type, exc_value, exc_traceback)
+492    return False
+493
+494  def __iter__(self):
+495    return self
+496
+497  def __next__(self):
+498    if self._opts.max_attempts is not None:
+499      max_attempts = self._opts.max_attempts
+500    else:
+501      max_attempts = self._max_attempts
+502
+503    if self._opts.max_backoff is not None:
+504      max_backoff = self._opts.max_backoff
+505    else:
+506      max_backoff = self._max_backoff
+507
+508    retryable = Retryable[Any](max_attempts, max_backoff, self._next_element)
+509    return retryable.run().response
+510
+511  def _next_element(self):
+512    try:
+513      if self._stream is None:
+514        try:
+515          self._stream = self._ctx.__enter__()
+516        except Exception:
+517          self._retry_stream()
+518
+519      if self._stream is not None:
+520        event: Any = FaunaDecoder.decode(next(self._stream))
+521
+522        if event["type"] == "error":
+523          FaunaError.parse_error_and_throw(event, 400)
+524
+525        self.last_ts = event["txn_ts"]
+526
+527        if event["type"] == "start":
+528          return self._next_element()
+529
+530        if not self._opts.status_events and event["type"] == "status":
+531          return self._next_element()
+532
+533        return event
+534
+535      raise StopIteration
+536    except NetworkError:
+537      self._retry_stream()
+538
+539  def _retry_stream(self):
+540    if self._stream is not None:
+541      self._stream.close()
+542
+543    self._stream = None
+544
+545    try:
+546      self._ctx = self._create_stream()
+547    except Exception:
+548      pass
+549    raise RetryableFaunaException
+550
+551  def _create_stream(self):
+552    data: Dict[str, Any] = {"token": self._token.token}
+553    if self.last_ts is not None:
+554      data["start_ts"] = self.last_ts
+555    elif self._opts.start_ts is not None:
+556      data["start_ts"] = self._opts.start_ts
+557
+558    return self._http_client.stream(
+559        url=self._endpoint, headers=self._headers, data=data)
+560
+561  def close(self):
+562    if self._stream is not None:
+563      self._stream.close()
+564
+565
+566class QueryIterator:
+567  """A class to provider an iterator on top of Fauna queries."""
+568
+569  def __init__(self,
+570               client: Client,
+571               fql: Query,
+572               opts: Optional[QueryOptions] = None):
+573    """Initializes the QueryIterator
+574
+575        :param fql: A Query
+576        :param opts: (Optional) Query Options
+577
+578        :raises TypeError: Invalid param types
+579        """
+580    if not isinstance(client, Client):
+581      err_msg = f"'client' must be a Client but was a {type(client)}. You can build a " \
+582                  f"Client by calling fauna.client.Client()"
+583      raise TypeError(err_msg)
+584
+585    if not isinstance(fql, Query):
+586      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+587                 f"Query by calling fauna.fql()"
+588      raise TypeError(err_msg)
+589
+590    self.client = client
+591    self.fql = fql
+592    self.opts = opts
+593
+594  def __iter__(self) -> Iterator:
+595    return self.iter()
+596
+597  def iter(self) -> Iterator:
+598    """
+599        A generator function that immediately fetches and yields the results of
+600        the stored query. Yields additional pages on subsequent iterations if
+601        they exist
+602        """
+603
+604    cursor = None
+605    initial_response = self.client.query(self.fql, self.opts)
+606
+607    if isinstance(initial_response.data, Page):
+608      cursor = initial_response.data.after
+609      yield initial_response.data.data
+610
+611      while cursor is not None:
+612        next_response = self.client.query(
+613            fql("Set.paginate(${after})", after=cursor), self.opts)
+614        # TODO: `Set.paginate` does not yet return a `@set` tagged value
+615        #       so we will get back a plain object that might not have
+616        #       an after property.
+617        cursor = next_response.data.get("after")
+618        yield next_response.data.get("data")
+619
+620    else:
+621      yield [initial_response.data]
+622
+623  def flatten(self) -> Iterator:
+624    """
+625        A generator function that immediately fetches and yields the results of
+626        the stored query. Yields each item individually, rather than a whole
+627        Page at a time. Fetches additional pages as required if they exist.
+628        """
+629
+630    for page in self.iter():
+631      for item in page:
+632        yield item
+
+ + +
+
+
+ DefaultHttpConnectTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultHttpReadTimeout: Optional[datetime.timedelta] = +None + + +
+ + + + +
+
+
+ DefaultHttpWriteTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultHttpPoolTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultIdleConnectionTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultQueryTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultClientBufferTimeout = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ DefaultMaxConnections = +20 + + +
+ + + + +
+
+
+ DefaultMaxIdleConnections = +20 + + +
+ + + + +
+
+ +
+
@dataclass
+ + class + QueryOptions: + + + +
+ +
29@dataclass
+30class QueryOptions:
+31  """
+32    A dataclass representing options available for a query.
+33
+34    * linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+35    * max_contention_retries - The max number of times to retry the query if contention is encountered.
+36    * query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
+37    * query_tags - Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+38    * traceparent - A traceparent to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_ Must match format: https://www.w3.org/TR/trace-context/#traceparent-header
+39    * typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the "typechecked" flag on the database configuration.
+40    * additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
+41    """
+42
+43  linearized: Optional[bool] = None
+44  max_contention_retries: Optional[int] = None
+45  query_timeout: Optional[timedelta] = DefaultQueryTimeout
+46  query_tags: Optional[Mapping[str, str]] = None
+47  traceparent: Optional[str] = None
+48  typecheck: Optional[bool] = None
+49  additional_headers: Optional[Dict[str, str]] = None
+
+ + +

A dataclass representing options available for a query.

+ +
    +
  • linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
  • +
  • max_contention_retries - The max number of times to retry the query if contention is encountered.
  • +
  • query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
  • +
  • query_tags - Tags to associate with the query. See logging
  • +
  • traceparent - A traceparent to associate with the query. See logging Must match format: https://www.w3.org/TR/trace-context/#traceparent-header
  • +
  • typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the "typechecked" flag on the database configuration.
  • +
  • additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
  • +
+
+ + +
+
+ + QueryOptions( linearized: Optional[bool] = None, max_contention_retries: Optional[int] = None, query_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), query_tags: Optional[Mapping[str, str]] = None, traceparent: Optional[str] = None, typecheck: Optional[bool] = None, additional_headers: Optional[Dict[str, str]] = None) + + +
+ + + + +
+
+
+ linearized: Optional[bool] = +None + + +
+ + + + +
+
+
+ max_contention_retries: Optional[int] = +None + + +
+ + + + +
+
+
+ query_timeout: Optional[datetime.timedelta] = +datetime.timedelta(seconds=5) + + +
+ + + + +
+
+
+ query_tags: Optional[Mapping[str, str]] = +None + + +
+ + + + +
+
+
+ traceparent: Optional[str] = +None + + +
+ + + + +
+
+
+ typecheck: Optional[bool] = +None + + +
+ + + + +
+
+
+ additional_headers: Optional[Dict[str, str]] = +None + + +
+ + + + +
+
+
+ +
+
@dataclass
+ + class + StreamOptions: + + + +
+ +
52@dataclass
+53class StreamOptions:
+54  """
+55    A dataclass representing options available for a stream.
+56
+57    * max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
+58    * max_backoff - The maximum backoff in seconds for an individual retry.
+59    * start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after
+60    the timestamp.
+61    * status_events - Indicates if stream should include status events. Status events are periodic events that
+62    update the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics
+63    about the cost of maintaining the stream other than the cost of the received events.
+64    """
+65
+66  max_attempts: Optional[int] = None
+67  max_backoff: Optional[int] = None
+68  start_ts: Optional[int] = None
+69  status_events: bool = False
+
+ + +

A dataclass representing options available for a stream.

+ +
    +
  • max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
  • +
  • max_backoff - The maximum backoff in seconds for an individual retry.
  • +
  • start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after +the timestamp.
  • +
  • status_events - Indicates if stream should include status events. Status events are periodic events that +update the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics +about the cost of maintaining the stream other than the cost of the received events.
  • +
+
+ + +
+
+ + StreamOptions( max_attempts: Optional[int] = None, max_backoff: Optional[int] = None, start_ts: Optional[int] = None, status_events: bool = False) + + +
+ + + + +
+
+
+ max_attempts: Optional[int] = +None + + +
+ + + + +
+
+
+ max_backoff: Optional[int] = +None + + +
+ + + + +
+
+
+ start_ts: Optional[int] = +None + + +
+ + + + +
+
+
+ status_events: bool = +False + + +
+ + + + +
+
+
+ +
+ + class + Client: + + + +
+ +
 72class Client:
+ 73
+ 74  def __init__(
+ 75      self,
+ 76      endpoint: Optional[str] = None,
+ 77      secret: Optional[str] = None,
+ 78      http_client: Optional[HTTPClient] = None,
+ 79      query_tags: Optional[Mapping[str, str]] = None,
+ 80      linearized: Optional[bool] = None,
+ 81      max_contention_retries: Optional[int] = None,
+ 82      typecheck: Optional[bool] = None,
+ 83      additional_headers: Optional[Dict[str, str]] = None,
+ 84      query_timeout: Optional[timedelta] = DefaultQueryTimeout,
+ 85      client_buffer_timeout: Optional[timedelta] = DefaultClientBufferTimeout,
+ 86      http_read_timeout: Optional[timedelta] = DefaultHttpReadTimeout,
+ 87      http_write_timeout: Optional[timedelta] = DefaultHttpWriteTimeout,
+ 88      http_connect_timeout: Optional[timedelta] = DefaultHttpConnectTimeout,
+ 89      http_pool_timeout: Optional[timedelta] = DefaultHttpPoolTimeout,
+ 90      http_idle_timeout: Optional[timedelta] = DefaultIdleConnectionTimeout,
+ 91      max_attempts: int = 3,
+ 92      max_backoff: int = 20,
+ 93  ):
+ 94    """Initializes a Client.
+ 95
+ 96        :param endpoint: The Fauna Endpoint to use. Defaults to https://db.fauna.com, or the `FAUNA_ENDPOINT` env variable.
+ 97        :param secret: The Fauna Secret to use. Defaults to empty, or the `FAUNA_SECRET` env variable.
+ 98        :param http_client: An :class:`HTTPClient` implementation. Defaults to a global :class:`HTTPXClient`.
+ 99        :param query_tags: Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+100        :param linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+101        :param max_contention_retries: The max number of times to retry the query if contention is encountered.
+102        :param typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
+103        :param additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
+104        :param query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is :py:data:`DefaultQueryTimeout`.
+105        :param client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is :py:data:`DefaultClientBufferTimeout`, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
+106        :param http_read_timeout: Set HTTP Read timeout, default is :py:data:`DefaultHttpReadTimeout`.
+107        :param http_write_timeout: Set HTTP Write timeout, default is :py:data:`DefaultHttpWriteTimeout`.
+108        :param http_connect_timeout: Set HTTP Connect timeout, default is :py:data:`DefaultHttpConnectTimeout`.
+109        :param http_pool_timeout: Set HTTP Pool timeout, default is :py:data:`DefaultHttpPoolTimeout`.
+110        :param http_idle_timeout: Set HTTP Idle timeout, default is :py:data:`DefaultIdleConnectionTimeout`.
+111        :param max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
+112        :param max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
+113        """
+114
+115    self._set_endpoint(endpoint)
+116    self._max_attempts = max_attempts
+117    self._max_backoff = max_backoff
+118
+119    if secret is None:
+120      self._auth = _Auth(_Environment.EnvFaunaSecret())
+121    else:
+122      self._auth = _Auth(secret)
+123
+124    self._last_txn_ts = LastTxnTs()
+125
+126    self._query_tags = {}
+127    if query_tags is not None:
+128      self._query_tags.update(query_tags)
+129
+130    if query_timeout is not None:
+131      self._query_timeout_ms = int(query_timeout.total_seconds() * 1000)
+132    else:
+133      self._query_timeout_ms = None
+134
+135    self._headers: Dict[str, str] = {
+136        _Header.AcceptEncoding: "gzip",
+137        _Header.ContentType: "application/json;charset=utf-8",
+138        _Header.Driver: "python",
+139        _Header.DriverEnv: str(_DriverEnvironment()),
+140    }
+141
+142    if typecheck is not None:
+143      self._headers[Header.Typecheck] = str(typecheck).lower()
+144
+145    if linearized is not None:
+146      self._headers[Header.Linearized] = str(linearized).lower()
+147
+148    if max_contention_retries is not None and max_contention_retries > 0:
+149      self._headers[Header.MaxContentionRetries] = \
+150          f"{max_contention_retries}"
+151
+152    if additional_headers is not None:
+153      self._headers = {
+154          **self._headers,
+155          **additional_headers,
+156      }
+157
+158    self._session: HTTPClient
+159
+160    if http_client is not None:
+161      self._session = http_client
+162    else:
+163      if fauna.global_http_client is None:
+164        timeout_s: Optional[float] = None
+165        if query_timeout is not None and client_buffer_timeout is not None:
+166          timeout_s = (query_timeout + client_buffer_timeout).total_seconds()
+167        read_timeout_s: Optional[float] = None
+168        if http_read_timeout is not None:
+169          read_timeout_s = http_read_timeout.total_seconds()
+170
+171        write_timeout_s: Optional[float] = http_write_timeout.total_seconds(
+172        ) if http_write_timeout is not None else None
+173        connect_timeout_s: Optional[float] = http_connect_timeout.total_seconds(
+174        ) if http_connect_timeout is not None else None
+175        pool_timeout_s: Optional[float] = http_pool_timeout.total_seconds(
+176        ) if http_pool_timeout is not None else None
+177        idle_timeout_s: Optional[float] = http_idle_timeout.total_seconds(
+178        ) if http_idle_timeout is not None else None
+179
+180        import httpx
+181        from fauna.http.httpx_client import HTTPXClient
+182        c = HTTPXClient(
+183            httpx.Client(
+184                http1=True,
+185                http2=False,
+186                timeout=httpx.Timeout(
+187                    timeout=timeout_s,
+188                    connect=connect_timeout_s,
+189                    read=read_timeout_s,
+190                    write=write_timeout_s,
+191                    pool=pool_timeout_s,
+192                ),
+193                limits=httpx.Limits(
+194                    max_connections=DefaultMaxConnections,
+195                    max_keepalive_connections=DefaultMaxIdleConnections,
+196                    keepalive_expiry=idle_timeout_s,
+197                ),
+198            ))
+199        fauna.global_http_client = c
+200
+201      self._session = fauna.global_http_client
+202
+203  def close(self):
+204    self._session.close()
+205    if self._session == fauna.global_http_client:
+206      fauna.global_http_client = None
+207
+208  def set_last_txn_ts(self, txn_ts: int):
+209    """
+210        Set the last timestamp seen by this client.
+211        This has no effect if earlier than stored timestamp.
+212
+213        .. WARNING:: This should be used only when coordinating timestamps across
+214        multiple clients. Moving the timestamp arbitrarily forward into
+215        the future will cause transactions to stall.
+216
+217        :param txn_ts: the new transaction time.
+218        """
+219    self._last_txn_ts.update_txn_time(txn_ts)
+220
+221  def get_last_txn_ts(self) -> Optional[int]:
+222    """
+223        Get the last timestamp seen by this client.
+224        :return:
+225        """
+226    return self._last_txn_ts.time
+227
+228  def get_query_timeout(self) -> Optional[timedelta]:
+229    """
+230        Get the query timeout for all queries.
+231        """
+232    if self._query_timeout_ms is not None:
+233      return timedelta(milliseconds=self._query_timeout_ms)
+234    else:
+235      return None
+236
+237  def paginate(
+238      self,
+239      fql: Query,
+240      opts: Optional[QueryOptions] = None,
+241  ) -> "QueryIterator":
+242    """
+243        Run a query on Fauna and returning an iterator of results. If the query
+244        returns a Page, the iterator will fetch additional Pages until the
+245        after token is null. Each call for a page will be retried with exponential
+246        backoff up to the max_attempts set in the client's retry policy in the
+247        event of a 429 or 502.
+248
+249        :param fql: A Query
+250        :param opts: (Optional) Query Options
+251
+252        :return: a :class:`QueryResponse`
+253
+254        :raises NetworkError: HTTP Request failed in transit
+255        :raises ProtocolError: HTTP error not from Fauna
+256        :raises ServiceError: Fauna returned an error
+257        :raises ValueError: Encoding and decoding errors
+258        :raises TypeError: Invalid param types
+259        """
+260
+261    if not isinstance(fql, Query):
+262      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+263                 f"Query by calling fauna.fql()"
+264      raise TypeError(err_msg)
+265
+266    return QueryIterator(self, fql, opts)
+267
+268  def query(
+269      self,
+270      fql: Query,
+271      opts: Optional[QueryOptions] = None,
+272  ) -> QuerySuccess:
+273    """
+274        Run a query on Fauna. A query will be retried max_attempt times with exponential backoff
+275        up to the max_backoff in the event of a 429.
+276
+277        :param fql: A Query
+278        :param opts: (Optional) Query Options
+279
+280        :return: a :class:`QueryResponse`
+281
+282        :raises NetworkError: HTTP Request failed in transit
+283        :raises ProtocolError: HTTP error not from Fauna
+284        :raises ServiceError: Fauna returned an error
+285        :raises ValueError: Encoding and decoding errors
+286        :raises TypeError: Invalid param types
+287        """
+288
+289    if not isinstance(fql, Query):
+290      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+291                 f"Query by calling fauna.fql()"
+292      raise TypeError(err_msg)
+293
+294    try:
+295      encoded_query: Mapping[str, Any] = FaunaEncoder.encode(fql)
+296    except Exception as e:
+297      raise ClientError("Failed to encode Query") from e
+298
+299    retryable = Retryable[QuerySuccess](
+300        self._max_attempts,
+301        self._max_backoff,
+302        self._query,
+303        "/query/1",
+304        fql=encoded_query,
+305        opts=opts,
+306    )
+307
+308    r = retryable.run()
+309    r.response.stats.attempts = r.attempts
+310    return r.response
+311
+312  def _query(
+313      self,
+314      path: str,
+315      fql: Mapping[str, Any],
+316      arguments: Optional[Mapping[str, Any]] = None,
+317      opts: Optional[QueryOptions] = None,
+318  ) -> QuerySuccess:
+319
+320    headers = self._headers.copy()
+321    headers[_Header.Format] = "tagged"
+322    headers[_Header.Authorization] = self._auth.bearer()
+323
+324    if self._query_timeout_ms is not None:
+325      headers[Header.QueryTimeoutMs] = str(self._query_timeout_ms)
+326
+327    headers.update(self._last_txn_ts.request_header)
+328
+329    query_tags = {}
+330    if self._query_tags is not None:
+331      query_tags.update(self._query_tags)
+332
+333    if opts is not None:
+334      if opts.linearized is not None:
+335        headers[Header.Linearized] = str(opts.linearized).lower()
+336      if opts.max_contention_retries is not None:
+337        headers[Header.MaxContentionRetries] = \
+338            f"{opts.max_contention_retries}"
+339      if opts.traceparent is not None:
+340        headers[Header.Traceparent] = opts.traceparent
+341      if opts.query_timeout is not None:
+342        timeout_ms = f"{int(opts.query_timeout.total_seconds() * 1000)}"
+343        headers[Header.QueryTimeoutMs] = timeout_ms
+344      if opts.query_tags is not None:
+345        query_tags.update(opts.query_tags)
+346      if opts.typecheck is not None:
+347        headers[Header.Typecheck] = str(opts.typecheck).lower()
+348      if opts.additional_headers is not None:
+349        headers.update(opts.additional_headers)
+350
+351    if len(query_tags) > 0:
+352      headers[Header.Tags] = QueryTags.encode(query_tags)
+353
+354    data: dict[str, Any] = {
+355        "query": fql,
+356        "arguments": arguments or {},
+357    }
+358
+359    with self._session.request(
+360        method="POST",
+361        url=self._endpoint + path,
+362        headers=headers,
+363        data=data,
+364    ) as response:
+365      status_code = response.status_code()
+366      response_json = response.json()
+367      headers = response.headers()
+368
+369      self._check_protocol(response_json, status_code)
+370
+371      dec: Any = FaunaDecoder.decode(response_json)
+372
+373      if status_code > 399:
+374        FaunaError.parse_error_and_throw(dec, status_code)
+375
+376      if "txn_ts" in dec:
+377        self.set_last_txn_ts(int(response_json["txn_ts"]))
+378
+379      stats = QueryStats(dec["stats"]) if "stats" in dec else None
+380      summary = dec["summary"] if "summary" in dec else None
+381      query_tags = QueryTags.decode(
+382          dec["query_tags"]) if "query_tags" in dec else None
+383      txn_ts = dec["txn_ts"] if "txn_ts" in dec else None
+384      schema_version = dec["schema_version"] if "schema_version" in dec else None
+385      traceparent = headers.get("traceparent", None)
+386      static_type = dec["static_type"] if "static_type" in dec else None
+387
+388      return QuerySuccess(
+389          data=dec["data"],
+390          query_tags=query_tags,
+391          static_type=static_type,
+392          stats=stats,
+393          summary=summary,
+394          traceparent=traceparent,
+395          txn_ts=txn_ts,
+396          schema_version=schema_version,
+397      )
+398
+399  def stream(
+400      self,
+401      fql: Union[StreamToken, Query],
+402      opts: StreamOptions = StreamOptions()
+403  ) -> "StreamIterator":
+404    """
+405        Opens a Stream in Fauna and returns an iterator that consume Fauna events.
+406
+407        :param fql: A Query that returns a StreamToken or a StreamToken.
+408        :param opts: (Optional) Stream Options.
+409
+410        :return: a :class:`StreamIterator`
+411
+412        :raises NetworkError: HTTP Request failed in transit
+413        :raises ProtocolError: HTTP error not from Fauna
+414        :raises ServiceError: Fauna returned an error
+415        :raises ValueError: Encoding and decoding errors
+416        :raises TypeError: Invalid param types
+417        """
+418
+419    if isinstance(fql, Query):
+420      token = self.query(fql).data
+421    else:
+422      token = fql
+423
+424    if not isinstance(token, StreamToken):
+425      err_msg = f"'fql' must be a StreamToken, or a Query that returns a StreamToken but was a {type(token)}."
+426      raise TypeError(err_msg)
+427
+428    headers = self._headers.copy()
+429    headers[_Header.Format] = "tagged"
+430    headers[_Header.Authorization] = self._auth.bearer()
+431
+432    return StreamIterator(self._session, headers, self._endpoint + "/stream/1",
+433                          self._max_attempts, self._max_backoff, opts, token)
+434
+435  def _check_protocol(self, response_json: Any, status_code):
+436    # TODO: Logic to validate wire protocol belongs elsewhere.
+437    should_raise = False
+438
+439    # check for QuerySuccess
+440    if status_code <= 399 and "data" not in response_json:
+441      should_raise = True
+442
+443    # check for QueryFailure
+444    if status_code > 399:
+445      if "error" not in response_json:
+446        should_raise = True
+447      else:
+448        e = response_json["error"]
+449        if "code" not in e or "message" not in e:
+450          should_raise = True
+451
+452    if should_raise:
+453      raise ProtocolError(
+454          status_code,
+455          f"Response is in an unknown format: \n{response_json}",
+456      )
+457
+458  def _set_endpoint(self, endpoint):
+459    if endpoint is None:
+460      endpoint = _Environment.EnvFaunaEndpoint()
+461
+462    if endpoint[-1:] == "/":
+463      endpoint = endpoint[:-1]
+464
+465    self._endpoint = endpoint
+
+ + + + +
+ +
+ + Client( endpoint: Optional[str] = None, secret: Optional[str] = None, http_client: Optional[fauna.http.http_client.HTTPClient] = None, query_tags: Optional[Mapping[str, str]] = None, linearized: Optional[bool] = None, max_contention_retries: Optional[int] = None, typecheck: Optional[bool] = None, additional_headers: Optional[Dict[str, str]] = None, query_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), client_buffer_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), http_read_timeout: Optional[datetime.timedelta] = None, http_write_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), http_connect_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), http_pool_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), http_idle_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5), max_attempts: int = 3, max_backoff: int = 20) + + + +
+ +
 74  def __init__(
+ 75      self,
+ 76      endpoint: Optional[str] = None,
+ 77      secret: Optional[str] = None,
+ 78      http_client: Optional[HTTPClient] = None,
+ 79      query_tags: Optional[Mapping[str, str]] = None,
+ 80      linearized: Optional[bool] = None,
+ 81      max_contention_retries: Optional[int] = None,
+ 82      typecheck: Optional[bool] = None,
+ 83      additional_headers: Optional[Dict[str, str]] = None,
+ 84      query_timeout: Optional[timedelta] = DefaultQueryTimeout,
+ 85      client_buffer_timeout: Optional[timedelta] = DefaultClientBufferTimeout,
+ 86      http_read_timeout: Optional[timedelta] = DefaultHttpReadTimeout,
+ 87      http_write_timeout: Optional[timedelta] = DefaultHttpWriteTimeout,
+ 88      http_connect_timeout: Optional[timedelta] = DefaultHttpConnectTimeout,
+ 89      http_pool_timeout: Optional[timedelta] = DefaultHttpPoolTimeout,
+ 90      http_idle_timeout: Optional[timedelta] = DefaultIdleConnectionTimeout,
+ 91      max_attempts: int = 3,
+ 92      max_backoff: int = 20,
+ 93  ):
+ 94    """Initializes a Client.
+ 95
+ 96        :param endpoint: The Fauna Endpoint to use. Defaults to https://db.fauna.com, or the `FAUNA_ENDPOINT` env variable.
+ 97        :param secret: The Fauna Secret to use. Defaults to empty, or the `FAUNA_SECRET` env variable.
+ 98        :param http_client: An :class:`HTTPClient` implementation. Defaults to a global :class:`HTTPXClient`.
+ 99        :param query_tags: Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+100        :param linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+101        :param max_contention_retries: The max number of times to retry the query if contention is encountered.
+102        :param typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
+103        :param additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
+104        :param query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is :py:data:`DefaultQueryTimeout`.
+105        :param client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is :py:data:`DefaultClientBufferTimeout`, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
+106        :param http_read_timeout: Set HTTP Read timeout, default is :py:data:`DefaultHttpReadTimeout`.
+107        :param http_write_timeout: Set HTTP Write timeout, default is :py:data:`DefaultHttpWriteTimeout`.
+108        :param http_connect_timeout: Set HTTP Connect timeout, default is :py:data:`DefaultHttpConnectTimeout`.
+109        :param http_pool_timeout: Set HTTP Pool timeout, default is :py:data:`DefaultHttpPoolTimeout`.
+110        :param http_idle_timeout: Set HTTP Idle timeout, default is :py:data:`DefaultIdleConnectionTimeout`.
+111        :param max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
+112        :param max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
+113        """
+114
+115    self._set_endpoint(endpoint)
+116    self._max_attempts = max_attempts
+117    self._max_backoff = max_backoff
+118
+119    if secret is None:
+120      self._auth = _Auth(_Environment.EnvFaunaSecret())
+121    else:
+122      self._auth = _Auth(secret)
+123
+124    self._last_txn_ts = LastTxnTs()
+125
+126    self._query_tags = {}
+127    if query_tags is not None:
+128      self._query_tags.update(query_tags)
+129
+130    if query_timeout is not None:
+131      self._query_timeout_ms = int(query_timeout.total_seconds() * 1000)
+132    else:
+133      self._query_timeout_ms = None
+134
+135    self._headers: Dict[str, str] = {
+136        _Header.AcceptEncoding: "gzip",
+137        _Header.ContentType: "application/json;charset=utf-8",
+138        _Header.Driver: "python",
+139        _Header.DriverEnv: str(_DriverEnvironment()),
+140    }
+141
+142    if typecheck is not None:
+143      self._headers[Header.Typecheck] = str(typecheck).lower()
+144
+145    if linearized is not None:
+146      self._headers[Header.Linearized] = str(linearized).lower()
+147
+148    if max_contention_retries is not None and max_contention_retries > 0:
+149      self._headers[Header.MaxContentionRetries] = \
+150          f"{max_contention_retries}"
+151
+152    if additional_headers is not None:
+153      self._headers = {
+154          **self._headers,
+155          **additional_headers,
+156      }
+157
+158    self._session: HTTPClient
+159
+160    if http_client is not None:
+161      self._session = http_client
+162    else:
+163      if fauna.global_http_client is None:
+164        timeout_s: Optional[float] = None
+165        if query_timeout is not None and client_buffer_timeout is not None:
+166          timeout_s = (query_timeout + client_buffer_timeout).total_seconds()
+167        read_timeout_s: Optional[float] = None
+168        if http_read_timeout is not None:
+169          read_timeout_s = http_read_timeout.total_seconds()
+170
+171        write_timeout_s: Optional[float] = http_write_timeout.total_seconds(
+172        ) if http_write_timeout is not None else None
+173        connect_timeout_s: Optional[float] = http_connect_timeout.total_seconds(
+174        ) if http_connect_timeout is not None else None
+175        pool_timeout_s: Optional[float] = http_pool_timeout.total_seconds(
+176        ) if http_pool_timeout is not None else None
+177        idle_timeout_s: Optional[float] = http_idle_timeout.total_seconds(
+178        ) if http_idle_timeout is not None else None
+179
+180        import httpx
+181        from fauna.http.httpx_client import HTTPXClient
+182        c = HTTPXClient(
+183            httpx.Client(
+184                http1=True,
+185                http2=False,
+186                timeout=httpx.Timeout(
+187                    timeout=timeout_s,
+188                    connect=connect_timeout_s,
+189                    read=read_timeout_s,
+190                    write=write_timeout_s,
+191                    pool=pool_timeout_s,
+192                ),
+193                limits=httpx.Limits(
+194                    max_connections=DefaultMaxConnections,
+195                    max_keepalive_connections=DefaultMaxIdleConnections,
+196                    keepalive_expiry=idle_timeout_s,
+197                ),
+198            ))
+199        fauna.global_http_client = c
+200
+201      self._session = fauna.global_http_client
+
+ + +

Initializes a Client.

+ +
Parameters
+ +
    +
  • endpoint: The Fauna Endpoint to use. Defaults to https: //db.fauna.com, or the FAUNA_ENDPOINT env variable.
  • +
  • secret: The Fauna Secret to use. Defaults to empty, or the FAUNA_SECRET env variable.
  • +
  • http_client: An HTTPClient implementation. Defaults to a global HTTPXClient.
  • +
  • **query_tags: Tags to associate with the query. See logging
  • +
  • linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
  • +
  • max_contention_retries: The max number of times to retry the query if contention is encountered.
  • +
  • typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
  • +
  • additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
  • +
  • query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is DefaultQueryTimeout.
  • +
  • client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is DefaultClientBufferTimeout, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
  • +
  • http_read_timeout: Set HTTP Read timeout, default is DefaultHttpReadTimeout.
  • +
  • http_write_timeout: Set HTTP Write timeout, default is DefaultHttpWriteTimeout.
  • +
  • http_connect_timeout: Set HTTP Connect timeout, default is DefaultHttpConnectTimeout.
  • +
  • http_pool_timeout: Set HTTP Pool timeout, default is DefaultHttpPoolTimeout.
  • +
  • http_idle_timeout: Set HTTP Idle timeout, default is DefaultIdleConnectionTimeout.
  • +
  • max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
  • +
  • max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
  • +
+
+ + +
+
+ +
+ + def + close(self): + + + +
+ +
203  def close(self):
+204    self._session.close()
+205    if self._session == fauna.global_http_client:
+206      fauna.global_http_client = None
+
+ + + + +
+
+ +
+ + def + set_last_txn_ts(self, txn_ts: int): + + + +
+ +
208  def set_last_txn_ts(self, txn_ts: int):
+209    """
+210        Set the last timestamp seen by this client.
+211        This has no effect if earlier than stored timestamp.
+212
+213        .. WARNING:: This should be used only when coordinating timestamps across
+214        multiple clients. Moving the timestamp arbitrarily forward into
+215        the future will cause transactions to stall.
+216
+217        :param txn_ts: the new transaction time.
+218        """
+219    self._last_txn_ts.update_txn_time(txn_ts)
+
+ + +

Set the last timestamp seen by this client. +This has no effect if earlier than stored timestamp.

+ +

.. WARNING:: This should be used only when coordinating timestamps across +multiple clients. Moving the timestamp arbitrarily forward into +the future will cause transactions to stall.

+ +
Parameters
+ +
    +
  • txn_ts: the new transaction time.
  • +
+
+ + +
+
+ +
+ + def + get_last_txn_ts(self) -> Optional[int]: + + + +
+ +
221  def get_last_txn_ts(self) -> Optional[int]:
+222    """
+223        Get the last timestamp seen by this client.
+224        :return:
+225        """
+226    return self._last_txn_ts.time
+
+ + +

Get the last timestamp seen by this client.

+ +
Returns
+
+ + +
+
+ +
+ + def + get_query_timeout(self) -> Optional[datetime.timedelta]: + + + +
+ +
228  def get_query_timeout(self) -> Optional[timedelta]:
+229    """
+230        Get the query timeout for all queries.
+231        """
+232    if self._query_timeout_ms is not None:
+233      return timedelta(milliseconds=self._query_timeout_ms)
+234    else:
+235      return None
+
+ + +

Get the query timeout for all queries.

+
+ + +
+
+ +
+ + def + paginate( self, fql: fauna.query.query_builder.Query, opts: Optional[QueryOptions] = None) -> QueryIterator: + + + +
+ +
237  def paginate(
+238      self,
+239      fql: Query,
+240      opts: Optional[QueryOptions] = None,
+241  ) -> "QueryIterator":
+242    """
+243        Run a query on Fauna and returning an iterator of results. If the query
+244        returns a Page, the iterator will fetch additional Pages until the
+245        after token is null. Each call for a page will be retried with exponential
+246        backoff up to the max_attempts set in the client's retry policy in the
+247        event of a 429 or 502.
+248
+249        :param fql: A Query
+250        :param opts: (Optional) Query Options
+251
+252        :return: a :class:`QueryResponse`
+253
+254        :raises NetworkError: HTTP Request failed in transit
+255        :raises ProtocolError: HTTP error not from Fauna
+256        :raises ServiceError: Fauna returned an error
+257        :raises ValueError: Encoding and decoding errors
+258        :raises TypeError: Invalid param types
+259        """
+260
+261    if not isinstance(fql, Query):
+262      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+263                 f"Query by calling fauna.fql()"
+264      raise TypeError(err_msg)
+265
+266    return QueryIterator(self, fql, opts)
+
+ + +

Run a query on Fauna and returning an iterator of results. If the query +returns a Page, the iterator will fetch additional Pages until the +after token is null. Each call for a page will be retried with exponential +backoff up to the max_attempts set in the client's retry policy in the +event of a 429 or 502.

+ +
Parameters
+ +
    +
  • fql: A Query
  • +
  • opts: (Optional) Query Options
  • +
+ +
Returns
+ +
+

a QueryResponse

+
+ +
Raises
+ +
    +
  • NetworkError: HTTP Request failed in transit
  • +
  • ProtocolError: HTTP error not from Fauna
  • +
  • ServiceError: Fauna returned an error
  • +
  • ValueError: Encoding and decoding errors
  • +
  • TypeError: Invalid param types
  • +
+
+ + +
+
+ +
+ + def + query( self, fql: fauna.query.query_builder.Query, opts: Optional[QueryOptions] = None) -> fauna.encoding.wire_protocol.QuerySuccess: + + + +
+ +
268  def query(
+269      self,
+270      fql: Query,
+271      opts: Optional[QueryOptions] = None,
+272  ) -> QuerySuccess:
+273    """
+274        Run a query on Fauna. A query will be retried max_attempt times with exponential backoff
+275        up to the max_backoff in the event of a 429.
+276
+277        :param fql: A Query
+278        :param opts: (Optional) Query Options
+279
+280        :return: a :class:`QueryResponse`
+281
+282        :raises NetworkError: HTTP Request failed in transit
+283        :raises ProtocolError: HTTP error not from Fauna
+284        :raises ServiceError: Fauna returned an error
+285        :raises ValueError: Encoding and decoding errors
+286        :raises TypeError: Invalid param types
+287        """
+288
+289    if not isinstance(fql, Query):
+290      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+291                 f"Query by calling fauna.fql()"
+292      raise TypeError(err_msg)
+293
+294    try:
+295      encoded_query: Mapping[str, Any] = FaunaEncoder.encode(fql)
+296    except Exception as e:
+297      raise ClientError("Failed to encode Query") from e
+298
+299    retryable = Retryable[QuerySuccess](
+300        self._max_attempts,
+301        self._max_backoff,
+302        self._query,
+303        "/query/1",
+304        fql=encoded_query,
+305        opts=opts,
+306    )
+307
+308    r = retryable.run()
+309    r.response.stats.attempts = r.attempts
+310    return r.response
+
+ + +

Run a query on Fauna. A query will be retried max_attempt times with exponential backoff +up to the max_backoff in the event of a 429.

+ +
Parameters
+ +
    +
  • fql: A Query
  • +
  • opts: (Optional) Query Options
  • +
+ +
Returns
+ +
+

a QueryResponse

+
+ +
Raises
+ +
    +
  • NetworkError: HTTP Request failed in transit
  • +
  • ProtocolError: HTTP error not from Fauna
  • +
  • ServiceError: Fauna returned an error
  • +
  • ValueError: Encoding and decoding errors
  • +
  • TypeError: Invalid param types
  • +
+
+ + +
+
+ +
+ + def + stream( self, fql: Union[fauna.query.models.StreamToken, fauna.query.query_builder.Query], opts: StreamOptions = StreamOptions(max_attempts=None, max_backoff=None, start_ts=None, status_events=False)) -> StreamIterator: + + + +
+ +
399  def stream(
+400      self,
+401      fql: Union[StreamToken, Query],
+402      opts: StreamOptions = StreamOptions()
+403  ) -> "StreamIterator":
+404    """
+405        Opens a Stream in Fauna and returns an iterator that consume Fauna events.
+406
+407        :param fql: A Query that returns a StreamToken or a StreamToken.
+408        :param opts: (Optional) Stream Options.
+409
+410        :return: a :class:`StreamIterator`
+411
+412        :raises NetworkError: HTTP Request failed in transit
+413        :raises ProtocolError: HTTP error not from Fauna
+414        :raises ServiceError: Fauna returned an error
+415        :raises ValueError: Encoding and decoding errors
+416        :raises TypeError: Invalid param types
+417        """
+418
+419    if isinstance(fql, Query):
+420      token = self.query(fql).data
+421    else:
+422      token = fql
+423
+424    if not isinstance(token, StreamToken):
+425      err_msg = f"'fql' must be a StreamToken, or a Query that returns a StreamToken but was a {type(token)}."
+426      raise TypeError(err_msg)
+427
+428    headers = self._headers.copy()
+429    headers[_Header.Format] = "tagged"
+430    headers[_Header.Authorization] = self._auth.bearer()
+431
+432    return StreamIterator(self._session, headers, self._endpoint + "/stream/1",
+433                          self._max_attempts, self._max_backoff, opts, token)
+
+ + +

Opens a Stream in Fauna and returns an iterator that consume Fauna events.

+ +
Parameters
+ +
    +
  • fql: A Query that returns a StreamToken or a StreamToken.
  • +
  • opts: (Optional) Stream Options.
  • +
+ +
Returns
+ +
+

a StreamIterator

+
+ +
Raises
+ +
    +
  • NetworkError: HTTP Request failed in transit
  • +
  • ProtocolError: HTTP error not from Fauna
  • +
  • ServiceError: Fauna returned an error
  • +
  • ValueError: Encoding and decoding errors
  • +
  • TypeError: Invalid param types
  • +
+
+ + +
+
+
+ +
+ + class + StreamIterator: + + + +
+ +
468class StreamIterator:
+469  """A class that mixes a ContextManager and an Iterator so we can detected retryable errors."""
+470
+471  def __init__(self, http_client: HTTPClient, headers: Dict[str, str],
+472               endpoint: str, max_attempts: int, max_backoff: int,
+473               opts: StreamOptions, token: StreamToken):
+474    self._http_client = http_client
+475    self._headers = headers
+476    self._endpoint = endpoint
+477    self._max_attempts = max_attempts
+478    self._max_backoff = max_backoff
+479    self._opts = opts
+480    self._token = token
+481    self._stream = None
+482    self.last_ts = None
+483    self._ctx = self._create_stream()
+484
+485  def __enter__(self):
+486    return self
+487
+488  def __exit__(self, exc_type, exc_value, exc_traceback):
+489    if self._stream is not None:
+490      self._stream.close()
+491
+492    self._ctx.__exit__(exc_type, exc_value, exc_traceback)
+493    return False
+494
+495  def __iter__(self):
+496    return self
+497
+498  def __next__(self):
+499    if self._opts.max_attempts is not None:
+500      max_attempts = self._opts.max_attempts
+501    else:
+502      max_attempts = self._max_attempts
+503
+504    if self._opts.max_backoff is not None:
+505      max_backoff = self._opts.max_backoff
+506    else:
+507      max_backoff = self._max_backoff
+508
+509    retryable = Retryable[Any](max_attempts, max_backoff, self._next_element)
+510    return retryable.run().response
+511
+512  def _next_element(self):
+513    try:
+514      if self._stream is None:
+515        try:
+516          self._stream = self._ctx.__enter__()
+517        except Exception:
+518          self._retry_stream()
+519
+520      if self._stream is not None:
+521        event: Any = FaunaDecoder.decode(next(self._stream))
+522
+523        if event["type"] == "error":
+524          FaunaError.parse_error_and_throw(event, 400)
+525
+526        self.last_ts = event["txn_ts"]
+527
+528        if event["type"] == "start":
+529          return self._next_element()
+530
+531        if not self._opts.status_events and event["type"] == "status":
+532          return self._next_element()
+533
+534        return event
+535
+536      raise StopIteration
+537    except NetworkError:
+538      self._retry_stream()
+539
+540  def _retry_stream(self):
+541    if self._stream is not None:
+542      self._stream.close()
+543
+544    self._stream = None
+545
+546    try:
+547      self._ctx = self._create_stream()
+548    except Exception:
+549      pass
+550    raise RetryableFaunaException
+551
+552  def _create_stream(self):
+553    data: Dict[str, Any] = {"token": self._token.token}
+554    if self.last_ts is not None:
+555      data["start_ts"] = self.last_ts
+556    elif self._opts.start_ts is not None:
+557      data["start_ts"] = self._opts.start_ts
+558
+559    return self._http_client.stream(
+560        url=self._endpoint, headers=self._headers, data=data)
+561
+562  def close(self):
+563    if self._stream is not None:
+564      self._stream.close()
+
+ + +

A class that mixes a ContextManager and an Iterator so we can detected retryable errors.

+
+ + +
+ +
+ + StreamIterator( http_client: fauna.http.http_client.HTTPClient, headers: Dict[str, str], endpoint: str, max_attempts: int, max_backoff: int, opts: StreamOptions, token: fauna.query.models.StreamToken) + + + +
+ +
471  def __init__(self, http_client: HTTPClient, headers: Dict[str, str],
+472               endpoint: str, max_attempts: int, max_backoff: int,
+473               opts: StreamOptions, token: StreamToken):
+474    self._http_client = http_client
+475    self._headers = headers
+476    self._endpoint = endpoint
+477    self._max_attempts = max_attempts
+478    self._max_backoff = max_backoff
+479    self._opts = opts
+480    self._token = token
+481    self._stream = None
+482    self.last_ts = None
+483    self._ctx = self._create_stream()
+
+ + + + +
+
+
+ last_ts + + +
+ + + + +
+
+ +
+ + def + close(self): + + + +
+ +
562  def close(self):
+563    if self._stream is not None:
+564      self._stream.close()
+
+ + + + +
+
+
+ +
+ + class + QueryIterator: + + + +
+ +
567class QueryIterator:
+568  """A class to provider an iterator on top of Fauna queries."""
+569
+570  def __init__(self,
+571               client: Client,
+572               fql: Query,
+573               opts: Optional[QueryOptions] = None):
+574    """Initializes the QueryIterator
+575
+576        :param fql: A Query
+577        :param opts: (Optional) Query Options
+578
+579        :raises TypeError: Invalid param types
+580        """
+581    if not isinstance(client, Client):
+582      err_msg = f"'client' must be a Client but was a {type(client)}. You can build a " \
+583                  f"Client by calling fauna.client.Client()"
+584      raise TypeError(err_msg)
+585
+586    if not isinstance(fql, Query):
+587      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+588                 f"Query by calling fauna.fql()"
+589      raise TypeError(err_msg)
+590
+591    self.client = client
+592    self.fql = fql
+593    self.opts = opts
+594
+595  def __iter__(self) -> Iterator:
+596    return self.iter()
+597
+598  def iter(self) -> Iterator:
+599    """
+600        A generator function that immediately fetches and yields the results of
+601        the stored query. Yields additional pages on subsequent iterations if
+602        they exist
+603        """
+604
+605    cursor = None
+606    initial_response = self.client.query(self.fql, self.opts)
+607
+608    if isinstance(initial_response.data, Page):
+609      cursor = initial_response.data.after
+610      yield initial_response.data.data
+611
+612      while cursor is not None:
+613        next_response = self.client.query(
+614            fql("Set.paginate(${after})", after=cursor), self.opts)
+615        # TODO: `Set.paginate` does not yet return a `@set` tagged value
+616        #       so we will get back a plain object that might not have
+617        #       an after property.
+618        cursor = next_response.data.get("after")
+619        yield next_response.data.get("data")
+620
+621    else:
+622      yield [initial_response.data]
+623
+624  def flatten(self) -> Iterator:
+625    """
+626        A generator function that immediately fetches and yields the results of
+627        the stored query. Yields each item individually, rather than a whole
+628        Page at a time. Fetches additional pages as required if they exist.
+629        """
+630
+631    for page in self.iter():
+632      for item in page:
+633        yield item
+
+ + +

A class to provider an iterator on top of Fauna queries.

+
+ + +
+ +
+ + QueryIterator( client: Client, fql: fauna.query.query_builder.Query, opts: Optional[QueryOptions] = None) + + + +
+ +
570  def __init__(self,
+571               client: Client,
+572               fql: Query,
+573               opts: Optional[QueryOptions] = None):
+574    """Initializes the QueryIterator
+575
+576        :param fql: A Query
+577        :param opts: (Optional) Query Options
+578
+579        :raises TypeError: Invalid param types
+580        """
+581    if not isinstance(client, Client):
+582      err_msg = f"'client' must be a Client but was a {type(client)}. You can build a " \
+583                  f"Client by calling fauna.client.Client()"
+584      raise TypeError(err_msg)
+585
+586    if not isinstance(fql, Query):
+587      err_msg = f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+588                 f"Query by calling fauna.fql()"
+589      raise TypeError(err_msg)
+590
+591    self.client = client
+592    self.fql = fql
+593    self.opts = opts
+
+ + +

Initializes the QueryIterator

+ +
Parameters
+ +
    +
  • fql: A Query
  • +
  • opts: (Optional) Query Options
  • +
+ +
Raises
+ +
    +
  • TypeError: Invalid param types
  • +
+
+ + +
+
+
+ client + + +
+ + + + +
+
+
+ fql + + +
+ + + + +
+
+
+ opts + + +
+ + + + +
+
+ +
+ + def + iter(self) -> Iterator: + + + +
+ +
598  def iter(self) -> Iterator:
+599    """
+600        A generator function that immediately fetches and yields the results of
+601        the stored query. Yields additional pages on subsequent iterations if
+602        they exist
+603        """
+604
+605    cursor = None
+606    initial_response = self.client.query(self.fql, self.opts)
+607
+608    if isinstance(initial_response.data, Page):
+609      cursor = initial_response.data.after
+610      yield initial_response.data.data
+611
+612      while cursor is not None:
+613        next_response = self.client.query(
+614            fql("Set.paginate(${after})", after=cursor), self.opts)
+615        # TODO: `Set.paginate` does not yet return a `@set` tagged value
+616        #       so we will get back a plain object that might not have
+617        #       an after property.
+618        cursor = next_response.data.get("after")
+619        yield next_response.data.get("data")
+620
+621    else:
+622      yield [initial_response.data]
+
+ + +

A generator function that immediately fetches and yields the results of +the stored query. Yields additional pages on subsequent iterations if +they exist

+
+ + +
+
+ +
+ + def + flatten(self) -> Iterator: + + + +
+ +
624  def flatten(self) -> Iterator:
+625    """
+626        A generator function that immediately fetches and yields the results of
+627        the stored query. Yields each item individually, rather than a whole
+628        Page at a time. Fetches additional pages as required if they exist.
+629        """
+630
+631    for page in self.iter():
+632      for item in page:
+633        yield item
+
+ + +

A generator function that immediately fetches and yields the results of +the stored query. Yields each item individually, rather than a whole +Page at a time. Fetches additional pages as required if they exist.

+
+ + +
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/client/endpoints.html b/2.1.0/api/fauna/client/endpoints.html new file mode 100644 index 00000000..1c4b564d --- /dev/null +++ b/2.1.0/api/fauna/client/endpoints.html @@ -0,0 +1,300 @@ + + + + + + + fauna.client.endpoints API documentation + + + + + + + + + +
+
+

+fauna.client.endpoints

+ + + + + + +
1class Endpoints:
+2  Default = "https://db.fauna.com"
+3  Local = "http://localhost:8443"
+
+ + +
+
+ +
+ + class + Endpoints: + + + +
+ +
2class Endpoints:
+3  Default = "https://db.fauna.com"
+4  Local = "http://localhost:8443"
+
+ + + + +
+
+ Default = +'https://db.fauna.com' + + +
+ + + + +
+
+
+ Local = +'http://localhost:8443' + + +
+ + + + +
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/client/headers.html b/2.1.0/api/fauna/client/headers.html new file mode 100644 index 00000000..f13dd66e --- /dev/null +++ b/2.1.0/api/fauna/client/headers.html @@ -0,0 +1,487 @@ + + + + + + + fauna.client.headers API documentation + + + + + + + + + +
+
+

+fauna.client.headers

+ + + + + + +
  1import os
+  2import platform
+  3import sys
+  4from dataclasses import dataclass
+  5from typing import Callable
+  6
+  7from fauna import __version__
+  8
+  9
+ 10class Header:
+ 11  LastTxnTs = "X-Last-Txn-Ts"
+ 12  Linearized = "X-Linearized"
+ 13  MaxContentionRetries = "X-Max-Contention-Retries"
+ 14  QueryTimeoutMs = "X-Query-Timeout-Ms"
+ 15  Typecheck = "X-Typecheck"
+ 16  Tags = "X-Query-Tags"
+ 17  Traceparent = "Traceparent"
+ 18
+ 19
+ 20class _Header:
+ 21  AcceptEncoding = "Accept-Encoding"
+ 22  Authorization = "Authorization"
+ 23  ContentType = "Content-Type"
+ 24  Driver = "X-Driver"
+ 25  DriverEnv = "X-Driver-Env"
+ 26  Format = "X-Format"
+ 27
+ 28
+ 29class _Auth:
+ 30  """Creates an auth helper object"""
+ 31
+ 32  def bearer(self):
+ 33    return "Bearer {}".format(self.secret)
+ 34
+ 35  def __init__(self, secret):
+ 36    self.secret = secret
+ 37
+ 38  def __eq__(self, other):
+ 39    return self.secret == getattr(other, 'secret', None)
+ 40
+ 41  def __ne__(self, other):
+ 42    return not self == other
+ 43
+ 44
+ 45class _DriverEnvironment:
+ 46
+ 47  def __init__(self):
+ 48    self.pythonVersion = "{0}.{1}.{2}-{3}".format(*sys.version_info)
+ 49    self.driverVersion = __version__
+ 50    self.env = self._get_runtime_env()
+ 51    self.os = "{0}-{1}".format(platform.system(), platform.release())
+ 52
+ 53  @staticmethod
+ 54  def _get_runtime_env():
+ 55
+ 56    @dataclass
+ 57    class EnvChecker:
+ 58      name: str
+ 59      check: Callable[[], bool]
+ 60
+ 61    env: list[EnvChecker] = [
+ 62        EnvChecker(
+ 63            name="Netlify",
+ 64            check=lambda: "NETLIFY_IMAGES_CDN_DOMAIN" in os.environ,
+ 65        ),
+ 66        EnvChecker(
+ 67            name="Vercel",
+ 68            check=lambda: "VERCEL" in os.environ,
+ 69        ),
+ 70        EnvChecker(
+ 71            name="Heroku",
+ 72            check=lambda: "PATH" in \
+ 73                os.environ and ".heroku" in os.environ["PATH"],
+ 74        ),
+ 75        EnvChecker(
+ 76            name="AWS Lambda",
+ 77            check=lambda: "AWS_LAMBDA_FUNCTION_VERSION" in os.environ,
+ 78        ),
+ 79        EnvChecker(
+ 80            name="GCP Cloud Functions",
+ 81            check=lambda: "_" in \
+ 82                os.environ and "google" in os.environ["_"],
+ 83        ),
+ 84        EnvChecker(
+ 85            name="GCP Compute Instances",
+ 86            check=lambda: "GOOGLE_CLOUD_PROJECT" in os.environ,
+ 87        ),
+ 88        EnvChecker(
+ 89            name="Azure Cloud Functions",
+ 90            check=lambda: "WEBSITE_FUNCTIONS_AZUREMONITOR_CATEGORIES" in \
+ 91                os.environ,
+ 92        ),
+ 93        EnvChecker(
+ 94            name="Azure Compute",
+ 95            check=lambda: "ORYX_ENV_TYPE" in os.environ and \
+ 96                "WEBSITE_INSTANCE_ID" in os.environ and \
+ 97                os.environ["ORYX_ENV_TYPE"] == "AppService",
+ 98        ),
+ 99    ]
+100
+101    try:
+102      recognized = next(e for e in env if e.check())
+103      if recognized is not None:
+104        return recognized.name
+105    except:
+106      return "Unknown"
+107
+108  def __str__(self):
+109    return "driver=python-{0}; runtime=python-{1} env={2}; os={3}".format(
+110        self.driverVersion, self.pythonVersion, self.env, self.os).lower()
+
+ + +
+ +
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/client/retryable.html b/2.1.0/api/fauna/client/retryable.html new file mode 100644 index 00000000..0b2ccca3 --- /dev/null +++ b/2.1.0/api/fauna/client/retryable.html @@ -0,0 +1,666 @@ + + + + + + + fauna.client.retryable API documentation + + + + + + + + + +
+
+

+fauna.client.retryable

+ + + + + + +
 1import abc
+ 2from dataclasses import dataclass
+ 3from random import random
+ 4from time import sleep
+ 5from typing import Callable, Optional, TypeVar, Generic
+ 6
+ 7from fauna.errors import RetryableFaunaException
+ 8
+ 9
+10class RetryStrategy:
+11
+12  @abc.abstractmethod
+13  def wait(self) -> float:
+14    pass
+15
+16
+17class ExponentialBackoffStrategy(RetryStrategy):
+18
+19  def __init__(self, max_backoff: int):
+20    self._max_backoff = float(max_backoff)
+21    self._i = 0.0
+22
+23  def wait(self) -> float:
+24    """Returns the number of seconds to wait for the next call."""
+25    backoff = random() * (2.0**self._i)
+26    self._i += 1.0
+27    return min(backoff, self._max_backoff)
+28
+29
+30T = TypeVar('T')
+31
+32
+33@dataclass
+34class RetryableResponse(Generic[T]):
+35  attempts: int
+36  response: T
+37
+38
+39class Retryable(Generic[T]):
+40  """
+41    Retryable is a wrapper class that acts on a Callable that returns a T type.
+42    """
+43  _strategy: RetryStrategy
+44  _error: Optional[Exception]
+45
+46  def __init__(
+47      self,
+48      max_attempts: int,
+49      max_backoff: int,
+50      func: Callable[..., T],
+51      *args,
+52      **kwargs,
+53  ):
+54    self._max_attempts = max_attempts
+55    self._strategy = ExponentialBackoffStrategy(max_backoff)
+56    self._func = func
+57    self._args = args
+58    self._kwargs = kwargs
+59    self._error = None
+60
+61  def run(self) -> RetryableResponse[T]:
+62    """Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates
+63        the thrown exception if max_attempts is reached or if a non-retryable is thrown.
+64
+65        Returns the number of attempts and the response
+66        """
+67    attempt = 0
+68    while True:
+69      sleep_time = 0.0 if attempt == 0 else self._strategy.wait()
+70      sleep(sleep_time)
+71
+72      try:
+73        attempt += 1
+74        qs = self._func(*self._args, **self._kwargs)
+75        return RetryableResponse[T](attempt, qs)
+76      except RetryableFaunaException as e:
+77        if attempt >= self._max_attempts:
+78          raise e
+
+ + +
+
+ +
+ + class + RetryStrategy: + + + +
+ +
11class RetryStrategy:
+12
+13  @abc.abstractmethod
+14  def wait(self) -> float:
+15    pass
+
+ + + + +
+ +
+
@abc.abstractmethod
+ + def + wait(self) -> float: + + + +
+ +
13  @abc.abstractmethod
+14  def wait(self) -> float:
+15    pass
+
+ + + + +
+
+
+ +
+ + class + ExponentialBackoffStrategy(RetryStrategy): + + + +
+ +
18class ExponentialBackoffStrategy(RetryStrategy):
+19
+20  def __init__(self, max_backoff: int):
+21    self._max_backoff = float(max_backoff)
+22    self._i = 0.0
+23
+24  def wait(self) -> float:
+25    """Returns the number of seconds to wait for the next call."""
+26    backoff = random() * (2.0**self._i)
+27    self._i += 1.0
+28    return min(backoff, self._max_backoff)
+
+ + + + +
+ +
+ + ExponentialBackoffStrategy(max_backoff: int) + + + +
+ +
20  def __init__(self, max_backoff: int):
+21    self._max_backoff = float(max_backoff)
+22    self._i = 0.0
+
+ + + + +
+
+ +
+ + def + wait(self) -> float: + + + +
+ +
24  def wait(self) -> float:
+25    """Returns the number of seconds to wait for the next call."""
+26    backoff = random() * (2.0**self._i)
+27    self._i += 1.0
+28    return min(backoff, self._max_backoff)
+
+ + +

Returns the number of seconds to wait for the next call.

+
+ + +
+
+
+ +
+
@dataclass
+ + class + RetryableResponse(typing.Generic[~T]): + + + +
+ +
34@dataclass
+35class RetryableResponse(Generic[T]):
+36  attempts: int
+37  response: T
+
+ + + + +
+
+ + RetryableResponse(attempts: int, response: ~T) + + +
+ + + + +
+
+
+ attempts: int + + +
+ + + + +
+
+
+ response: ~T + + +
+ + + + +
+
+
+ +
+ + class + Retryable(typing.Generic[~T]): + + + +
+ +
40class Retryable(Generic[T]):
+41  """
+42    Retryable is a wrapper class that acts on a Callable that returns a T type.
+43    """
+44  _strategy: RetryStrategy
+45  _error: Optional[Exception]
+46
+47  def __init__(
+48      self,
+49      max_attempts: int,
+50      max_backoff: int,
+51      func: Callable[..., T],
+52      *args,
+53      **kwargs,
+54  ):
+55    self._max_attempts = max_attempts
+56    self._strategy = ExponentialBackoffStrategy(max_backoff)
+57    self._func = func
+58    self._args = args
+59    self._kwargs = kwargs
+60    self._error = None
+61
+62  def run(self) -> RetryableResponse[T]:
+63    """Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates
+64        the thrown exception if max_attempts is reached or if a non-retryable is thrown.
+65
+66        Returns the number of attempts and the response
+67        """
+68    attempt = 0
+69    while True:
+70      sleep_time = 0.0 if attempt == 0 else self._strategy.wait()
+71      sleep(sleep_time)
+72
+73      try:
+74        attempt += 1
+75        qs = self._func(*self._args, **self._kwargs)
+76        return RetryableResponse[T](attempt, qs)
+77      except RetryableFaunaException as e:
+78        if attempt >= self._max_attempts:
+79          raise e
+
+ + +

Retryable is a wrapper class that acts on a Callable that returns a T type.

+
+ + +
+ +
+ + Retryable( max_attempts: int, max_backoff: int, func: Callable[..., ~T], *args, **kwargs) + + + +
+ +
47  def __init__(
+48      self,
+49      max_attempts: int,
+50      max_backoff: int,
+51      func: Callable[..., T],
+52      *args,
+53      **kwargs,
+54  ):
+55    self._max_attempts = max_attempts
+56    self._strategy = ExponentialBackoffStrategy(max_backoff)
+57    self._func = func
+58    self._args = args
+59    self._kwargs = kwargs
+60    self._error = None
+
+ + + + +
+
+ +
+ + def + run(self) -> RetryableResponse[~T]: + + + +
+ +
62  def run(self) -> RetryableResponse[T]:
+63    """Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates
+64        the thrown exception if max_attempts is reached or if a non-retryable is thrown.
+65
+66        Returns the number of attempts and the response
+67        """
+68    attempt = 0
+69    while True:
+70      sleep_time = 0.0 if attempt == 0 else self._strategy.wait()
+71      sleep(sleep_time)
+72
+73      try:
+74        attempt += 1
+75        qs = self._func(*self._args, **self._kwargs)
+76        return RetryableResponse[T](attempt, qs)
+77      except RetryableFaunaException as e:
+78        if attempt >= self._max_attempts:
+79          raise e
+
+ + +

Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates +the thrown exception if max_attempts is reached or if a non-retryable is thrown.

+ +

Returns the number of attempts and the response

+
+ + +
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/client/utils.html b/2.1.0/api/fauna/client/utils.html new file mode 100644 index 00000000..3d75f455 --- /dev/null +++ b/2.1.0/api/fauna/client/utils.html @@ -0,0 +1,488 @@ + + + + + + + fauna.client.utils API documentation + + + + + + + + + +
+
+

+fauna.client.utils

+ + + + + + +
 1import os
+ 2import threading
+ 3from typing import Generic, Callable, TypeVar, Optional
+ 4
+ 5from fauna.client.endpoints import Endpoints
+ 6from fauna.client.headers import Header
+ 7
+ 8
+ 9def _fancy_bool_from_str(val: str) -> bool:
+10  return val.lower() in ["1", "true", "yes", "y"]
+11
+12
+13class LastTxnTs(object):
+14  """Wraps tracking the last transaction time supplied from the database."""
+15
+16  def __init__(
+17      self,
+18      time: Optional[int] = None,
+19  ):
+20    self._lock: threading.Lock = threading.Lock()
+21    self._time: Optional[int] = time
+22
+23  @property
+24  def time(self):
+25    """Produces the last transaction time, or, None if not yet updated."""
+26    with self._lock:
+27      return self._time
+28
+29  @property
+30  def request_header(self):
+31    """Produces a dictionary with a non-zero `X-Last-Seen-Txn` header; or,
+32        if one has not yet been set, the empty header dictionary."""
+33    t = self._time
+34    if t is None:
+35      return {}
+36    return {Header.LastTxnTs: str(t)}
+37
+38  def update_txn_time(self, new_txn_time: int):
+39    """Updates the internal transaction time.
+40        In order to maintain a monotonically-increasing value, `newTxnTime`
+41        is discarded if it is behind the current timestamp."""
+42    with self._lock:
+43      self._time = max(self._time or 0, new_txn_time)
+44
+45
+46T = TypeVar('T')
+47
+48
+49class _SettingFromEnviron(Generic[T]):
+50
+51  def __init__(
+52      self,
+53      var_name: str,
+54      default_value: str,
+55      adapt_from_str: Callable[[str], T],
+56  ):
+57    self.__var_name = var_name
+58    self.__default_value = default_value
+59    self.__adapt_from_str = adapt_from_str
+60
+61  def __call__(self) -> T:
+62    return self.__adapt_from_str(
+63        os.environ.get(
+64            self.__var_name,
+65            default=self.__default_value,
+66        ))
+67
+68
+69class _Environment:
+70  EnvFaunaEndpoint = _SettingFromEnviron(
+71      "FAUNA_ENDPOINT",
+72      Endpoints.Default,
+73      str,
+74  )
+75  """environment variable for Fauna Client HTTP endpoint"""
+76
+77  EnvFaunaSecret = _SettingFromEnviron(
+78      "FAUNA_SECRET",
+79      "",
+80      str,
+81  )
+82  """environment variable for Fauna Client authentication"""
+
+ + +
+
+ +
+ + class + LastTxnTs: + + + +
+ +
14class LastTxnTs(object):
+15  """Wraps tracking the last transaction time supplied from the database."""
+16
+17  def __init__(
+18      self,
+19      time: Optional[int] = None,
+20  ):
+21    self._lock: threading.Lock = threading.Lock()
+22    self._time: Optional[int] = time
+23
+24  @property
+25  def time(self):
+26    """Produces the last transaction time, or, None if not yet updated."""
+27    with self._lock:
+28      return self._time
+29
+30  @property
+31  def request_header(self):
+32    """Produces a dictionary with a non-zero `X-Last-Seen-Txn` header; or,
+33        if one has not yet been set, the empty header dictionary."""
+34    t = self._time
+35    if t is None:
+36      return {}
+37    return {Header.LastTxnTs: str(t)}
+38
+39  def update_txn_time(self, new_txn_time: int):
+40    """Updates the internal transaction time.
+41        In order to maintain a monotonically-increasing value, `newTxnTime`
+42        is discarded if it is behind the current timestamp."""
+43    with self._lock:
+44      self._time = max(self._time or 0, new_txn_time)
+
+ + +

Wraps tracking the last transaction time supplied from the database.

+
+ + +
+ +
+ + LastTxnTs(time: Optional[int] = None) + + + +
+ +
17  def __init__(
+18      self,
+19      time: Optional[int] = None,
+20  ):
+21    self._lock: threading.Lock = threading.Lock()
+22    self._time: Optional[int] = time
+
+ + + + +
+
+ +
+ time + + + +
+ +
24  @property
+25  def time(self):
+26    """Produces the last transaction time, or, None if not yet updated."""
+27    with self._lock:
+28      return self._time
+
+ + +

Produces the last transaction time, or, None if not yet updated.

+
+ + +
+
+ +
+ request_header + + + +
+ +
30  @property
+31  def request_header(self):
+32    """Produces a dictionary with a non-zero `X-Last-Seen-Txn` header; or,
+33        if one has not yet been set, the empty header dictionary."""
+34    t = self._time
+35    if t is None:
+36      return {}
+37    return {Header.LastTxnTs: str(t)}
+
+ + +

Produces a dictionary with a non-zero X-Last-Seen-Txn header; or, +if one has not yet been set, the empty header dictionary.

+
+ + +
+
+ +
+ + def + update_txn_time(self, new_txn_time: int): + + + +
+ +
39  def update_txn_time(self, new_txn_time: int):
+40    """Updates the internal transaction time.
+41        In order to maintain a monotonically-increasing value, `newTxnTime`
+42        is discarded if it is behind the current timestamp."""
+43    with self._lock:
+44      self._time = max(self._time or 0, new_txn_time)
+
+ + +

Updates the internal transaction time. +In order to maintain a monotonically-increasing value, newTxnTime +is discarded if it is behind the current timestamp.

+
+ + +
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/encoding.html b/2.1.0/api/fauna/encoding.html new file mode 100644 index 00000000..3a6d5033 --- /dev/null +++ b/2.1.0/api/fauna/encoding.html @@ -0,0 +1,246 @@ + + + + + + + fauna.encoding API documentation + + + + + + + + + +
+
+

+fauna.encoding

+ + + + + + +
1from .decoder import FaunaDecoder
+2from .encoder import FaunaEncoder
+3from .wire_protocol import ConstraintFailure, QueryTags, QueryInfo, QueryStats, QuerySuccess
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/encoding/decoder.html b/2.1.0/api/fauna/encoding/decoder.html new file mode 100644 index 00000000..a4c228ee --- /dev/null +++ b/2.1.0/api/fauna/encoding/decoder.html @@ -0,0 +1,719 @@ + + + + + + + fauna.encoding.decoder API documentation + + + + + + + + + +
+
+

+fauna.encoding.decoder

+ + + + + + +
  1import base64
+  2from typing import Any, List, Union
+  3
+  4from iso8601 import parse_date
+  5
+  6from fauna.query.models import Module, DocumentReference, Document, NamedDocument, NamedDocumentReference, Page, \
+  7  NullDocument, StreamToken
+  8
+  9
+ 10class FaunaDecoder:
+ 11  """Supports the following types:
+ 12
+ 13     +--------------------+---------------+
+ 14     | Python             | Fauna         |
+ 15     +====================+===============+
+ 16     | dict               | object        |
+ 17     +--------------------+---------------+
+ 18     | list, tuple        | array         |
+ 19     +--------------------+---------------+
+ 20     | str                | string        |
+ 21     +--------------------+---------------+
+ 22     | int                | @int          |
+ 23     +--------------------+---------------+
+ 24     | int                | @long         |
+ 25     +--------------------+---------------+
+ 26     | float              | @double       |
+ 27     +--------------------+---------------+
+ 28     | datetime.datetime  | @time         |
+ 29     +--------------------+---------------+
+ 30     | datetime.date      | @date         |
+ 31     +--------------------+---------------+
+ 32     | True               | true          |
+ 33     +--------------------+---------------+
+ 34     | False              | false         |
+ 35     +--------------------+---------------+
+ 36     | None               | null          |
+ 37     +--------------------+---------------+
+ 38     | bytearray          | @bytes        |
+ 39     +--------------------+---------------+
+ 40     | *DocumentReference | @ref          |
+ 41     +--------------------+---------------+
+ 42     | *Document          | @doc          |
+ 43     +--------------------+---------------+
+ 44     | Module             | @mod          |
+ 45     +--------------------+---------------+
+ 46     | Page               | @set          |
+ 47     +--------------------+---------------+
+ 48     | StreamToken        | @stream       |
+ 49     +--------------------+---------------+
+ 50
+ 51     """
+ 52
+ 53  @staticmethod
+ 54  def decode(obj: Any):
+ 55    """Decodes supported objects from the tagged typed into untagged.
+ 56
+ 57        Examples:
+ 58            - { "@int": "100" } decodes to 100 of type int
+ 59            - { "@double": "100" } decodes to 100.0 of type float
+ 60            - { "@long": "100" } decodes to 100 of type int
+ 61            - { "@time": "..." } decodes to a datetime
+ 62            - { "@date": "..." } decodes to a date
+ 63            - { "@doc": ... } decodes to a Document or NamedDocument
+ 64            - { "@ref": ... } decodes to a DocumentReference or NamedDocumentReference
+ 65            - { "@mod": ... } decodes to a Module
+ 66            - { "@set": ... } decodes to a Page
+ 67            - { "@stream": ... } decodes to a StreamToken
+ 68            - { "@bytes": ... } decodes to a bytearray
+ 69
+ 70        :param obj: the object to decode
+ 71        """
+ 72    return FaunaDecoder._decode(obj)
+ 73
+ 74  @staticmethod
+ 75  def _decode(o: Any, escaped: bool = False):
+ 76    if isinstance(o, (str, bool, int, float)):
+ 77      return o
+ 78    elif isinstance(o, list):
+ 79      return FaunaDecoder._decode_list(o)
+ 80    elif isinstance(o, dict):
+ 81      return FaunaDecoder._decode_dict(o, escaped)
+ 82
+ 83  @staticmethod
+ 84  def _decode_list(lst: List):
+ 85    return [FaunaDecoder._decode(i) for i in lst]
+ 86
+ 87  @staticmethod
+ 88  def _decode_dict(dct: dict, escaped: bool):
+ 89    keys = dct.keys()
+ 90
+ 91    # If escaped, everything is user-specified
+ 92    if escaped:
+ 93      return {k: FaunaDecoder._decode(v) for k, v in dct.items()}
+ 94
+ 95    if len(keys) == 1:
+ 96      if "@int" in keys:
+ 97        return int(dct["@int"])
+ 98      if "@long" in keys:
+ 99        return int(dct["@long"])
+100      if "@double" in dct:
+101        return float(dct["@double"])
+102      if "@object" in dct:
+103        return FaunaDecoder._decode(dct["@object"], True)
+104      if "@mod" in dct:
+105        return Module(dct["@mod"])
+106      if "@time" in dct:
+107        return parse_date(dct["@time"])
+108      if "@date" in dct:
+109        return parse_date(dct["@date"]).date()
+110      if "@bytes" in dct:
+111        bts = base64.b64decode(dct["@bytes"])
+112        return bytearray(bts)
+113      if "@doc" in dct:
+114        value = dct["@doc"]
+115        if isinstance(value, str):
+116          # Not distinguishing between DocumentReference and NamedDocumentReference because this shouldn't
+117          # be an issue much longer
+118          return DocumentReference.from_string(value)
+119
+120        contents = FaunaDecoder._decode(value)
+121
+122        if "id" in contents and "coll" in contents and "ts" in contents:
+123          doc_id = contents.pop("id")
+124          doc_coll = contents.pop("coll")
+125          doc_ts = contents.pop("ts")
+126
+127          return Document(
+128              id=doc_id,
+129              coll=doc_coll,
+130              ts=doc_ts,
+131              data=contents,
+132          )
+133        elif "name" in contents and "coll" in contents and "ts" in contents:
+134          doc_name = contents.pop("name")
+135          doc_coll = contents.pop("coll")
+136          doc_ts = contents.pop("ts")
+137
+138          return NamedDocument(
+139              name=doc_name,
+140              coll=doc_coll,
+141              ts=doc_ts,
+142              data=contents,
+143          )
+144        else:
+145          # Unsupported document reference. Return the unwrapped value to futureproof.
+146          return contents
+147
+148      if "@ref" in dct:
+149        value = dct["@ref"]
+150        if "id" not in value and "name" not in value:
+151          # Unsupported document reference. Return the unwrapped value to futureproof.
+152          return value
+153
+154        col = FaunaDecoder._decode(value["coll"])
+155        doc_ref: Union[DocumentReference, NamedDocumentReference]
+156
+157        if "id" in value:
+158          doc_ref = DocumentReference(col, value["id"])
+159        else:
+160          doc_ref = NamedDocumentReference(col, value["name"])
+161
+162        if "exists" in value and not value["exists"]:
+163          cause = value["cause"] if "cause" in value else None
+164          return NullDocument(doc_ref, cause)
+165
+166        return doc_ref
+167
+168      if "@set" in dct:
+169        value = dct["@set"]
+170        if isinstance(value, str):
+171          return Page(after=value)
+172
+173        after = value["after"] if "after" in value else None
+174        data = FaunaDecoder._decode(value["data"]) if "data" in value else None
+175
+176        return Page(data=data, after=after)
+177
+178      if "@stream" in dct:
+179        return StreamToken(dct["@stream"])
+180
+181    return {k: FaunaDecoder._decode(v) for k, v in dct.items()}
+
+ + +
+
+ +
+ + class + FaunaDecoder: + + + +
+ +
 11class FaunaDecoder:
+ 12  """Supports the following types:
+ 13
+ 14     +--------------------+---------------+
+ 15     | Python             | Fauna         |
+ 16     +====================+===============+
+ 17     | dict               | object        |
+ 18     +--------------------+---------------+
+ 19     | list, tuple        | array         |
+ 20     +--------------------+---------------+
+ 21     | str                | string        |
+ 22     +--------------------+---------------+
+ 23     | int                | @int          |
+ 24     +--------------------+---------------+
+ 25     | int                | @long         |
+ 26     +--------------------+---------------+
+ 27     | float              | @double       |
+ 28     +--------------------+---------------+
+ 29     | datetime.datetime  | @time         |
+ 30     +--------------------+---------------+
+ 31     | datetime.date      | @date         |
+ 32     +--------------------+---------------+
+ 33     | True               | true          |
+ 34     +--------------------+---------------+
+ 35     | False              | false         |
+ 36     +--------------------+---------------+
+ 37     | None               | null          |
+ 38     +--------------------+---------------+
+ 39     | bytearray          | @bytes        |
+ 40     +--------------------+---------------+
+ 41     | *DocumentReference | @ref          |
+ 42     +--------------------+---------------+
+ 43     | *Document          | @doc          |
+ 44     +--------------------+---------------+
+ 45     | Module             | @mod          |
+ 46     +--------------------+---------------+
+ 47     | Page               | @set          |
+ 48     +--------------------+---------------+
+ 49     | StreamToken        | @stream       |
+ 50     +--------------------+---------------+
+ 51
+ 52     """
+ 53
+ 54  @staticmethod
+ 55  def decode(obj: Any):
+ 56    """Decodes supported objects from the tagged typed into untagged.
+ 57
+ 58        Examples:
+ 59            - { "@int": "100" } decodes to 100 of type int
+ 60            - { "@double": "100" } decodes to 100.0 of type float
+ 61            - { "@long": "100" } decodes to 100 of type int
+ 62            - { "@time": "..." } decodes to a datetime
+ 63            - { "@date": "..." } decodes to a date
+ 64            - { "@doc": ... } decodes to a Document or NamedDocument
+ 65            - { "@ref": ... } decodes to a DocumentReference or NamedDocumentReference
+ 66            - { "@mod": ... } decodes to a Module
+ 67            - { "@set": ... } decodes to a Page
+ 68            - { "@stream": ... } decodes to a StreamToken
+ 69            - { "@bytes": ... } decodes to a bytearray
+ 70
+ 71        :param obj: the object to decode
+ 72        """
+ 73    return FaunaDecoder._decode(obj)
+ 74
+ 75  @staticmethod
+ 76  def _decode(o: Any, escaped: bool = False):
+ 77    if isinstance(o, (str, bool, int, float)):
+ 78      return o
+ 79    elif isinstance(o, list):
+ 80      return FaunaDecoder._decode_list(o)
+ 81    elif isinstance(o, dict):
+ 82      return FaunaDecoder._decode_dict(o, escaped)
+ 83
+ 84  @staticmethod
+ 85  def _decode_list(lst: List):
+ 86    return [FaunaDecoder._decode(i) for i in lst]
+ 87
+ 88  @staticmethod
+ 89  def _decode_dict(dct: dict, escaped: bool):
+ 90    keys = dct.keys()
+ 91
+ 92    # If escaped, everything is user-specified
+ 93    if escaped:
+ 94      return {k: FaunaDecoder._decode(v) for k, v in dct.items()}
+ 95
+ 96    if len(keys) == 1:
+ 97      if "@int" in keys:
+ 98        return int(dct["@int"])
+ 99      if "@long" in keys:
+100        return int(dct["@long"])
+101      if "@double" in dct:
+102        return float(dct["@double"])
+103      if "@object" in dct:
+104        return FaunaDecoder._decode(dct["@object"], True)
+105      if "@mod" in dct:
+106        return Module(dct["@mod"])
+107      if "@time" in dct:
+108        return parse_date(dct["@time"])
+109      if "@date" in dct:
+110        return parse_date(dct["@date"]).date()
+111      if "@bytes" in dct:
+112        bts = base64.b64decode(dct["@bytes"])
+113        return bytearray(bts)
+114      if "@doc" in dct:
+115        value = dct["@doc"]
+116        if isinstance(value, str):
+117          # Not distinguishing between DocumentReference and NamedDocumentReference because this shouldn't
+118          # be an issue much longer
+119          return DocumentReference.from_string(value)
+120
+121        contents = FaunaDecoder._decode(value)
+122
+123        if "id" in contents and "coll" in contents and "ts" in contents:
+124          doc_id = contents.pop("id")
+125          doc_coll = contents.pop("coll")
+126          doc_ts = contents.pop("ts")
+127
+128          return Document(
+129              id=doc_id,
+130              coll=doc_coll,
+131              ts=doc_ts,
+132              data=contents,
+133          )
+134        elif "name" in contents and "coll" in contents and "ts" in contents:
+135          doc_name = contents.pop("name")
+136          doc_coll = contents.pop("coll")
+137          doc_ts = contents.pop("ts")
+138
+139          return NamedDocument(
+140              name=doc_name,
+141              coll=doc_coll,
+142              ts=doc_ts,
+143              data=contents,
+144          )
+145        else:
+146          # Unsupported document reference. Return the unwrapped value to futureproof.
+147          return contents
+148
+149      if "@ref" in dct:
+150        value = dct["@ref"]
+151        if "id" not in value and "name" not in value:
+152          # Unsupported document reference. Return the unwrapped value to futureproof.
+153          return value
+154
+155        col = FaunaDecoder._decode(value["coll"])
+156        doc_ref: Union[DocumentReference, NamedDocumentReference]
+157
+158        if "id" in value:
+159          doc_ref = DocumentReference(col, value["id"])
+160        else:
+161          doc_ref = NamedDocumentReference(col, value["name"])
+162
+163        if "exists" in value and not value["exists"]:
+164          cause = value["cause"] if "cause" in value else None
+165          return NullDocument(doc_ref, cause)
+166
+167        return doc_ref
+168
+169      if "@set" in dct:
+170        value = dct["@set"]
+171        if isinstance(value, str):
+172          return Page(after=value)
+173
+174        after = value["after"] if "after" in value else None
+175        data = FaunaDecoder._decode(value["data"]) if "data" in value else None
+176
+177        return Page(data=data, after=after)
+178
+179      if "@stream" in dct:
+180        return StreamToken(dct["@stream"])
+181
+182    return {k: FaunaDecoder._decode(v) for k, v in dct.items()}
+
+ + +

Supports the following types:

+ +

+--------------------+---------------+ +| Python | Fauna | ++====================+===============+ +| dict | object | ++--------------------+---------------+ +| list, tuple | array | ++--------------------+---------------+ +| str | string | ++--------------------+---------------+ +| int | @int | ++--------------------+---------------+ +| int | @long | ++--------------------+---------------+ +| float | @double | ++--------------------+---------------+ +| datetime.datetime | @time | ++--------------------+---------------+ +| datetime.date | @date | ++--------------------+---------------+ +| True | true | ++--------------------+---------------+ +| False | false | ++--------------------+---------------+ +| None | null | ++--------------------+---------------+ +| bytearray | @bytes | ++--------------------+---------------+ +| *DocumentReference | @ref | ++--------------------+---------------+ +| *Document | @doc | ++--------------------+---------------+ +| Module | @mod | ++--------------------+---------------+ +| Page | @set | ++--------------------+---------------+ +| StreamToken | @stream | ++--------------------+---------------+

+
+ + +
+ +
+
@staticmethod
+ + def + decode(obj: Any): + + + +
+ +
54  @staticmethod
+55  def decode(obj: Any):
+56    """Decodes supported objects from the tagged typed into untagged.
+57
+58        Examples:
+59            - { "@int": "100" } decodes to 100 of type int
+60            - { "@double": "100" } decodes to 100.0 of type float
+61            - { "@long": "100" } decodes to 100 of type int
+62            - { "@time": "..." } decodes to a datetime
+63            - { "@date": "..." } decodes to a date
+64            - { "@doc": ... } decodes to a Document or NamedDocument
+65            - { "@ref": ... } decodes to a DocumentReference or NamedDocumentReference
+66            - { "@mod": ... } decodes to a Module
+67            - { "@set": ... } decodes to a Page
+68            - { "@stream": ... } decodes to a StreamToken
+69            - { "@bytes": ... } decodes to a bytearray
+70
+71        :param obj: the object to decode
+72        """
+73    return FaunaDecoder._decode(obj)
+
+ + +

Decodes supported objects from the tagged typed into untagged.

+ +

Examples: + - { "@int": "100" } decodes to 100 of type int + - { "@double": "100" } decodes to 100.0 of type float + - { "@long": "100" } decodes to 100 of type int + - { "@time": "..." } decodes to a datetime + - { "@date": "..." } decodes to a date + - { "@doc": ... } decodes to a Document or NamedDocument + - { "@ref": ... } decodes to a DocumentReference or NamedDocumentReference + - { "@mod": ... } decodes to a Module + - { "@set": ... } decodes to a Page + - { "@stream": ... } decodes to a StreamToken + - { "@bytes": ... } decodes to a bytearray

+ +
Parameters
+ +
    +
  • obj: the object to decode
  • +
+
+ + +
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/encoding/encoder.html b/2.1.0/api/fauna/encoding/encoder.html new file mode 100644 index 00000000..713597ac --- /dev/null +++ b/2.1.0/api/fauna/encoding/encoder.html @@ -0,0 +1,1222 @@ + + + + + + + fauna.encoding.encoder API documentation + + + + + + + + + +
+
+

+fauna.encoding.encoder

+ + + + + + +
  1import base64
+  2from datetime import datetime, date
+  3from typing import Any, Optional, List, Union
+  4
+  5from fauna.query.models import DocumentReference, Module, Document, NamedDocument, NamedDocumentReference, NullDocument, \
+  6  StreamToken
+  7from fauna.query.query_builder import Query, Fragment, LiteralFragment, ValueFragment
+  8
+  9_RESERVED_TAGS = [
+ 10    "@date",
+ 11    "@doc",
+ 12    "@double",
+ 13    "@int",
+ 14    "@long",
+ 15    "@mod",
+ 16    "@object",
+ 17    "@ref",
+ 18    "@set",
+ 19    "@time",
+ 20]
+ 21
+ 22
+ 23class FaunaEncoder:
+ 24  """Supports the following types:
+ 25
+ 26    +-------------------------------+---------------+
+ 27    | Python                        | Fauna Tags    |
+ 28    +===============================+===============+
+ 29    | dict                          | @object       |
+ 30    +-------------------------------+---------------+
+ 31    | list, tuple                   | array         |
+ 32    +-------------------------------+---------------+
+ 33    | str                           | string        |
+ 34    +-------------------------------+---------------+
+ 35    | int 32-bit signed             | @int          |
+ 36    +-------------------------------+---------------+
+ 37    | int 64-bit signed             | @long         |
+ 38    +-------------------------------+---------------+
+ 39    | float                         | @double       |
+ 40    +-------------------------------+---------------+
+ 41    | datetime.datetime             | @time         |
+ 42    +-------------------------------+---------------+
+ 43    | datetime.date                 | @date         |
+ 44    +-------------------------------+---------------+
+ 45    | True                          | True          |
+ 46    +-------------------------------+---------------+
+ 47    | False                         | False         |
+ 48    +-------------------------------+---------------+
+ 49    | None                          | None          |
+ 50    +-------------------------------+---------------+
+ 51    | bytes / bytearray             | @bytes        |
+ 52    +-------------------------------+---------------+
+ 53    | *Document                     | @ref          |
+ 54    +-------------------------------+---------------+
+ 55    | *DocumentReference            | @ref          |
+ 56    +-------------------------------+---------------+
+ 57    | Module                        | @mod          |
+ 58    +-------------------------------+---------------+
+ 59    | Query                         | fql           |
+ 60    +-------------------------------+---------------+
+ 61    | ValueFragment                 | value         |
+ 62    +-------------------------------+---------------+
+ 63    | TemplateFragment              | string        |
+ 64    +-------------------------------+---------------+
+ 65    | StreamToken                   | string        |
+ 66    +-------------------------------+---------------+
+ 67
+ 68    """
+ 69
+ 70  @staticmethod
+ 71  def encode(obj: Any) -> Any:
+ 72    """Encodes supported objects into the tagged format.
+ 73
+ 74        Examples:
+ 75            - Up to 32-bit ints encode to { "@int": "..." }
+ 76            - Up to 64-bit ints encode to { "@long": "..." }
+ 77            - Floats encode to { "@double": "..." }
+ 78            - datetime encodes to { "@time": "..." }
+ 79            - date encodes to { "@date": "..." }
+ 80            - DocumentReference encodes to { "@doc": "..." }
+ 81            - Module encodes to { "@mod": "..." }
+ 82            - Query encodes to { "fql": [...] }
+ 83            - ValueFragment encodes to { "value": <encoded_val> }
+ 84            - LiteralFragment encodes to a string
+ 85            - StreamToken encodes to a string
+ 86
+ 87        :raises ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
+ 88        :param obj: the object to decode
+ 89        """
+ 90    return FaunaEncoder._encode(obj)
+ 91
+ 92  @staticmethod
+ 93  def from_int(obj: int):
+ 94    if -2**31 <= obj <= 2**31 - 1:
+ 95      return {"@int": repr(obj)}
+ 96    elif -2**63 <= obj <= 2**63 - 1:
+ 97      return {"@long": repr(obj)}
+ 98    else:
+ 99      raise ValueError("Precision loss when converting int to Fauna type")
+100
+101  @staticmethod
+102  def from_bool(obj: bool):
+103    return obj
+104
+105  @staticmethod
+106  def from_float(obj: float):
+107    return {"@double": repr(obj)}
+108
+109  @staticmethod
+110  def from_str(obj: str):
+111    return obj
+112
+113  @staticmethod
+114  def from_datetime(obj: datetime):
+115    if obj.utcoffset() is None:
+116      raise ValueError("datetimes must be timezone-aware")
+117
+118    return {"@time": obj.isoformat(sep="T")}
+119
+120  @staticmethod
+121  def from_date(obj: date):
+122    return {"@date": obj.isoformat()}
+123
+124  @staticmethod
+125  def from_bytes(obj: Union[bytearray, bytes]):
+126    return {"@bytes": base64.b64encode(obj).decode('ascii')}
+127
+128  @staticmethod
+129  def from_doc_ref(obj: DocumentReference):
+130    return {"@ref": {"id": obj.id, "coll": FaunaEncoder.from_mod(obj.coll)}}
+131
+132  @staticmethod
+133  def from_named_doc_ref(obj: NamedDocumentReference):
+134    return {"@ref": {"name": obj.name, "coll": FaunaEncoder.from_mod(obj.coll)}}
+135
+136  @staticmethod
+137  def from_mod(obj: Module):
+138    return {"@mod": obj.name}
+139
+140  @staticmethod
+141  def from_dict(obj: Any):
+142    return {"@object": obj}
+143
+144  @staticmethod
+145  def from_none():
+146    return None
+147
+148  @staticmethod
+149  def from_fragment(obj: Fragment):
+150    if isinstance(obj, LiteralFragment):
+151      return obj.get()
+152    elif isinstance(obj, ValueFragment):
+153      v = obj.get()
+154      if isinstance(v, Query):
+155        return FaunaEncoder.from_query_interpolation_builder(v)
+156      else:
+157        return {"value": FaunaEncoder.encode(v)}
+158    else:
+159      raise ValueError(f"Unknown fragment type: {type(obj)}")
+160
+161  @staticmethod
+162  def from_query_interpolation_builder(obj: Query):
+163    return {"fql": [FaunaEncoder.from_fragment(f) for f in obj.fragments]}
+164
+165  @staticmethod
+166  def from_streamtoken(obj: StreamToken):
+167    return {"@stream": obj.token}
+168
+169  @staticmethod
+170  def _encode(o: Any, _markers: Optional[List] = None):
+171    if _markers is None:
+172      _markers = []
+173
+174    if isinstance(o, str):
+175      return FaunaEncoder.from_str(o)
+176    elif o is None:
+177      return FaunaEncoder.from_none()
+178    elif o is True:
+179      return FaunaEncoder.from_bool(o)
+180    elif o is False:
+181      return FaunaEncoder.from_bool(o)
+182    elif isinstance(o, int):
+183      return FaunaEncoder.from_int(o)
+184    elif isinstance(o, float):
+185      return FaunaEncoder.from_float(o)
+186    elif isinstance(o, Module):
+187      return FaunaEncoder.from_mod(o)
+188    elif isinstance(o, DocumentReference):
+189      return FaunaEncoder.from_doc_ref(o)
+190    elif isinstance(o, NamedDocumentReference):
+191      return FaunaEncoder.from_named_doc_ref(o)
+192    elif isinstance(o, datetime):
+193      return FaunaEncoder.from_datetime(o)
+194    elif isinstance(o, date):
+195      return FaunaEncoder.from_date(o)
+196    elif isinstance(o, bytearray) or isinstance(o, bytes):
+197      return FaunaEncoder.from_bytes(o)
+198    elif isinstance(o, Document):
+199      return FaunaEncoder.from_doc_ref(DocumentReference(o.coll, o.id))
+200    elif isinstance(o, NamedDocument):
+201      return FaunaEncoder.from_named_doc_ref(
+202          NamedDocumentReference(o.coll, o.name))
+203    elif isinstance(o, NullDocument):
+204      return FaunaEncoder.encode(o.ref)
+205    elif isinstance(o, (list, tuple)):
+206      return FaunaEncoder._encode_list(o, _markers)
+207    elif isinstance(o, dict):
+208      return FaunaEncoder._encode_dict(o, _markers)
+209    elif isinstance(o, Query):
+210      return FaunaEncoder.from_query_interpolation_builder(o)
+211    elif isinstance(o, StreamToken):
+212      return FaunaEncoder.from_streamtoken(o)
+213    else:
+214      raise ValueError(f"Object {o} of type {type(o)} cannot be encoded")
+215
+216  @staticmethod
+217  def _encode_list(lst, markers):
+218    _id = id(lst)
+219    if _id in markers:
+220      raise ValueError("Circular reference detected")
+221
+222    markers.append(id(lst))
+223    res = [FaunaEncoder._encode(elem, markers) for elem in lst]
+224    markers.pop()
+225    return res
+226
+227  @staticmethod
+228  def _encode_dict(dct, markers):
+229    _id = id(dct)
+230    if _id in markers:
+231      raise ValueError("Circular reference detected")
+232
+233    markers.append(id(dct))
+234    if any(i in _RESERVED_TAGS for i in dct.keys()):
+235      res = {
+236          "@object": {
+237              k: FaunaEncoder._encode(v, markers) for k, v in dct.items()
+238          }
+239      }
+240      markers.pop()
+241      return res
+242    else:
+243      res = {k: FaunaEncoder._encode(v, markers) for k, v in dct.items()}
+244      markers.pop()
+245      return res
+
+ + +
+
+ +
+ + class + FaunaEncoder: + + + +
+ +
 24class FaunaEncoder:
+ 25  """Supports the following types:
+ 26
+ 27    +-------------------------------+---------------+
+ 28    | Python                        | Fauna Tags    |
+ 29    +===============================+===============+
+ 30    | dict                          | @object       |
+ 31    +-------------------------------+---------------+
+ 32    | list, tuple                   | array         |
+ 33    +-------------------------------+---------------+
+ 34    | str                           | string        |
+ 35    +-------------------------------+---------------+
+ 36    | int 32-bit signed             | @int          |
+ 37    +-------------------------------+---------------+
+ 38    | int 64-bit signed             | @long         |
+ 39    +-------------------------------+---------------+
+ 40    | float                         | @double       |
+ 41    +-------------------------------+---------------+
+ 42    | datetime.datetime             | @time         |
+ 43    +-------------------------------+---------------+
+ 44    | datetime.date                 | @date         |
+ 45    +-------------------------------+---------------+
+ 46    | True                          | True          |
+ 47    +-------------------------------+---------------+
+ 48    | False                         | False         |
+ 49    +-------------------------------+---------------+
+ 50    | None                          | None          |
+ 51    +-------------------------------+---------------+
+ 52    | bytes / bytearray             | @bytes        |
+ 53    +-------------------------------+---------------+
+ 54    | *Document                     | @ref          |
+ 55    +-------------------------------+---------------+
+ 56    | *DocumentReference            | @ref          |
+ 57    +-------------------------------+---------------+
+ 58    | Module                        | @mod          |
+ 59    +-------------------------------+---------------+
+ 60    | Query                         | fql           |
+ 61    +-------------------------------+---------------+
+ 62    | ValueFragment                 | value         |
+ 63    +-------------------------------+---------------+
+ 64    | TemplateFragment              | string        |
+ 65    +-------------------------------+---------------+
+ 66    | StreamToken                   | string        |
+ 67    +-------------------------------+---------------+
+ 68
+ 69    """
+ 70
+ 71  @staticmethod
+ 72  def encode(obj: Any) -> Any:
+ 73    """Encodes supported objects into the tagged format.
+ 74
+ 75        Examples:
+ 76            - Up to 32-bit ints encode to { "@int": "..." }
+ 77            - Up to 64-bit ints encode to { "@long": "..." }
+ 78            - Floats encode to { "@double": "..." }
+ 79            - datetime encodes to { "@time": "..." }
+ 80            - date encodes to { "@date": "..." }
+ 81            - DocumentReference encodes to { "@doc": "..." }
+ 82            - Module encodes to { "@mod": "..." }
+ 83            - Query encodes to { "fql": [...] }
+ 84            - ValueFragment encodes to { "value": <encoded_val> }
+ 85            - LiteralFragment encodes to a string
+ 86            - StreamToken encodes to a string
+ 87
+ 88        :raises ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
+ 89        :param obj: the object to decode
+ 90        """
+ 91    return FaunaEncoder._encode(obj)
+ 92
+ 93  @staticmethod
+ 94  def from_int(obj: int):
+ 95    if -2**31 <= obj <= 2**31 - 1:
+ 96      return {"@int": repr(obj)}
+ 97    elif -2**63 <= obj <= 2**63 - 1:
+ 98      return {"@long": repr(obj)}
+ 99    else:
+100      raise ValueError("Precision loss when converting int to Fauna type")
+101
+102  @staticmethod
+103  def from_bool(obj: bool):
+104    return obj
+105
+106  @staticmethod
+107  def from_float(obj: float):
+108    return {"@double": repr(obj)}
+109
+110  @staticmethod
+111  def from_str(obj: str):
+112    return obj
+113
+114  @staticmethod
+115  def from_datetime(obj: datetime):
+116    if obj.utcoffset() is None:
+117      raise ValueError("datetimes must be timezone-aware")
+118
+119    return {"@time": obj.isoformat(sep="T")}
+120
+121  @staticmethod
+122  def from_date(obj: date):
+123    return {"@date": obj.isoformat()}
+124
+125  @staticmethod
+126  def from_bytes(obj: Union[bytearray, bytes]):
+127    return {"@bytes": base64.b64encode(obj).decode('ascii')}
+128
+129  @staticmethod
+130  def from_doc_ref(obj: DocumentReference):
+131    return {"@ref": {"id": obj.id, "coll": FaunaEncoder.from_mod(obj.coll)}}
+132
+133  @staticmethod
+134  def from_named_doc_ref(obj: NamedDocumentReference):
+135    return {"@ref": {"name": obj.name, "coll": FaunaEncoder.from_mod(obj.coll)}}
+136
+137  @staticmethod
+138  def from_mod(obj: Module):
+139    return {"@mod": obj.name}
+140
+141  @staticmethod
+142  def from_dict(obj: Any):
+143    return {"@object": obj}
+144
+145  @staticmethod
+146  def from_none():
+147    return None
+148
+149  @staticmethod
+150  def from_fragment(obj: Fragment):
+151    if isinstance(obj, LiteralFragment):
+152      return obj.get()
+153    elif isinstance(obj, ValueFragment):
+154      v = obj.get()
+155      if isinstance(v, Query):
+156        return FaunaEncoder.from_query_interpolation_builder(v)
+157      else:
+158        return {"value": FaunaEncoder.encode(v)}
+159    else:
+160      raise ValueError(f"Unknown fragment type: {type(obj)}")
+161
+162  @staticmethod
+163  def from_query_interpolation_builder(obj: Query):
+164    return {"fql": [FaunaEncoder.from_fragment(f) for f in obj.fragments]}
+165
+166  @staticmethod
+167  def from_streamtoken(obj: StreamToken):
+168    return {"@stream": obj.token}
+169
+170  @staticmethod
+171  def _encode(o: Any, _markers: Optional[List] = None):
+172    if _markers is None:
+173      _markers = []
+174
+175    if isinstance(o, str):
+176      return FaunaEncoder.from_str(o)
+177    elif o is None:
+178      return FaunaEncoder.from_none()
+179    elif o is True:
+180      return FaunaEncoder.from_bool(o)
+181    elif o is False:
+182      return FaunaEncoder.from_bool(o)
+183    elif isinstance(o, int):
+184      return FaunaEncoder.from_int(o)
+185    elif isinstance(o, float):
+186      return FaunaEncoder.from_float(o)
+187    elif isinstance(o, Module):
+188      return FaunaEncoder.from_mod(o)
+189    elif isinstance(o, DocumentReference):
+190      return FaunaEncoder.from_doc_ref(o)
+191    elif isinstance(o, NamedDocumentReference):
+192      return FaunaEncoder.from_named_doc_ref(o)
+193    elif isinstance(o, datetime):
+194      return FaunaEncoder.from_datetime(o)
+195    elif isinstance(o, date):
+196      return FaunaEncoder.from_date(o)
+197    elif isinstance(o, bytearray) or isinstance(o, bytes):
+198      return FaunaEncoder.from_bytes(o)
+199    elif isinstance(o, Document):
+200      return FaunaEncoder.from_doc_ref(DocumentReference(o.coll, o.id))
+201    elif isinstance(o, NamedDocument):
+202      return FaunaEncoder.from_named_doc_ref(
+203          NamedDocumentReference(o.coll, o.name))
+204    elif isinstance(o, NullDocument):
+205      return FaunaEncoder.encode(o.ref)
+206    elif isinstance(o, (list, tuple)):
+207      return FaunaEncoder._encode_list(o, _markers)
+208    elif isinstance(o, dict):
+209      return FaunaEncoder._encode_dict(o, _markers)
+210    elif isinstance(o, Query):
+211      return FaunaEncoder.from_query_interpolation_builder(o)
+212    elif isinstance(o, StreamToken):
+213      return FaunaEncoder.from_streamtoken(o)
+214    else:
+215      raise ValueError(f"Object {o} of type {type(o)} cannot be encoded")
+216
+217  @staticmethod
+218  def _encode_list(lst, markers):
+219    _id = id(lst)
+220    if _id in markers:
+221      raise ValueError("Circular reference detected")
+222
+223    markers.append(id(lst))
+224    res = [FaunaEncoder._encode(elem, markers) for elem in lst]
+225    markers.pop()
+226    return res
+227
+228  @staticmethod
+229  def _encode_dict(dct, markers):
+230    _id = id(dct)
+231    if _id in markers:
+232      raise ValueError("Circular reference detected")
+233
+234    markers.append(id(dct))
+235    if any(i in _RESERVED_TAGS for i in dct.keys()):
+236      res = {
+237          "@object": {
+238              k: FaunaEncoder._encode(v, markers) for k, v in dct.items()
+239          }
+240      }
+241      markers.pop()
+242      return res
+243    else:
+244      res = {k: FaunaEncoder._encode(v, markers) for k, v in dct.items()}
+245      markers.pop()
+246      return res
+
+ + +

Supports the following types:

+ +

+-------------------------------+---------------+ +| Python | Fauna Tags | ++===============================+===============+ +| dict | @object | ++-------------------------------+---------------+ +| list, tuple | array | ++-------------------------------+---------------+ +| str | string | ++-------------------------------+---------------+ +| int 32-bit signed | @int | ++-------------------------------+---------------+ +| int 64-bit signed | @long | ++-------------------------------+---------------+ +| float | @double | ++-------------------------------+---------------+ +| datetime.datetime | @time | ++-------------------------------+---------------+ +| datetime.date | @date | ++-------------------------------+---------------+ +| True | True | ++-------------------------------+---------------+ +| False | False | ++-------------------------------+---------------+ +| None | None | ++-------------------------------+---------------+ +| bytes / bytearray | @bytes | ++-------------------------------+---------------+ +| *Document | @ref | ++-------------------------------+---------------+ +| *DocumentReference | @ref | ++-------------------------------+---------------+ +| Module | @mod | ++-------------------------------+---------------+ +| Query | fql | ++-------------------------------+---------------+ +| ValueFragment | value | ++-------------------------------+---------------+ +| TemplateFragment | string | ++-------------------------------+---------------+ +| StreamToken | string | ++-------------------------------+---------------+

+
+ + +
+ +
+
@staticmethod
+ + def + encode(obj: Any) -> Any: + + + +
+ +
71  @staticmethod
+72  def encode(obj: Any) -> Any:
+73    """Encodes supported objects into the tagged format.
+74
+75        Examples:
+76            - Up to 32-bit ints encode to { "@int": "..." }
+77            - Up to 64-bit ints encode to { "@long": "..." }
+78            - Floats encode to { "@double": "..." }
+79            - datetime encodes to { "@time": "..." }
+80            - date encodes to { "@date": "..." }
+81            - DocumentReference encodes to { "@doc": "..." }
+82            - Module encodes to { "@mod": "..." }
+83            - Query encodes to { "fql": [...] }
+84            - ValueFragment encodes to { "value": <encoded_val> }
+85            - LiteralFragment encodes to a string
+86            - StreamToken encodes to a string
+87
+88        :raises ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
+89        :param obj: the object to decode
+90        """
+91    return FaunaEncoder._encode(obj)
+
+ + +

Encodes supported objects into the tagged format.

+ +

Examples: + - Up to 32-bit ints encode to { "@int": "..." } + - Up to 64-bit ints encode to { "@long": "..." } + - Floats encode to { "@double": "..." } + - datetime encodes to { "@time": "..." } + - date encodes to { "@date": "..." } + - DocumentReference encodes to { "@doc": "..." } + - Module encodes to { "@mod": "..." } + - Query encodes to { "fql": [...] } + - ValueFragment encodes to { "value": } + - LiteralFragment encodes to a string + - StreamToken encodes to a string

+ +
Raises
+ +
    +
  • ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
  • +
+ +
Parameters
+ +
    +
  • obj: the object to decode
  • +
+
+ + +
+
+ +
+
@staticmethod
+ + def + from_int(obj: int): + + + +
+ +
 93  @staticmethod
+ 94  def from_int(obj: int):
+ 95    if -2**31 <= obj <= 2**31 - 1:
+ 96      return {"@int": repr(obj)}
+ 97    elif -2**63 <= obj <= 2**63 - 1:
+ 98      return {"@long": repr(obj)}
+ 99    else:
+100      raise ValueError("Precision loss when converting int to Fauna type")
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_bool(obj: bool): + + + +
+ +
102  @staticmethod
+103  def from_bool(obj: bool):
+104    return obj
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_float(obj: float): + + + +
+ +
106  @staticmethod
+107  def from_float(obj: float):
+108    return {"@double": repr(obj)}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_str(obj: str): + + + +
+ +
110  @staticmethod
+111  def from_str(obj: str):
+112    return obj
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_datetime(obj: datetime.datetime): + + + +
+ +
114  @staticmethod
+115  def from_datetime(obj: datetime):
+116    if obj.utcoffset() is None:
+117      raise ValueError("datetimes must be timezone-aware")
+118
+119    return {"@time": obj.isoformat(sep="T")}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_date(obj: datetime.date): + + + +
+ +
121  @staticmethod
+122  def from_date(obj: date):
+123    return {"@date": obj.isoformat()}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_bytes(obj: Union[bytearray, bytes]): + + + +
+ +
125  @staticmethod
+126  def from_bytes(obj: Union[bytearray, bytes]):
+127    return {"@bytes": base64.b64encode(obj).decode('ascii')}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_doc_ref(obj: fauna.query.models.DocumentReference): + + + +
+ +
129  @staticmethod
+130  def from_doc_ref(obj: DocumentReference):
+131    return {"@ref": {"id": obj.id, "coll": FaunaEncoder.from_mod(obj.coll)}}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_named_doc_ref(obj: fauna.query.models.NamedDocumentReference): + + + +
+ +
133  @staticmethod
+134  def from_named_doc_ref(obj: NamedDocumentReference):
+135    return {"@ref": {"name": obj.name, "coll": FaunaEncoder.from_mod(obj.coll)}}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_mod(obj: fauna.query.models.Module): + + + +
+ +
137  @staticmethod
+138  def from_mod(obj: Module):
+139    return {"@mod": obj.name}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_dict(obj: Any): + + + +
+ +
141  @staticmethod
+142  def from_dict(obj: Any):
+143    return {"@object": obj}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_none(): + + + +
+ +
145  @staticmethod
+146  def from_none():
+147    return None
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_fragment(obj: fauna.query.query_builder.Fragment): + + + +
+ +
149  @staticmethod
+150  def from_fragment(obj: Fragment):
+151    if isinstance(obj, LiteralFragment):
+152      return obj.get()
+153    elif isinstance(obj, ValueFragment):
+154      v = obj.get()
+155      if isinstance(v, Query):
+156        return FaunaEncoder.from_query_interpolation_builder(v)
+157      else:
+158        return {"value": FaunaEncoder.encode(v)}
+159    else:
+160      raise ValueError(f"Unknown fragment type: {type(obj)}")
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_query_interpolation_builder(obj: fauna.query.query_builder.Query): + + + +
+ +
162  @staticmethod
+163  def from_query_interpolation_builder(obj: Query):
+164    return {"fql": [FaunaEncoder.from_fragment(f) for f in obj.fragments]}
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + from_streamtoken(obj: fauna.query.models.StreamToken): + + + +
+ +
166  @staticmethod
+167  def from_streamtoken(obj: StreamToken):
+168    return {"@stream": obj.token}
+
+ + + + +
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/encoding/wire_protocol.html b/2.1.0/api/fauna/encoding/wire_protocol.html new file mode 100644 index 00000000..84170fd2 --- /dev/null +++ b/2.1.0/api/fauna/encoding/wire_protocol.html @@ -0,0 +1,1387 @@ + + + + + + + fauna.encoding.wire_protocol API documentation + + + + + + + + + +
+
+

+fauna.encoding.wire_protocol

+ + + + + + +
  1from dataclasses import dataclass
+  2from typing import Optional, Mapping, Any, List
+  3
+  4
+  5class QueryStats:
+  6  """Query stats"""
+  7
+  8  @property
+  9  def compute_ops(self) -> int:
+ 10    """The amount of Transactional Compute Ops consumed by the query."""
+ 11    return self._compute_ops
+ 12
+ 13  @property
+ 14  def read_ops(self) -> int:
+ 15    """The amount of Transactional Read Ops consumed by the query."""
+ 16    return self._read_ops
+ 17
+ 18  @property
+ 19  def write_ops(self) -> int:
+ 20    """The amount of Transactional Write Ops consumed by the query."""
+ 21    return self._write_ops
+ 22
+ 23  @property
+ 24  def query_time_ms(self) -> int:
+ 25    """The query run time in milliseconds."""
+ 26    return self._query_time_ms
+ 27
+ 28  @property
+ 29  def storage_bytes_read(self) -> int:
+ 30    """The amount of data read from storage, in bytes."""
+ 31    return self._storage_bytes_read
+ 32
+ 33  @property
+ 34  def storage_bytes_write(self) -> int:
+ 35    """The amount of data written to storage, in bytes."""
+ 36    return self._storage_bytes_write
+ 37
+ 38  @property
+ 39  def contention_retries(self) -> int:
+ 40    """The number of times the transaction was retried due to write contention."""
+ 41    return self._contention_retries
+ 42
+ 43  @property
+ 44  def attempts(self) -> int:
+ 45    """The number of attempts made by the client to run the query."""
+ 46    return self._attempts
+ 47
+ 48  @attempts.setter
+ 49  def attempts(self, value):
+ 50    self._attempts = value
+ 51
+ 52  def __init__(self, stats: Mapping[str, Any]):
+ 53    self._compute_ops = stats.get("compute_ops", 0)
+ 54    self._read_ops = stats.get("read_ops", 0)
+ 55    self._write_ops = stats.get("write_ops", 0)
+ 56    self._query_time_ms = stats.get("query_time_ms", 0)
+ 57    self._storage_bytes_read = stats.get("storage_bytes_read", 0)
+ 58    self._storage_bytes_write = stats.get("storage_bytes_write", 0)
+ 59    self._contention_retries = stats.get("contention_retries", 0)
+ 60    self._attempts = 0
+ 61
+ 62  def __repr__(self):
+ 63    stats = {
+ 64        "compute_ops": self._compute_ops,
+ 65        "read_ops": self._read_ops,
+ 66        "write_ops": self._write_ops,
+ 67        "query_time_ms": self._query_time_ms,
+ 68        "storage_bytes_read": self._storage_bytes_read,
+ 69        "storage_bytes_write": self._storage_bytes_write,
+ 70        "contention_retries": self._contention_retries,
+ 71        "attempts": self._attempts,
+ 72    }
+ 73
+ 74    return f"{self.__class__.__name__}(stats={repr(stats)})"
+ 75
+ 76  def __eq__(self, other):
+ 77    return type(self) == type(other) \
+ 78        and self.compute_ops == other.compute_ops \
+ 79        and self.read_ops == other.read_ops \
+ 80        and self.write_ops == other.write_ops \
+ 81        and self.query_time_ms == other.query_time_ms \
+ 82        and self.storage_bytes_read == other.storage_bytes_read \
+ 83        and self.storage_bytes_write == other.storage_bytes_write \
+ 84        and self.contention_retries == other.contention_retries \
+ 85        and self.attempts == other.attempts
+ 86
+ 87  def __ne__(self, other):
+ 88    return not self.__eq__(other)
+ 89
+ 90
+ 91class QueryInfo:
+ 92
+ 93  @property
+ 94  def query_tags(self) -> Mapping[str, Any]:
+ 95    """The tags associated with the query."""
+ 96    return self._query_tags
+ 97
+ 98  @property
+ 99  def summary(self) -> str:
+100    """A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query."""
+101    return self._summary
+102
+103  @property
+104  def stats(self) -> QueryStats:
+105    """Query stats associated with the query."""
+106    return self._stats
+107
+108  @property
+109  def txn_ts(self) -> int:
+110    """The last transaction timestamp of the query. A Unix epoch in microseconds."""
+111    return self._txn_ts
+112
+113  @property
+114  def schema_version(self) -> int:
+115    """The schema version that was used for the query execution."""
+116    return self._schema_version
+117
+118  def __init__(
+119      self,
+120      query_tags: Optional[Mapping[str, str]] = None,
+121      stats: Optional[QueryStats] = None,
+122      summary: Optional[str] = None,
+123      txn_ts: Optional[int] = None,
+124      schema_version: Optional[int] = None,
+125  ):
+126    self._query_tags = query_tags or {}
+127    self._stats = stats or QueryStats({})
+128    self._summary = summary or ""
+129    self._txn_ts = txn_ts or 0
+130    self._schema_version = schema_version or 0
+131
+132  def __repr__(self):
+133    return f"{self.__class__.__name__}(" \
+134           f"query_tags={repr(self.query_tags)}," \
+135           f"stats={repr(self.stats)}," \
+136           f"summary={repr(self.summary)}," \
+137           f"txn_ts={repr(self.txn_ts)}," \
+138           f"schema_version={repr(self.schema_version)})"
+139
+140
+141class QuerySuccess(QueryInfo):
+142  """The result of the query."""
+143
+144  @property
+145  def data(self) -> Any:
+146    """The data returned by the query. This is the result of the FQL query."""
+147    return self._data
+148
+149  @property
+150  def static_type(self) -> Optional[str]:
+151    """If typechecked, the query's inferred static result type, if the query was typechecked."""
+152    return self._static_type
+153
+154  @property
+155  def traceparent(self) -> Optional[str]:
+156    """The traceparent for the query."""
+157    return self._traceparent
+158
+159  def __init__(
+160      self,
+161      data: Any,
+162      query_tags: Optional[Mapping[str, str]],
+163      static_type: Optional[str],
+164      stats: Optional[QueryStats],
+165      summary: Optional[str],
+166      traceparent: Optional[str],
+167      txn_ts: Optional[int],
+168      schema_version: Optional[int],
+169  ):
+170
+171    super().__init__(
+172        query_tags=query_tags,
+173        stats=stats,
+174        summary=summary,
+175        txn_ts=txn_ts,
+176        schema_version=schema_version,
+177    )
+178
+179    self._traceparent = traceparent
+180    self._static_type = static_type
+181    self._data = data
+182
+183  def __repr__(self):
+184    return f"{self.__class__.__name__}(" \
+185           f"query_tags={repr(self.query_tags)}," \
+186           f"static_type={repr(self.static_type)}," \
+187           f"stats={repr(self.stats)}," \
+188           f"summary={repr(self.summary)}," \
+189           f"traceparent={repr(self.traceparent)}," \
+190           f"txn_ts={repr(self.txn_ts)}," \
+191           f"schema_version={repr(self.schema_version)}," \
+192           f"data={repr(self.data)})"
+193
+194
+195@dataclass
+196class ConstraintFailure:
+197  message: str
+198  name: Optional[str] = None
+199  paths: Optional[List[Any]] = None
+200
+201
+202class QueryTags:
+203
+204  @staticmethod
+205  def encode(tags: Mapping[str, str]) -> str:
+206    return ",".join([f"{k}={v}" for k, v in tags.items()])
+207
+208  @staticmethod
+209  def decode(tag_str: str) -> Mapping[str, str]:
+210    res: dict[str, str] = {}
+211    for pair in tag_str.split(","):
+212      kv = pair.split("=")
+213      res[kv[0]] = kv[1]
+214    return res
+
+ + +
+
+ +
+ + class + QueryStats: + + + +
+ +
 6class QueryStats:
+ 7  """Query stats"""
+ 8
+ 9  @property
+10  def compute_ops(self) -> int:
+11    """The amount of Transactional Compute Ops consumed by the query."""
+12    return self._compute_ops
+13
+14  @property
+15  def read_ops(self) -> int:
+16    """The amount of Transactional Read Ops consumed by the query."""
+17    return self._read_ops
+18
+19  @property
+20  def write_ops(self) -> int:
+21    """The amount of Transactional Write Ops consumed by the query."""
+22    return self._write_ops
+23
+24  @property
+25  def query_time_ms(self) -> int:
+26    """The query run time in milliseconds."""
+27    return self._query_time_ms
+28
+29  @property
+30  def storage_bytes_read(self) -> int:
+31    """The amount of data read from storage, in bytes."""
+32    return self._storage_bytes_read
+33
+34  @property
+35  def storage_bytes_write(self) -> int:
+36    """The amount of data written to storage, in bytes."""
+37    return self._storage_bytes_write
+38
+39  @property
+40  def contention_retries(self) -> int:
+41    """The number of times the transaction was retried due to write contention."""
+42    return self._contention_retries
+43
+44  @property
+45  def attempts(self) -> int:
+46    """The number of attempts made by the client to run the query."""
+47    return self._attempts
+48
+49  @attempts.setter
+50  def attempts(self, value):
+51    self._attempts = value
+52
+53  def __init__(self, stats: Mapping[str, Any]):
+54    self._compute_ops = stats.get("compute_ops", 0)
+55    self._read_ops = stats.get("read_ops", 0)
+56    self._write_ops = stats.get("write_ops", 0)
+57    self._query_time_ms = stats.get("query_time_ms", 0)
+58    self._storage_bytes_read = stats.get("storage_bytes_read", 0)
+59    self._storage_bytes_write = stats.get("storage_bytes_write", 0)
+60    self._contention_retries = stats.get("contention_retries", 0)
+61    self._attempts = 0
+62
+63  def __repr__(self):
+64    stats = {
+65        "compute_ops": self._compute_ops,
+66        "read_ops": self._read_ops,
+67        "write_ops": self._write_ops,
+68        "query_time_ms": self._query_time_ms,
+69        "storage_bytes_read": self._storage_bytes_read,
+70        "storage_bytes_write": self._storage_bytes_write,
+71        "contention_retries": self._contention_retries,
+72        "attempts": self._attempts,
+73    }
+74
+75    return f"{self.__class__.__name__}(stats={repr(stats)})"
+76
+77  def __eq__(self, other):
+78    return type(self) == type(other) \
+79        and self.compute_ops == other.compute_ops \
+80        and self.read_ops == other.read_ops \
+81        and self.write_ops == other.write_ops \
+82        and self.query_time_ms == other.query_time_ms \
+83        and self.storage_bytes_read == other.storage_bytes_read \
+84        and self.storage_bytes_write == other.storage_bytes_write \
+85        and self.contention_retries == other.contention_retries \
+86        and self.attempts == other.attempts
+87
+88  def __ne__(self, other):
+89    return not self.__eq__(other)
+
+ + +

Query stats

+
+ + +
+ +
+ + QueryStats(stats: Mapping[str, Any]) + + + +
+ +
53  def __init__(self, stats: Mapping[str, Any]):
+54    self._compute_ops = stats.get("compute_ops", 0)
+55    self._read_ops = stats.get("read_ops", 0)
+56    self._write_ops = stats.get("write_ops", 0)
+57    self._query_time_ms = stats.get("query_time_ms", 0)
+58    self._storage_bytes_read = stats.get("storage_bytes_read", 0)
+59    self._storage_bytes_write = stats.get("storage_bytes_write", 0)
+60    self._contention_retries = stats.get("contention_retries", 0)
+61    self._attempts = 0
+
+ + + + +
+
+ +
+ compute_ops: int + + + +
+ +
 9  @property
+10  def compute_ops(self) -> int:
+11    """The amount of Transactional Compute Ops consumed by the query."""
+12    return self._compute_ops
+
+ + +

The amount of Transactional Compute Ops consumed by the query.

+
+ + +
+
+ +
+ read_ops: int + + + +
+ +
14  @property
+15  def read_ops(self) -> int:
+16    """The amount of Transactional Read Ops consumed by the query."""
+17    return self._read_ops
+
+ + +

The amount of Transactional Read Ops consumed by the query.

+
+ + +
+
+ +
+ write_ops: int + + + +
+ +
19  @property
+20  def write_ops(self) -> int:
+21    """The amount of Transactional Write Ops consumed by the query."""
+22    return self._write_ops
+
+ + +

The amount of Transactional Write Ops consumed by the query.

+
+ + +
+
+ +
+ query_time_ms: int + + + +
+ +
24  @property
+25  def query_time_ms(self) -> int:
+26    """The query run time in milliseconds."""
+27    return self._query_time_ms
+
+ + +

The query run time in milliseconds.

+
+ + +
+
+ +
+ storage_bytes_read: int + + + +
+ +
29  @property
+30  def storage_bytes_read(self) -> int:
+31    """The amount of data read from storage, in bytes."""
+32    return self._storage_bytes_read
+
+ + +

The amount of data read from storage, in bytes.

+
+ + +
+
+ +
+ storage_bytes_write: int + + + +
+ +
34  @property
+35  def storage_bytes_write(self) -> int:
+36    """The amount of data written to storage, in bytes."""
+37    return self._storage_bytes_write
+
+ + +

The amount of data written to storage, in bytes.

+
+ + +
+
+ +
+ contention_retries: int + + + +
+ +
39  @property
+40  def contention_retries(self) -> int:
+41    """The number of times the transaction was retried due to write contention."""
+42    return self._contention_retries
+
+ + +

The number of times the transaction was retried due to write contention.

+
+ + +
+
+ +
+ attempts: int + + + +
+ +
44  @property
+45  def attempts(self) -> int:
+46    """The number of attempts made by the client to run the query."""
+47    return self._attempts
+
+ + +

The number of attempts made by the client to run the query.

+
+ + +
+
+
+ +
+ + class + QueryInfo: + + + +
+ +
 92class QueryInfo:
+ 93
+ 94  @property
+ 95  def query_tags(self) -> Mapping[str, Any]:
+ 96    """The tags associated with the query."""
+ 97    return self._query_tags
+ 98
+ 99  @property
+100  def summary(self) -> str:
+101    """A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query."""
+102    return self._summary
+103
+104  @property
+105  def stats(self) -> QueryStats:
+106    """Query stats associated with the query."""
+107    return self._stats
+108
+109  @property
+110  def txn_ts(self) -> int:
+111    """The last transaction timestamp of the query. A Unix epoch in microseconds."""
+112    return self._txn_ts
+113
+114  @property
+115  def schema_version(self) -> int:
+116    """The schema version that was used for the query execution."""
+117    return self._schema_version
+118
+119  def __init__(
+120      self,
+121      query_tags: Optional[Mapping[str, str]] = None,
+122      stats: Optional[QueryStats] = None,
+123      summary: Optional[str] = None,
+124      txn_ts: Optional[int] = None,
+125      schema_version: Optional[int] = None,
+126  ):
+127    self._query_tags = query_tags or {}
+128    self._stats = stats or QueryStats({})
+129    self._summary = summary or ""
+130    self._txn_ts = txn_ts or 0
+131    self._schema_version = schema_version or 0
+132
+133  def __repr__(self):
+134    return f"{self.__class__.__name__}(" \
+135           f"query_tags={repr(self.query_tags)}," \
+136           f"stats={repr(self.stats)}," \
+137           f"summary={repr(self.summary)}," \
+138           f"txn_ts={repr(self.txn_ts)}," \
+139           f"schema_version={repr(self.schema_version)})"
+
+ + + + +
+ +
+ + QueryInfo( query_tags: Optional[Mapping[str, str]] = None, stats: Optional[QueryStats] = None, summary: Optional[str] = None, txn_ts: Optional[int] = None, schema_version: Optional[int] = None) + + + +
+ +
119  def __init__(
+120      self,
+121      query_tags: Optional[Mapping[str, str]] = None,
+122      stats: Optional[QueryStats] = None,
+123      summary: Optional[str] = None,
+124      txn_ts: Optional[int] = None,
+125      schema_version: Optional[int] = None,
+126  ):
+127    self._query_tags = query_tags or {}
+128    self._stats = stats or QueryStats({})
+129    self._summary = summary or ""
+130    self._txn_ts = txn_ts or 0
+131    self._schema_version = schema_version or 0
+
+ + + + +
+
+ +
+ query_tags: Mapping[str, Any] + + + +
+ +
94  @property
+95  def query_tags(self) -> Mapping[str, Any]:
+96    """The tags associated with the query."""
+97    return self._query_tags
+
+ + +

The tags associated with the query.

+
+ + +
+
+ +
+ summary: str + + + +
+ +
 99  @property
+100  def summary(self) -> str:
+101    """A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query."""
+102    return self._summary
+
+ + +

A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query.

+
+ + +
+
+ +
+ stats: QueryStats + + + +
+ +
104  @property
+105  def stats(self) -> QueryStats:
+106    """Query stats associated with the query."""
+107    return self._stats
+
+ + +

Query stats associated with the query.

+
+ + +
+
+ +
+ txn_ts: int + + + +
+ +
109  @property
+110  def txn_ts(self) -> int:
+111    """The last transaction timestamp of the query. A Unix epoch in microseconds."""
+112    return self._txn_ts
+
+ + +

The last transaction timestamp of the query. A Unix epoch in microseconds.

+
+ + +
+
+ +
+ schema_version: int + + + +
+ +
114  @property
+115  def schema_version(self) -> int:
+116    """The schema version that was used for the query execution."""
+117    return self._schema_version
+
+ + +

The schema version that was used for the query execution.

+
+ + +
+
+
+ +
+ + class + QuerySuccess(QueryInfo): + + + +
+ +
142class QuerySuccess(QueryInfo):
+143  """The result of the query."""
+144
+145  @property
+146  def data(self) -> Any:
+147    """The data returned by the query. This is the result of the FQL query."""
+148    return self._data
+149
+150  @property
+151  def static_type(self) -> Optional[str]:
+152    """If typechecked, the query's inferred static result type, if the query was typechecked."""
+153    return self._static_type
+154
+155  @property
+156  def traceparent(self) -> Optional[str]:
+157    """The traceparent for the query."""
+158    return self._traceparent
+159
+160  def __init__(
+161      self,
+162      data: Any,
+163      query_tags: Optional[Mapping[str, str]],
+164      static_type: Optional[str],
+165      stats: Optional[QueryStats],
+166      summary: Optional[str],
+167      traceparent: Optional[str],
+168      txn_ts: Optional[int],
+169      schema_version: Optional[int],
+170  ):
+171
+172    super().__init__(
+173        query_tags=query_tags,
+174        stats=stats,
+175        summary=summary,
+176        txn_ts=txn_ts,
+177        schema_version=schema_version,
+178    )
+179
+180    self._traceparent = traceparent
+181    self._static_type = static_type
+182    self._data = data
+183
+184  def __repr__(self):
+185    return f"{self.__class__.__name__}(" \
+186           f"query_tags={repr(self.query_tags)}," \
+187           f"static_type={repr(self.static_type)}," \
+188           f"stats={repr(self.stats)}," \
+189           f"summary={repr(self.summary)}," \
+190           f"traceparent={repr(self.traceparent)}," \
+191           f"txn_ts={repr(self.txn_ts)}," \
+192           f"schema_version={repr(self.schema_version)}," \
+193           f"data={repr(self.data)})"
+
+ + +

The result of the query.

+
+ + +
+ +
+ + QuerySuccess( data: Any, query_tags: Optional[Mapping[str, str]], static_type: Optional[str], stats: Optional[QueryStats], summary: Optional[str], traceparent: Optional[str], txn_ts: Optional[int], schema_version: Optional[int]) + + + +
+ +
160  def __init__(
+161      self,
+162      data: Any,
+163      query_tags: Optional[Mapping[str, str]],
+164      static_type: Optional[str],
+165      stats: Optional[QueryStats],
+166      summary: Optional[str],
+167      traceparent: Optional[str],
+168      txn_ts: Optional[int],
+169      schema_version: Optional[int],
+170  ):
+171
+172    super().__init__(
+173        query_tags=query_tags,
+174        stats=stats,
+175        summary=summary,
+176        txn_ts=txn_ts,
+177        schema_version=schema_version,
+178    )
+179
+180    self._traceparent = traceparent
+181    self._static_type = static_type
+182    self._data = data
+
+ + + + +
+
+ +
+ data: Any + + + +
+ +
145  @property
+146  def data(self) -> Any:
+147    """The data returned by the query. This is the result of the FQL query."""
+148    return self._data
+
+ + +

The data returned by the query. This is the result of the FQL query.

+
+ + +
+
+ +
+ static_type: Optional[str] + + + +
+ +
150  @property
+151  def static_type(self) -> Optional[str]:
+152    """If typechecked, the query's inferred static result type, if the query was typechecked."""
+153    return self._static_type
+
+ + +

If typechecked, the query's inferred static result type, if the query was typechecked.

+
+ + +
+
+ +
+ traceparent: Optional[str] + + + +
+ +
155  @property
+156  def traceparent(self) -> Optional[str]:
+157    """The traceparent for the query."""
+158    return self._traceparent
+
+ + +

The traceparent for the query.

+
+ + +
+
+
Inherited Members
+
+
QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
+
+
+
+ +
+
@dataclass
+ + class + ConstraintFailure: + + + +
+ +
196@dataclass
+197class ConstraintFailure:
+198  message: str
+199  name: Optional[str] = None
+200  paths: Optional[List[Any]] = None
+
+ + + + +
+
+ + ConstraintFailure( message: str, name: Optional[str] = None, paths: Optional[List[Any]] = None) + + +
+ + + + +
+
+
+ message: str + + +
+ + + + +
+
+
+ name: Optional[str] = +None + + +
+ + + + +
+
+
+ paths: Optional[List[Any]] = +None + + +
+ + + + +
+
+
+ +
+ + class + QueryTags: + + + +
+ +
203class QueryTags:
+204
+205  @staticmethod
+206  def encode(tags: Mapping[str, str]) -> str:
+207    return ",".join([f"{k}={v}" for k, v in tags.items()])
+208
+209  @staticmethod
+210  def decode(tag_str: str) -> Mapping[str, str]:
+211    res: dict[str, str] = {}
+212    for pair in tag_str.split(","):
+213      kv = pair.split("=")
+214      res[kv[0]] = kv[1]
+215    return res
+
+ + + + +
+ +
+
@staticmethod
+ + def + encode(tags: Mapping[str, str]) -> str: + + + +
+ +
205  @staticmethod
+206  def encode(tags: Mapping[str, str]) -> str:
+207    return ",".join([f"{k}={v}" for k, v in tags.items()])
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + decode(tag_str: str) -> Mapping[str, str]: + + + +
+ +
209  @staticmethod
+210  def decode(tag_str: str) -> Mapping[str, str]:
+211    res: dict[str, str] = {}
+212    for pair in tag_str.split(","):
+213      kv = pair.split("=")
+214      res[kv[0]] = kv[1]
+215    return res
+
+ + + + +
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/errors.html b/2.1.0/api/fauna/errors.html new file mode 100644 index 00000000..e1171662 --- /dev/null +++ b/2.1.0/api/fauna/errors.html @@ -0,0 +1,247 @@ + + + + + + + fauna.errors API documentation + + + + + + + + + +
+
+

+fauna.errors

+ + + + + + +
1from .errors import AuthenticationError, AuthorizationError, QueryCheckError, QueryRuntimeError, \
+2  QueryTimeoutError, ServiceInternalError, ServiceTimeoutError, ThrottlingError, ContendedTransactionError, \
+3  InvalidRequestError, AbortError, RetryableFaunaException
+4from .errors import ClientError, FaunaError, NetworkError
+5from .errors import FaunaException
+6from .errors import ProtocolError, ServiceError
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/errors/errors.html b/2.1.0/api/fauna/errors/errors.html new file mode 100644 index 00000000..105b683c --- /dev/null +++ b/2.1.0/api/fauna/errors/errors.html @@ -0,0 +1,2341 @@ + + + + + + + fauna.errors.errors API documentation + + + + + + + + + +
+
+

+fauna.errors.errors

+ + + + + + +
  1from typing import Optional, List, Any, Mapping
+  2
+  3from fauna.encoding import ConstraintFailure, QueryStats, QueryInfo, QueryTags
+  4
+  5
+  6class FaunaException(Exception):
+  7  """Base class Fauna Exceptions"""
+  8  pass
+  9
+ 10
+ 11class RetryableFaunaException(FaunaException):
+ 12  pass
+ 13
+ 14
+ 15class ClientError(FaunaException):
+ 16  """An error representing a failure internal to the client, itself.
+ 17    This indicates Fauna was never called - the client failed internally
+ 18    prior to sending the request."""
+ 19  pass
+ 20
+ 21
+ 22class NetworkError(FaunaException):
+ 23  """An error representing a failure due to the network.
+ 24    This indicates Fauna was never reached."""
+ 25  pass
+ 26
+ 27
+ 28class ProtocolError(FaunaException):
+ 29  """An error representing a HTTP failure - but one not directly emitted by Fauna."""
+ 30
+ 31  @property
+ 32  def status_code(self) -> int:
+ 33    return self._status_code
+ 34
+ 35  @property
+ 36  def message(self) -> str:
+ 37    return self._message
+ 38
+ 39  def __init__(self, status_code: int, message: str):
+ 40    self._status_code = status_code
+ 41    self._message = message
+ 42
+ 43  def __str__(self):
+ 44    return f"{self.status_code}: {self.message}"
+ 45
+ 46
+ 47class FaunaError(FaunaException):
+ 48  """Base class Fauna Errors"""
+ 49
+ 50  @property
+ 51  def status_code(self) -> int:
+ 52    return self._status_code
+ 53
+ 54  @property
+ 55  def code(self) -> str:
+ 56    return self._code
+ 57
+ 58  @property
+ 59  def message(self) -> str:
+ 60    return self._message
+ 61
+ 62  @property
+ 63  def abort(self) -> Optional[Any]:
+ 64    return self._abort
+ 65
+ 66  @property
+ 67  def constraint_failures(self) -> Optional[List['ConstraintFailure']]:
+ 68    return self._constraint_failures
+ 69
+ 70  def __init__(
+ 71      self,
+ 72      status_code: int,
+ 73      code: str,
+ 74      message: str,
+ 75      abort: Optional[Any] = None,
+ 76      constraint_failures: Optional[List['ConstraintFailure']] = None,
+ 77  ):
+ 78    self._status_code = status_code
+ 79    self._code = code
+ 80    self._message = message
+ 81    self._abort = abort
+ 82    self._constraint_failures = constraint_failures
+ 83
+ 84  def __str__(self):
+ 85    return f"{self.status_code}: {self.code}\n{self.message}"
+ 86
+ 87  @staticmethod
+ 88  def parse_error_and_throw(body: Any, status_code: int):
+ 89    err = body["error"]
+ 90    code = err["code"]
+ 91    message = err["message"]
+ 92
+ 93    query_tags = QueryTags.decode(
+ 94        body["query_tags"]) if "query_tags" in body else None
+ 95    stats = QueryStats(body["stats"]) if "stats" in body else None
+ 96    txn_ts = body["txn_ts"] if "txn_ts" in body else None
+ 97    schema_version = body["schema_version"] if "schema_version" in body else None
+ 98    summary = body["summary"] if "summary" in body else None
+ 99
+100    constraint_failures: Optional[List[ConstraintFailure]] = None
+101    if "constraint_failures" in err:
+102      constraint_failures = [
+103          ConstraintFailure(
+104              message=cf["message"],
+105              name=cf["name"] if "name" in cf else None,
+106              paths=cf["paths"] if "paths" in cf else None,
+107          ) for cf in err["constraint_failures"]
+108      ]
+109
+110    if status_code >= 400 and status_code < 500:
+111      if code == "invalid_query":
+112        raise QueryCheckError(
+113            status_code=400,
+114            code=code,
+115            message=message,
+116            summary=summary,
+117            constraint_failures=constraint_failures,
+118            query_tags=query_tags,
+119            stats=stats,
+120            txn_ts=txn_ts,
+121            schema_version=schema_version,
+122        )
+123      elif code == "invalid_request":
+124        raise InvalidRequestError(
+125            status_code=400,
+126            code=code,
+127            message=message,
+128            summary=summary,
+129            constraint_failures=constraint_failures,
+130            query_tags=query_tags,
+131            stats=stats,
+132            txn_ts=txn_ts,
+133            schema_version=schema_version,
+134        )
+135      elif code == "abort":
+136        abort = err["abort"] if "abort" in err else None
+137        raise AbortError(
+138            status_code=400,
+139            code=code,
+140            message=message,
+141            summary=summary,
+142            abort=abort,
+143            constraint_failures=constraint_failures,
+144            query_tags=query_tags,
+145            stats=stats,
+146            txn_ts=txn_ts,
+147            schema_version=schema_version,
+148        )
+149      elif code == "unauthorized":
+150        raise AuthenticationError(
+151            status_code=401,
+152            code=code,
+153            message=message,
+154            summary=summary,
+155            constraint_failures=constraint_failures,
+156            query_tags=query_tags,
+157            stats=stats,
+158            txn_ts=txn_ts,
+159            schema_version=schema_version,
+160        )
+161      elif code == "forbidden" and status_code == 403:
+162        raise AuthorizationError(
+163            status_code=403,
+164            code=code,
+165            message=message,
+166            summary=summary,
+167            constraint_failures=constraint_failures,
+168            query_tags=query_tags,
+169            stats=stats,
+170            txn_ts=txn_ts,
+171            schema_version=schema_version,
+172        )
+173      elif code == "method_not_allowed":
+174        raise QueryRuntimeError(
+175            status_code=405,
+176            code=code,
+177            message=message,
+178            summary=summary,
+179            constraint_failures=constraint_failures,
+180            query_tags=query_tags,
+181            stats=stats,
+182            txn_ts=txn_ts,
+183            schema_version=schema_version,
+184        )
+185      elif code == "conflict":
+186        raise ContendedTransactionError(
+187            status_code=409,
+188            code=code,
+189            message=message,
+190            summary=summary,
+191            constraint_failures=constraint_failures,
+192            query_tags=query_tags,
+193            stats=stats,
+194            txn_ts=txn_ts,
+195            schema_version=schema_version,
+196        )
+197      elif code == "request_size_exceeded":
+198        raise QueryRuntimeError(
+199            status_code=413,
+200            code=code,
+201            message=message,
+202            summary=summary,
+203            constraint_failures=constraint_failures,
+204            query_tags=query_tags,
+205            stats=stats,
+206            txn_ts=txn_ts,
+207            schema_version=schema_version,
+208        )
+209      elif code == "limit_exceeded":
+210        raise ThrottlingError(
+211            status_code=429,
+212            code=code,
+213            message=message,
+214            summary=summary,
+215            constraint_failures=constraint_failures,
+216            query_tags=query_tags,
+217            stats=stats,
+218            txn_ts=txn_ts,
+219            schema_version=schema_version,
+220        )
+221      elif code == "time_out":
+222        raise QueryTimeoutError(
+223            status_code=440,
+224            code=code,
+225            message=message,
+226            summary=summary,
+227            constraint_failures=constraint_failures,
+228            query_tags=query_tags,
+229            stats=stats,
+230            txn_ts=txn_ts,
+231            schema_version=schema_version,
+232        )
+233      else:
+234        raise QueryRuntimeError(
+235            status_code=status_code,
+236            code=code,
+237            message=message,
+238            summary=summary,
+239            constraint_failures=constraint_failures,
+240            query_tags=query_tags,
+241            stats=stats,
+242            txn_ts=txn_ts,
+243            schema_version=schema_version,
+244        )
+245    elif status_code == 500:
+246      raise ServiceInternalError(
+247          status_code=status_code,
+248          code=code,
+249          message=message,
+250          summary=summary,
+251          constraint_failures=constraint_failures,
+252          query_tags=query_tags,
+253          stats=stats,
+254          txn_ts=txn_ts,
+255          schema_version=schema_version,
+256      )
+257    elif status_code == 503:
+258      raise ServiceTimeoutError(
+259          status_code=status_code,
+260          code=code,
+261          message=message,
+262          summary=summary,
+263          constraint_failures=constraint_failures,
+264          query_tags=query_tags,
+265          stats=stats,
+266          txn_ts=txn_ts,
+267          schema_version=schema_version,
+268      )
+269    else:
+270      raise ServiceError(
+271          status_code=status_code,
+272          code=code,
+273          message=message,
+274          summary=summary,
+275          constraint_failures=constraint_failures,
+276          query_tags=query_tags,
+277          stats=stats,
+278          txn_ts=txn_ts,
+279          schema_version=schema_version,
+280      )
+281
+282
+283class ServiceError(FaunaError, QueryInfo):
+284  """An error representing a query failure returned by Fauna."""
+285
+286  def __init__(
+287      self,
+288      status_code: int,
+289      code: str,
+290      message: str,
+291      summary: Optional[str] = None,
+292      abort: Optional[Any] = None,
+293      constraint_failures: Optional[List['ConstraintFailure']] = None,
+294      query_tags: Optional[Mapping[str, str]] = None,
+295      stats: Optional[QueryStats] = None,
+296      txn_ts: Optional[int] = None,
+297      schema_version: Optional[int] = None,
+298  ):
+299    QueryInfo.__init__(
+300        self,
+301        query_tags=query_tags,
+302        stats=stats,
+303        summary=summary,
+304        txn_ts=txn_ts,
+305        schema_version=schema_version,
+306    )
+307
+308    FaunaError.__init__(
+309        self,
+310        status_code=status_code,
+311        code=code,
+312        message=message,
+313        abort=abort,
+314        constraint_failures=constraint_failures,
+315    )
+316
+317  def __str__(self):
+318    constraint_str = "---"
+319    if self._constraint_failures:
+320      constraint_str = f"---\nconstraint failures: {self._constraint_failures}\n---"
+321
+322    return f"{self._status_code}: {self.code}\n{self.message}\n{constraint_str}\n{self.summary or ''}"
+323
+324
+325class AbortError(ServiceError):
+326  pass
+327
+328
+329class InvalidRequestError(ServiceError):
+330  pass
+331
+332
+333class QueryCheckError(ServiceError):
+334  """An error due to a "compile-time" check of the query failing."""
+335  pass
+336
+337
+338class ContendedTransactionError(ServiceError):
+339  """Transaction is aborted due to concurrent modification."""
+340  pass
+341
+342
+343class QueryRuntimeError(ServiceError):
+344  """An error response that is the result of the query failing during execution.
+345    QueryRuntimeError's occur when a bug in your query causes an invalid execution
+346    to be requested.
+347    The 'code' field will vary based on the specific error cause."""
+348  pass
+349
+350
+351class AuthenticationError(ServiceError):
+352  """AuthenticationError indicates invalid credentials were used."""
+353  pass
+354
+355
+356class AuthorizationError(ServiceError):
+357  """AuthorizationError indicates the credentials used do not have
+358    permission to perform the requested action."""
+359  pass
+360
+361
+362class ThrottlingError(ServiceError, RetryableFaunaException):
+363  """ThrottlingError indicates some capacity limit was exceeded
+364    and thus the request could not be served."""
+365  pass
+366
+367
+368class QueryTimeoutError(ServiceError):
+369  """A failure due to the timeout being exceeded, but the timeout
+370    was set lower than the query's expected processing time.
+371    This response is distinguished from a ServiceTimeoutException
+372    in that a QueryTimeoutError shows Fauna behaving in an expected manner."""
+373  pass
+374
+375
+376class ServiceInternalError(ServiceError):
+377  """ServiceInternalError indicates Fauna failed unexpectedly."""
+378  pass
+379
+380
+381class ServiceTimeoutError(ServiceError):
+382  """ServiceTimeoutError indicates Fauna was not available to service
+383    the request before the timeout was reached."""
+384  pass
+
+ + +
+
+ +
+ + class + FaunaException(builtins.Exception): + + + +
+ +
7class FaunaException(Exception):
+8  """Base class Fauna Exceptions"""
+9  pass
+
+ + +

Base class Fauna Exceptions

+
+ + +
+
Inherited Members
+
+
builtins.Exception
+
Exception
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + RetryableFaunaException(FaunaException): + + + +
+ +
12class RetryableFaunaException(FaunaException):
+13  pass
+
+ + +

Base class Fauna Exceptions

+
+ + +
+
Inherited Members
+
+
builtins.Exception
+
Exception
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + ClientError(FaunaException): + + + +
+ +
16class ClientError(FaunaException):
+17  """An error representing a failure internal to the client, itself.
+18    This indicates Fauna was never called - the client failed internally
+19    prior to sending the request."""
+20  pass
+
+ + +

An error representing a failure internal to the client, itself. +This indicates Fauna was never called - the client failed internally +prior to sending the request.

+
+ + +
+
Inherited Members
+
+
builtins.Exception
+
Exception
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + NetworkError(FaunaException): + + + +
+ +
23class NetworkError(FaunaException):
+24  """An error representing a failure due to the network.
+25    This indicates Fauna was never reached."""
+26  pass
+
+ + +

An error representing a failure due to the network. +This indicates Fauna was never reached.

+
+ + +
+
Inherited Members
+
+
builtins.Exception
+
Exception
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + ProtocolError(FaunaException): + + + +
+ +
29class ProtocolError(FaunaException):
+30  """An error representing a HTTP failure - but one not directly emitted by Fauna."""
+31
+32  @property
+33  def status_code(self) -> int:
+34    return self._status_code
+35
+36  @property
+37  def message(self) -> str:
+38    return self._message
+39
+40  def __init__(self, status_code: int, message: str):
+41    self._status_code = status_code
+42    self._message = message
+43
+44  def __str__(self):
+45    return f"{self.status_code}: {self.message}"
+
+ + +

An error representing a HTTP failure - but one not directly emitted by Fauna.

+
+ + +
+ +
+ + ProtocolError(status_code: int, message: str) + + + +
+ +
40  def __init__(self, status_code: int, message: str):
+41    self._status_code = status_code
+42    self._message = message
+
+ + + + +
+
+ +
+ status_code: int + + + +
+ +
32  @property
+33  def status_code(self) -> int:
+34    return self._status_code
+
+ + + + +
+
+ +
+ message: str + + + +
+ +
36  @property
+37  def message(self) -> str:
+38    return self._message
+
+ + + + +
+
+
Inherited Members
+
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + FaunaError(FaunaException): + + + +
+ +
 48class FaunaError(FaunaException):
+ 49  """Base class Fauna Errors"""
+ 50
+ 51  @property
+ 52  def status_code(self) -> int:
+ 53    return self._status_code
+ 54
+ 55  @property
+ 56  def code(self) -> str:
+ 57    return self._code
+ 58
+ 59  @property
+ 60  def message(self) -> str:
+ 61    return self._message
+ 62
+ 63  @property
+ 64  def abort(self) -> Optional[Any]:
+ 65    return self._abort
+ 66
+ 67  @property
+ 68  def constraint_failures(self) -> Optional[List['ConstraintFailure']]:
+ 69    return self._constraint_failures
+ 70
+ 71  def __init__(
+ 72      self,
+ 73      status_code: int,
+ 74      code: str,
+ 75      message: str,
+ 76      abort: Optional[Any] = None,
+ 77      constraint_failures: Optional[List['ConstraintFailure']] = None,
+ 78  ):
+ 79    self._status_code = status_code
+ 80    self._code = code
+ 81    self._message = message
+ 82    self._abort = abort
+ 83    self._constraint_failures = constraint_failures
+ 84
+ 85  def __str__(self):
+ 86    return f"{self.status_code}: {self.code}\n{self.message}"
+ 87
+ 88  @staticmethod
+ 89  def parse_error_and_throw(body: Any, status_code: int):
+ 90    err = body["error"]
+ 91    code = err["code"]
+ 92    message = err["message"]
+ 93
+ 94    query_tags = QueryTags.decode(
+ 95        body["query_tags"]) if "query_tags" in body else None
+ 96    stats = QueryStats(body["stats"]) if "stats" in body else None
+ 97    txn_ts = body["txn_ts"] if "txn_ts" in body else None
+ 98    schema_version = body["schema_version"] if "schema_version" in body else None
+ 99    summary = body["summary"] if "summary" in body else None
+100
+101    constraint_failures: Optional[List[ConstraintFailure]] = None
+102    if "constraint_failures" in err:
+103      constraint_failures = [
+104          ConstraintFailure(
+105              message=cf["message"],
+106              name=cf["name"] if "name" in cf else None,
+107              paths=cf["paths"] if "paths" in cf else None,
+108          ) for cf in err["constraint_failures"]
+109      ]
+110
+111    if status_code >= 400 and status_code < 500:
+112      if code == "invalid_query":
+113        raise QueryCheckError(
+114            status_code=400,
+115            code=code,
+116            message=message,
+117            summary=summary,
+118            constraint_failures=constraint_failures,
+119            query_tags=query_tags,
+120            stats=stats,
+121            txn_ts=txn_ts,
+122            schema_version=schema_version,
+123        )
+124      elif code == "invalid_request":
+125        raise InvalidRequestError(
+126            status_code=400,
+127            code=code,
+128            message=message,
+129            summary=summary,
+130            constraint_failures=constraint_failures,
+131            query_tags=query_tags,
+132            stats=stats,
+133            txn_ts=txn_ts,
+134            schema_version=schema_version,
+135        )
+136      elif code == "abort":
+137        abort = err["abort"] if "abort" in err else None
+138        raise AbortError(
+139            status_code=400,
+140            code=code,
+141            message=message,
+142            summary=summary,
+143            abort=abort,
+144            constraint_failures=constraint_failures,
+145            query_tags=query_tags,
+146            stats=stats,
+147            txn_ts=txn_ts,
+148            schema_version=schema_version,
+149        )
+150      elif code == "unauthorized":
+151        raise AuthenticationError(
+152            status_code=401,
+153            code=code,
+154            message=message,
+155            summary=summary,
+156            constraint_failures=constraint_failures,
+157            query_tags=query_tags,
+158            stats=stats,
+159            txn_ts=txn_ts,
+160            schema_version=schema_version,
+161        )
+162      elif code == "forbidden" and status_code == 403:
+163        raise AuthorizationError(
+164            status_code=403,
+165            code=code,
+166            message=message,
+167            summary=summary,
+168            constraint_failures=constraint_failures,
+169            query_tags=query_tags,
+170            stats=stats,
+171            txn_ts=txn_ts,
+172            schema_version=schema_version,
+173        )
+174      elif code == "method_not_allowed":
+175        raise QueryRuntimeError(
+176            status_code=405,
+177            code=code,
+178            message=message,
+179            summary=summary,
+180            constraint_failures=constraint_failures,
+181            query_tags=query_tags,
+182            stats=stats,
+183            txn_ts=txn_ts,
+184            schema_version=schema_version,
+185        )
+186      elif code == "conflict":
+187        raise ContendedTransactionError(
+188            status_code=409,
+189            code=code,
+190            message=message,
+191            summary=summary,
+192            constraint_failures=constraint_failures,
+193            query_tags=query_tags,
+194            stats=stats,
+195            txn_ts=txn_ts,
+196            schema_version=schema_version,
+197        )
+198      elif code == "request_size_exceeded":
+199        raise QueryRuntimeError(
+200            status_code=413,
+201            code=code,
+202            message=message,
+203            summary=summary,
+204            constraint_failures=constraint_failures,
+205            query_tags=query_tags,
+206            stats=stats,
+207            txn_ts=txn_ts,
+208            schema_version=schema_version,
+209        )
+210      elif code == "limit_exceeded":
+211        raise ThrottlingError(
+212            status_code=429,
+213            code=code,
+214            message=message,
+215            summary=summary,
+216            constraint_failures=constraint_failures,
+217            query_tags=query_tags,
+218            stats=stats,
+219            txn_ts=txn_ts,
+220            schema_version=schema_version,
+221        )
+222      elif code == "time_out":
+223        raise QueryTimeoutError(
+224            status_code=440,
+225            code=code,
+226            message=message,
+227            summary=summary,
+228            constraint_failures=constraint_failures,
+229            query_tags=query_tags,
+230            stats=stats,
+231            txn_ts=txn_ts,
+232            schema_version=schema_version,
+233        )
+234      else:
+235        raise QueryRuntimeError(
+236            status_code=status_code,
+237            code=code,
+238            message=message,
+239            summary=summary,
+240            constraint_failures=constraint_failures,
+241            query_tags=query_tags,
+242            stats=stats,
+243            txn_ts=txn_ts,
+244            schema_version=schema_version,
+245        )
+246    elif status_code == 500:
+247      raise ServiceInternalError(
+248          status_code=status_code,
+249          code=code,
+250          message=message,
+251          summary=summary,
+252          constraint_failures=constraint_failures,
+253          query_tags=query_tags,
+254          stats=stats,
+255          txn_ts=txn_ts,
+256          schema_version=schema_version,
+257      )
+258    elif status_code == 503:
+259      raise ServiceTimeoutError(
+260          status_code=status_code,
+261          code=code,
+262          message=message,
+263          summary=summary,
+264          constraint_failures=constraint_failures,
+265          query_tags=query_tags,
+266          stats=stats,
+267          txn_ts=txn_ts,
+268          schema_version=schema_version,
+269      )
+270    else:
+271      raise ServiceError(
+272          status_code=status_code,
+273          code=code,
+274          message=message,
+275          summary=summary,
+276          constraint_failures=constraint_failures,
+277          query_tags=query_tags,
+278          stats=stats,
+279          txn_ts=txn_ts,
+280          schema_version=schema_version,
+281      )
+
+ + +

Base class Fauna Errors

+
+ + +
+ +
+ + FaunaError( status_code: int, code: str, message: str, abort: Optional[Any] = None, constraint_failures: Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]] = None) + + + +
+ +
71  def __init__(
+72      self,
+73      status_code: int,
+74      code: str,
+75      message: str,
+76      abort: Optional[Any] = None,
+77      constraint_failures: Optional[List['ConstraintFailure']] = None,
+78  ):
+79    self._status_code = status_code
+80    self._code = code
+81    self._message = message
+82    self._abort = abort
+83    self._constraint_failures = constraint_failures
+
+ + + + +
+
+ +
+ status_code: int + + + +
+ +
51  @property
+52  def status_code(self) -> int:
+53    return self._status_code
+
+ + + + +
+
+ +
+ code: str + + + +
+ +
55  @property
+56  def code(self) -> str:
+57    return self._code
+
+ + + + +
+
+ +
+ message: str + + + +
+ +
59  @property
+60  def message(self) -> str:
+61    return self._message
+
+ + + + +
+
+ +
+ abort: Optional[Any] + + + +
+ +
63  @property
+64  def abort(self) -> Optional[Any]:
+65    return self._abort
+
+ + + + +
+
+ +
+ constraint_failures: Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]] + + + +
+ +
67  @property
+68  def constraint_failures(self) -> Optional[List['ConstraintFailure']]:
+69    return self._constraint_failures
+
+ + + + +
+
+ +
+
@staticmethod
+ + def + parse_error_and_throw(body: Any, status_code: int): + + + +
+ +
 88  @staticmethod
+ 89  def parse_error_and_throw(body: Any, status_code: int):
+ 90    err = body["error"]
+ 91    code = err["code"]
+ 92    message = err["message"]
+ 93
+ 94    query_tags = QueryTags.decode(
+ 95        body["query_tags"]) if "query_tags" in body else None
+ 96    stats = QueryStats(body["stats"]) if "stats" in body else None
+ 97    txn_ts = body["txn_ts"] if "txn_ts" in body else None
+ 98    schema_version = body["schema_version"] if "schema_version" in body else None
+ 99    summary = body["summary"] if "summary" in body else None
+100
+101    constraint_failures: Optional[List[ConstraintFailure]] = None
+102    if "constraint_failures" in err:
+103      constraint_failures = [
+104          ConstraintFailure(
+105              message=cf["message"],
+106              name=cf["name"] if "name" in cf else None,
+107              paths=cf["paths"] if "paths" in cf else None,
+108          ) for cf in err["constraint_failures"]
+109      ]
+110
+111    if status_code >= 400 and status_code < 500:
+112      if code == "invalid_query":
+113        raise QueryCheckError(
+114            status_code=400,
+115            code=code,
+116            message=message,
+117            summary=summary,
+118            constraint_failures=constraint_failures,
+119            query_tags=query_tags,
+120            stats=stats,
+121            txn_ts=txn_ts,
+122            schema_version=schema_version,
+123        )
+124      elif code == "invalid_request":
+125        raise InvalidRequestError(
+126            status_code=400,
+127            code=code,
+128            message=message,
+129            summary=summary,
+130            constraint_failures=constraint_failures,
+131            query_tags=query_tags,
+132            stats=stats,
+133            txn_ts=txn_ts,
+134            schema_version=schema_version,
+135        )
+136      elif code == "abort":
+137        abort = err["abort"] if "abort" in err else None
+138        raise AbortError(
+139            status_code=400,
+140            code=code,
+141            message=message,
+142            summary=summary,
+143            abort=abort,
+144            constraint_failures=constraint_failures,
+145            query_tags=query_tags,
+146            stats=stats,
+147            txn_ts=txn_ts,
+148            schema_version=schema_version,
+149        )
+150      elif code == "unauthorized":
+151        raise AuthenticationError(
+152            status_code=401,
+153            code=code,
+154            message=message,
+155            summary=summary,
+156            constraint_failures=constraint_failures,
+157            query_tags=query_tags,
+158            stats=stats,
+159            txn_ts=txn_ts,
+160            schema_version=schema_version,
+161        )
+162      elif code == "forbidden" and status_code == 403:
+163        raise AuthorizationError(
+164            status_code=403,
+165            code=code,
+166            message=message,
+167            summary=summary,
+168            constraint_failures=constraint_failures,
+169            query_tags=query_tags,
+170            stats=stats,
+171            txn_ts=txn_ts,
+172            schema_version=schema_version,
+173        )
+174      elif code == "method_not_allowed":
+175        raise QueryRuntimeError(
+176            status_code=405,
+177            code=code,
+178            message=message,
+179            summary=summary,
+180            constraint_failures=constraint_failures,
+181            query_tags=query_tags,
+182            stats=stats,
+183            txn_ts=txn_ts,
+184            schema_version=schema_version,
+185        )
+186      elif code == "conflict":
+187        raise ContendedTransactionError(
+188            status_code=409,
+189            code=code,
+190            message=message,
+191            summary=summary,
+192            constraint_failures=constraint_failures,
+193            query_tags=query_tags,
+194            stats=stats,
+195            txn_ts=txn_ts,
+196            schema_version=schema_version,
+197        )
+198      elif code == "request_size_exceeded":
+199        raise QueryRuntimeError(
+200            status_code=413,
+201            code=code,
+202            message=message,
+203            summary=summary,
+204            constraint_failures=constraint_failures,
+205            query_tags=query_tags,
+206            stats=stats,
+207            txn_ts=txn_ts,
+208            schema_version=schema_version,
+209        )
+210      elif code == "limit_exceeded":
+211        raise ThrottlingError(
+212            status_code=429,
+213            code=code,
+214            message=message,
+215            summary=summary,
+216            constraint_failures=constraint_failures,
+217            query_tags=query_tags,
+218            stats=stats,
+219            txn_ts=txn_ts,
+220            schema_version=schema_version,
+221        )
+222      elif code == "time_out":
+223        raise QueryTimeoutError(
+224            status_code=440,
+225            code=code,
+226            message=message,
+227            summary=summary,
+228            constraint_failures=constraint_failures,
+229            query_tags=query_tags,
+230            stats=stats,
+231            txn_ts=txn_ts,
+232            schema_version=schema_version,
+233        )
+234      else:
+235        raise QueryRuntimeError(
+236            status_code=status_code,
+237            code=code,
+238            message=message,
+239            summary=summary,
+240            constraint_failures=constraint_failures,
+241            query_tags=query_tags,
+242            stats=stats,
+243            txn_ts=txn_ts,
+244            schema_version=schema_version,
+245        )
+246    elif status_code == 500:
+247      raise ServiceInternalError(
+248          status_code=status_code,
+249          code=code,
+250          message=message,
+251          summary=summary,
+252          constraint_failures=constraint_failures,
+253          query_tags=query_tags,
+254          stats=stats,
+255          txn_ts=txn_ts,
+256          schema_version=schema_version,
+257      )
+258    elif status_code == 503:
+259      raise ServiceTimeoutError(
+260          status_code=status_code,
+261          code=code,
+262          message=message,
+263          summary=summary,
+264          constraint_failures=constraint_failures,
+265          query_tags=query_tags,
+266          stats=stats,
+267          txn_ts=txn_ts,
+268          schema_version=schema_version,
+269      )
+270    else:
+271      raise ServiceError(
+272          status_code=status_code,
+273          code=code,
+274          message=message,
+275          summary=summary,
+276          constraint_failures=constraint_failures,
+277          query_tags=query_tags,
+278          stats=stats,
+279          txn_ts=txn_ts,
+280          schema_version=schema_version,
+281      )
+
+ + + + +
+
+
Inherited Members
+
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + ServiceError(FaunaError, fauna.encoding.wire_protocol.QueryInfo): + + + +
+ +
284class ServiceError(FaunaError, QueryInfo):
+285  """An error representing a query failure returned by Fauna."""
+286
+287  def __init__(
+288      self,
+289      status_code: int,
+290      code: str,
+291      message: str,
+292      summary: Optional[str] = None,
+293      abort: Optional[Any] = None,
+294      constraint_failures: Optional[List['ConstraintFailure']] = None,
+295      query_tags: Optional[Mapping[str, str]] = None,
+296      stats: Optional[QueryStats] = None,
+297      txn_ts: Optional[int] = None,
+298      schema_version: Optional[int] = None,
+299  ):
+300    QueryInfo.__init__(
+301        self,
+302        query_tags=query_tags,
+303        stats=stats,
+304        summary=summary,
+305        txn_ts=txn_ts,
+306        schema_version=schema_version,
+307    )
+308
+309    FaunaError.__init__(
+310        self,
+311        status_code=status_code,
+312        code=code,
+313        message=message,
+314        abort=abort,
+315        constraint_failures=constraint_failures,
+316    )
+317
+318  def __str__(self):
+319    constraint_str = "---"
+320    if self._constraint_failures:
+321      constraint_str = f"---\nconstraint failures: {self._constraint_failures}\n---"
+322
+323    return f"{self._status_code}: {self.code}\n{self.message}\n{constraint_str}\n{self.summary or ''}"
+
+ + +

An error representing a query failure returned by Fauna.

+
+ + +
+ +
+ + ServiceError( status_code: int, code: str, message: str, summary: Optional[str] = None, abort: Optional[Any] = None, constraint_failures: Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]] = None, query_tags: Optional[Mapping[str, str]] = None, stats: Optional[fauna.encoding.wire_protocol.QueryStats] = None, txn_ts: Optional[int] = None, schema_version: Optional[int] = None) + + + +
+ +
287  def __init__(
+288      self,
+289      status_code: int,
+290      code: str,
+291      message: str,
+292      summary: Optional[str] = None,
+293      abort: Optional[Any] = None,
+294      constraint_failures: Optional[List['ConstraintFailure']] = None,
+295      query_tags: Optional[Mapping[str, str]] = None,
+296      stats: Optional[QueryStats] = None,
+297      txn_ts: Optional[int] = None,
+298      schema_version: Optional[int] = None,
+299  ):
+300    QueryInfo.__init__(
+301        self,
+302        query_tags=query_tags,
+303        stats=stats,
+304        summary=summary,
+305        txn_ts=txn_ts,
+306        schema_version=schema_version,
+307    )
+308
+309    FaunaError.__init__(
+310        self,
+311        status_code=status_code,
+312        code=code,
+313        message=message,
+314        abort=abort,
+315        constraint_failures=constraint_failures,
+316    )
+
+ + + + +
+
+
Inherited Members
+
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + AbortError(ServiceError): + + + +
+ +
326class AbortError(ServiceError):
+327  pass
+
+ + +

An error representing a query failure returned by Fauna.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + InvalidRequestError(ServiceError): + + + +
+ +
330class InvalidRequestError(ServiceError):
+331  pass
+
+ + +

An error representing a query failure returned by Fauna.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + QueryCheckError(ServiceError): + + + +
+ +
334class QueryCheckError(ServiceError):
+335  """An error due to a "compile-time" check of the query failing."""
+336  pass
+
+ + +

An error due to a "compile-time" check of the query failing.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + ContendedTransactionError(ServiceError): + + + +
+ +
339class ContendedTransactionError(ServiceError):
+340  """Transaction is aborted due to concurrent modification."""
+341  pass
+
+ + +

Transaction is aborted due to concurrent modification.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + QueryRuntimeError(ServiceError): + + + +
+ +
344class QueryRuntimeError(ServiceError):
+345  """An error response that is the result of the query failing during execution.
+346    QueryRuntimeError's occur when a bug in your query causes an invalid execution
+347    to be requested.
+348    The 'code' field will vary based on the specific error cause."""
+349  pass
+
+ + +

An error response that is the result of the query failing during execution. +QueryRuntimeError's occur when a bug in your query causes an invalid execution +to be requested. +The 'code' field will vary based on the specific error cause.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + AuthenticationError(ServiceError): + + + +
+ +
352class AuthenticationError(ServiceError):
+353  """AuthenticationError indicates invalid credentials were used."""
+354  pass
+
+ + +

AuthenticationError indicates invalid credentials were used.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + AuthorizationError(ServiceError): + + + +
+ +
357class AuthorizationError(ServiceError):
+358  """AuthorizationError indicates the credentials used do not have
+359    permission to perform the requested action."""
+360  pass
+
+ + +

AuthorizationError indicates the credentials used do not have +permission to perform the requested action.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + ThrottlingError(ServiceError, RetryableFaunaException): + + + +
+ +
363class ThrottlingError(ServiceError, RetryableFaunaException):
+364  """ThrottlingError indicates some capacity limit was exceeded
+365    and thus the request could not be served."""
+366  pass
+
+ + +

ThrottlingError indicates some capacity limit was exceeded +and thus the request could not be served.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + QueryTimeoutError(ServiceError): + + + +
+ +
369class QueryTimeoutError(ServiceError):
+370  """A failure due to the timeout being exceeded, but the timeout
+371    was set lower than the query's expected processing time.
+372    This response is distinguished from a ServiceTimeoutException
+373    in that a QueryTimeoutError shows Fauna behaving in an expected manner."""
+374  pass
+
+ + +

A failure due to the timeout being exceeded, but the timeout +was set lower than the query's expected processing time. +This response is distinguished from a ServiceTimeoutException +in that a QueryTimeoutError shows Fauna behaving in an expected manner.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + ServiceInternalError(ServiceError): + + + +
+ +
377class ServiceInternalError(ServiceError):
+378  """ServiceInternalError indicates Fauna failed unexpectedly."""
+379  pass
+
+ + +

ServiceInternalError indicates Fauna failed unexpectedly.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ +
+ + class + ServiceTimeoutError(ServiceError): + + + +
+ +
382class ServiceTimeoutError(ServiceError):
+383  """ServiceTimeoutError indicates Fauna was not available to service
+384    the request before the timeout was reached."""
+385  pass
+
+ + +

ServiceTimeoutError indicates Fauna was not available to service +the request before the timeout was reached.

+
+ + +
+
Inherited Members
+
+
ServiceError
+
ServiceError
+ +
+
FaunaError
+
status_code
+
code
+
message
+
abort
+
constraint_failures
+
parse_error_and_throw
+ +
+
fauna.encoding.wire_protocol.QueryInfo
+
query_tags
+
summary
+
stats
+
txn_ts
+
schema_version
+ +
+
builtins.BaseException
+
with_traceback
+
args
+ +
+
+
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/http.html b/2.1.0/api/fauna/http.html new file mode 100644 index 00000000..8c163a79 --- /dev/null +++ b/2.1.0/api/fauna/http.html @@ -0,0 +1,244 @@ + + + + + + + fauna.http API documentation + + + + + + + + + +
+
+

+fauna.http

+ + + + + + +
1from .http_client import HTTPClient, HTTPResponse
+2from .httpx_client import HTTPXClient
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/http/http_client.html b/2.1.0/api/fauna/http/http_client.html new file mode 100644 index 00000000..261378ef --- /dev/null +++ b/2.1.0/api/fauna/http/http_client.html @@ -0,0 +1,783 @@ + + + + + + + fauna.http.http_client API documentation + + + + + + + + + +
+
+

+fauna.http.http_client

+ + + + + + +
 1import abc
+ 2import contextlib
+ 3from dataclasses import dataclass
+ 4from typing import Iterator, Mapping, Any
+ 5
+ 6
+ 7@dataclass(frozen=True)
+ 8class ErrorResponse:
+ 9  status_code: int
+10  error_code: str
+11  error_message: str
+12  summary: str
+13
+14
+15class HTTPResponse(abc.ABC):
+16
+17  @abc.abstractmethod
+18  def headers(self) -> Mapping[str, str]:
+19    pass
+20
+21  @abc.abstractmethod
+22  def status_code(self) -> int:
+23    pass
+24
+25  @abc.abstractmethod
+26  def json(self) -> Any:
+27    pass
+28
+29  @abc.abstractmethod
+30  def text(self) -> str:
+31    pass
+32
+33  @abc.abstractmethod
+34  def read(self) -> bytes:
+35    pass
+36
+37  @abc.abstractmethod
+38  def iter_bytes(self) -> Iterator[bytes]:
+39    pass
+40
+41  @abc.abstractmethod
+42  def close(self):
+43    pass
+44
+45  def __enter__(self):
+46    return self
+47
+48  def __exit__(self, exc_type, exc_val, exc_tb):
+49    self.close()
+50
+51
+52class HTTPClient(abc.ABC):
+53
+54  @abc.abstractmethod
+55  def request(
+56      self,
+57      method: str,
+58      url: str,
+59      headers: Mapping[str, str],
+60      data: Mapping[str, Any],
+61  ) -> HTTPResponse:
+62    pass
+63
+64  @abc.abstractmethod
+65  @contextlib.contextmanager
+66  def stream(
+67      self,
+68      url: str,
+69      headers: Mapping[str, str],
+70      data: Mapping[str, Any],
+71  ) -> Iterator[Any]:
+72    pass
+73
+74  @abc.abstractmethod
+75  def close(self):
+76    pass
+
+ + +
+
+ +
+
@dataclass(frozen=True)
+ + class + ErrorResponse: + + + +
+ +
 8@dataclass(frozen=True)
+ 9class ErrorResponse:
+10  status_code: int
+11  error_code: str
+12  error_message: str
+13  summary: str
+
+ + + + +
+
+ + ErrorResponse(status_code: int, error_code: str, error_message: str, summary: str) + + +
+ + + + +
+
+
+ status_code: int + + +
+ + + + +
+
+
+ error_code: str + + +
+ + + + +
+
+
+ error_message: str + + +
+ + + + +
+
+
+ summary: str + + +
+ + + + +
+
+
+ +
+ + class + HTTPResponse(abc.ABC): + + + +
+ +
16class HTTPResponse(abc.ABC):
+17
+18  @abc.abstractmethod
+19  def headers(self) -> Mapping[str, str]:
+20    pass
+21
+22  @abc.abstractmethod
+23  def status_code(self) -> int:
+24    pass
+25
+26  @abc.abstractmethod
+27  def json(self) -> Any:
+28    pass
+29
+30  @abc.abstractmethod
+31  def text(self) -> str:
+32    pass
+33
+34  @abc.abstractmethod
+35  def read(self) -> bytes:
+36    pass
+37
+38  @abc.abstractmethod
+39  def iter_bytes(self) -> Iterator[bytes]:
+40    pass
+41
+42  @abc.abstractmethod
+43  def close(self):
+44    pass
+45
+46  def __enter__(self):
+47    return self
+48
+49  def __exit__(self, exc_type, exc_val, exc_tb):
+50    self.close()
+
+ + +

Helper class that provides a standard way to create an ABC using +inheritance.

+
+ + +
+ +
+
@abc.abstractmethod
+ + def + headers(self) -> Mapping[str, str]: + + + +
+ +
18  @abc.abstractmethod
+19  def headers(self) -> Mapping[str, str]:
+20    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + status_code(self) -> int: + + + +
+ +
22  @abc.abstractmethod
+23  def status_code(self) -> int:
+24    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + json(self) -> Any: + + + +
+ +
26  @abc.abstractmethod
+27  def json(self) -> Any:
+28    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + text(self) -> str: + + + +
+ +
30  @abc.abstractmethod
+31  def text(self) -> str:
+32    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + read(self) -> bytes: + + + +
+ +
34  @abc.abstractmethod
+35  def read(self) -> bytes:
+36    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + iter_bytes(self) -> Iterator[bytes]: + + + +
+ +
38  @abc.abstractmethod
+39  def iter_bytes(self) -> Iterator[bytes]:
+40    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + close(self): + + + +
+ +
42  @abc.abstractmethod
+43  def close(self):
+44    pass
+
+ + + + +
+
+
+ +
+ + class + HTTPClient(abc.ABC): + + + +
+ +
53class HTTPClient(abc.ABC):
+54
+55  @abc.abstractmethod
+56  def request(
+57      self,
+58      method: str,
+59      url: str,
+60      headers: Mapping[str, str],
+61      data: Mapping[str, Any],
+62  ) -> HTTPResponse:
+63    pass
+64
+65  @abc.abstractmethod
+66  @contextlib.contextmanager
+67  def stream(
+68      self,
+69      url: str,
+70      headers: Mapping[str, str],
+71      data: Mapping[str, Any],
+72  ) -> Iterator[Any]:
+73    pass
+74
+75  @abc.abstractmethod
+76  def close(self):
+77    pass
+
+ + +

Helper class that provides a standard way to create an ABC using +inheritance.

+
+ + +
+ +
+
@abc.abstractmethod
+ + def + request( self, method: str, url: str, headers: Mapping[str, str], data: Mapping[str, Any]) -> HTTPResponse: + + + +
+ +
55  @abc.abstractmethod
+56  def request(
+57      self,
+58      method: str,
+59      url: str,
+60      headers: Mapping[str, str],
+61      data: Mapping[str, Any],
+62  ) -> HTTPResponse:
+63    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+
@contextlib.contextmanager
+ + def + stream( self, url: str, headers: Mapping[str, str], data: Mapping[str, Any]) -> Iterator[Any]: + + + +
+ +
65  @abc.abstractmethod
+66  @contextlib.contextmanager
+67  def stream(
+68      self,
+69      url: str,
+70      headers: Mapping[str, str],
+71      data: Mapping[str, Any],
+72  ) -> Iterator[Any]:
+73    pass
+
+ + + + +
+
+ +
+
@abc.abstractmethod
+ + def + close(self): + + + +
+ +
75  @abc.abstractmethod
+76  def close(self):
+77    pass
+
+ + + + +
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/http/httpx_client.html b/2.1.0/api/fauna/http/httpx_client.html new file mode 100644 index 00000000..4d4d5493 --- /dev/null +++ b/2.1.0/api/fauna/http/httpx_client.html @@ -0,0 +1,795 @@ + + + + + + + fauna.http.httpx_client API documentation + + + + + + + + + +
+
+

+fauna.http.httpx_client

+ + + + + + +
  1import json
+  2from contextlib import contextmanager
+  3from json import JSONDecodeError
+  4from typing import Mapping, Any, Optional, Iterator
+  5
+  6import httpx
+  7
+  8from fauna.errors import ClientError, NetworkError
+  9from fauna.http.http_client import HTTPResponse, HTTPClient
+ 10
+ 11
+ 12class HTTPXResponse(HTTPResponse):
+ 13
+ 14  def __init__(self, response: httpx.Response):
+ 15    self._r = response
+ 16
+ 17  def headers(self) -> Mapping[str, str]:
+ 18    h = {}
+ 19    for (k, v) in self._r.headers.items():
+ 20      h[k] = v
+ 21    return h
+ 22
+ 23  def json(self) -> Any:
+ 24    try:
+ 25      decoded = self._r.read().decode("utf-8")
+ 26      return json.loads(decoded)
+ 27    except (JSONDecodeError, UnicodeDecodeError) as e:
+ 28      raise ClientError(
+ 29          f"Unable to decode response from endpoint {self._r.request.url}. Check that your endpoint is valid."
+ 30      ) from e
+ 31
+ 32  def text(self) -> str:
+ 33    return str(self.read(), encoding='utf-8')
+ 34
+ 35  def status_code(self) -> int:
+ 36    return self._r.status_code
+ 37
+ 38  def read(self) -> bytes:
+ 39    return self._r.read()
+ 40
+ 41  def iter_bytes(self, size: Optional[int] = None) -> Iterator[bytes]:
+ 42    return self._r.iter_bytes(size)
+ 43
+ 44  def close(self) -> None:
+ 45    try:
+ 46      self._r.close()
+ 47    except Exception as e:
+ 48      raise ClientError("Error closing response") from e
+ 49
+ 50
+ 51class HTTPXClient(HTTPClient):
+ 52
+ 53  def __init__(self, client: httpx.Client):
+ 54    super(HTTPXClient, self).__init__()
+ 55    self._c = client
+ 56
+ 57  def request(
+ 58      self,
+ 59      method: str,
+ 60      url: str,
+ 61      headers: Mapping[str, str],
+ 62      data: Mapping[str, Any],
+ 63  ) -> HTTPResponse:
+ 64
+ 65    try:
+ 66      request = self._c.build_request(
+ 67          method,
+ 68          url,
+ 69          json=data,
+ 70          headers=headers,
+ 71      )
+ 72    except httpx.InvalidURL as e:
+ 73      raise ClientError("Invalid URL Format") from e
+ 74
+ 75    try:
+ 76      return HTTPXResponse(self._c.send(
+ 77          request,
+ 78          stream=False,
+ 79      ))
+ 80    except (httpx.HTTPError, httpx.InvalidURL) as e:
+ 81      raise NetworkError("Exception re-raised from HTTP request") from e
+ 82
+ 83  @contextmanager
+ 84  def stream(
+ 85      self,
+ 86      url: str,
+ 87      headers: Mapping[str, str],
+ 88      data: Mapping[str, Any],
+ 89  ) -> Iterator[Any]:
+ 90    with self._c.stream(
+ 91        "POST", url=url, headers=headers, json=data) as response:
+ 92      yield self._transform(response)
+ 93
+ 94  def _transform(self, response):
+ 95    try:
+ 96      for line in response.iter_lines():
+ 97        yield json.loads(line)
+ 98    except httpx.ReadTimeout as e:
+ 99      raise NetworkError("Stream timeout") from e
+100    except (httpx.HTTPError, httpx.InvalidURL) as e:
+101      raise NetworkError("Exception re-raised from HTTP request") from e
+102
+103  def close(self):
+104    self._c.close()
+
+ + +
+
+ +
+ + class + HTTPXResponse(fauna.http.http_client.HTTPResponse): + + + +
+ +
13class HTTPXResponse(HTTPResponse):
+14
+15  def __init__(self, response: httpx.Response):
+16    self._r = response
+17
+18  def headers(self) -> Mapping[str, str]:
+19    h = {}
+20    for (k, v) in self._r.headers.items():
+21      h[k] = v
+22    return h
+23
+24  def json(self) -> Any:
+25    try:
+26      decoded = self._r.read().decode("utf-8")
+27      return json.loads(decoded)
+28    except (JSONDecodeError, UnicodeDecodeError) as e:
+29      raise ClientError(
+30          f"Unable to decode response from endpoint {self._r.request.url}. Check that your endpoint is valid."
+31      ) from e
+32
+33  def text(self) -> str:
+34    return str(self.read(), encoding='utf-8')
+35
+36  def status_code(self) -> int:
+37    return self._r.status_code
+38
+39  def read(self) -> bytes:
+40    return self._r.read()
+41
+42  def iter_bytes(self, size: Optional[int] = None) -> Iterator[bytes]:
+43    return self._r.iter_bytes(size)
+44
+45  def close(self) -> None:
+46    try:
+47      self._r.close()
+48    except Exception as e:
+49      raise ClientError("Error closing response") from e
+
+ + +

Helper class that provides a standard way to create an ABC using +inheritance.

+
+ + +
+ +
+ + HTTPXResponse(response: httpx.Response) + + + +
+ +
15  def __init__(self, response: httpx.Response):
+16    self._r = response
+
+ + + + +
+
+ +
+ + def + headers(self) -> Mapping[str, str]: + + + +
+ +
18  def headers(self) -> Mapping[str, str]:
+19    h = {}
+20    for (k, v) in self._r.headers.items():
+21      h[k] = v
+22    return h
+
+ + + + +
+
+ +
+ + def + json(self) -> Any: + + + +
+ +
24  def json(self) -> Any:
+25    try:
+26      decoded = self._r.read().decode("utf-8")
+27      return json.loads(decoded)
+28    except (JSONDecodeError, UnicodeDecodeError) as e:
+29      raise ClientError(
+30          f"Unable to decode response from endpoint {self._r.request.url}. Check that your endpoint is valid."
+31      ) from e
+
+ + + + +
+
+ +
+ + def + text(self) -> str: + + + +
+ +
33  def text(self) -> str:
+34    return str(self.read(), encoding='utf-8')
+
+ + + + +
+
+ +
+ + def + status_code(self) -> int: + + + +
+ +
36  def status_code(self) -> int:
+37    return self._r.status_code
+
+ + + + +
+
+ +
+ + def + read(self) -> bytes: + + + +
+ +
39  def read(self) -> bytes:
+40    return self._r.read()
+
+ + + + +
+
+ +
+ + def + iter_bytes(self, size: Optional[int] = None) -> Iterator[bytes]: + + + +
+ +
42  def iter_bytes(self, size: Optional[int] = None) -> Iterator[bytes]:
+43    return self._r.iter_bytes(size)
+
+ + + + +
+
+ +
+ + def + close(self) -> None: + + + +
+ +
45  def close(self) -> None:
+46    try:
+47      self._r.close()
+48    except Exception as e:
+49      raise ClientError("Error closing response") from e
+
+ + + + +
+
+
+ +
+ + class + HTTPXClient(fauna.http.http_client.HTTPClient): + + + +
+ +
 52class HTTPXClient(HTTPClient):
+ 53
+ 54  def __init__(self, client: httpx.Client):
+ 55    super(HTTPXClient, self).__init__()
+ 56    self._c = client
+ 57
+ 58  def request(
+ 59      self,
+ 60      method: str,
+ 61      url: str,
+ 62      headers: Mapping[str, str],
+ 63      data: Mapping[str, Any],
+ 64  ) -> HTTPResponse:
+ 65
+ 66    try:
+ 67      request = self._c.build_request(
+ 68          method,
+ 69          url,
+ 70          json=data,
+ 71          headers=headers,
+ 72      )
+ 73    except httpx.InvalidURL as e:
+ 74      raise ClientError("Invalid URL Format") from e
+ 75
+ 76    try:
+ 77      return HTTPXResponse(self._c.send(
+ 78          request,
+ 79          stream=False,
+ 80      ))
+ 81    except (httpx.HTTPError, httpx.InvalidURL) as e:
+ 82      raise NetworkError("Exception re-raised from HTTP request") from e
+ 83
+ 84  @contextmanager
+ 85  def stream(
+ 86      self,
+ 87      url: str,
+ 88      headers: Mapping[str, str],
+ 89      data: Mapping[str, Any],
+ 90  ) -> Iterator[Any]:
+ 91    with self._c.stream(
+ 92        "POST", url=url, headers=headers, json=data) as response:
+ 93      yield self._transform(response)
+ 94
+ 95  def _transform(self, response):
+ 96    try:
+ 97      for line in response.iter_lines():
+ 98        yield json.loads(line)
+ 99    except httpx.ReadTimeout as e:
+100      raise NetworkError("Stream timeout") from e
+101    except (httpx.HTTPError, httpx.InvalidURL) as e:
+102      raise NetworkError("Exception re-raised from HTTP request") from e
+103
+104  def close(self):
+105    self._c.close()
+
+ + +

Helper class that provides a standard way to create an ABC using +inheritance.

+
+ + +
+ +
+ + HTTPXClient(client: httpx.Client) + + + +
+ +
54  def __init__(self, client: httpx.Client):
+55    super(HTTPXClient, self).__init__()
+56    self._c = client
+
+ + + + +
+
+ +
+ + def + request( self, method: str, url: str, headers: Mapping[str, str], data: Mapping[str, Any]) -> fauna.http.http_client.HTTPResponse: + + + +
+ +
58  def request(
+59      self,
+60      method: str,
+61      url: str,
+62      headers: Mapping[str, str],
+63      data: Mapping[str, Any],
+64  ) -> HTTPResponse:
+65
+66    try:
+67      request = self._c.build_request(
+68          method,
+69          url,
+70          json=data,
+71          headers=headers,
+72      )
+73    except httpx.InvalidURL as e:
+74      raise ClientError("Invalid URL Format") from e
+75
+76    try:
+77      return HTTPXResponse(self._c.send(
+78          request,
+79          stream=False,
+80      ))
+81    except (httpx.HTTPError, httpx.InvalidURL) as e:
+82      raise NetworkError("Exception re-raised from HTTP request") from e
+
+ + + + +
+
+ +
+
@contextmanager
+ + def + stream( self, url: str, headers: Mapping[str, str], data: Mapping[str, Any]) -> Iterator[Any]: + + + +
+ +
84  @contextmanager
+85  def stream(
+86      self,
+87      url: str,
+88      headers: Mapping[str, str],
+89      data: Mapping[str, Any],
+90  ) -> Iterator[Any]:
+91    with self._c.stream(
+92        "POST", url=url, headers=headers, json=data) as response:
+93      yield self._transform(response)
+
+ + + + +
+
+ +
+ + def + close(self): + + + +
+ +
104  def close(self):
+105    self._c.close()
+
+ + + + +
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/query.html b/2.1.0/api/fauna/query.html new file mode 100644 index 00000000..39aeb013 --- /dev/null +++ b/2.1.0/api/fauna/query.html @@ -0,0 +1,245 @@ + + + + + + + fauna.query API documentation + + + + + + + + + +
+
+

+fauna.query

+ + + + + + +
1from .models import Document, DocumentReference, NamedDocument, NamedDocumentReference, NullDocument, Module, Page
+2from .query_builder import fql, Query
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/query/models.html b/2.1.0/api/fauna/query/models.html new file mode 100644 index 00000000..9e9f0290 --- /dev/null +++ b/2.1.0/api/fauna/query/models.html @@ -0,0 +1,1825 @@ + + + + + + + fauna.query.models API documentation + + + + + + + + + +
+
+

+fauna.query.models

+ + + + + + +
  1from collections.abc import Mapping
+  2from datetime import datetime
+  3from typing import Union, Iterator, Any, Optional, List
+  4
+  5
+  6class Page:
+  7  """A class representing a Set in Fauna."""
+  8
+  9  def __init__(self,
+ 10               data: Optional[List[Any]] = None,
+ 11               after: Optional[str] = None):
+ 12    self.data = data
+ 13    self.after = after
+ 14
+ 15  def __repr__(self):
+ 16    args = []
+ 17    if self.data is not None:
+ 18      args.append(f"data={repr(self.data)}")
+ 19
+ 20    if self.after is not None:
+ 21      args.append(f"after={repr(self.after)}")
+ 22
+ 23    return f"{self.__class__.__name__}({','.join(args)})"
+ 24
+ 25  def __iter__(self) -> Iterator[Any]:
+ 26    return iter(self.data or [])
+ 27
+ 28  def __eq__(self, other):
+ 29    return isinstance(
+ 30        other, Page) and self.data == other.data and self.after == other.after
+ 31
+ 32  def __hash__(self):
+ 33    return hash((type(self), self.data, self.after))
+ 34
+ 35  def __ne__(self, other):
+ 36    return not self.__eq__(other)
+ 37
+ 38
+ 39class StreamToken:
+ 40  """A class represeting a Stream in Fauna."""
+ 41
+ 42  def __init__(self, token: str):
+ 43    self.token = token
+ 44
+ 45  def __eq__(self, other):
+ 46    return isinstance(other, StreamToken) and self.token == other.token
+ 47
+ 48  def __hash__(self):
+ 49    return hash(self.token)
+ 50
+ 51
+ 52class Module:
+ 53  """A class representing a Module in Fauna. Examples of modules include Collection, Math, and a user-defined
+ 54    collection, among others.
+ 55
+ 56    Usage:
+ 57
+ 58       dogs = Module("Dogs")
+ 59       query = fql("${col}.all", col=dogs)
+ 60    """
+ 61
+ 62  def __init__(self, name: str):
+ 63    self.name = name
+ 64
+ 65  def __repr__(self):
+ 66    return f"{self.__class__.__name__}(name={repr(self.name)})"
+ 67
+ 68  def __eq__(self, other):
+ 69    return isinstance(other, Module) and str(self) == str(other)
+ 70
+ 71  def __hash__(self):
+ 72    return hash(self.name)
+ 73
+ 74
+ 75class BaseReference:
+ 76  _collection: Module
+ 77
+ 78  @property
+ 79  def coll(self) -> Module:
+ 80    return self._collection
+ 81
+ 82  def __init__(self, coll: Union[str, Module]):
+ 83    if isinstance(coll, Module):
+ 84      self._collection = coll
+ 85    elif isinstance(coll, str):
+ 86      self._collection = Module(coll)
+ 87    else:
+ 88      raise TypeError(
+ 89          f"'coll' should be of type Module or str, but was {type(coll)}")
+ 90
+ 91  def __repr__(self):
+ 92    return f"{self.__class__.__name__}(coll={repr(self._collection)})"
+ 93
+ 94  def __eq__(self, other):
+ 95    return isinstance(other, type(self)) and str(self) == str(other)
+ 96
+ 97
+ 98class DocumentReference(BaseReference):
+ 99  """A class representing a reference to a :class:`Document` stored in Fauna.
+100    """
+101
+102  @property
+103  def id(self) -> str:
+104    """The ID for the :class:`Document`. Valid IDs are 64-bit integers, stored as strings.
+105
+106        :rtype: str
+107        """
+108    return self._id
+109
+110  def __init__(self, coll: Union[str, Module], id: str):
+111    super().__init__(coll)
+112
+113    if not isinstance(id, str):
+114      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+115    self._id = id
+116
+117  def __hash__(self):
+118    return hash((type(self), self._collection, self._id))
+119
+120  def __repr__(self):
+121    return f"{self.__class__.__name__}(id={repr(self._id)},coll={repr(self._collection)})"
+122
+123  @staticmethod
+124  def from_string(ref: str):
+125    rs = ref.split(":")
+126    if len(rs) != 2:
+127      raise ValueError("Expects string of format <CollectionName>:<ID>")
+128    return DocumentReference(rs[0], rs[1])
+129
+130
+131class NamedDocumentReference(BaseReference):
+132  """A class representing a reference to a :class:`NamedDocument` stored in Fauna.
+133    """
+134
+135  @property
+136  def name(self) -> str:
+137    """The name of the :class:`NamedDocument`.
+138
+139        :rtype: str
+140        """
+141    return self._name
+142
+143  def __init__(self, coll: Union[str, Module], name: str):
+144    super().__init__(coll)
+145
+146    if not isinstance(name, str):
+147      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+148
+149    self._name = name
+150
+151  def __hash__(self):
+152    return hash((type(self), self._collection, self._name))
+153
+154  def __repr__(self):
+155    return f"{self.__class__.__name__}(name={repr(self._name)},coll={repr(self._collection)})"
+156
+157
+158class NullDocument:
+159
+160  @property
+161  def cause(self) -> Optional[str]:
+162    return self._cause
+163
+164  @property
+165  def ref(self) -> Union[DocumentReference, NamedDocumentReference]:
+166    return self._ref
+167
+168  def __init__(
+169      self,
+170      ref: Union[DocumentReference, NamedDocumentReference],
+171      cause: Optional[str] = None,
+172  ):
+173    self._cause = cause
+174    self._ref = ref
+175
+176  def __repr__(self):
+177    return f"{self.__class__.__name__}(ref={repr(self.ref)},cause={repr(self._cause)})"
+178
+179  def __eq__(self, other):
+180    if not isinstance(other, type(self)):
+181      return False
+182
+183    return self.ref == other.ref and self.cause == other.cause
+184
+185  def __ne__(self, other):
+186    return not self == other
+187
+188
+189class BaseDocument(Mapping):
+190  """A base document class implementing an immutable mapping.
+191    """
+192
+193  def __init__(self, *args, **kwargs):
+194    self._store = dict(*args, **kwargs)
+195
+196  def __getitem__(self, __k: str) -> Any:
+197    return self._store[__k]
+198
+199  def __len__(self) -> int:
+200    return len(self._store)
+201
+202  def __iter__(self) -> Iterator[Any]:
+203    return iter(self._store)
+204
+205  def __eq__(self, other):
+206    if not isinstance(other, type(self)):
+207      return False
+208
+209    if len(self) != len(other):
+210      return False
+211
+212    for k, v in self.items():
+213      if k not in other:
+214        return False
+215      if self[k] != other[k]:
+216        return False
+217
+218    return True
+219
+220  def __ne__(self, other):
+221    return not self.__eq__(other)
+222
+223
+224class Document(BaseDocument):
+225  """A class representing a user document stored in Fauna.
+226
+227    User data should be stored directly on the map, while id, ts, and coll should only be stored on the related
+228    properties. When working with a :class:`Document` in code, it should be considered immutable.
+229    """
+230
+231  @property
+232  def id(self) -> str:
+233    return self._id
+234
+235  @property
+236  def ts(self) -> datetime:
+237    return self._ts
+238
+239  @property
+240  def coll(self) -> Module:
+241    return self._coll
+242
+243  def __init__(self,
+244               id: str,
+245               ts: datetime,
+246               coll: Union[str, Module],
+247               data: Optional[Mapping] = None):
+248    if not isinstance(id, str):
+249      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+250
+251    if not isinstance(ts, datetime):
+252      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+253
+254    if not (isinstance(coll, str) or isinstance(coll, Module)):
+255      raise TypeError(
+256          f"'coll' should be of type Module or str, but was {type(coll)}")
+257
+258    if isinstance(coll, str):
+259      coll = Module(coll)
+260
+261    self._id = id
+262    self._ts = ts
+263    self._coll = coll
+264
+265    super().__init__(data or {})
+266
+267  def __eq__(self, other):
+268    return type(self) == type(other) \
+269        and self.id == other.id \
+270        and self.coll == other.coll \
+271        and self.ts == other.ts \
+272        and super().__eq__(other)
+273
+274  def __ne__(self, other):
+275    return not self.__eq__(other)
+276
+277  def __repr__(self):
+278    kvs = ",".join([f"{repr(k)}:{repr(v)}" for k, v in self.items()])
+279
+280    return f"{self.__class__.__name__}(" \
+281           f"id={repr(self.id)}," \
+282           f"coll={repr(self.coll)}," \
+283           f"ts={repr(self.ts)}," \
+284           f"data={{{kvs}}})"
+285
+286
+287class NamedDocument(BaseDocument):
+288  """A class representing a named document stored in Fauna. Examples of named documents include Collection
+289    definitions, Index definitions, and Roles, among others.
+290
+291    When working with a :class:`NamedDocument` in code, it should be considered immutable.
+292    """
+293
+294  @property
+295  def name(self) -> str:
+296    return self._name
+297
+298  @property
+299  def ts(self) -> datetime:
+300    return self._ts
+301
+302  @property
+303  def coll(self) -> Module:
+304    return self._coll
+305
+306  def __init__(self,
+307               name: str,
+308               ts: datetime,
+309               coll: Union[Module, str],
+310               data: Optional[Mapping] = None):
+311    if not isinstance(name, str):
+312      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+313
+314    if not isinstance(ts, datetime):
+315      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+316
+317    if not (isinstance(coll, str) or isinstance(coll, Module)):
+318      raise TypeError(
+319          f"'coll' should be of type Module or str, but was {type(coll)}")
+320
+321    if isinstance(coll, str):
+322      coll = Module(coll)
+323
+324    self._name = name
+325    self._ts = ts
+326    self._coll = coll
+327
+328    super().__init__(data or {})
+329
+330  def __eq__(self, other):
+331    return type(self) == type(other) \
+332        and self.name == other.name \
+333        and self.coll == other.coll \
+334        and self.ts == other.ts \
+335        and super().__eq__(other)
+336
+337  def __ne__(self, other):
+338    return not self.__eq__(other)
+339
+340  def __repr__(self):
+341    kvs = ",".join([f"{repr(k)}:{repr(v)}" for k, v in self.items()])
+342
+343    return f"{self.__class__.__name__}(" \
+344           f"name={repr(self.name)}," \
+345           f"coll={repr(self.coll)}," \
+346           f"ts={repr(self.ts)}," \
+347           f"data={{{kvs}}})"
+
+ + +
+
+ +
+ + class + Page: + + + +
+ +
 7class Page:
+ 8  """A class representing a Set in Fauna."""
+ 9
+10  def __init__(self,
+11               data: Optional[List[Any]] = None,
+12               after: Optional[str] = None):
+13    self.data = data
+14    self.after = after
+15
+16  def __repr__(self):
+17    args = []
+18    if self.data is not None:
+19      args.append(f"data={repr(self.data)}")
+20
+21    if self.after is not None:
+22      args.append(f"after={repr(self.after)}")
+23
+24    return f"{self.__class__.__name__}({','.join(args)})"
+25
+26  def __iter__(self) -> Iterator[Any]:
+27    return iter(self.data or [])
+28
+29  def __eq__(self, other):
+30    return isinstance(
+31        other, Page) and self.data == other.data and self.after == other.after
+32
+33  def __hash__(self):
+34    return hash((type(self), self.data, self.after))
+35
+36  def __ne__(self, other):
+37    return not self.__eq__(other)
+
+ + +

A class representing a Set in Fauna.

+
+ + +
+ +
+ + Page(data: Optional[List[Any]] = None, after: Optional[str] = None) + + + +
+ +
10  def __init__(self,
+11               data: Optional[List[Any]] = None,
+12               after: Optional[str] = None):
+13    self.data = data
+14    self.after = after
+
+ + + + +
+
+
+ data + + +
+ + + + +
+
+
+ after + + +
+ + + + +
+
+
+ +
+ + class + StreamToken: + + + +
+ +
40class StreamToken:
+41  """A class represeting a Stream in Fauna."""
+42
+43  def __init__(self, token: str):
+44    self.token = token
+45
+46  def __eq__(self, other):
+47    return isinstance(other, StreamToken) and self.token == other.token
+48
+49  def __hash__(self):
+50    return hash(self.token)
+
+ + +

A class represeting a Stream in Fauna.

+
+ + +
+ +
+ + StreamToken(token: str) + + + +
+ +
43  def __init__(self, token: str):
+44    self.token = token
+
+ + + + +
+
+
+ token + + +
+ + + + +
+
+
+ +
+ + class + Module: + + + +
+ +
53class Module:
+54  """A class representing a Module in Fauna. Examples of modules include Collection, Math, and a user-defined
+55    collection, among others.
+56
+57    Usage:
+58
+59       dogs = Module("Dogs")
+60       query = fql("${col}.all", col=dogs)
+61    """
+62
+63  def __init__(self, name: str):
+64    self.name = name
+65
+66  def __repr__(self):
+67    return f"{self.__class__.__name__}(name={repr(self.name)})"
+68
+69  def __eq__(self, other):
+70    return isinstance(other, Module) and str(self) == str(other)
+71
+72  def __hash__(self):
+73    return hash(self.name)
+
+ + +

A class representing a Module in Fauna. Examples of modules include Collection, Math, and a user-defined +collection, among others.

+ +

Usage:

+ +

dogs = Module("Dogs") + query = fql("${col}.all", col=dogs)

+
+ + +
+ +
+ + Module(name: str) + + + +
+ +
63  def __init__(self, name: str):
+64    self.name = name
+
+ + + + +
+
+
+ name + + +
+ + + + +
+
+
+ +
+ + class + BaseReference: + + + +
+ +
76class BaseReference:
+77  _collection: Module
+78
+79  @property
+80  def coll(self) -> Module:
+81    return self._collection
+82
+83  def __init__(self, coll: Union[str, Module]):
+84    if isinstance(coll, Module):
+85      self._collection = coll
+86    elif isinstance(coll, str):
+87      self._collection = Module(coll)
+88    else:
+89      raise TypeError(
+90          f"'coll' should be of type Module or str, but was {type(coll)}")
+91
+92  def __repr__(self):
+93    return f"{self.__class__.__name__}(coll={repr(self._collection)})"
+94
+95  def __eq__(self, other):
+96    return isinstance(other, type(self)) and str(self) == str(other)
+
+ + + + +
+ +
+ + BaseReference(coll: Union[str, Module]) + + + +
+ +
83  def __init__(self, coll: Union[str, Module]):
+84    if isinstance(coll, Module):
+85      self._collection = coll
+86    elif isinstance(coll, str):
+87      self._collection = Module(coll)
+88    else:
+89      raise TypeError(
+90          f"'coll' should be of type Module or str, but was {type(coll)}")
+
+ + + + +
+
+ +
+ coll: Module + + + +
+ +
79  @property
+80  def coll(self) -> Module:
+81    return self._collection
+
+ + + + +
+
+
+ +
+ + class + DocumentReference(BaseReference): + + + +
+ +
 99class DocumentReference(BaseReference):
+100  """A class representing a reference to a :class:`Document` stored in Fauna.
+101    """
+102
+103  @property
+104  def id(self) -> str:
+105    """The ID for the :class:`Document`. Valid IDs are 64-bit integers, stored as strings.
+106
+107        :rtype: str
+108        """
+109    return self._id
+110
+111  def __init__(self, coll: Union[str, Module], id: str):
+112    super().__init__(coll)
+113
+114    if not isinstance(id, str):
+115      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+116    self._id = id
+117
+118  def __hash__(self):
+119    return hash((type(self), self._collection, self._id))
+120
+121  def __repr__(self):
+122    return f"{self.__class__.__name__}(id={repr(self._id)},coll={repr(self._collection)})"
+123
+124  @staticmethod
+125  def from_string(ref: str):
+126    rs = ref.split(":")
+127    if len(rs) != 2:
+128      raise ValueError("Expects string of format <CollectionName>:<ID>")
+129    return DocumentReference(rs[0], rs[1])
+
+ + +

A class representing a reference to a Document stored in Fauna.

+
+ + +
+ +
+ + DocumentReference(coll: Union[str, Module], id: str) + + + +
+ +
111  def __init__(self, coll: Union[str, Module], id: str):
+112    super().__init__(coll)
+113
+114    if not isinstance(id, str):
+115      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+116    self._id = id
+
+ + + + +
+
+ +
+ id: str + + + +
+ +
103  @property
+104  def id(self) -> str:
+105    """The ID for the :class:`Document`. Valid IDs are 64-bit integers, stored as strings.
+106
+107        :rtype: str
+108        """
+109    return self._id
+
+ + +

The ID for the Document. Valid IDs are 64-bit integers, stored as strings.

+
+ + +
+
+ +
+
@staticmethod
+ + def + from_string(ref: str): + + + +
+ +
124  @staticmethod
+125  def from_string(ref: str):
+126    rs = ref.split(":")
+127    if len(rs) != 2:
+128      raise ValueError("Expects string of format <CollectionName>:<ID>")
+129    return DocumentReference(rs[0], rs[1])
+
+ + + + +
+
+
Inherited Members
+
+
BaseReference
+
coll
+ +
+
+
+
+
+ +
+ + class + NamedDocumentReference(BaseReference): + + + +
+ +
132class NamedDocumentReference(BaseReference):
+133  """A class representing a reference to a :class:`NamedDocument` stored in Fauna.
+134    """
+135
+136  @property
+137  def name(self) -> str:
+138    """The name of the :class:`NamedDocument`.
+139
+140        :rtype: str
+141        """
+142    return self._name
+143
+144  def __init__(self, coll: Union[str, Module], name: str):
+145    super().__init__(coll)
+146
+147    if not isinstance(name, str):
+148      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+149
+150    self._name = name
+151
+152  def __hash__(self):
+153    return hash((type(self), self._collection, self._name))
+154
+155  def __repr__(self):
+156    return f"{self.__class__.__name__}(name={repr(self._name)},coll={repr(self._collection)})"
+
+ + +

A class representing a reference to a NamedDocument stored in Fauna.

+
+ + +
+ +
+ + NamedDocumentReference(coll: Union[str, Module], name: str) + + + +
+ +
144  def __init__(self, coll: Union[str, Module], name: str):
+145    super().__init__(coll)
+146
+147    if not isinstance(name, str):
+148      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+149
+150    self._name = name
+
+ + + + +
+
+ +
+ name: str + + + +
+ +
136  @property
+137  def name(self) -> str:
+138    """The name of the :class:`NamedDocument`.
+139
+140        :rtype: str
+141        """
+142    return self._name
+
+ + +

The name of the NamedDocument.

+
+ + +
+
+
Inherited Members
+
+
BaseReference
+
coll
+ +
+
+
+
+
+ +
+ + class + NullDocument: + + + +
+ +
159class NullDocument:
+160
+161  @property
+162  def cause(self) -> Optional[str]:
+163    return self._cause
+164
+165  @property
+166  def ref(self) -> Union[DocumentReference, NamedDocumentReference]:
+167    return self._ref
+168
+169  def __init__(
+170      self,
+171      ref: Union[DocumentReference, NamedDocumentReference],
+172      cause: Optional[str] = None,
+173  ):
+174    self._cause = cause
+175    self._ref = ref
+176
+177  def __repr__(self):
+178    return f"{self.__class__.__name__}(ref={repr(self.ref)},cause={repr(self._cause)})"
+179
+180  def __eq__(self, other):
+181    if not isinstance(other, type(self)):
+182      return False
+183
+184    return self.ref == other.ref and self.cause == other.cause
+185
+186  def __ne__(self, other):
+187    return not self == other
+
+ + + + +
+ +
+ + NullDocument( ref: Union[DocumentReference, NamedDocumentReference], cause: Optional[str] = None) + + + +
+ +
169  def __init__(
+170      self,
+171      ref: Union[DocumentReference, NamedDocumentReference],
+172      cause: Optional[str] = None,
+173  ):
+174    self._cause = cause
+175    self._ref = ref
+
+ + + + +
+
+ +
+ cause: Optional[str] + + + +
+ +
161  @property
+162  def cause(self) -> Optional[str]:
+163    return self._cause
+
+ + + + +
+
+ +
+ ref: Union[DocumentReference, NamedDocumentReference] + + + +
+ +
165  @property
+166  def ref(self) -> Union[DocumentReference, NamedDocumentReference]:
+167    return self._ref
+
+ + + + +
+
+
+ +
+ + class + BaseDocument(collections.abc.Mapping): + + + +
+ +
190class BaseDocument(Mapping):
+191  """A base document class implementing an immutable mapping.
+192    """
+193
+194  def __init__(self, *args, **kwargs):
+195    self._store = dict(*args, **kwargs)
+196
+197  def __getitem__(self, __k: str) -> Any:
+198    return self._store[__k]
+199
+200  def __len__(self) -> int:
+201    return len(self._store)
+202
+203  def __iter__(self) -> Iterator[Any]:
+204    return iter(self._store)
+205
+206  def __eq__(self, other):
+207    if not isinstance(other, type(self)):
+208      return False
+209
+210    if len(self) != len(other):
+211      return False
+212
+213    for k, v in self.items():
+214      if k not in other:
+215        return False
+216      if self[k] != other[k]:
+217        return False
+218
+219    return True
+220
+221  def __ne__(self, other):
+222    return not self.__eq__(other)
+
+ + +

A base document class implementing an immutable mapping.

+
+ + +
+ +
+ + BaseDocument(*args, **kwargs) + + + +
+ +
194  def __init__(self, *args, **kwargs):
+195    self._store = dict(*args, **kwargs)
+
+ + + + +
+
+
Inherited Members
+
+
collections.abc.Mapping
+
get
+
keys
+
items
+
values
+ +
+
+
+
+
+ +
+ + class + Document(BaseDocument): + + + +
+ +
225class Document(BaseDocument):
+226  """A class representing a user document stored in Fauna.
+227
+228    User data should be stored directly on the map, while id, ts, and coll should only be stored on the related
+229    properties. When working with a :class:`Document` in code, it should be considered immutable.
+230    """
+231
+232  @property
+233  def id(self) -> str:
+234    return self._id
+235
+236  @property
+237  def ts(self) -> datetime:
+238    return self._ts
+239
+240  @property
+241  def coll(self) -> Module:
+242    return self._coll
+243
+244  def __init__(self,
+245               id: str,
+246               ts: datetime,
+247               coll: Union[str, Module],
+248               data: Optional[Mapping] = None):
+249    if not isinstance(id, str):
+250      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+251
+252    if not isinstance(ts, datetime):
+253      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+254
+255    if not (isinstance(coll, str) or isinstance(coll, Module)):
+256      raise TypeError(
+257          f"'coll' should be of type Module or str, but was {type(coll)}")
+258
+259    if isinstance(coll, str):
+260      coll = Module(coll)
+261
+262    self._id = id
+263    self._ts = ts
+264    self._coll = coll
+265
+266    super().__init__(data or {})
+267
+268  def __eq__(self, other):
+269    return type(self) == type(other) \
+270        and self.id == other.id \
+271        and self.coll == other.coll \
+272        and self.ts == other.ts \
+273        and super().__eq__(other)
+274
+275  def __ne__(self, other):
+276    return not self.__eq__(other)
+277
+278  def __repr__(self):
+279    kvs = ",".join([f"{repr(k)}:{repr(v)}" for k, v in self.items()])
+280
+281    return f"{self.__class__.__name__}(" \
+282           f"id={repr(self.id)}," \
+283           f"coll={repr(self.coll)}," \
+284           f"ts={repr(self.ts)}," \
+285           f"data={{{kvs}}})"
+
+ + +

A class representing a user document stored in Fauna.

+ +

User data should be stored directly on the map, while id, ts, and coll should only be stored on the related +properties. When working with a Document in code, it should be considered immutable.

+
+ + +
+ +
+ + Document( id: str, ts: datetime.datetime, coll: Union[str, Module], data: Optional[collections.abc.Mapping] = None) + + + +
+ +
244  def __init__(self,
+245               id: str,
+246               ts: datetime,
+247               coll: Union[str, Module],
+248               data: Optional[Mapping] = None):
+249    if not isinstance(id, str):
+250      raise TypeError(f"'id' should be of type str, but was {type(id)}")
+251
+252    if not isinstance(ts, datetime):
+253      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+254
+255    if not (isinstance(coll, str) or isinstance(coll, Module)):
+256      raise TypeError(
+257          f"'coll' should be of type Module or str, but was {type(coll)}")
+258
+259    if isinstance(coll, str):
+260      coll = Module(coll)
+261
+262    self._id = id
+263    self._ts = ts
+264    self._coll = coll
+265
+266    super().__init__(data or {})
+
+ + + + +
+
+ +
+ id: str + + + +
+ +
232  @property
+233  def id(self) -> str:
+234    return self._id
+
+ + + + +
+
+ +
+ ts: datetime.datetime + + + +
+ +
236  @property
+237  def ts(self) -> datetime:
+238    return self._ts
+
+ + + + +
+
+ +
+ coll: Module + + + +
+ +
240  @property
+241  def coll(self) -> Module:
+242    return self._coll
+
+ + + + +
+
+
Inherited Members
+
+
collections.abc.Mapping
+
get
+
keys
+
items
+
values
+ +
+
+
+
+
+ +
+ + class + NamedDocument(BaseDocument): + + + +
+ +
288class NamedDocument(BaseDocument):
+289  """A class representing a named document stored in Fauna. Examples of named documents include Collection
+290    definitions, Index definitions, and Roles, among others.
+291
+292    When working with a :class:`NamedDocument` in code, it should be considered immutable.
+293    """
+294
+295  @property
+296  def name(self) -> str:
+297    return self._name
+298
+299  @property
+300  def ts(self) -> datetime:
+301    return self._ts
+302
+303  @property
+304  def coll(self) -> Module:
+305    return self._coll
+306
+307  def __init__(self,
+308               name: str,
+309               ts: datetime,
+310               coll: Union[Module, str],
+311               data: Optional[Mapping] = None):
+312    if not isinstance(name, str):
+313      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+314
+315    if not isinstance(ts, datetime):
+316      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+317
+318    if not (isinstance(coll, str) or isinstance(coll, Module)):
+319      raise TypeError(
+320          f"'coll' should be of type Module or str, but was {type(coll)}")
+321
+322    if isinstance(coll, str):
+323      coll = Module(coll)
+324
+325    self._name = name
+326    self._ts = ts
+327    self._coll = coll
+328
+329    super().__init__(data or {})
+330
+331  def __eq__(self, other):
+332    return type(self) == type(other) \
+333        and self.name == other.name \
+334        and self.coll == other.coll \
+335        and self.ts == other.ts \
+336        and super().__eq__(other)
+337
+338  def __ne__(self, other):
+339    return not self.__eq__(other)
+340
+341  def __repr__(self):
+342    kvs = ",".join([f"{repr(k)}:{repr(v)}" for k, v in self.items()])
+343
+344    return f"{self.__class__.__name__}(" \
+345           f"name={repr(self.name)}," \
+346           f"coll={repr(self.coll)}," \
+347           f"ts={repr(self.ts)}," \
+348           f"data={{{kvs}}})"
+
+ + +

A class representing a named document stored in Fauna. Examples of named documents include Collection +definitions, Index definitions, and Roles, among others.

+ +

When working with a NamedDocument in code, it should be considered immutable.

+
+ + +
+ +
+ + NamedDocument( name: str, ts: datetime.datetime, coll: Union[Module, str], data: Optional[collections.abc.Mapping] = None) + + + +
+ +
307  def __init__(self,
+308               name: str,
+309               ts: datetime,
+310               coll: Union[Module, str],
+311               data: Optional[Mapping] = None):
+312    if not isinstance(name, str):
+313      raise TypeError(f"'name' should be of type str, but was {type(name)}")
+314
+315    if not isinstance(ts, datetime):
+316      raise TypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+317
+318    if not (isinstance(coll, str) or isinstance(coll, Module)):
+319      raise TypeError(
+320          f"'coll' should be of type Module or str, but was {type(coll)}")
+321
+322    if isinstance(coll, str):
+323      coll = Module(coll)
+324
+325    self._name = name
+326    self._ts = ts
+327    self._coll = coll
+328
+329    super().__init__(data or {})
+
+ + + + +
+
+ +
+ name: str + + + +
+ +
295  @property
+296  def name(self) -> str:
+297    return self._name
+
+ + + + +
+
+ +
+ ts: datetime.datetime + + + +
+ +
299  @property
+300  def ts(self) -> datetime:
+301    return self._ts
+
+ + + + +
+
+ +
+ coll: Module + + + +
+ +
303  @property
+304  def coll(self) -> Module:
+305    return self._coll
+
+ + + + +
+
+
Inherited Members
+
+
collections.abc.Mapping
+
get
+
keys
+
items
+
values
+ +
+
+
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/query/query_builder.html b/2.1.0/api/fauna/query/query_builder.html new file mode 100644 index 00000000..35841807 --- /dev/null +++ b/2.1.0/api/fauna/query/query_builder.html @@ -0,0 +1,831 @@ + + + + + + + fauna.query.query_builder API documentation + + + + + + + + + +
+
+

+fauna.query.query_builder

+ + + + + + +
  1import abc
+  2from typing import Any, Optional, List
+  3
+  4from .template import FaunaTemplate
+  5
+  6
+  7class Fragment(abc.ABC):
+  8  """An abstract class representing a Fragment of a query.
+  9    """
+ 10
+ 11  @abc.abstractmethod
+ 12  def get(self) -> Any:
+ 13    """An abstract method for returning a stored value.
+ 14        """
+ 15    pass
+ 16
+ 17
+ 18class ValueFragment(Fragment):
+ 19  """A concrete :class:`Fragment` representing a part of a query that can represent a template variable.
+ 20    For example, if a template contains a variable ``${foo}``, and an object ``{ "prop": 1 }`` is provided for foo,
+ 21    then ``{ "prop": 1 }`` should be wrapped as a :class:`ValueFragment`.
+ 22
+ 23    :param Any val: The value to be used as a fragment.
+ 24    """
+ 25
+ 26  def __init__(self, val: Any):
+ 27    self._val = val
+ 28
+ 29  def get(self) -> Any:
+ 30    """Gets the stored value.
+ 31
+ 32        :returns: The stored value.
+ 33        """
+ 34    return self._val
+ 35
+ 36
+ 37class LiteralFragment(Fragment):
+ 38  """A concrete :class:`Fragment` representing a query literal For example, in the template ```let x = ${foo}```,
+ 39    the portion ```let x = ``` is a query literal and should be wrapped as a :class:`LiteralFragment`.
+ 40
+ 41    :param str val: The query literal to be used as a fragment.
+ 42    """
+ 43
+ 44  def __init__(self, val: str):
+ 45    self._val = val
+ 46
+ 47  def get(self) -> str:
+ 48    """Returns the stored value.
+ 49
+ 50        :returns: The stored value.
+ 51        """
+ 52    return self._val
+ 53
+ 54
+ 55class Query:
+ 56  """A class for representing a query.
+ 57
+ 58       e.g. { "fql": [...] }
+ 59    """
+ 60  _fragments: List[Fragment]
+ 61
+ 62  def __init__(self, fragments: Optional[List[Fragment]] = None):
+ 63    self._fragments = fragments or []
+ 64
+ 65  @property
+ 66  def fragments(self) -> List[Fragment]:
+ 67    """The list of stored Fragments"""
+ 68    return self._fragments
+ 69
+ 70  def __str__(self) -> str:
+ 71    res = ""
+ 72    for f in self._fragments:
+ 73      res += str(f.get())
+ 74
+ 75    return res
+ 76
+ 77
+ 78def fql(query: str, **kwargs: Any) -> Query:
+ 79  """Creates a Query - capable of performing query composition and simple querying. It can accept a
+ 80    simple string query, or can perform composition using ``${}`` sigil string template with ``**kwargs`` as
+ 81    substitutions.
+ 82
+ 83    The ``**kwargs`` can be Fauna data types - such as strings, document references, or modules - and embedded
+ 84    Query - allowing you to compose arbitrarily complex queries.
+ 85
+ 86    When providing ``**kwargs``, following types are accepted:
+ 87        - :class:`str`, :class:`int`, :class:`float`, :class:`bool`, :class:`datetime.datetime`, :class:`datetime.date`,
+ 88          :class:`dict`, :class:`list`, :class:`Query`, :class:`DocumentReference`, :class:`Module`
+ 89
+ 90    :raises ValueError: If there is an invalid template placeholder or a value that cannot be encoded.
+ 91    :returns: A :class:`Query` that can be passed to the client for evaluation against Fauna.
+ 92
+ 93    Examples:
+ 94
+ 95    .. code-block:: python
+ 96        :name: Simple-FQL-Example
+ 97        :caption: Simple query declaration using this function.
+ 98
+ 99        fql('Dogs.byName("Fido")')
+100
+101    .. code-block:: python
+102        :name: Composition-FQL-Example
+103        :caption: Query composition using this function.
+104
+105        def get_dog(id):
+106            return fql('Dogs.byId(${id})', id=id)
+107
+108        def get_vet_phone(id):
+109            return fql('${dog} { .vet_phone_number }', dog=get_dog(id))
+110
+111        get_vet_phone('d123')
+112
+113    """
+114
+115  fragments: List[Any] = []
+116  template = FaunaTemplate(query)
+117  for text, field_name in template.iter():
+118    if text is not None and len(text) > 0:
+119      fragments.append(LiteralFragment(text))
+120
+121    if field_name is not None:
+122      if field_name not in kwargs:
+123        raise ValueError(
+124            f"template variable `{field_name}` not found in provided kwargs")
+125
+126      # TODO: Reject if it's already a fragment, or accept *Fragment? Decide on API here
+127      fragments.append(ValueFragment(kwargs[field_name]))
+128  return Query(fragments)
+
+ + +
+
+ +
+ + class + Fragment(abc.ABC): + + + +
+ +
 8class Fragment(abc.ABC):
+ 9  """An abstract class representing a Fragment of a query.
+10    """
+11
+12  @abc.abstractmethod
+13  def get(self) -> Any:
+14    """An abstract method for returning a stored value.
+15        """
+16    pass
+
+ + +

An abstract class representing a Fragment of a query.

+
+ + +
+ +
+
@abc.abstractmethod
+ + def + get(self) -> Any: + + + +
+ +
12  @abc.abstractmethod
+13  def get(self) -> Any:
+14    """An abstract method for returning a stored value.
+15        """
+16    pass
+
+ + +

An abstract method for returning a stored value.

+
+ + +
+
+
+ +
+ + class + ValueFragment(Fragment): + + + +
+ +
19class ValueFragment(Fragment):
+20  """A concrete :class:`Fragment` representing a part of a query that can represent a template variable.
+21    For example, if a template contains a variable ``${foo}``, and an object ``{ "prop": 1 }`` is provided for foo,
+22    then ``{ "prop": 1 }`` should be wrapped as a :class:`ValueFragment`.
+23
+24    :param Any val: The value to be used as a fragment.
+25    """
+26
+27  def __init__(self, val: Any):
+28    self._val = val
+29
+30  def get(self) -> Any:
+31    """Gets the stored value.
+32
+33        :returns: The stored value.
+34        """
+35    return self._val
+
+ + +

A concrete Fragment representing a part of a query that can represent a template variable. +For example, if a template contains a variable ${foo}, and an object { "prop": 1 } is provided for foo, +then { "prop": 1 } should be wrapped as a ValueFragment.

+ +
Parameters
+ +
    +
  • Any val: The value to be used as a fragment.
  • +
+
+ + +
+ +
+ + ValueFragment(val: Any) + + + +
+ +
27  def __init__(self, val: Any):
+28    self._val = val
+
+ + + + +
+
+ +
+ + def + get(self) -> Any: + + + +
+ +
30  def get(self) -> Any:
+31    """Gets the stored value.
+32
+33        :returns: The stored value.
+34        """
+35    return self._val
+
+ + +

Gets the stored value.

+ +

:returns: The stored value.

+
+ + +
+
+
+ +
+ + class + LiteralFragment(Fragment): + + + +
+ +
38class LiteralFragment(Fragment):
+39  """A concrete :class:`Fragment` representing a query literal For example, in the template ```let x = ${foo}```,
+40    the portion ```let x = ``` is a query literal and should be wrapped as a :class:`LiteralFragment`.
+41
+42    :param str val: The query literal to be used as a fragment.
+43    """
+44
+45  def __init__(self, val: str):
+46    self._val = val
+47
+48  def get(self) -> str:
+49    """Returns the stored value.
+50
+51        :returns: The stored value.
+52        """
+53    return self._val
+
+ + +

A concrete Fragment representing a query literal For example, in the template let x = ${foo}, +the portion let x = is a query literal and should be wrapped as a LiteralFragment.

+ +
Parameters
+ +
    +
  • str val: The query literal to be used as a fragment.
  • +
+
+ + +
+ +
+ + LiteralFragment(val: str) + + + +
+ +
45  def __init__(self, val: str):
+46    self._val = val
+
+ + + + +
+
+ +
+ + def + get(self) -> str: + + + +
+ +
48  def get(self) -> str:
+49    """Returns the stored value.
+50
+51        :returns: The stored value.
+52        """
+53    return self._val
+
+ + +

Returns the stored value.

+ +

:returns: The stored value.

+
+ + +
+
+
+ +
+ + class + Query: + + + +
+ +
56class Query:
+57  """A class for representing a query.
+58
+59       e.g. { "fql": [...] }
+60    """
+61  _fragments: List[Fragment]
+62
+63  def __init__(self, fragments: Optional[List[Fragment]] = None):
+64    self._fragments = fragments or []
+65
+66  @property
+67  def fragments(self) -> List[Fragment]:
+68    """The list of stored Fragments"""
+69    return self._fragments
+70
+71  def __str__(self) -> str:
+72    res = ""
+73    for f in self._fragments:
+74      res += str(f.get())
+75
+76    return res
+
+ + +

A class for representing a query.

+ +

e.g. { "fql": [...] }

+
+ + +
+ +
+ + Query(fragments: Optional[List[Fragment]] = None) + + + +
+ +
63  def __init__(self, fragments: Optional[List[Fragment]] = None):
+64    self._fragments = fragments or []
+
+ + + + +
+
+ +
+ fragments: List[Fragment] + + + +
+ +
66  @property
+67  def fragments(self) -> List[Fragment]:
+68    """The list of stored Fragments"""
+69    return self._fragments
+
+ + +

The list of stored Fragments

+
+ + +
+
+
+ +
+ + def + fql(query: str, **kwargs: Any) -> Query: + + + +
+ +
 79def fql(query: str, **kwargs: Any) -> Query:
+ 80  """Creates a Query - capable of performing query composition and simple querying. It can accept a
+ 81    simple string query, or can perform composition using ``${}`` sigil string template with ``**kwargs`` as
+ 82    substitutions.
+ 83
+ 84    The ``**kwargs`` can be Fauna data types - such as strings, document references, or modules - and embedded
+ 85    Query - allowing you to compose arbitrarily complex queries.
+ 86
+ 87    When providing ``**kwargs``, following types are accepted:
+ 88        - :class:`str`, :class:`int`, :class:`float`, :class:`bool`, :class:`datetime.datetime`, :class:`datetime.date`,
+ 89          :class:`dict`, :class:`list`, :class:`Query`, :class:`DocumentReference`, :class:`Module`
+ 90
+ 91    :raises ValueError: If there is an invalid template placeholder or a value that cannot be encoded.
+ 92    :returns: A :class:`Query` that can be passed to the client for evaluation against Fauna.
+ 93
+ 94    Examples:
+ 95
+ 96    .. code-block:: python
+ 97        :name: Simple-FQL-Example
+ 98        :caption: Simple query declaration using this function.
+ 99
+100        fql('Dogs.byName("Fido")')
+101
+102    .. code-block:: python
+103        :name: Composition-FQL-Example
+104        :caption: Query composition using this function.
+105
+106        def get_dog(id):
+107            return fql('Dogs.byId(${id})', id=id)
+108
+109        def get_vet_phone(id):
+110            return fql('${dog} { .vet_phone_number }', dog=get_dog(id))
+111
+112        get_vet_phone('d123')
+113
+114    """
+115
+116  fragments: List[Any] = []
+117  template = FaunaTemplate(query)
+118  for text, field_name in template.iter():
+119    if text is not None and len(text) > 0:
+120      fragments.append(LiteralFragment(text))
+121
+122    if field_name is not None:
+123      if field_name not in kwargs:
+124        raise ValueError(
+125            f"template variable `{field_name}` not found in provided kwargs")
+126
+127      # TODO: Reject if it's already a fragment, or accept *Fragment? Decide on API here
+128      fragments.append(ValueFragment(kwargs[field_name]))
+129  return Query(fragments)
+
+ + +

Creates a Query - capable of performing query composition and simple querying. It can accept a +simple string query, or can perform composition using ${} sigil string template with **kwargs as +substitutions.

+ +

The **kwargs can be Fauna data types - such as strings, document references, or modules - and embedded +Query - allowing you to compose arbitrarily complex queries.

+ +

When providing **kwargs, following types are accepted: + - str, int, float, bool, datetime.datetime, datetime.date, + dict, list, Query, DocumentReference, Module

+ +
Raises
+ +
    +
  • ValueError: If there is an invalid template placeholder or a value that cannot be encoded. +:returns: A Query that can be passed to the client for evaluation against Fauna.
  • +
+ +

Examples:

+ +
+
fql('Dogs.byName("Fido")')
+
+
+ +
+
def get_dog(id):
+    return fql('Dogs.byId(${id})', id=id)
+
+def get_vet_phone(id):
+    return fql('${dog} { .vet_phone_number }', dog=get_dog(id))
+
+get_vet_phone('d123')
+
+
+
+ + +
+
+ + \ No newline at end of file diff --git a/2.1.0/api/fauna/query/template.html b/2.1.0/api/fauna/query/template.html new file mode 100644 index 00000000..219e30a2 --- /dev/null +++ b/2.1.0/api/fauna/query/template.html @@ -0,0 +1,533 @@ + + + + + + + fauna.query.template API documentation + + + + + + + + + +
+
+

+fauna.query.template

+ + + + + + +
 1import re as _re
+ 2from typing import Optional, Tuple, Iterator, Match
+ 3
+ 4
+ 5class FaunaTemplate:
+ 6  """A template class that supports variables marked with a ${}-sigil. Its primary purpose
+ 7    is to expose an iterator for the template parts that support composition of FQL queries.
+ 8
+ 9    Implementation adapted from https://github.com/python/cpython/blob/main/Lib/string.py
+10
+11    :param template: A string template e.g. "${my_var} { name }"
+12    :type template: str
+13    """
+14
+15  _delimiter = '$'
+16  _idpattern = r'[_a-zA-Z][_a-zA-Z0-9]*'
+17  _flags = _re.VERBOSE
+18
+19  def __init__(self, template: str):
+20    """The initializer"""
+21    delim = _re.escape(self._delimiter)
+22    pattern = fr"""
+23        {delim}(?:
+24          (?P<escaped>{delim})  |   # Escape sequence of two delimiters
+25          {{(?P<braced>{self._idpattern})}} |   # delimiter and a braced identifier
+26          (?P<invalid>)             # Other ill-formed delimiter exprs
+27        ) 
+28        """
+29    self._pattern = _re.compile(pattern, self._flags)
+30    self._template = template
+31
+32  def iter(self) -> Iterator[Tuple[Optional[str], Optional[str]]]:
+33    """A method that returns an iterator over tuples representing template parts. The
+34        first value of the tuple, if not None, is a template literal. The second value of
+35        the tuple, if not None, is a template variable. If both are not None, then the
+36        template literal comes *before* the variable.
+37
+38        :raises ValueError: If there is an invalid template placeholder
+39
+40        :return: An iterator of template parts
+41        :rtype: collections.Iterable[Tuple[Optional[str], Optional[str]]]
+42        """
+43    match_objects = self._pattern.finditer(self._template)
+44    cur_pos = 0
+45    for mo in match_objects:
+46      if mo.group("invalid") is not None:
+47        self._handle_invalid(mo)
+48
+49      span_start_pos = mo.span()[0]
+50      span_end_pos = mo.span()[1]
+51      escaped_part = mo.group("escaped") or ""
+52      variable_part = mo.group("braced")
+53      literal_part: Optional[str] = None
+54
+55      if cur_pos != span_start_pos:
+56        literal_part = \
+57            self._template[cur_pos:span_start_pos] \
+58                + escaped_part
+59
+60      cur_pos = span_end_pos
+61
+62      yield literal_part, variable_part
+63
+64    if cur_pos != len(self._template):
+65      yield self._template[cur_pos:], None
+66
+67  def _handle_invalid(self, mo: Match) -> None:
+68    i = mo.start("invalid")
+69    lines = self._template[:i].splitlines(keepends=True)
+70
+71    if not lines:
+72      colno = 1
+73      lineno = 1
+74    else:
+75      colno = i - len(''.join(lines[:-1]))
+76      lineno = len(lines)
+77
+78    raise ValueError(
+79        f"Invalid placeholder in template: line {lineno}, col {colno}")
+
+ + +
+
+ +
+ + class + FaunaTemplate: + + + +
+ +
 6class FaunaTemplate:
+ 7  """A template class that supports variables marked with a ${}-sigil. Its primary purpose
+ 8    is to expose an iterator for the template parts that support composition of FQL queries.
+ 9
+10    Implementation adapted from https://github.com/python/cpython/blob/main/Lib/string.py
+11
+12    :param template: A string template e.g. "${my_var} { name }"
+13    :type template: str
+14    """
+15
+16  _delimiter = '$'
+17  _idpattern = r'[_a-zA-Z][_a-zA-Z0-9]*'
+18  _flags = _re.VERBOSE
+19
+20  def __init__(self, template: str):
+21    """The initializer"""
+22    delim = _re.escape(self._delimiter)
+23    pattern = fr"""
+24        {delim}(?:
+25          (?P<escaped>{delim})  |   # Escape sequence of two delimiters
+26          {{(?P<braced>{self._idpattern})}} |   # delimiter and a braced identifier
+27          (?P<invalid>)             # Other ill-formed delimiter exprs
+28        ) 
+29        """
+30    self._pattern = _re.compile(pattern, self._flags)
+31    self._template = template
+32
+33  def iter(self) -> Iterator[Tuple[Optional[str], Optional[str]]]:
+34    """A method that returns an iterator over tuples representing template parts. The
+35        first value of the tuple, if not None, is a template literal. The second value of
+36        the tuple, if not None, is a template variable. If both are not None, then the
+37        template literal comes *before* the variable.
+38
+39        :raises ValueError: If there is an invalid template placeholder
+40
+41        :return: An iterator of template parts
+42        :rtype: collections.Iterable[Tuple[Optional[str], Optional[str]]]
+43        """
+44    match_objects = self._pattern.finditer(self._template)
+45    cur_pos = 0
+46    for mo in match_objects:
+47      if mo.group("invalid") is not None:
+48        self._handle_invalid(mo)
+49
+50      span_start_pos = mo.span()[0]
+51      span_end_pos = mo.span()[1]
+52      escaped_part = mo.group("escaped") or ""
+53      variable_part = mo.group("braced")
+54      literal_part: Optional[str] = None
+55
+56      if cur_pos != span_start_pos:
+57        literal_part = \
+58            self._template[cur_pos:span_start_pos] \
+59                + escaped_part
+60
+61      cur_pos = span_end_pos
+62
+63      yield literal_part, variable_part
+64
+65    if cur_pos != len(self._template):
+66      yield self._template[cur_pos:], None
+67
+68  def _handle_invalid(self, mo: Match) -> None:
+69    i = mo.start("invalid")
+70    lines = self._template[:i].splitlines(keepends=True)
+71
+72    if not lines:
+73      colno = 1
+74      lineno = 1
+75    else:
+76      colno = i - len(''.join(lines[:-1]))
+77      lineno = len(lines)
+78
+79    raise ValueError(
+80        f"Invalid placeholder in template: line {lineno}, col {colno}")
+
+ + +

A template class that supports variables marked with a ${}-sigil. Its primary purpose +is to expose an iterator for the template parts that support composition of FQL queries.

+ +

Implementation adapted from https://github.com/python/cpython/blob/main/Lib/string.py

+ +
Parameters
+ +
    +
  • template: A string template e.g. "${my_var} { name }"
  • +
+
+ + +
+ +
+ + FaunaTemplate(template: str) + + + +
+ +
20  def __init__(self, template: str):
+21    """The initializer"""
+22    delim = _re.escape(self._delimiter)
+23    pattern = fr"""
+24        {delim}(?:
+25          (?P<escaped>{delim})  |   # Escape sequence of two delimiters
+26          {{(?P<braced>{self._idpattern})}} |   # delimiter and a braced identifier
+27          (?P<invalid>)             # Other ill-formed delimiter exprs
+28        ) 
+29        """
+30    self._pattern = _re.compile(pattern, self._flags)
+31    self._template = template
+
+ + +

The initializer

+
+ + +
+
+ +
+ + def + iter(self) -> Iterator[Tuple[Optional[str], Optional[str]]]: + + + +
+ +
33  def iter(self) -> Iterator[Tuple[Optional[str], Optional[str]]]:
+34    """A method that returns an iterator over tuples representing template parts. The
+35        first value of the tuple, if not None, is a template literal. The second value of
+36        the tuple, if not None, is a template variable. If both are not None, then the
+37        template literal comes *before* the variable.
+38
+39        :raises ValueError: If there is an invalid template placeholder
+40
+41        :return: An iterator of template parts
+42        :rtype: collections.Iterable[Tuple[Optional[str], Optional[str]]]
+43        """
+44    match_objects = self._pattern.finditer(self._template)
+45    cur_pos = 0
+46    for mo in match_objects:
+47      if mo.group("invalid") is not None:
+48        self._handle_invalid(mo)
+49
+50      span_start_pos = mo.span()[0]
+51      span_end_pos = mo.span()[1]
+52      escaped_part = mo.group("escaped") or ""
+53      variable_part = mo.group("braced")
+54      literal_part: Optional[str] = None
+55
+56      if cur_pos != span_start_pos:
+57        literal_part = \
+58            self._template[cur_pos:span_start_pos] \
+59                + escaped_part
+60
+61      cur_pos = span_end_pos
+62
+63      yield literal_part, variable_part
+64
+65    if cur_pos != len(self._template):
+66      yield self._template[cur_pos:], None
+
+ + +

A method that returns an iterator over tuples representing template parts. The +first value of the tuple, if not None, is a template literal. The second value of +the tuple, if not None, is a template variable. If both are not None, then the +template literal comes before the variable.

+ +
Raises
+ +
    +
  • ValueError: If there is an invalid template placeholder
  • +
+ +
Returns
+ +
+

An iterator of template parts

+
+
+ + +
+
+
+ + \ No newline at end of file diff --git a/2.1.0/api/index.html b/2.1.0/api/index.html new file mode 100644 index 00000000..cd7f994c --- /dev/null +++ b/2.1.0/api/index.html @@ -0,0 +1,7 @@ + + + + + + + diff --git a/2.1.0/api/search.js b/2.1.0/api/search.js new file mode 100644 index 00000000..465075e2 --- /dev/null +++ b/2.1.0/api/search.js @@ -0,0 +1,46 @@ +window.pdocSearch = (function(){ +/** elasticlunr - http://weixsong.github.io * Copyright (C) 2017 Oliver Nightingale * Copyright (C) 2017 Wei Song * MIT Licensed */!function(){function e(e){if(null===e||"object"!=typeof e)return e;var t=e.constructor();for(var n in e)e.hasOwnProperty(n)&&(t[n]=e[n]);return t}var t=function(e){var n=new t.Index;return n.pipeline.add(t.trimmer,t.stopWordFilter,t.stemmer),e&&e.call(n,n),n};t.version="0.9.5",lunr=t,t.utils={},t.utils.warn=function(e){return function(t){e.console&&console.warn&&console.warn(t)}}(this),t.utils.toString=function(e){return void 0===e||null===e?"":e.toString()},t.EventEmitter=function(){this.events={}},t.EventEmitter.prototype.addListener=function(){var e=Array.prototype.slice.call(arguments),t=e.pop(),n=e;if("function"!=typeof t)throw new TypeError("last argument must be a function");n.forEach(function(e){this.hasHandler(e)||(this.events[e]=[]),this.events[e].push(t)},this)},t.EventEmitter.prototype.removeListener=function(e,t){if(this.hasHandler(e)){var n=this.events[e].indexOf(t);-1!==n&&(this.events[e].splice(n,1),0==this.events[e].length&&delete this.events[e])}},t.EventEmitter.prototype.emit=function(e){if(this.hasHandler(e)){var t=Array.prototype.slice.call(arguments,1);this.events[e].forEach(function(e){e.apply(void 0,t)},this)}},t.EventEmitter.prototype.hasHandler=function(e){return e in this.events},t.tokenizer=function(e){if(!arguments.length||null===e||void 0===e)return[];if(Array.isArray(e)){var n=e.filter(function(e){return null===e||void 0===e?!1:!0});n=n.map(function(e){return t.utils.toString(e).toLowerCase()});var i=[];return n.forEach(function(e){var n=e.split(t.tokenizer.seperator);i=i.concat(n)},this),i}return e.toString().trim().toLowerCase().split(t.tokenizer.seperator)},t.tokenizer.defaultSeperator=/[\s\-]+/,t.tokenizer.seperator=t.tokenizer.defaultSeperator,t.tokenizer.setSeperator=function(e){null!==e&&void 0!==e&&"object"==typeof e&&(t.tokenizer.seperator=e)},t.tokenizer.resetSeperator=function(){t.tokenizer.seperator=t.tokenizer.defaultSeperator},t.tokenizer.getSeperator=function(){return t.tokenizer.seperator},t.Pipeline=function(){this._queue=[]},t.Pipeline.registeredFunctions={},t.Pipeline.registerFunction=function(e,n){n in t.Pipeline.registeredFunctions&&t.utils.warn("Overwriting existing registered function: "+n),e.label=n,t.Pipeline.registeredFunctions[n]=e},t.Pipeline.getRegisteredFunction=function(e){return e in t.Pipeline.registeredFunctions!=!0?null:t.Pipeline.registeredFunctions[e]},t.Pipeline.warnIfFunctionNotRegistered=function(e){var n=e.label&&e.label in this.registeredFunctions;n||t.utils.warn("Function is not registered with pipeline. This may cause problems when serialising the index.\n",e)},t.Pipeline.load=function(e){var n=new t.Pipeline;return e.forEach(function(e){var i=t.Pipeline.getRegisteredFunction(e);if(!i)throw new Error("Cannot load un-registered function: "+e);n.add(i)}),n},t.Pipeline.prototype.add=function(){var e=Array.prototype.slice.call(arguments);e.forEach(function(e){t.Pipeline.warnIfFunctionNotRegistered(e),this._queue.push(e)},this)},t.Pipeline.prototype.after=function(e,n){t.Pipeline.warnIfFunctionNotRegistered(n);var i=this._queue.indexOf(e);if(-1===i)throw new Error("Cannot find existingFn");this._queue.splice(i+1,0,n)},t.Pipeline.prototype.before=function(e,n){t.Pipeline.warnIfFunctionNotRegistered(n);var i=this._queue.indexOf(e);if(-1===i)throw new Error("Cannot find existingFn");this._queue.splice(i,0,n)},t.Pipeline.prototype.remove=function(e){var t=this._queue.indexOf(e);-1!==t&&this._queue.splice(t,1)},t.Pipeline.prototype.run=function(e){for(var t=[],n=e.length,i=this._queue.length,o=0;n>o;o++){for(var r=e[o],s=0;i>s&&(r=this._queue[s](r,o,e),void 0!==r&&null!==r);s++);void 0!==r&&null!==r&&t.push(r)}return t},t.Pipeline.prototype.reset=function(){this._queue=[]},t.Pipeline.prototype.get=function(){return this._queue},t.Pipeline.prototype.toJSON=function(){return this._queue.map(function(e){return t.Pipeline.warnIfFunctionNotRegistered(e),e.label})},t.Index=function(){this._fields=[],this._ref="id",this.pipeline=new t.Pipeline,this.documentStore=new t.DocumentStore,this.index={},this.eventEmitter=new t.EventEmitter,this._idfCache={},this.on("add","remove","update",function(){this._idfCache={}}.bind(this))},t.Index.prototype.on=function(){var e=Array.prototype.slice.call(arguments);return this.eventEmitter.addListener.apply(this.eventEmitter,e)},t.Index.prototype.off=function(e,t){return this.eventEmitter.removeListener(e,t)},t.Index.load=function(e){e.version!==t.version&&t.utils.warn("version mismatch: current "+t.version+" importing "+e.version);var n=new this;n._fields=e.fields,n._ref=e.ref,n.documentStore=t.DocumentStore.load(e.documentStore),n.pipeline=t.Pipeline.load(e.pipeline),n.index={};for(var i in e.index)n.index[i]=t.InvertedIndex.load(e.index[i]);return n},t.Index.prototype.addField=function(e){return this._fields.push(e),this.index[e]=new t.InvertedIndex,this},t.Index.prototype.setRef=function(e){return this._ref=e,this},t.Index.prototype.saveDocument=function(e){return this.documentStore=new t.DocumentStore(e),this},t.Index.prototype.addDoc=function(e,n){if(e){var n=void 0===n?!0:n,i=e[this._ref];this.documentStore.addDoc(i,e),this._fields.forEach(function(n){var o=this.pipeline.run(t.tokenizer(e[n]));this.documentStore.addFieldLength(i,n,o.length);var r={};o.forEach(function(e){e in r?r[e]+=1:r[e]=1},this);for(var s in r){var u=r[s];u=Math.sqrt(u),this.index[n].addToken(s,{ref:i,tf:u})}},this),n&&this.eventEmitter.emit("add",e,this)}},t.Index.prototype.removeDocByRef=function(e){if(e&&this.documentStore.isDocStored()!==!1&&this.documentStore.hasDoc(e)){var t=this.documentStore.getDoc(e);this.removeDoc(t,!1)}},t.Index.prototype.removeDoc=function(e,n){if(e){var n=void 0===n?!0:n,i=e[this._ref];this.documentStore.hasDoc(i)&&(this.documentStore.removeDoc(i),this._fields.forEach(function(n){var o=this.pipeline.run(t.tokenizer(e[n]));o.forEach(function(e){this.index[n].removeToken(e,i)},this)},this),n&&this.eventEmitter.emit("remove",e,this))}},t.Index.prototype.updateDoc=function(e,t){var t=void 0===t?!0:t;this.removeDocByRef(e[this._ref],!1),this.addDoc(e,!1),t&&this.eventEmitter.emit("update",e,this)},t.Index.prototype.idf=function(e,t){var n="@"+t+"/"+e;if(Object.prototype.hasOwnProperty.call(this._idfCache,n))return this._idfCache[n];var i=this.index[t].getDocFreq(e),o=1+Math.log(this.documentStore.length/(i+1));return this._idfCache[n]=o,o},t.Index.prototype.getFields=function(){return this._fields.slice()},t.Index.prototype.search=function(e,n){if(!e)return[];e="string"==typeof e?{any:e}:JSON.parse(JSON.stringify(e));var i=null;null!=n&&(i=JSON.stringify(n));for(var o=new t.Configuration(i,this.getFields()).get(),r={},s=Object.keys(e),u=0;u0&&t.push(e);for(var i in n)"docs"!==i&&"df"!==i&&this.expandToken(e+i,t,n[i]);return t},t.InvertedIndex.prototype.toJSON=function(){return{root:this.root}},t.Configuration=function(e,n){var e=e||"";if(void 0==n||null==n)throw new Error("fields should not be null");this.config={};var i;try{i=JSON.parse(e),this.buildUserConfig(i,n)}catch(o){t.utils.warn("user configuration parse failed, will use default configuration"),this.buildDefaultConfig(n)}},t.Configuration.prototype.buildDefaultConfig=function(e){this.reset(),e.forEach(function(e){this.config[e]={boost:1,bool:"OR",expand:!1}},this)},t.Configuration.prototype.buildUserConfig=function(e,n){var i="OR",o=!1;if(this.reset(),"bool"in e&&(i=e.bool||i),"expand"in e&&(o=e.expand||o),"fields"in e)for(var r in e.fields)if(n.indexOf(r)>-1){var s=e.fields[r],u=o;void 0!=s.expand&&(u=s.expand),this.config[r]={boost:s.boost||0===s.boost?s.boost:1,bool:s.bool||i,expand:u}}else t.utils.warn("field name in user configuration not found in index instance fields");else this.addAllFields2UserConfig(i,o,n)},t.Configuration.prototype.addAllFields2UserConfig=function(e,t,n){n.forEach(function(n){this.config[n]={boost:1,bool:e,expand:t}},this)},t.Configuration.prototype.get=function(){return this.config},t.Configuration.prototype.reset=function(){this.config={}},lunr.SortedSet=function(){this.length=0,this.elements=[]},lunr.SortedSet.load=function(e){var t=new this;return t.elements=e,t.length=e.length,t},lunr.SortedSet.prototype.add=function(){var e,t;for(e=0;e1;){if(r===e)return o;e>r&&(t=o),r>e&&(n=o),i=n-t,o=t+Math.floor(i/2),r=this.elements[o]}return r===e?o:-1},lunr.SortedSet.prototype.locationFor=function(e){for(var t=0,n=this.elements.length,i=n-t,o=t+Math.floor(i/2),r=this.elements[o];i>1;)e>r&&(t=o),r>e&&(n=o),i=n-t,o=t+Math.floor(i/2),r=this.elements[o];return r>e?o:e>r?o+1:void 0},lunr.SortedSet.prototype.intersect=function(e){for(var t=new lunr.SortedSet,n=0,i=0,o=this.length,r=e.length,s=this.elements,u=e.elements;;){if(n>o-1||i>r-1)break;s[n]!==u[i]?s[n]u[i]&&i++:(t.add(s[n]),n++,i++)}return t},lunr.SortedSet.prototype.clone=function(){var e=new lunr.SortedSet;return e.elements=this.toArray(),e.length=e.elements.length,e},lunr.SortedSet.prototype.union=function(e){var t,n,i;this.length>=e.length?(t=this,n=e):(t=e,n=this),i=t.clone();for(var o=0,r=n.toArray();o

\n"}, {"fullname": "fauna.global_http_client", "modulename": "fauna", "qualname": "global_http_client", "kind": "variable", "doc": "

\n", "default_value": "None"}, {"fullname": "fauna.client", "modulename": "fauna.client", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.client", "modulename": "fauna.client.client", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.client.DefaultHttpConnectTimeout", "modulename": "fauna.client.client", "qualname": "DefaultHttpConnectTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultHttpReadTimeout", "modulename": "fauna.client.client", "qualname": "DefaultHttpReadTimeout", "kind": "variable", "doc": "

\n", "annotation": ": Optional[datetime.timedelta]", "default_value": "None"}, {"fullname": "fauna.client.client.DefaultHttpWriteTimeout", "modulename": "fauna.client.client", "qualname": "DefaultHttpWriteTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultHttpPoolTimeout", "modulename": "fauna.client.client", "qualname": "DefaultHttpPoolTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultIdleConnectionTimeout", "modulename": "fauna.client.client", "qualname": "DefaultIdleConnectionTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultQueryTimeout", "modulename": "fauna.client.client", "qualname": "DefaultQueryTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultClientBufferTimeout", "modulename": "fauna.client.client", "qualname": "DefaultClientBufferTimeout", "kind": "variable", "doc": "

\n", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.DefaultMaxConnections", "modulename": "fauna.client.client", "qualname": "DefaultMaxConnections", "kind": "variable", "doc": "

\n", "default_value": "20"}, {"fullname": "fauna.client.client.DefaultMaxIdleConnections", "modulename": "fauna.client.client", "qualname": "DefaultMaxIdleConnections", "kind": "variable", "doc": "

\n", "default_value": "20"}, {"fullname": "fauna.client.client.QueryOptions", "modulename": "fauna.client.client", "qualname": "QueryOptions", "kind": "class", "doc": "

A dataclass representing options available for a query.

\n\n
    \n
  • linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
  • \n
  • max_contention_retries - The max number of times to retry the query if contention is encountered.
  • \n
  • query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
  • \n
  • query_tags - Tags to associate with the query. See logging
  • \n
  • traceparent - A traceparent to associate with the query. See logging Must match format: https://www.w3.org/TR/trace-context/#traceparent-header
  • \n
  • typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the \"typechecked\" flag on the database configuration.
  • \n
  • additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
  • \n
\n"}, {"fullname": "fauna.client.client.QueryOptions.__init__", "modulename": "fauna.client.client", "qualname": "QueryOptions.__init__", "kind": "function", "doc": "

\n", "signature": "(\tlinearized: Optional[bool] = None,\tmax_contention_retries: Optional[int] = None,\tquery_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\tquery_tags: Optional[Mapping[str, str]] = None,\ttraceparent: Optional[str] = None,\ttypecheck: Optional[bool] = None,\tadditional_headers: Optional[Dict[str, str]] = None)"}, {"fullname": "fauna.client.client.QueryOptions.linearized", "modulename": "fauna.client.client", "qualname": "QueryOptions.linearized", "kind": "variable", "doc": "

\n", "annotation": ": Optional[bool]", "default_value": "None"}, {"fullname": "fauna.client.client.QueryOptions.max_contention_retries", "modulename": "fauna.client.client", "qualname": "QueryOptions.max_contention_retries", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.QueryOptions.query_timeout", "modulename": "fauna.client.client", "qualname": "QueryOptions.query_timeout", "kind": "variable", "doc": "

\n", "annotation": ": Optional[datetime.timedelta]", "default_value": "datetime.timedelta(seconds=5)"}, {"fullname": "fauna.client.client.QueryOptions.query_tags", "modulename": "fauna.client.client", "qualname": "QueryOptions.query_tags", "kind": "variable", "doc": "

\n", "annotation": ": Optional[Mapping[str, str]]", "default_value": "None"}, {"fullname": "fauna.client.client.QueryOptions.traceparent", "modulename": "fauna.client.client", "qualname": "QueryOptions.traceparent", "kind": "variable", "doc": "

\n", "annotation": ": Optional[str]", "default_value": "None"}, {"fullname": "fauna.client.client.QueryOptions.typecheck", "modulename": "fauna.client.client", "qualname": "QueryOptions.typecheck", "kind": "variable", "doc": "

\n", "annotation": ": Optional[bool]", "default_value": "None"}, {"fullname": "fauna.client.client.QueryOptions.additional_headers", "modulename": "fauna.client.client", "qualname": "QueryOptions.additional_headers", "kind": "variable", "doc": "

\n", "annotation": ": Optional[Dict[str, str]]", "default_value": "None"}, {"fullname": "fauna.client.client.StreamOptions", "modulename": "fauna.client.client", "qualname": "StreamOptions", "kind": "class", "doc": "

A dataclass representing options available for a stream.

\n\n
    \n
  • max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
  • \n
  • max_backoff - The maximum backoff in seconds for an individual retry.
  • \n
  • start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after\nthe timestamp.
  • \n
  • status_events - Indicates if stream should include status events. Status events are periodic events that\nupdate the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics\nabout the cost of maintaining the stream other than the cost of the received events.
  • \n
\n"}, {"fullname": "fauna.client.client.StreamOptions.__init__", "modulename": "fauna.client.client", "qualname": "StreamOptions.__init__", "kind": "function", "doc": "

\n", "signature": "(\tmax_attempts: Optional[int] = None,\tmax_backoff: Optional[int] = None,\tstart_ts: Optional[int] = None,\tstatus_events: bool = False)"}, {"fullname": "fauna.client.client.StreamOptions.max_attempts", "modulename": "fauna.client.client", "qualname": "StreamOptions.max_attempts", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.StreamOptions.max_backoff", "modulename": "fauna.client.client", "qualname": "StreamOptions.max_backoff", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.StreamOptions.start_ts", "modulename": "fauna.client.client", "qualname": "StreamOptions.start_ts", "kind": "variable", "doc": "

\n", "annotation": ": Optional[int]", "default_value": "None"}, {"fullname": "fauna.client.client.StreamOptions.status_events", "modulename": "fauna.client.client", "qualname": "StreamOptions.status_events", "kind": "variable", "doc": "

\n", "annotation": ": bool", "default_value": "False"}, {"fullname": "fauna.client.client.Client", "modulename": "fauna.client.client", "qualname": "Client", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.client.client.Client.__init__", "modulename": "fauna.client.client", "qualname": "Client.__init__", "kind": "function", "doc": "

Initializes a Client.

\n\n
Parameters
\n\n
    \n
  • endpoint: The Fauna Endpoint to use. Defaults to https: //db.fauna.com, or the FAUNA_ENDPOINT env variable.
  • \n
  • secret: The Fauna Secret to use. Defaults to empty, or the FAUNA_SECRET env variable.
  • \n
  • http_client: An HTTPClient implementation. Defaults to a global HTTPXClient.
  • \n
  • **query_tags: Tags to associate with the query. See logging
  • \n
  • linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
  • \n
  • max_contention_retries: The max number of times to retry the query if contention is encountered.
  • \n
  • typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the \"typechecked\" flag on the database configuration.
  • \n
  • additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
  • \n
  • query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is DefaultQueryTimeout.
  • \n
  • client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is DefaultClientBufferTimeout, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
  • \n
  • http_read_timeout: Set HTTP Read timeout, default is DefaultHttpReadTimeout.
  • \n
  • http_write_timeout: Set HTTP Write timeout, default is DefaultHttpWriteTimeout.
  • \n
  • http_connect_timeout: Set HTTP Connect timeout, default is DefaultHttpConnectTimeout.
  • \n
  • http_pool_timeout: Set HTTP Pool timeout, default is DefaultHttpPoolTimeout.
  • \n
  • http_idle_timeout: Set HTTP Idle timeout, default is DefaultIdleConnectionTimeout.
  • \n
  • max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
  • \n
  • max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
  • \n
\n", "signature": "(\tendpoint: Optional[str] = None,\tsecret: Optional[str] = None,\thttp_client: Optional[fauna.http.http_client.HTTPClient] = None,\tquery_tags: Optional[Mapping[str, str]] = None,\tlinearized: Optional[bool] = None,\tmax_contention_retries: Optional[int] = None,\ttypecheck: Optional[bool] = None,\tadditional_headers: Optional[Dict[str, str]] = None,\tquery_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\tclient_buffer_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\thttp_read_timeout: Optional[datetime.timedelta] = None,\thttp_write_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\thttp_connect_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\thttp_pool_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\thttp_idle_timeout: Optional[datetime.timedelta] = datetime.timedelta(seconds=5),\tmax_attempts: int = 3,\tmax_backoff: int = 20)"}, {"fullname": "fauna.client.client.Client.close", "modulename": "fauna.client.client", "qualname": "Client.close", "kind": "function", "doc": "

\n", "signature": "(self):", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.set_last_txn_ts", "modulename": "fauna.client.client", "qualname": "Client.set_last_txn_ts", "kind": "function", "doc": "

Set the last timestamp seen by this client.\nThis has no effect if earlier than stored timestamp.

\n\n

.. WARNING:: This should be used only when coordinating timestamps across\nmultiple clients. Moving the timestamp arbitrarily forward into\nthe future will cause transactions to stall.

\n\n
Parameters
\n\n
    \n
  • txn_ts: the new transaction time.
  • \n
\n", "signature": "(self, txn_ts: int):", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.get_last_txn_ts", "modulename": "fauna.client.client", "qualname": "Client.get_last_txn_ts", "kind": "function", "doc": "

Get the last timestamp seen by this client.

\n\n
Returns
\n", "signature": "(self) -> Optional[int]:", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.get_query_timeout", "modulename": "fauna.client.client", "qualname": "Client.get_query_timeout", "kind": "function", "doc": "

Get the query timeout for all queries.

\n", "signature": "(self) -> Optional[datetime.timedelta]:", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.paginate", "modulename": "fauna.client.client", "qualname": "Client.paginate", "kind": "function", "doc": "

Run a query on Fauna and returning an iterator of results. If the query\nreturns a Page, the iterator will fetch additional Pages until the\nafter token is null. Each call for a page will be retried with exponential\nbackoff up to the max_attempts set in the client's retry policy in the\nevent of a 429 or 502.

\n\n
Parameters
\n\n
    \n
  • fql: A Query
  • \n
  • opts: (Optional) Query Options
  • \n
\n\n
Returns
\n\n
\n

a QueryResponse

\n
\n\n
Raises
\n\n
    \n
  • NetworkError: HTTP Request failed in transit
  • \n
  • ProtocolError: HTTP error not from Fauna
  • \n
  • ServiceError: Fauna returned an error
  • \n
  • ValueError: Encoding and decoding errors
  • \n
  • TypeError: Invalid param types
  • \n
\n", "signature": "(\tself,\tfql: fauna.query.query_builder.Query,\topts: Optional[fauna.client.client.QueryOptions] = None) -> fauna.client.client.QueryIterator:", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.query", "modulename": "fauna.client.client", "qualname": "Client.query", "kind": "function", "doc": "

Run a query on Fauna. A query will be retried max_attempt times with exponential backoff\nup to the max_backoff in the event of a 429.

\n\n
Parameters
\n\n
    \n
  • fql: A Query
  • \n
  • opts: (Optional) Query Options
  • \n
\n\n
Returns
\n\n
\n

a QueryResponse

\n
\n\n
Raises
\n\n
    \n
  • NetworkError: HTTP Request failed in transit
  • \n
  • ProtocolError: HTTP error not from Fauna
  • \n
  • ServiceError: Fauna returned an error
  • \n
  • ValueError: Encoding and decoding errors
  • \n
  • TypeError: Invalid param types
  • \n
\n", "signature": "(\tself,\tfql: fauna.query.query_builder.Query,\topts: Optional[fauna.client.client.QueryOptions] = None) -> fauna.encoding.wire_protocol.QuerySuccess:", "funcdef": "def"}, {"fullname": "fauna.client.client.Client.stream", "modulename": "fauna.client.client", "qualname": "Client.stream", "kind": "function", "doc": "

Opens a Stream in Fauna and returns an iterator that consume Fauna events.

\n\n
Parameters
\n\n
    \n
  • fql: A Query that returns a StreamToken or a StreamToken.
  • \n
  • opts: (Optional) Stream Options.
  • \n
\n\n
Returns
\n\n
\n

a StreamIterator

\n
\n\n
Raises
\n\n
    \n
  • NetworkError: HTTP Request failed in transit
  • \n
  • ProtocolError: HTTP error not from Fauna
  • \n
  • ServiceError: Fauna returned an error
  • \n
  • ValueError: Encoding and decoding errors
  • \n
  • TypeError: Invalid param types
  • \n
\n", "signature": "(\tself,\tfql: Union[fauna.query.models.StreamToken, fauna.query.query_builder.Query],\topts: fauna.client.client.StreamOptions = StreamOptions(max_attempts=None, max_backoff=None, start_ts=None, status_events=False)) -> fauna.client.client.StreamIterator:", "funcdef": "def"}, {"fullname": "fauna.client.client.StreamIterator", "modulename": "fauna.client.client", "qualname": "StreamIterator", "kind": "class", "doc": "

A class that mixes a ContextManager and an Iterator so we can detected retryable errors.

\n"}, {"fullname": "fauna.client.client.StreamIterator.__init__", "modulename": "fauna.client.client", "qualname": "StreamIterator.__init__", "kind": "function", "doc": "

\n", "signature": "(\thttp_client: fauna.http.http_client.HTTPClient,\theaders: Dict[str, str],\tendpoint: str,\tmax_attempts: int,\tmax_backoff: int,\topts: fauna.client.client.StreamOptions,\ttoken: fauna.query.models.StreamToken)"}, {"fullname": "fauna.client.client.StreamIterator.last_ts", "modulename": "fauna.client.client", "qualname": "StreamIterator.last_ts", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.client.client.StreamIterator.close", "modulename": "fauna.client.client", "qualname": "StreamIterator.close", "kind": "function", "doc": "

\n", "signature": "(self):", "funcdef": "def"}, {"fullname": "fauna.client.client.QueryIterator", "modulename": "fauna.client.client", "qualname": "QueryIterator", "kind": "class", "doc": "

A class to provider an iterator on top of Fauna queries.

\n"}, {"fullname": "fauna.client.client.QueryIterator.__init__", "modulename": "fauna.client.client", "qualname": "QueryIterator.__init__", "kind": "function", "doc": "

Initializes the QueryIterator

\n\n
Parameters
\n\n
    \n
  • fql: A Query
  • \n
  • opts: (Optional) Query Options
  • \n
\n\n
Raises
\n\n
    \n
  • TypeError: Invalid param types
  • \n
\n", "signature": "(\tclient: fauna.client.client.Client,\tfql: fauna.query.query_builder.Query,\topts: Optional[fauna.client.client.QueryOptions] = None)"}, {"fullname": "fauna.client.client.QueryIterator.client", "modulename": "fauna.client.client", "qualname": "QueryIterator.client", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.client.client.QueryIterator.fql", "modulename": "fauna.client.client", "qualname": "QueryIterator.fql", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.client.client.QueryIterator.opts", "modulename": "fauna.client.client", "qualname": "QueryIterator.opts", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.client.client.QueryIterator.iter", "modulename": "fauna.client.client", "qualname": "QueryIterator.iter", "kind": "function", "doc": "

A generator function that immediately fetches and yields the results of\nthe stored query. Yields additional pages on subsequent iterations if\nthey exist

\n", "signature": "(self) -> Iterator:", "funcdef": "def"}, {"fullname": "fauna.client.client.QueryIterator.flatten", "modulename": "fauna.client.client", "qualname": "QueryIterator.flatten", "kind": "function", "doc": "

A generator function that immediately fetches and yields the results of\nthe stored query. Yields each item individually, rather than a whole\nPage at a time. Fetches additional pages as required if they exist.

\n", "signature": "(self) -> Iterator:", "funcdef": "def"}, {"fullname": "fauna.client.endpoints", "modulename": "fauna.client.endpoints", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.endpoints.Endpoints", "modulename": "fauna.client.endpoints", "qualname": "Endpoints", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.client.endpoints.Endpoints.Default", "modulename": "fauna.client.endpoints", "qualname": "Endpoints.Default", "kind": "variable", "doc": "

\n", "default_value": "'https://db.fauna.com'"}, {"fullname": "fauna.client.endpoints.Endpoints.Local", "modulename": "fauna.client.endpoints", "qualname": "Endpoints.Local", "kind": "variable", "doc": "

\n", "default_value": "'http://localhost:8443'"}, {"fullname": "fauna.client.headers", "modulename": "fauna.client.headers", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.headers.Header", "modulename": "fauna.client.headers", "qualname": "Header", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.client.headers.Header.LastTxnTs", "modulename": "fauna.client.headers", "qualname": "Header.LastTxnTs", "kind": "variable", "doc": "

\n", "default_value": "'X-Last-Txn-Ts'"}, {"fullname": "fauna.client.headers.Header.Linearized", "modulename": "fauna.client.headers", "qualname": "Header.Linearized", "kind": "variable", "doc": "

\n", "default_value": "'X-Linearized'"}, {"fullname": "fauna.client.headers.Header.MaxContentionRetries", "modulename": "fauna.client.headers", "qualname": "Header.MaxContentionRetries", "kind": "variable", "doc": "

\n", "default_value": "'X-Max-Contention-Retries'"}, {"fullname": "fauna.client.headers.Header.QueryTimeoutMs", "modulename": "fauna.client.headers", "qualname": "Header.QueryTimeoutMs", "kind": "variable", "doc": "

\n", "default_value": "'X-Query-Timeout-Ms'"}, {"fullname": "fauna.client.headers.Header.Typecheck", "modulename": "fauna.client.headers", "qualname": "Header.Typecheck", "kind": "variable", "doc": "

\n", "default_value": "'X-Typecheck'"}, {"fullname": "fauna.client.headers.Header.Tags", "modulename": "fauna.client.headers", "qualname": "Header.Tags", "kind": "variable", "doc": "

\n", "default_value": "'X-Query-Tags'"}, {"fullname": "fauna.client.headers.Header.Traceparent", "modulename": "fauna.client.headers", "qualname": "Header.Traceparent", "kind": "variable", "doc": "

\n", "default_value": "'Traceparent'"}, {"fullname": "fauna.client.retryable", "modulename": "fauna.client.retryable", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.retryable.RetryStrategy", "modulename": "fauna.client.retryable", "qualname": "RetryStrategy", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.client.retryable.RetryStrategy.wait", "modulename": "fauna.client.retryable", "qualname": "RetryStrategy.wait", "kind": "function", "doc": "

\n", "signature": "(self) -> float:", "funcdef": "def"}, {"fullname": "fauna.client.retryable.ExponentialBackoffStrategy", "modulename": "fauna.client.retryable", "qualname": "ExponentialBackoffStrategy", "kind": "class", "doc": "

\n", "bases": "RetryStrategy"}, {"fullname": "fauna.client.retryable.ExponentialBackoffStrategy.__init__", "modulename": "fauna.client.retryable", "qualname": "ExponentialBackoffStrategy.__init__", "kind": "function", "doc": "

\n", "signature": "(max_backoff: int)"}, {"fullname": "fauna.client.retryable.ExponentialBackoffStrategy.wait", "modulename": "fauna.client.retryable", "qualname": "ExponentialBackoffStrategy.wait", "kind": "function", "doc": "

Returns the number of seconds to wait for the next call.

\n", "signature": "(self) -> float:", "funcdef": "def"}, {"fullname": "fauna.client.retryable.RetryableResponse", "modulename": "fauna.client.retryable", "qualname": "RetryableResponse", "kind": "class", "doc": "

\n", "bases": "typing.Generic[~T]"}, {"fullname": "fauna.client.retryable.RetryableResponse.__init__", "modulename": "fauna.client.retryable", "qualname": "RetryableResponse.__init__", "kind": "function", "doc": "

\n", "signature": "(attempts: int, response: ~T)"}, {"fullname": "fauna.client.retryable.RetryableResponse.attempts", "modulename": "fauna.client.retryable", "qualname": "RetryableResponse.attempts", "kind": "variable", "doc": "

\n", "annotation": ": int"}, {"fullname": "fauna.client.retryable.RetryableResponse.response", "modulename": "fauna.client.retryable", "qualname": "RetryableResponse.response", "kind": "variable", "doc": "

\n", "annotation": ": ~T"}, {"fullname": "fauna.client.retryable.Retryable", "modulename": "fauna.client.retryable", "qualname": "Retryable", "kind": "class", "doc": "

Retryable is a wrapper class that acts on a Callable that returns a T type.

\n", "bases": "typing.Generic[~T]"}, {"fullname": "fauna.client.retryable.Retryable.__init__", "modulename": "fauna.client.retryable", "qualname": "Retryable.__init__", "kind": "function", "doc": "

\n", "signature": "(\tmax_attempts: int,\tmax_backoff: int,\tfunc: Callable[..., ~T],\t*args,\t**kwargs)"}, {"fullname": "fauna.client.retryable.Retryable.run", "modulename": "fauna.client.retryable", "qualname": "Retryable.run", "kind": "function", "doc": "

Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates\nthe thrown exception if max_attempts is reached or if a non-retryable is thrown.

\n\n

Returns the number of attempts and the response

\n", "signature": "(self) -> fauna.client.retryable.RetryableResponse[~T]:", "funcdef": "def"}, {"fullname": "fauna.client.utils", "modulename": "fauna.client.utils", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.client.utils.LastTxnTs", "modulename": "fauna.client.utils", "qualname": "LastTxnTs", "kind": "class", "doc": "

Wraps tracking the last transaction time supplied from the database.

\n"}, {"fullname": "fauna.client.utils.LastTxnTs.__init__", "modulename": "fauna.client.utils", "qualname": "LastTxnTs.__init__", "kind": "function", "doc": "

\n", "signature": "(time: Optional[int] = None)"}, {"fullname": "fauna.client.utils.LastTxnTs.time", "modulename": "fauna.client.utils", "qualname": "LastTxnTs.time", "kind": "variable", "doc": "

Produces the last transaction time, or, None if not yet updated.

\n"}, {"fullname": "fauna.client.utils.LastTxnTs.request_header", "modulename": "fauna.client.utils", "qualname": "LastTxnTs.request_header", "kind": "variable", "doc": "

Produces a dictionary with a non-zero X-Last-Seen-Txn header; or,\nif one has not yet been set, the empty header dictionary.

\n"}, {"fullname": "fauna.client.utils.LastTxnTs.update_txn_time", "modulename": "fauna.client.utils", "qualname": "LastTxnTs.update_txn_time", "kind": "function", "doc": "

Updates the internal transaction time.\nIn order to maintain a monotonically-increasing value, newTxnTime\nis discarded if it is behind the current timestamp.

\n", "signature": "(self, new_txn_time: int):", "funcdef": "def"}, {"fullname": "fauna.encoding", "modulename": "fauna.encoding", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.encoding.decoder", "modulename": "fauna.encoding.decoder", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.encoding.decoder.FaunaDecoder", "modulename": "fauna.encoding.decoder", "qualname": "FaunaDecoder", "kind": "class", "doc": "

Supports the following types:

\n\n

+--------------------+---------------+\n| Python | Fauna |\n+====================+===============+\n| dict | object |\n+--------------------+---------------+\n| list, tuple | array |\n+--------------------+---------------+\n| str | string |\n+--------------------+---------------+\n| int | @int |\n+--------------------+---------------+\n| int | @long |\n+--------------------+---------------+\n| float | @double |\n+--------------------+---------------+\n| datetime.datetime | @time |\n+--------------------+---------------+\n| datetime.date | @date |\n+--------------------+---------------+\n| True | true |\n+--------------------+---------------+\n| False | false |\n+--------------------+---------------+\n| None | null |\n+--------------------+---------------+\n| bytearray | @bytes |\n+--------------------+---------------+\n| *DocumentReference | @ref |\n+--------------------+---------------+\n| *Document | @doc |\n+--------------------+---------------+\n| Module | @mod |\n+--------------------+---------------+\n| Page | @set |\n+--------------------+---------------+\n| StreamToken | @stream |\n+--------------------+---------------+

\n"}, {"fullname": "fauna.encoding.decoder.FaunaDecoder.decode", "modulename": "fauna.encoding.decoder", "qualname": "FaunaDecoder.decode", "kind": "function", "doc": "

Decodes supported objects from the tagged typed into untagged.

\n\n

Examples:\n - { \"@int\": \"100\" } decodes to 100 of type int\n - { \"@double\": \"100\" } decodes to 100.0 of type float\n - { \"@long\": \"100\" } decodes to 100 of type int\n - { \"@time\": \"...\" } decodes to a datetime\n - { \"@date\": \"...\" } decodes to a date\n - { \"@doc\": ... } decodes to a Document or NamedDocument\n - { \"@ref\": ... } decodes to a DocumentReference or NamedDocumentReference\n - { \"@mod\": ... } decodes to a Module\n - { \"@set\": ... } decodes to a Page\n - { \"@stream\": ... } decodes to a StreamToken\n - { \"@bytes\": ... } decodes to a bytearray

\n\n
Parameters
\n\n
    \n
  • obj: the object to decode
  • \n
\n", "signature": "(obj: Any):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder", "modulename": "fauna.encoding.encoder", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder", "kind": "class", "doc": "

Supports the following types:

\n\n

+-------------------------------+---------------+\n| Python | Fauna Tags |\n+===============================+===============+\n| dict | @object |\n+-------------------------------+---------------+\n| list, tuple | array |\n+-------------------------------+---------------+\n| str | string |\n+-------------------------------+---------------+\n| int 32-bit signed | @int |\n+-------------------------------+---------------+\n| int 64-bit signed | @long |\n+-------------------------------+---------------+\n| float | @double |\n+-------------------------------+---------------+\n| datetime.datetime | @time |\n+-------------------------------+---------------+\n| datetime.date | @date |\n+-------------------------------+---------------+\n| True | True |\n+-------------------------------+---------------+\n| False | False |\n+-------------------------------+---------------+\n| None | None |\n+-------------------------------+---------------+\n| bytes / bytearray | @bytes |\n+-------------------------------+---------------+\n| *Document | @ref |\n+-------------------------------+---------------+\n| *DocumentReference | @ref |\n+-------------------------------+---------------+\n| Module | @mod |\n+-------------------------------+---------------+\n| Query | fql |\n+-------------------------------+---------------+\n| ValueFragment | value |\n+-------------------------------+---------------+\n| TemplateFragment | string |\n+-------------------------------+---------------+\n| StreamToken | string |\n+-------------------------------+---------------+

\n"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.encode", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.encode", "kind": "function", "doc": "

Encodes supported objects into the tagged format.

\n\n

Examples:\n - Up to 32-bit ints encode to { \"@int\": \"...\" }\n - Up to 64-bit ints encode to { \"@long\": \"...\" }\n - Floats encode to { \"@double\": \"...\" }\n - datetime encodes to { \"@time\": \"...\" }\n - date encodes to { \"@date\": \"...\" }\n - DocumentReference encodes to { \"@doc\": \"...\" }\n - Module encodes to { \"@mod\": \"...\" }\n - Query encodes to { \"fql\": [...] }\n - ValueFragment encodes to { \"value\": }\n - LiteralFragment encodes to a string\n - StreamToken encodes to a string

\n\n
Raises
\n\n
    \n
  • ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
  • \n
\n\n
Parameters
\n\n
    \n
  • obj: the object to decode
  • \n
\n", "signature": "(obj: Any) -> Any:", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_int", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_int", "kind": "function", "doc": "

\n", "signature": "(obj: int):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_bool", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_bool", "kind": "function", "doc": "

\n", "signature": "(obj: bool):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_float", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_float", "kind": "function", "doc": "

\n", "signature": "(obj: float):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_str", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_str", "kind": "function", "doc": "

\n", "signature": "(obj: str):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_datetime", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_datetime", "kind": "function", "doc": "

\n", "signature": "(obj: datetime.datetime):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_date", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_date", "kind": "function", "doc": "

\n", "signature": "(obj: datetime.date):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_bytes", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_bytes", "kind": "function", "doc": "

\n", "signature": "(obj: Union[bytearray, bytes]):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_doc_ref", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_doc_ref", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.models.DocumentReference):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_named_doc_ref", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_named_doc_ref", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.models.NamedDocumentReference):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_mod", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_mod", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.models.Module):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_dict", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_dict", "kind": "function", "doc": "

\n", "signature": "(obj: Any):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_none", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_none", "kind": "function", "doc": "

\n", "signature": "():", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_fragment", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_fragment", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.query_builder.Fragment):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_query_interpolation_builder", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_query_interpolation_builder", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.query_builder.Query):", "funcdef": "def"}, {"fullname": "fauna.encoding.encoder.FaunaEncoder.from_streamtoken", "modulename": "fauna.encoding.encoder", "qualname": "FaunaEncoder.from_streamtoken", "kind": "function", "doc": "

\n", "signature": "(obj: fauna.query.models.StreamToken):", "funcdef": "def"}, {"fullname": "fauna.encoding.wire_protocol", "modulename": "fauna.encoding.wire_protocol", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats", "kind": "class", "doc": "

Query stats

\n"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.__init__", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.__init__", "kind": "function", "doc": "

\n", "signature": "(stats: Mapping[str, Any])"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.compute_ops", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.compute_ops", "kind": "variable", "doc": "

The amount of Transactional Compute Ops consumed by the query.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.read_ops", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.read_ops", "kind": "variable", "doc": "

The amount of Transactional Read Ops consumed by the query.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.write_ops", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.write_ops", "kind": "variable", "doc": "

The amount of Transactional Write Ops consumed by the query.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.query_time_ms", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.query_time_ms", "kind": "variable", "doc": "

The query run time in milliseconds.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.storage_bytes_read", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.storage_bytes_read", "kind": "variable", "doc": "

The amount of data read from storage, in bytes.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.storage_bytes_write", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.storage_bytes_write", "kind": "variable", "doc": "

The amount of data written to storage, in bytes.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.contention_retries", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.contention_retries", "kind": "variable", "doc": "

The number of times the transaction was retried due to write contention.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryStats.attempts", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryStats.attempts", "kind": "variable", "doc": "

The number of attempts made by the client to run the query.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.__init__", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.__init__", "kind": "function", "doc": "

\n", "signature": "(\tquery_tags: Optional[Mapping[str, str]] = None,\tstats: Optional[fauna.encoding.wire_protocol.QueryStats] = None,\tsummary: Optional[str] = None,\ttxn_ts: Optional[int] = None,\tschema_version: Optional[int] = None)"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.query_tags", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.query_tags", "kind": "variable", "doc": "

The tags associated with the query.

\n", "annotation": ": Mapping[str, Any]"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.summary", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.summary", "kind": "variable", "doc": "

A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query.

\n", "annotation": ": str"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.stats", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.stats", "kind": "variable", "doc": "

Query stats associated with the query.

\n", "annotation": ": fauna.encoding.wire_protocol.QueryStats"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.txn_ts", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.txn_ts", "kind": "variable", "doc": "

The last transaction timestamp of the query. A Unix epoch in microseconds.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QueryInfo.schema_version", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryInfo.schema_version", "kind": "variable", "doc": "

The schema version that was used for the query execution.

\n", "annotation": ": int"}, {"fullname": "fauna.encoding.wire_protocol.QuerySuccess", "modulename": "fauna.encoding.wire_protocol", "qualname": "QuerySuccess", "kind": "class", "doc": "

The result of the query.

\n", "bases": "QueryInfo"}, {"fullname": "fauna.encoding.wire_protocol.QuerySuccess.__init__", "modulename": "fauna.encoding.wire_protocol", "qualname": "QuerySuccess.__init__", "kind": "function", "doc": "

\n", "signature": "(\tdata: Any,\tquery_tags: Optional[Mapping[str, str]],\tstatic_type: Optional[str],\tstats: Optional[fauna.encoding.wire_protocol.QueryStats],\tsummary: Optional[str],\ttraceparent: Optional[str],\ttxn_ts: Optional[int],\tschema_version: Optional[int])"}, {"fullname": "fauna.encoding.wire_protocol.QuerySuccess.data", "modulename": "fauna.encoding.wire_protocol", "qualname": "QuerySuccess.data", "kind": "variable", "doc": "

The data returned by the query. This is the result of the FQL query.

\n", "annotation": ": Any"}, {"fullname": "fauna.encoding.wire_protocol.QuerySuccess.static_type", "modulename": "fauna.encoding.wire_protocol", "qualname": "QuerySuccess.static_type", "kind": "variable", "doc": "

If typechecked, the query's inferred static result type, if the query was typechecked.

\n", "annotation": ": Optional[str]"}, {"fullname": "fauna.encoding.wire_protocol.QuerySuccess.traceparent", "modulename": "fauna.encoding.wire_protocol", "qualname": "QuerySuccess.traceparent", "kind": "variable", "doc": "

The traceparent for the query.

\n", "annotation": ": Optional[str]"}, {"fullname": "fauna.encoding.wire_protocol.ConstraintFailure", "modulename": "fauna.encoding.wire_protocol", "qualname": "ConstraintFailure", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.encoding.wire_protocol.ConstraintFailure.__init__", "modulename": "fauna.encoding.wire_protocol", "qualname": "ConstraintFailure.__init__", "kind": "function", "doc": "

\n", "signature": "(\tmessage: str,\tname: Optional[str] = None,\tpaths: Optional[List[Any]] = None)"}, {"fullname": "fauna.encoding.wire_protocol.ConstraintFailure.message", "modulename": "fauna.encoding.wire_protocol", "qualname": "ConstraintFailure.message", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.encoding.wire_protocol.ConstraintFailure.name", "modulename": "fauna.encoding.wire_protocol", "qualname": "ConstraintFailure.name", "kind": "variable", "doc": "

\n", "annotation": ": Optional[str]", "default_value": "None"}, {"fullname": "fauna.encoding.wire_protocol.ConstraintFailure.paths", "modulename": "fauna.encoding.wire_protocol", "qualname": "ConstraintFailure.paths", "kind": "variable", "doc": "

\n", "annotation": ": Optional[List[Any]]", "default_value": "None"}, {"fullname": "fauna.encoding.wire_protocol.QueryTags", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryTags", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.encoding.wire_protocol.QueryTags.encode", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryTags.encode", "kind": "function", "doc": "

\n", "signature": "(tags: Mapping[str, str]) -> str:", "funcdef": "def"}, {"fullname": "fauna.encoding.wire_protocol.QueryTags.decode", "modulename": "fauna.encoding.wire_protocol", "qualname": "QueryTags.decode", "kind": "function", "doc": "

\n", "signature": "(tag_str: str) -> Mapping[str, str]:", "funcdef": "def"}, {"fullname": "fauna.errors", "modulename": "fauna.errors", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.errors.errors", "modulename": "fauna.errors.errors", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.errors.errors.FaunaException", "modulename": "fauna.errors.errors", "qualname": "FaunaException", "kind": "class", "doc": "

Base class Fauna Exceptions

\n", "bases": "builtins.Exception"}, {"fullname": "fauna.errors.errors.RetryableFaunaException", "modulename": "fauna.errors.errors", "qualname": "RetryableFaunaException", "kind": "class", "doc": "

Base class Fauna Exceptions

\n", "bases": "FaunaException"}, {"fullname": "fauna.errors.errors.ClientError", "modulename": "fauna.errors.errors", "qualname": "ClientError", "kind": "class", "doc": "

An error representing a failure internal to the client, itself.\nThis indicates Fauna was never called - the client failed internally\nprior to sending the request.

\n", "bases": "FaunaException"}, {"fullname": "fauna.errors.errors.NetworkError", "modulename": "fauna.errors.errors", "qualname": "NetworkError", "kind": "class", "doc": "

An error representing a failure due to the network.\nThis indicates Fauna was never reached.

\n", "bases": "FaunaException"}, {"fullname": "fauna.errors.errors.ProtocolError", "modulename": "fauna.errors.errors", "qualname": "ProtocolError", "kind": "class", "doc": "

An error representing a HTTP failure - but one not directly emitted by Fauna.

\n", "bases": "FaunaException"}, {"fullname": "fauna.errors.errors.ProtocolError.__init__", "modulename": "fauna.errors.errors", "qualname": "ProtocolError.__init__", "kind": "function", "doc": "

\n", "signature": "(status_code: int, message: str)"}, {"fullname": "fauna.errors.errors.ProtocolError.status_code", "modulename": "fauna.errors.errors", "qualname": "ProtocolError.status_code", "kind": "variable", "doc": "

\n", "annotation": ": int"}, {"fullname": "fauna.errors.errors.ProtocolError.message", "modulename": "fauna.errors.errors", "qualname": "ProtocolError.message", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.errors.errors.FaunaError", "modulename": "fauna.errors.errors", "qualname": "FaunaError", "kind": "class", "doc": "

Base class Fauna Errors

\n", "bases": "FaunaException"}, {"fullname": "fauna.errors.errors.FaunaError.__init__", "modulename": "fauna.errors.errors", "qualname": "FaunaError.__init__", "kind": "function", "doc": "

\n", "signature": "(\tstatus_code: int,\tcode: str,\tmessage: str,\tabort: Optional[Any] = None,\tconstraint_failures: Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]] = None)"}, {"fullname": "fauna.errors.errors.FaunaError.status_code", "modulename": "fauna.errors.errors", "qualname": "FaunaError.status_code", "kind": "variable", "doc": "

\n", "annotation": ": int"}, {"fullname": "fauna.errors.errors.FaunaError.code", "modulename": "fauna.errors.errors", "qualname": "FaunaError.code", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.errors.errors.FaunaError.message", "modulename": "fauna.errors.errors", "qualname": "FaunaError.message", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.errors.errors.FaunaError.abort", "modulename": "fauna.errors.errors", "qualname": "FaunaError.abort", "kind": "variable", "doc": "

\n", "annotation": ": Optional[Any]"}, {"fullname": "fauna.errors.errors.FaunaError.constraint_failures", "modulename": "fauna.errors.errors", "qualname": "FaunaError.constraint_failures", "kind": "variable", "doc": "

\n", "annotation": ": Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]]"}, {"fullname": "fauna.errors.errors.FaunaError.parse_error_and_throw", "modulename": "fauna.errors.errors", "qualname": "FaunaError.parse_error_and_throw", "kind": "function", "doc": "

\n", "signature": "(body: Any, status_code: int):", "funcdef": "def"}, {"fullname": "fauna.errors.errors.ServiceError", "modulename": "fauna.errors.errors", "qualname": "ServiceError", "kind": "class", "doc": "

An error representing a query failure returned by Fauna.

\n", "bases": "FaunaError, fauna.encoding.wire_protocol.QueryInfo"}, {"fullname": "fauna.errors.errors.ServiceError.__init__", "modulename": "fauna.errors.errors", "qualname": "ServiceError.__init__", "kind": "function", "doc": "

\n", "signature": "(\tstatus_code: int,\tcode: str,\tmessage: str,\tsummary: Optional[str] = None,\tabort: Optional[Any] = None,\tconstraint_failures: Optional[List[fauna.encoding.wire_protocol.ConstraintFailure]] = None,\tquery_tags: Optional[Mapping[str, str]] = None,\tstats: Optional[fauna.encoding.wire_protocol.QueryStats] = None,\ttxn_ts: Optional[int] = None,\tschema_version: Optional[int] = None)"}, {"fullname": "fauna.errors.errors.AbortError", "modulename": "fauna.errors.errors", "qualname": "AbortError", "kind": "class", "doc": "

An error representing a query failure returned by Fauna.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.InvalidRequestError", "modulename": "fauna.errors.errors", "qualname": "InvalidRequestError", "kind": "class", "doc": "

An error representing a query failure returned by Fauna.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.QueryCheckError", "modulename": "fauna.errors.errors", "qualname": "QueryCheckError", "kind": "class", "doc": "

An error due to a \"compile-time\" check of the query failing.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.ContendedTransactionError", "modulename": "fauna.errors.errors", "qualname": "ContendedTransactionError", "kind": "class", "doc": "

Transaction is aborted due to concurrent modification.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.QueryRuntimeError", "modulename": "fauna.errors.errors", "qualname": "QueryRuntimeError", "kind": "class", "doc": "

An error response that is the result of the query failing during execution.\nQueryRuntimeError's occur when a bug in your query causes an invalid execution\nto be requested.\nThe 'code' field will vary based on the specific error cause.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.AuthenticationError", "modulename": "fauna.errors.errors", "qualname": "AuthenticationError", "kind": "class", "doc": "

AuthenticationError indicates invalid credentials were used.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.AuthorizationError", "modulename": "fauna.errors.errors", "qualname": "AuthorizationError", "kind": "class", "doc": "

AuthorizationError indicates the credentials used do not have\npermission to perform the requested action.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.ThrottlingError", "modulename": "fauna.errors.errors", "qualname": "ThrottlingError", "kind": "class", "doc": "

ThrottlingError indicates some capacity limit was exceeded\nand thus the request could not be served.

\n", "bases": "ServiceError, RetryableFaunaException"}, {"fullname": "fauna.errors.errors.QueryTimeoutError", "modulename": "fauna.errors.errors", "qualname": "QueryTimeoutError", "kind": "class", "doc": "

A failure due to the timeout being exceeded, but the timeout\nwas set lower than the query's expected processing time.\nThis response is distinguished from a ServiceTimeoutException\nin that a QueryTimeoutError shows Fauna behaving in an expected manner.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.ServiceInternalError", "modulename": "fauna.errors.errors", "qualname": "ServiceInternalError", "kind": "class", "doc": "

ServiceInternalError indicates Fauna failed unexpectedly.

\n", "bases": "ServiceError"}, {"fullname": "fauna.errors.errors.ServiceTimeoutError", "modulename": "fauna.errors.errors", "qualname": "ServiceTimeoutError", "kind": "class", "doc": "

ServiceTimeoutError indicates Fauna was not available to service\nthe request before the timeout was reached.

\n", "bases": "ServiceError"}, {"fullname": "fauna.http", "modulename": "fauna.http", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.http.http_client", "modulename": "fauna.http.http_client", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.http.http_client.ErrorResponse", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.http.http_client.ErrorResponse.__init__", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse.__init__", "kind": "function", "doc": "

\n", "signature": "(status_code: int, error_code: str, error_message: str, summary: str)"}, {"fullname": "fauna.http.http_client.ErrorResponse.status_code", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse.status_code", "kind": "variable", "doc": "

\n", "annotation": ": int"}, {"fullname": "fauna.http.http_client.ErrorResponse.error_code", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse.error_code", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.http.http_client.ErrorResponse.error_message", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse.error_message", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.http.http_client.ErrorResponse.summary", "modulename": "fauna.http.http_client", "qualname": "ErrorResponse.summary", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.http.http_client.HTTPResponse", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse", "kind": "class", "doc": "

Helper class that provides a standard way to create an ABC using\ninheritance.

\n", "bases": "abc.ABC"}, {"fullname": "fauna.http.http_client.HTTPResponse.headers", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.headers", "kind": "function", "doc": "

\n", "signature": "(self) -> Mapping[str, str]:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.status_code", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.status_code", "kind": "function", "doc": "

\n", "signature": "(self) -> int:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.json", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.json", "kind": "function", "doc": "

\n", "signature": "(self) -> Any:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.text", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.text", "kind": "function", "doc": "

\n", "signature": "(self) -> str:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.read", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.read", "kind": "function", "doc": "

\n", "signature": "(self) -> bytes:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.iter_bytes", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.iter_bytes", "kind": "function", "doc": "

\n", "signature": "(self) -> Iterator[bytes]:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPResponse.close", "modulename": "fauna.http.http_client", "qualname": "HTTPResponse.close", "kind": "function", "doc": "

\n", "signature": "(self):", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPClient", "modulename": "fauna.http.http_client", "qualname": "HTTPClient", "kind": "class", "doc": "

Helper class that provides a standard way to create an ABC using\ninheritance.

\n", "bases": "abc.ABC"}, {"fullname": "fauna.http.http_client.HTTPClient.request", "modulename": "fauna.http.http_client", "qualname": "HTTPClient.request", "kind": "function", "doc": "

\n", "signature": "(\tself,\tmethod: str,\turl: str,\theaders: Mapping[str, str],\tdata: Mapping[str, Any]) -> fauna.http.http_client.HTTPResponse:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPClient.stream", "modulename": "fauna.http.http_client", "qualname": "HTTPClient.stream", "kind": "function", "doc": "

\n", "signature": "(\tself,\turl: str,\theaders: Mapping[str, str],\tdata: Mapping[str, Any]) -> Iterator[Any]:", "funcdef": "def"}, {"fullname": "fauna.http.http_client.HTTPClient.close", "modulename": "fauna.http.http_client", "qualname": "HTTPClient.close", "kind": "function", "doc": "

\n", "signature": "(self):", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client", "modulename": "fauna.http.httpx_client", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse", "kind": "class", "doc": "

Helper class that provides a standard way to create an ABC using\ninheritance.

\n", "bases": "fauna.http.http_client.HTTPResponse"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.__init__", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.__init__", "kind": "function", "doc": "

\n", "signature": "(response: httpx.Response)"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.headers", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.headers", "kind": "function", "doc": "

\n", "signature": "(self) -> Mapping[str, str]:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.json", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.json", "kind": "function", "doc": "

\n", "signature": "(self) -> Any:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.text", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.text", "kind": "function", "doc": "

\n", "signature": "(self) -> str:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.status_code", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.status_code", "kind": "function", "doc": "

\n", "signature": "(self) -> int:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.read", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.read", "kind": "function", "doc": "

\n", "signature": "(self) -> bytes:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.iter_bytes", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.iter_bytes", "kind": "function", "doc": "

\n", "signature": "(self, size: Optional[int] = None) -> Iterator[bytes]:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXResponse.close", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXResponse.close", "kind": "function", "doc": "

\n", "signature": "(self) -> None:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXClient", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXClient", "kind": "class", "doc": "

Helper class that provides a standard way to create an ABC using\ninheritance.

\n", "bases": "fauna.http.http_client.HTTPClient"}, {"fullname": "fauna.http.httpx_client.HTTPXClient.__init__", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXClient.__init__", "kind": "function", "doc": "

\n", "signature": "(client: httpx.Client)"}, {"fullname": "fauna.http.httpx_client.HTTPXClient.request", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXClient.request", "kind": "function", "doc": "

\n", "signature": "(\tself,\tmethod: str,\turl: str,\theaders: Mapping[str, str],\tdata: Mapping[str, Any]) -> fauna.http.http_client.HTTPResponse:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXClient.stream", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXClient.stream", "kind": "function", "doc": "

\n", "signature": "(\tself,\turl: str,\theaders: Mapping[str, str],\tdata: Mapping[str, Any]) -> Iterator[Any]:", "funcdef": "def"}, {"fullname": "fauna.http.httpx_client.HTTPXClient.close", "modulename": "fauna.http.httpx_client", "qualname": "HTTPXClient.close", "kind": "function", "doc": "

\n", "signature": "(self):", "funcdef": "def"}, {"fullname": "fauna.query", "modulename": "fauna.query", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.query.models", "modulename": "fauna.query.models", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.query.models.Page", "modulename": "fauna.query.models", "qualname": "Page", "kind": "class", "doc": "

A class representing a Set in Fauna.

\n"}, {"fullname": "fauna.query.models.Page.__init__", "modulename": "fauna.query.models", "qualname": "Page.__init__", "kind": "function", "doc": "

\n", "signature": "(data: Optional[List[Any]] = None, after: Optional[str] = None)"}, {"fullname": "fauna.query.models.Page.data", "modulename": "fauna.query.models", "qualname": "Page.data", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.query.models.Page.after", "modulename": "fauna.query.models", "qualname": "Page.after", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.query.models.StreamToken", "modulename": "fauna.query.models", "qualname": "StreamToken", "kind": "class", "doc": "

A class represeting a Stream in Fauna.

\n"}, {"fullname": "fauna.query.models.StreamToken.__init__", "modulename": "fauna.query.models", "qualname": "StreamToken.__init__", "kind": "function", "doc": "

\n", "signature": "(token: str)"}, {"fullname": "fauna.query.models.StreamToken.token", "modulename": "fauna.query.models", "qualname": "StreamToken.token", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.query.models.Module", "modulename": "fauna.query.models", "qualname": "Module", "kind": "class", "doc": "

A class representing a Module in Fauna. Examples of modules include Collection, Math, and a user-defined\ncollection, among others.

\n\n

Usage:

\n\n

dogs = Module(\"Dogs\")\n query = fql(\"${col}.all\", col=dogs)

\n"}, {"fullname": "fauna.query.models.Module.__init__", "modulename": "fauna.query.models", "qualname": "Module.__init__", "kind": "function", "doc": "

\n", "signature": "(name: str)"}, {"fullname": "fauna.query.models.Module.name", "modulename": "fauna.query.models", "qualname": "Module.name", "kind": "variable", "doc": "

\n"}, {"fullname": "fauna.query.models.BaseReference", "modulename": "fauna.query.models", "qualname": "BaseReference", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.query.models.BaseReference.__init__", "modulename": "fauna.query.models", "qualname": "BaseReference.__init__", "kind": "function", "doc": "

\n", "signature": "(coll: Union[str, fauna.query.models.Module])"}, {"fullname": "fauna.query.models.BaseReference.coll", "modulename": "fauna.query.models", "qualname": "BaseReference.coll", "kind": "variable", "doc": "

\n", "annotation": ": fauna.query.models.Module"}, {"fullname": "fauna.query.models.DocumentReference", "modulename": "fauna.query.models", "qualname": "DocumentReference", "kind": "class", "doc": "

A class representing a reference to a Document stored in Fauna.

\n", "bases": "BaseReference"}, {"fullname": "fauna.query.models.DocumentReference.__init__", "modulename": "fauna.query.models", "qualname": "DocumentReference.__init__", "kind": "function", "doc": "

\n", "signature": "(coll: Union[str, fauna.query.models.Module], id: str)"}, {"fullname": "fauna.query.models.DocumentReference.id", "modulename": "fauna.query.models", "qualname": "DocumentReference.id", "kind": "variable", "doc": "

The ID for the Document. Valid IDs are 64-bit integers, stored as strings.

\n", "annotation": ": str"}, {"fullname": "fauna.query.models.DocumentReference.from_string", "modulename": "fauna.query.models", "qualname": "DocumentReference.from_string", "kind": "function", "doc": "

\n", "signature": "(ref: str):", "funcdef": "def"}, {"fullname": "fauna.query.models.NamedDocumentReference", "modulename": "fauna.query.models", "qualname": "NamedDocumentReference", "kind": "class", "doc": "

A class representing a reference to a NamedDocument stored in Fauna.

\n", "bases": "BaseReference"}, {"fullname": "fauna.query.models.NamedDocumentReference.__init__", "modulename": "fauna.query.models", "qualname": "NamedDocumentReference.__init__", "kind": "function", "doc": "

\n", "signature": "(coll: Union[str, fauna.query.models.Module], name: str)"}, {"fullname": "fauna.query.models.NamedDocumentReference.name", "modulename": "fauna.query.models", "qualname": "NamedDocumentReference.name", "kind": "variable", "doc": "

The name of the NamedDocument.

\n", "annotation": ": str"}, {"fullname": "fauna.query.models.NullDocument", "modulename": "fauna.query.models", "qualname": "NullDocument", "kind": "class", "doc": "

\n"}, {"fullname": "fauna.query.models.NullDocument.__init__", "modulename": "fauna.query.models", "qualname": "NullDocument.__init__", "kind": "function", "doc": "

\n", "signature": "(\tref: Union[fauna.query.models.DocumentReference, fauna.query.models.NamedDocumentReference],\tcause: Optional[str] = None)"}, {"fullname": "fauna.query.models.NullDocument.cause", "modulename": "fauna.query.models", "qualname": "NullDocument.cause", "kind": "variable", "doc": "

\n", "annotation": ": Optional[str]"}, {"fullname": "fauna.query.models.NullDocument.ref", "modulename": "fauna.query.models", "qualname": "NullDocument.ref", "kind": "variable", "doc": "

\n", "annotation": ": Union[fauna.query.models.DocumentReference, fauna.query.models.NamedDocumentReference]"}, {"fullname": "fauna.query.models.BaseDocument", "modulename": "fauna.query.models", "qualname": "BaseDocument", "kind": "class", "doc": "

A base document class implementing an immutable mapping.

\n", "bases": "collections.abc.Mapping"}, {"fullname": "fauna.query.models.BaseDocument.__init__", "modulename": "fauna.query.models", "qualname": "BaseDocument.__init__", "kind": "function", "doc": "

\n", "signature": "(*args, **kwargs)"}, {"fullname": "fauna.query.models.Document", "modulename": "fauna.query.models", "qualname": "Document", "kind": "class", "doc": "

A class representing a user document stored in Fauna.

\n\n

User data should be stored directly on the map, while id, ts, and coll should only be stored on the related\nproperties. When working with a Document in code, it should be considered immutable.

\n", "bases": "BaseDocument"}, {"fullname": "fauna.query.models.Document.__init__", "modulename": "fauna.query.models", "qualname": "Document.__init__", "kind": "function", "doc": "

\n", "signature": "(\tid: str,\tts: datetime.datetime,\tcoll: Union[str, fauna.query.models.Module],\tdata: Optional[collections.abc.Mapping] = None)"}, {"fullname": "fauna.query.models.Document.id", "modulename": "fauna.query.models", "qualname": "Document.id", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.query.models.Document.ts", "modulename": "fauna.query.models", "qualname": "Document.ts", "kind": "variable", "doc": "

\n", "annotation": ": datetime.datetime"}, {"fullname": "fauna.query.models.Document.coll", "modulename": "fauna.query.models", "qualname": "Document.coll", "kind": "variable", "doc": "

\n", "annotation": ": fauna.query.models.Module"}, {"fullname": "fauna.query.models.NamedDocument", "modulename": "fauna.query.models", "qualname": "NamedDocument", "kind": "class", "doc": "

A class representing a named document stored in Fauna. Examples of named documents include Collection\ndefinitions, Index definitions, and Roles, among others.

\n\n

When working with a NamedDocument in code, it should be considered immutable.

\n", "bases": "BaseDocument"}, {"fullname": "fauna.query.models.NamedDocument.__init__", "modulename": "fauna.query.models", "qualname": "NamedDocument.__init__", "kind": "function", "doc": "

\n", "signature": "(\tname: str,\tts: datetime.datetime,\tcoll: Union[fauna.query.models.Module, str],\tdata: Optional[collections.abc.Mapping] = None)"}, {"fullname": "fauna.query.models.NamedDocument.name", "modulename": "fauna.query.models", "qualname": "NamedDocument.name", "kind": "variable", "doc": "

\n", "annotation": ": str"}, {"fullname": "fauna.query.models.NamedDocument.ts", "modulename": "fauna.query.models", "qualname": "NamedDocument.ts", "kind": "variable", "doc": "

\n", "annotation": ": datetime.datetime"}, {"fullname": "fauna.query.models.NamedDocument.coll", "modulename": "fauna.query.models", "qualname": "NamedDocument.coll", "kind": "variable", "doc": "

\n", "annotation": ": fauna.query.models.Module"}, {"fullname": "fauna.query.query_builder", "modulename": "fauna.query.query_builder", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.query.query_builder.Fragment", "modulename": "fauna.query.query_builder", "qualname": "Fragment", "kind": "class", "doc": "

An abstract class representing a Fragment of a query.

\n", "bases": "abc.ABC"}, {"fullname": "fauna.query.query_builder.Fragment.get", "modulename": "fauna.query.query_builder", "qualname": "Fragment.get", "kind": "function", "doc": "

An abstract method for returning a stored value.

\n", "signature": "(self) -> Any:", "funcdef": "def"}, {"fullname": "fauna.query.query_builder.ValueFragment", "modulename": "fauna.query.query_builder", "qualname": "ValueFragment", "kind": "class", "doc": "

A concrete Fragment representing a part of a query that can represent a template variable.\nFor example, if a template contains a variable ${foo}, and an object { \"prop\": 1 } is provided for foo,\nthen { \"prop\": 1 } should be wrapped as a ValueFragment.

\n\n
Parameters
\n\n
    \n
  • Any val: The value to be used as a fragment.
  • \n
\n", "bases": "Fragment"}, {"fullname": "fauna.query.query_builder.ValueFragment.__init__", "modulename": "fauna.query.query_builder", "qualname": "ValueFragment.__init__", "kind": "function", "doc": "

\n", "signature": "(val: Any)"}, {"fullname": "fauna.query.query_builder.ValueFragment.get", "modulename": "fauna.query.query_builder", "qualname": "ValueFragment.get", "kind": "function", "doc": "

Gets the stored value.

\n\n

:returns: The stored value.

\n", "signature": "(self) -> Any:", "funcdef": "def"}, {"fullname": "fauna.query.query_builder.LiteralFragment", "modulename": "fauna.query.query_builder", "qualname": "LiteralFragment", "kind": "class", "doc": "

A concrete Fragment representing a query literal For example, in the template let x = ${foo},\nthe portion let x = is a query literal and should be wrapped as a LiteralFragment.

\n\n
Parameters
\n\n
    \n
  • str val: The query literal to be used as a fragment.
  • \n
\n", "bases": "Fragment"}, {"fullname": "fauna.query.query_builder.LiteralFragment.__init__", "modulename": "fauna.query.query_builder", "qualname": "LiteralFragment.__init__", "kind": "function", "doc": "

\n", "signature": "(val: str)"}, {"fullname": "fauna.query.query_builder.LiteralFragment.get", "modulename": "fauna.query.query_builder", "qualname": "LiteralFragment.get", "kind": "function", "doc": "

Returns the stored value.

\n\n

:returns: The stored value.

\n", "signature": "(self) -> str:", "funcdef": "def"}, {"fullname": "fauna.query.query_builder.Query", "modulename": "fauna.query.query_builder", "qualname": "Query", "kind": "class", "doc": "

A class for representing a query.

\n\n

e.g. { \"fql\": [...] }

\n"}, {"fullname": "fauna.query.query_builder.Query.__init__", "modulename": "fauna.query.query_builder", "qualname": "Query.__init__", "kind": "function", "doc": "

\n", "signature": "(fragments: Optional[List[fauna.query.query_builder.Fragment]] = None)"}, {"fullname": "fauna.query.query_builder.Query.fragments", "modulename": "fauna.query.query_builder", "qualname": "Query.fragments", "kind": "variable", "doc": "

The list of stored Fragments

\n", "annotation": ": List[fauna.query.query_builder.Fragment]"}, {"fullname": "fauna.query.query_builder.fql", "modulename": "fauna.query.query_builder", "qualname": "fql", "kind": "function", "doc": "

Creates a Query - capable of performing query composition and simple querying. It can accept a\nsimple string query, or can perform composition using ${} sigil string template with **kwargs as\nsubstitutions.

\n\n

The **kwargs can be Fauna data types - such as strings, document references, or modules - and embedded\nQuery - allowing you to compose arbitrarily complex queries.

\n\n

When providing **kwargs, following types are accepted:\n - str, int, float, bool, datetime.datetime, datetime.date,\n dict, list, Query, DocumentReference, Module

\n\n
Raises
\n\n
    \n
  • ValueError: If there is an invalid template placeholder or a value that cannot be encoded.\n:returns: A Query that can be passed to the client for evaluation against Fauna.
  • \n
\n\n

Examples:

\n\n
\n
fql('Dogs.byName("Fido")')\n
\n
\n\n
\n
def get_dog(id):\n    return fql('Dogs.byId(${id})', id=id)\n\ndef get_vet_phone(id):\n    return fql('${dog} { .vet_phone_number }', dog=get_dog(id))\n\nget_vet_phone('d123')\n
\n
\n", "signature": "(query: str, **kwargs: Any) -> fauna.query.query_builder.Query:", "funcdef": "def"}, {"fullname": "fauna.query.template", "modulename": "fauna.query.template", "kind": "module", "doc": "

\n"}, {"fullname": "fauna.query.template.FaunaTemplate", "modulename": "fauna.query.template", "qualname": "FaunaTemplate", "kind": "class", "doc": "

A template class that supports variables marked with a ${}-sigil. Its primary purpose\nis to expose an iterator for the template parts that support composition of FQL queries.

\n\n

Implementation adapted from https://github.com/python/cpython/blob/main/Lib/string.py

\n\n
Parameters
\n\n
    \n
  • template: A string template e.g. \"${my_var} { name }\"
  • \n
\n"}, {"fullname": "fauna.query.template.FaunaTemplate.__init__", "modulename": "fauna.query.template", "qualname": "FaunaTemplate.__init__", "kind": "function", "doc": "

The initializer

\n", "signature": "(template: str)"}, {"fullname": "fauna.query.template.FaunaTemplate.iter", "modulename": "fauna.query.template", "qualname": "FaunaTemplate.iter", "kind": "function", "doc": "

A method that returns an iterator over tuples representing template parts. The\nfirst value of the tuple, if not None, is a template literal. The second value of\nthe tuple, if not None, is a template variable. If both are not None, then the\ntemplate literal comes before the variable.

\n\n
Raises
\n\n
    \n
  • ValueError: If there is an invalid template placeholder
  • \n
\n\n
Returns
\n\n
\n

An iterator of template parts

\n
\n", "signature": "(self) -> Iterator[Tuple[Optional[str], Optional[str]]]:", "funcdef": "def"}]; + + // mirrored in build-search-index.js (part 1) + // Also split on html tags. this is a cheap heuristic, but good enough. + elasticlunr.tokenizer.setSeperator(/[\s\-.;&_'"=,()]+|<[^>]*>/); + + let searchIndex; + if (docs._isPrebuiltIndex) { + console.info("using precompiled search index"); + searchIndex = elasticlunr.Index.load(docs); + } else { + console.time("building search index"); + // mirrored in build-search-index.js (part 2) + searchIndex = elasticlunr(function () { + this.pipeline.remove(elasticlunr.stemmer); + this.pipeline.remove(elasticlunr.stopWordFilter); + this.addField("qualname"); + this.addField("fullname"); + this.addField("annotation"); + this.addField("default_value"); + this.addField("signature"); + this.addField("bases"); + this.addField("doc"); + this.setRef("fullname"); + }); + for (let doc of docs) { + searchIndex.addDoc(doc); + } + console.timeEnd("building search index"); + } + + return (term) => searchIndex.search(term, { + fields: { + qualname: {boost: 4}, + fullname: {boost: 2}, + annotation: {boost: 2}, + default_value: {boost: 2}, + signature: {boost: 2}, + bases: {boost: 2}, + doc: {boost: 1}, + }, + expand: true + }); +})(); \ No newline at end of file diff --git a/latest b/latest index 359a5b95..50aea0e7 120000 --- a/latest +++ b/latest @@ -1 +1 @@ -2.0.0 \ No newline at end of file +2.1.0 \ No newline at end of file