1importlogging
+ 2fromdataclassesimportdataclass
+ 3fromdatetimeimporttimedelta
+ 4fromtypingimportAny,Dict,Iterator,Mapping,Optional,Union,List
+ 5
+ 6importfauna
+ 7fromfauna.client.headersimport_DriverEnvironment,_Header,_Auth,Header
+ 8fromfauna.client.retryableimportRetryable
+ 9fromfauna.client.utilsimport_Environment,LastTxnTs
+ 10fromfauna.encodingimportFaunaEncoder,FaunaDecoder
+ 11fromfauna.encodingimportQuerySuccess,QueryTags,QueryStats
+ 12fromfauna.errorsimportFaunaError,ClientError,ProtocolError, \
+ 13RetryableFaunaException,NetworkError
+ 14fromfauna.http.http_clientimportHTTPClient
+ 15fromfauna.queryimportEventSource,Query,Page,fql
+ 16
+ 17logger=logging.getLogger("fauna")
+ 18
+ 19DefaultHttpConnectTimeout=timedelta(seconds=5)
+ 20DefaultHttpReadTimeout:Optional[timedelta]=None
+ 21DefaultHttpWriteTimeout=timedelta(seconds=5)
+ 22DefaultHttpPoolTimeout=timedelta(seconds=5)
+ 23DefaultIdleConnectionTimeout=timedelta(seconds=5)
+ 24DefaultQueryTimeout=timedelta(seconds=5)
+ 25DefaultClientBufferTimeout=timedelta(seconds=5)
+ 26DefaultMaxConnections=20
+ 27DefaultMaxIdleConnections=20
+ 28
+ 29
+ 30@dataclass
+ 31classQueryOptions:
+ 32"""
+ 33 A dataclass representing options available for a query.
+ 34
+ 35 * linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+ 36 * max_contention_retries - The max number of times to retry the query if contention is encountered.
+ 37 * query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
+ 38 * query_tags - Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+ 39 * traceparent - A traceparent to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_ Must match format: https://www.w3.org/TR/trace-context/#traceparent-header
+ 40 * typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the "typechecked" flag on the database configuration.
+ 41 * additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
+ 42 """
+ 43
+ 44linearized:Optional[bool]=None
+ 45max_contention_retries:Optional[int]=None
+ 46query_timeout:Optional[timedelta]=DefaultQueryTimeout
+ 47query_tags:Optional[Mapping[str,str]]=None
+ 48traceparent:Optional[str]=None
+ 49typecheck:Optional[bool]=None
+ 50additional_headers:Optional[Dict[str,str]]=None
+ 51
+ 52
+ 53@dataclass
+ 54classStreamOptions:
+ 55"""
+ 56 A dataclass representing options available for a stream.
+ 57
+ 58 * max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
+ 59 * max_backoff - The maximum backoff in seconds for an individual retry.
+ 60 * start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after
+ 61 the timestamp.
+ 62 * cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
+ 63 * status_events - Indicates if stream should include status events. Status events are periodic events that
+ 64 update the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics
+ 65 about the cost of maintaining the stream other than the cost of the received events.
+ 66 """
+ 67
+ 68max_attempts:Optional[int]=None
+ 69max_backoff:Optional[int]=None
+ 70start_ts:Optional[int]=None
+ 71cursor:Optional[str]=None
+ 72status_events:bool=False
+ 73
+ 74
+ 75@dataclass
+ 76classFeedOptions:
+ 77"""
+ 78 A dataclass representing options available for an Event Feed.
+ 79
+ 80 * max_attempts - The maximum number of times to attempt an Event Feed query when a retryable exception is thrown.
+ 81 * max_backoff - The maximum backoff in seconds for an individual retry.
+ 82 * query_timeout - Controls the maximum amount of time Fauna will execute a query before returning a page of events.
+ 83 * start_ts - The starting timestamp of the Event Feed, exclusive. If set, Fauna will return events starting after
+ 84 the timestamp.
+ 85 * cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
+ 86 * page_size - The desired number of events per page.
+ 87 """
+ 88max_attempts:Optional[int]=None
+ 89max_backoff:Optional[int]=None
+ 90query_timeout:Optional[timedelta]=None
+ 91page_size:Optional[int]=None
+ 92start_ts:Optional[int]=None
+ 93cursor:Optional[str]=None
+ 94
+ 95
+ 96classClient:
+ 97
+ 98def__init__(
+ 99self,
+100endpoint:Optional[str]=None,
+101secret:Optional[str]=None,
+102http_client:Optional[HTTPClient]=None,
+103query_tags:Optional[Mapping[str,str]]=None,
+104linearized:Optional[bool]=None,
+105max_contention_retries:Optional[int]=None,
+106typecheck:Optional[bool]=None,
+107additional_headers:Optional[Dict[str,str]]=None,
+108query_timeout:Optional[timedelta]=DefaultQueryTimeout,
+109client_buffer_timeout:Optional[timedelta]=DefaultClientBufferTimeout,
+110http_read_timeout:Optional[timedelta]=DefaultHttpReadTimeout,
+111http_write_timeout:Optional[timedelta]=DefaultHttpWriteTimeout,
+112http_connect_timeout:Optional[timedelta]=DefaultHttpConnectTimeout,
+113http_pool_timeout:Optional[timedelta]=DefaultHttpPoolTimeout,
+114http_idle_timeout:Optional[timedelta]=DefaultIdleConnectionTimeout,
+115max_attempts:int=3,
+116max_backoff:int=20,
+117):
+118"""Initializes a Client.
+119
+120 :param endpoint: The Fauna Endpoint to use. Defaults to https://db.fauna.com, or the `FAUNA_ENDPOINT` env variable.
+121 :param secret: The Fauna Secret to use. Defaults to empty, or the `FAUNA_SECRET` env variable.
+122 :param http_client: An :class:`HTTPClient` implementation. Defaults to a global :class:`HTTPXClient`.
+123 :param query_tags: Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+124 :param linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+125 :param max_contention_retries: The max number of times to retry the query if contention is encountered.
+126 :param typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
+127 :param additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
+128 :param query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is :py:data:`DefaultQueryTimeout`.
+129 :param client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is :py:data:`DefaultClientBufferTimeout`, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
+130 :param http_read_timeout: Set HTTP Read timeout, default is :py:data:`DefaultHttpReadTimeout`.
+131 :param http_write_timeout: Set HTTP Write timeout, default is :py:data:`DefaultHttpWriteTimeout`.
+132 :param http_connect_timeout: Set HTTP Connect timeout, default is :py:data:`DefaultHttpConnectTimeout`.
+133 :param http_pool_timeout: Set HTTP Pool timeout, default is :py:data:`DefaultHttpPoolTimeout`.
+134 :param http_idle_timeout: Set HTTP Idle timeout, default is :py:data:`DefaultIdleConnectionTimeout`.
+135 :param max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
+136 :param max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
+137 """
+138
+139self._set_endpoint(endpoint)
+140self._max_attempts=max_attempts
+141self._max_backoff=max_backoff
+142
+143ifsecretisNone:
+144self._auth=_Auth(_Environment.EnvFaunaSecret())
+145else:
+146self._auth=_Auth(secret)
+147
+148self._last_txn_ts=LastTxnTs()
+149
+150self._query_tags={}
+151ifquery_tagsisnotNone:
+152self._query_tags.update(query_tags)
+153
+154ifquery_timeoutisnotNone:
+155self._query_timeout_ms=int(query_timeout.total_seconds()*1000)
+156else:
+157self._query_timeout_ms=None
+158
+159self._headers:Dict[str,str]={
+160_Header.AcceptEncoding:"gzip",
+161_Header.ContentType:"application/json;charset=utf-8",
+162_Header.Driver:"python",
+163_Header.DriverEnv:str(_DriverEnvironment()),
+164}
+165
+166iftypecheckisnotNone:
+167self._headers[Header.Typecheck]=str(typecheck).lower()
+168
+169iflinearizedisnotNone:
+170self._headers[Header.Linearized]=str(linearized).lower()
+171
+172ifmax_contention_retriesisnotNoneandmax_contention_retries>0:
+173self._headers[Header.MaxContentionRetries]= \
+174f"{max_contention_retries}"
+175
+176ifadditional_headersisnotNone:
+177self._headers={
+178**self._headers,
+179**additional_headers,
+180}
+181
+182self._session:HTTPClient
+183
+184ifhttp_clientisnotNone:
+185self._session=http_client
+186else:
+187iffauna.global_http_clientisNone:
+188timeout_s:Optional[float]=None
+189ifquery_timeoutisnotNoneandclient_buffer_timeoutisnotNone:
+190timeout_s=(query_timeout+client_buffer_timeout).total_seconds()
+191read_timeout_s:Optional[float]=None
+192ifhttp_read_timeoutisnotNone:
+193read_timeout_s=http_read_timeout.total_seconds()
+194
+195write_timeout_s:Optional[float]=http_write_timeout.total_seconds(
+196)ifhttp_write_timeoutisnotNoneelseNone
+197connect_timeout_s:Optional[float]=http_connect_timeout.total_seconds(
+198)ifhttp_connect_timeoutisnotNoneelseNone
+199pool_timeout_s:Optional[float]=http_pool_timeout.total_seconds(
+200)ifhttp_pool_timeoutisnotNoneelseNone
+201idle_timeout_s:Optional[float]=http_idle_timeout.total_seconds(
+202)ifhttp_idle_timeoutisnotNoneelseNone
+203
+204importhttpx
+205fromfauna.http.httpx_clientimportHTTPXClient
+206c=HTTPXClient(
+207httpx.Client(
+208http1=True,
+209http2=False,
+210timeout=httpx.Timeout(
+211timeout=timeout_s,
+212connect=connect_timeout_s,
+213read=read_timeout_s,
+214write=write_timeout_s,
+215pool=pool_timeout_s,
+216),
+217limits=httpx.Limits(
+218max_connections=DefaultMaxConnections,
+219max_keepalive_connections=DefaultMaxIdleConnections,
+220keepalive_expiry=idle_timeout_s,
+221),
+222),logger)
+223fauna.global_http_client=c
+224
+225self._session=fauna.global_http_client
+226
+227defclose(self):
+228self._session.close()
+229ifself._session==fauna.global_http_client:
+230fauna.global_http_client=None
+231
+232defset_last_txn_ts(self,txn_ts:int):
+233"""
+234 Set the last timestamp seen by this client.
+235 This has no effect if earlier than stored timestamp.
+236
+237 .. WARNING:: This should be used only when coordinating timestamps across
+238 multiple clients. Moving the timestamp arbitrarily forward into
+239 the future will cause transactions to stall.
+240
+241 :param txn_ts: the new transaction time.
+242 """
+243self._last_txn_ts.update_txn_time(txn_ts)
+244
+245defget_last_txn_ts(self)->Optional[int]:
+246"""
+247 Get the last timestamp seen by this client.
+248 :return:
+249 """
+250returnself._last_txn_ts.time
+251
+252defget_query_timeout(self)->Optional[timedelta]:
+253"""
+254 Get the query timeout for all queries.
+255 """
+256ifself._query_timeout_msisnotNone:
+257returntimedelta(milliseconds=self._query_timeout_ms)
+258else:
+259returnNone
+260
+261defpaginate(
+262self,
+263fql:Query,
+264opts:Optional[QueryOptions]=None,
+265)->"QueryIterator":
+266"""
+267 Run a query on Fauna and returning an iterator of results. If the query
+268 returns a Page, the iterator will fetch additional Pages until the
+269 after token is null. Each call for a page will be retried with exponential
+270 backoff up to the max_attempts set in the client's retry policy in the
+271 event of a 429 or 502.
+272
+273 :param fql: A Query
+274 :param opts: (Optional) Query Options
+275
+276 :return: a :class:`QueryResponse`
+277
+278 :raises NetworkError: HTTP Request failed in transit
+279 :raises ProtocolError: HTTP error not from Fauna
+280 :raises ServiceError: Fauna returned an error
+281 :raises ValueError: Encoding and decoding errors
+282 :raises TypeError: Invalid param types
+283 """
+284
+285ifnotisinstance(fql,Query):
+286err_msg=f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+287f"Query by calling fauna.fql()"
+288raiseTypeError(err_msg)
+289
+290returnQueryIterator(self,fql,opts)
+291
+292defquery(
+293self,
+294fql:Query,
+295opts:Optional[QueryOptions]=None,
+296)->QuerySuccess:
+297"""
+298 Run a query on Fauna. A query will be retried max_attempt times with exponential backoff
+299 up to the max_backoff in the event of a 429.
+300
+301 :param fql: A Query
+302 :param opts: (Optional) Query Options
+303
+304 :return: a :class:`QueryResponse`
+305
+306 :raises NetworkError: HTTP Request failed in transit
+307 :raises ProtocolError: HTTP error not from Fauna
+308 :raises ServiceError: Fauna returned an error
+309 :raises ValueError: Encoding and decoding errors
+310 :raises TypeError: Invalid param types
+311 """
+312
+313ifnotisinstance(fql,Query):
+314err_msg=f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+315f"Query by calling fauna.fql()"
+316raiseTypeError(err_msg)
+317
+318try:
+319encoded_query:Mapping[str,Any]=FaunaEncoder.encode(fql)
+320exceptExceptionase:
+321raiseClientError("Failed to encode Query")frome
+322
+323retryable=Retryable[QuerySuccess](
+324self._max_attempts,
+325self._max_backoff,
+326self._query,
+327"/query/1",
+328fql=encoded_query,
+329opts=opts,
+330)
+331
+332r=retryable.run()
+333r.response.stats.attempts=r.attempts
+334returnr.response
+335
+336def_query(
+337self,
+338path:str,
+339fql:Mapping[str,Any],
+340arguments:Optional[Mapping[str,Any]]=None,
+341opts:Optional[QueryOptions]=None,
+342)->QuerySuccess:
+343
+344headers=self._headers.copy()
+345headers[_Header.Format]="tagged"
+346headers[_Header.Authorization]=self._auth.bearer()
+347
+348ifself._query_timeout_msisnotNone:
+349headers[Header.QueryTimeoutMs]=str(self._query_timeout_ms)
+350
+351headers.update(self._last_txn_ts.request_header)
+352
+353query_tags={}
+354ifself._query_tagsisnotNone:
+355query_tags.update(self._query_tags)
+356
+357ifoptsisnotNone:
+358ifopts.linearizedisnotNone:
+359headers[Header.Linearized]=str(opts.linearized).lower()
+360ifopts.max_contention_retriesisnotNone:
+361headers[Header.MaxContentionRetries]= \
+362f"{opts.max_contention_retries}"
+363ifopts.traceparentisnotNone:
+364headers[Header.Traceparent]=opts.traceparent
+365ifopts.query_timeoutisnotNone:
+366timeout_ms=f"{int(opts.query_timeout.total_seconds()*1000)}"
+367headers[Header.QueryTimeoutMs]=timeout_ms
+368ifopts.query_tagsisnotNone:
+369query_tags.update(opts.query_tags)
+370ifopts.typecheckisnotNone:
+371headers[Header.Typecheck]=str(opts.typecheck).lower()
+372ifopts.additional_headersisnotNone:
+373headers.update(opts.additional_headers)
+374
+375iflen(query_tags)>0:
+376headers[Header.Tags]=QueryTags.encode(query_tags)
+377
+378data:dict[str,Any]={
+379"query":fql,
+380"arguments":argumentsor{},
+381}
+382
+383withself._session.request(
+384method="POST",
+385url=self._endpoint+path,
+386headers=headers,
+387data=data,
+388)asresponse:
+389status_code=response.status_code()
+390response_json=response.json()
+391headers=response.headers()
+392
+393self._check_protocol(response_json,status_code)
+394
+395dec:Any=FaunaDecoder.decode(response_json)
+396
+397ifstatus_code>399:
+398FaunaError.parse_error_and_throw(dec,status_code)
+399
+400if"txn_ts"indec:
+401self.set_last_txn_ts(int(response_json["txn_ts"]))
+402
+403stats=QueryStats(dec["stats"])if"stats"indecelseNone
+404summary=dec["summary"]if"summary"indecelseNone
+405query_tags=QueryTags.decode(
+406dec["query_tags"])if"query_tags"indecelseNone
+407txn_ts=dec["txn_ts"]if"txn_ts"indecelseNone
+408schema_version=dec["schema_version"]if"schema_version"indecelseNone
+409traceparent=headers.get("traceparent",None)
+410static_type=dec["static_type"]if"static_type"indecelseNone
+411
+412returnQuerySuccess(
+413data=dec["data"],
+414query_tags=query_tags,
+415static_type=static_type,
+416stats=stats,
+417summary=summary,
+418traceparent=traceparent,
+419txn_ts=txn_ts,
+420schema_version=schema_version,
+421)
+422
+423defstream(
+424self,
+425fql:Union[EventSource,Query],
+426opts:StreamOptions=StreamOptions()
+427)->"StreamIterator":
+428"""
+429 Opens a Stream in Fauna and returns an iterator that consume Fauna events.
+430
+431 :param fql: An EventSource or a Query that returns an EventSource.
+432 :param opts: (Optional) Stream Options.
+433
+434 :return: a :class:`StreamIterator`
+435
+436 :raises ClientError: Invalid options provided
+437 :raises NetworkError: HTTP Request failed in transit
+438 :raises ProtocolError: HTTP error not from Fauna
+439 :raises ServiceError: Fauna returned an error
+440 :raises ValueError: Encoding and decoding errors
+441 :raises TypeError: Invalid param types
+442 """
+443
+444ifisinstance(fql,Query):
+445ifopts.cursorisnotNone:
+446raiseClientError(
+447"The 'cursor' configuration can only be used with an event source.")
+448
+449source=self.query(fql).data
+450else:
+451source=fql
+452
+453ifnotisinstance(source,EventSource):
+454err_msg=f"'fql' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+455raiseTypeError(err_msg)
+456
+457headers=self._headers.copy()
+458headers[_Header.Format]="tagged"
+459headers[_Header.Authorization]=self._auth.bearer()
+460
+461returnStreamIterator(self._session,headers,self._endpoint+"/stream/1",
+462self._max_attempts,self._max_backoff,opts,source)
+463
+464deffeed(
+465self,
+466source:Union[EventSource,Query],
+467opts:FeedOptions=FeedOptions(),
+468)->"FeedIterator":
+469"""
+470 Opens an Event Feed in Fauna and returns an iterator that consume Fauna events.
+471
+472 :param source: An EventSource or a Query that returns an EventSource.
+473 :param opts: (Optional) Event Feed options.
+474
+475 :return: a :class:`FeedIterator`
+476
+477 :raises ClientError: Invalid options provided
+478 :raises NetworkError: HTTP Request failed in transit
+479 :raises ProtocolError: HTTP error not from Fauna
+480 :raises ServiceError: Fauna returned an error
+481 :raises ValueError: Encoding and decoding errors
+482 :raises TypeError: Invalid param types
+483 """
+484
+485ifisinstance(source,Query):
+486source=self.query(source).data
+487
+488ifnotisinstance(source,EventSource):
+489err_msg=f"'source' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+490raiseTypeError(err_msg)
+491
+492headers=self._headers.copy()
+493headers[_Header.Format]="tagged"
+494headers[_Header.Authorization]=self._auth.bearer()
+495
+496ifopts.query_timeoutisnotNone:
+497query_timeout_ms=int(opts.query_timeout.total_seconds()*1000)
+498headers[Header.QueryTimeoutMs]=str(query_timeout_ms)
+499elifself._query_timeout_msisnotNone:
+500headers[Header.QueryTimeoutMs]=str(self._query_timeout_ms)
+501
+502returnFeedIterator(self._session,headers,self._endpoint+"/feed/1",
+503self._max_attempts,self._max_backoff,opts,source)
+504
+505def_check_protocol(self,response_json:Any,status_code):
+506# TODO: Logic to validate wire protocol belongs elsewhere.
+507should_raise=False
+508
+509# check for QuerySuccess
+510ifstatus_code<=399and"data"notinresponse_json:
+511should_raise=True
+512
+513# check for QueryFailure
+514ifstatus_code>399:
+515if"error"notinresponse_json:
+516should_raise=True
+517else:
+518e=response_json["error"]
+519if"code"notineor"message"notine:
+520should_raise=True
+521
+522ifshould_raise:
+523raiseProtocolError(
+524status_code,
+525f"Response is in an unknown format: \n{response_json}",
+526)
+527
+528def_set_endpoint(self,endpoint):
+529ifendpointisNone:
+530endpoint=_Environment.EnvFaunaEndpoint()
+531
+532ifendpoint[-1:]=="/":
+533endpoint=endpoint[:-1]
+534
+535self._endpoint=endpoint
+536
+537
+538classStreamIterator:
+539"""A class that mixes a ContextManager and an Iterator so we can detected retryable errors."""
+540
+541def__init__(self,http_client:HTTPClient,headers:Dict[str,str],
+542endpoint:str,max_attempts:int,max_backoff:int,
+543opts:StreamOptions,source:EventSource):
+544self._http_client=http_client
+545self._headers=headers
+546self._endpoint=endpoint
+547self._max_attempts=max_attempts
+548self._max_backoff=max_backoff
+549self._opts=opts
+550self._source=source
+551self._stream=None
+552self.last_ts=None
+553self.last_cursor=None
+554self._ctx=self._create_stream()
+555
+556ifopts.start_tsisnotNoneandopts.cursorisnotNone:
+557err_msg="Only one of 'start_ts' or 'cursor' can be defined in the StreamOptions."
+558raiseTypeError(err_msg)
+559
+560def__enter__(self):
+561returnself
+562
+563def__exit__(self,exc_type,exc_value,exc_traceback):
+564ifself._streamisnotNone:
+565self._stream.close()
+566
+567self._ctx.__exit__(exc_type,exc_value,exc_traceback)
+568returnFalse
+569
+570def__iter__(self):
+571returnself
+572
+573def__next__(self):
+574ifself._opts.max_attemptsisnotNone:
+575max_attempts=self._opts.max_attempts
+576else:
+577max_attempts=self._max_attempts
+578
+579ifself._opts.max_backoffisnotNone:
+580max_backoff=self._opts.max_backoff
+581else:
+582max_backoff=self._max_backoff
+583
+584retryable=Retryable[Any](max_attempts,max_backoff,self._next_element)
+585returnretryable.run().response
+586
+587def_next_element(self):
+588try:
+589ifself._streamisNone:
+590try:
+591self._stream=self._ctx.__enter__()
+592exceptException:
+593self._retry_stream()
+594
+595ifself._streamisnotNone:
+596event:Any=FaunaDecoder.decode(next(self._stream))
+597
+598ifevent["type"]=="error":
+599FaunaError.parse_error_and_throw(event,400)
+600
+601self.last_ts=event["txn_ts"]
+602self.last_cursor=event.get('cursor')
+603
+604ifevent["type"]=="start":
+605returnself._next_element()
+606
+607ifnotself._opts.status_eventsandevent["type"]=="status":
+608returnself._next_element()
+609
+610returnevent
+611
+612raiseStopIteration
+613exceptNetworkError:
+614self._retry_stream()
+615
+616def_retry_stream(self):
+617ifself._streamisnotNone:
+618self._stream.close()
+619
+620self._stream=None
+621
+622try:
+623self._ctx=self._create_stream()
+624exceptException:
+625pass
+626raiseRetryableFaunaException
+627
+628def_create_stream(self):
+629data:Dict[str,Any]={"token":self._source.token}
+630ifself.last_cursorisnotNone:
+631data["cursor"]=self.last_cursor
+632elifself._opts.cursorisnotNone:
+633data["cursor"]=self._opts.cursor
+634elifself._opts.start_tsisnotNone:
+635data["start_ts"]=self._opts.start_ts
+636
+637returnself._http_client.stream(
+638url=self._endpoint,headers=self._headers,data=data)
+639
+640defclose(self):
+641ifself._streamisnotNone:
+642self._stream.close()
+643
+644
+645classFeedPage:
+646
+647def__init__(self,events:List[Any],cursor:str,stats:QueryStats):
+648self._events=events
+649self.cursor=cursor
+650self.stats=stats
+651
+652def__len__(self):
+653returnlen(self._events)
+654
+655def__iter__(self)->Iterator[Any]:
+656foreventinself._events:
+657ifevent["type"]=="error":
+658FaunaError.parse_error_and_throw(event,400)
+659yieldevent
+660
+661
+662classFeedIterator:
+663"""A class to provide an iterator on top of Event Feed pages."""
+664
+665def__init__(self,http:HTTPClient,headers:Dict[str,str],endpoint:str,
+666max_attempts:int,max_backoff:int,opts:FeedOptions,
+667source:EventSource):
+668self._http=http
+669self._headers=headers
+670self._endpoint=endpoint
+671self._max_attempts=opts.max_attemptsormax_attempts
+672self._max_backoff=opts.max_backofformax_backoff
+673self._request:Dict[str,Any]={"token":source.token}
+674self._is_done=False
+675
+676ifopts.start_tsisnotNoneandopts.cursorisnotNone:
+677err_msg="Only one of 'start_ts' or 'cursor' can be defined in the FeedOptions."
+678raiseTypeError(err_msg)
+679
+680ifopts.page_sizeisnotNone:
+681self._request["page_size"]=opts.page_size
+682
+683ifopts.cursorisnotNone:
+684self._request["cursor"]=opts.cursor
+685elifopts.start_tsisnotNone:
+686self._request["start_ts"]=opts.start_ts
+687
+688def__iter__(self)->Iterator[FeedPage]:
+689self._is_done=False
+690returnself
+691
+692def__next__(self)->FeedPage:
+693ifself._is_done:
+694raiseStopIteration
+695
+696retryable=Retryable[Any](self._max_attempts,self._max_backoff,
+697self._next_page)
+698returnretryable.run().response
+699
+700def_next_page(self)->FeedPage:
+701withself._http.request(
+702method="POST",
+703url=self._endpoint,
+704headers=self._headers,
+705data=self._request,
+706)asresponse:
+707status_code=response.status_code()
+708decoded:Any=FaunaDecoder.decode(response.json())
+709
+710ifstatus_code>399:
+711FaunaError.parse_error_and_throw(decoded,status_code)
+712
+713self._is_done=notdecoded["has_next"]
+714self._request["cursor"]=decoded["cursor"]
+715
+716if"start_ts"inself._request:
+717delself._request["start_ts"]
+718
+719returnFeedPage(decoded["events"],decoded["cursor"],
+720QueryStats(decoded["stats"]))
+721
+722defflatten(self)->Iterator:
+723"""A generator that yields events instead of pages of events."""
+724forpageinself:
+725foreventinpage:
+726yieldevent
+727
+728
+729classQueryIterator:
+730"""A class to provider an iterator on top of Fauna queries."""
+731
+732def__init__(self,
+733client:Client,
+734fql:Query,
+735opts:Optional[QueryOptions]=None):
+736"""Initializes the QueryIterator
+737
+738 :param fql: A Query
+739 :param opts: (Optional) Query Options
+740
+741 :raises TypeError: Invalid param types
+742 """
+743ifnotisinstance(client,Client):
+744err_msg=f"'client' must be a Client but was a {type(client)}. You can build a " \
+745f"Client by calling fauna.client.Client()"
+746raiseTypeError(err_msg)
+747
+748ifnotisinstance(fql,Query):
+749err_msg=f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+750f"Query by calling fauna.fql()"
+751raiseTypeError(err_msg)
+752
+753self.client=client
+754self.fql=fql
+755self.opts=opts
+756
+757def__iter__(self)->Iterator:
+758returnself.iter()
+759
+760defiter(self)->Iterator:
+761"""
+762 A generator function that immediately fetches and yields the results of
+763 the stored query. Yields additional pages on subsequent iterations if
+764 they exist
+765 """
+766
+767cursor=None
+768initial_response=self.client.query(self.fql,self.opts)
+769
+770ifisinstance(initial_response.data,Page):
+771cursor=initial_response.data.after
+772yieldinitial_response.data.data
+773
+774whilecursorisnotNone:
+775next_response=self.client.query(
+776fql("Set.paginate(${after})",after=cursor),self.opts)
+777# TODO: `Set.paginate` does not yet return a `@set` tagged value
+778# so we will get back a plain object that might not have
+779# an after property.
+780cursor=next_response.data.get("after")
+781yieldnext_response.data.get("data")
+782
+783else:
+784yield[initial_response.data]
+785
+786defflatten(self)->Iterator:
+787"""
+788 A generator function that immediately fetches and yields the results of
+789 the stored query. Yields each item individually, rather than a whole
+790 Page at a time. Fetches additional pages as required if they exist.
+791 """
+792
+793forpageinself.iter():
+794foriteminpage:
+795yielditem
+
31@dataclass
+32classQueryOptions:
+33"""
+34 A dataclass representing options available for a query.
+35
+36 * linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+37 * max_contention_retries - The max number of times to retry the query if contention is encountered.
+38 * query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
+39 * query_tags - Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+40 * traceparent - A traceparent to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_ Must match format: https://www.w3.org/TR/trace-context/#traceparent-header
+41 * typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the "typechecked" flag on the database configuration.
+42 * additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
+43 """
+44
+45linearized:Optional[bool]=None
+46max_contention_retries:Optional[int]=None
+47query_timeout:Optional[timedelta]=DefaultQueryTimeout
+48query_tags:Optional[Mapping[str,str]]=None
+49traceparent:Optional[str]=None
+50typecheck:Optional[bool]=None
+51additional_headers:Optional[Dict[str,str]]=None
+
+
+
+
A dataclass representing options available for a query.
+
+
+
linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+
max_contention_retries - The max number of times to retry the query if contention is encountered.
+
query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
+
query_tags - Tags to associate with the query. See logging
typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the "typechecked" flag on the database configuration.
+
additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
54@dataclass
+55classStreamOptions:
+56"""
+57 A dataclass representing options available for a stream.
+58
+59 * max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
+60 * max_backoff - The maximum backoff in seconds for an individual retry.
+61 * start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after
+62 the timestamp.
+63 * cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
+64 * status_events - Indicates if stream should include status events. Status events are periodic events that
+65 update the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics
+66 about the cost of maintaining the stream other than the cost of the received events.
+67 """
+68
+69max_attempts:Optional[int]=None
+70max_backoff:Optional[int]=None
+71start_ts:Optional[int]=None
+72cursor:Optional[str]=None
+73status_events:bool=False
+
+
+
+
A dataclass representing options available for a stream.
+
+
+
max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
+
max_backoff - The maximum backoff in seconds for an individual retry.
+
start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after
+the timestamp.
+
cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
+
status_events - Indicates if stream should include status events. Status events are periodic events that
+update the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics
+about the cost of maintaining the stream other than the cost of the received events.
76@dataclass
+77classFeedOptions:
+78"""
+79 A dataclass representing options available for an Event Feed.
+80
+81 * max_attempts - The maximum number of times to attempt an Event Feed query when a retryable exception is thrown.
+82 * max_backoff - The maximum backoff in seconds for an individual retry.
+83 * query_timeout - Controls the maximum amount of time Fauna will execute a query before returning a page of events.
+84 * start_ts - The starting timestamp of the Event Feed, exclusive. If set, Fauna will return events starting after
+85 the timestamp.
+86 * cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
+87 * page_size - The desired number of events per page.
+88 """
+89max_attempts:Optional[int]=None
+90max_backoff:Optional[int]=None
+91query_timeout:Optional[timedelta]=None
+92page_size:Optional[int]=None
+93start_ts:Optional[int]=None
+94cursor:Optional[str]=None
+
+
+
+
A dataclass representing options available for an Event Feed.
+
+
+
max_attempts - The maximum number of times to attempt an Event Feed query when a retryable exception is thrown.
+
max_backoff - The maximum backoff in seconds for an individual retry.
+
query_timeout - Controls the maximum amount of time Fauna will execute a query before returning a page of events.
+
start_ts - The starting timestamp of the Event Feed, exclusive. If set, Fauna will return events starting after
+the timestamp.
+
cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
+
page_size - The desired number of events per page.
97classClient:
+ 98
+ 99def__init__(
+100self,
+101endpoint:Optional[str]=None,
+102secret:Optional[str]=None,
+103http_client:Optional[HTTPClient]=None,
+104query_tags:Optional[Mapping[str,str]]=None,
+105linearized:Optional[bool]=None,
+106max_contention_retries:Optional[int]=None,
+107typecheck:Optional[bool]=None,
+108additional_headers:Optional[Dict[str,str]]=None,
+109query_timeout:Optional[timedelta]=DefaultQueryTimeout,
+110client_buffer_timeout:Optional[timedelta]=DefaultClientBufferTimeout,
+111http_read_timeout:Optional[timedelta]=DefaultHttpReadTimeout,
+112http_write_timeout:Optional[timedelta]=DefaultHttpWriteTimeout,
+113http_connect_timeout:Optional[timedelta]=DefaultHttpConnectTimeout,
+114http_pool_timeout:Optional[timedelta]=DefaultHttpPoolTimeout,
+115http_idle_timeout:Optional[timedelta]=DefaultIdleConnectionTimeout,
+116max_attempts:int=3,
+117max_backoff:int=20,
+118):
+119"""Initializes a Client.
+120
+121 :param endpoint: The Fauna Endpoint to use. Defaults to https://db.fauna.com, or the `FAUNA_ENDPOINT` env variable.
+122 :param secret: The Fauna Secret to use. Defaults to empty, or the `FAUNA_SECRET` env variable.
+123 :param http_client: An :class:`HTTPClient` implementation. Defaults to a global :class:`HTTPXClient`.
+124 :param query_tags: Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+125 :param linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+126 :param max_contention_retries: The max number of times to retry the query if contention is encountered.
+127 :param typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
+128 :param additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
+129 :param query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is :py:data:`DefaultQueryTimeout`.
+130 :param client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is :py:data:`DefaultClientBufferTimeout`, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
+131 :param http_read_timeout: Set HTTP Read timeout, default is :py:data:`DefaultHttpReadTimeout`.
+132 :param http_write_timeout: Set HTTP Write timeout, default is :py:data:`DefaultHttpWriteTimeout`.
+133 :param http_connect_timeout: Set HTTP Connect timeout, default is :py:data:`DefaultHttpConnectTimeout`.
+134 :param http_pool_timeout: Set HTTP Pool timeout, default is :py:data:`DefaultHttpPoolTimeout`.
+135 :param http_idle_timeout: Set HTTP Idle timeout, default is :py:data:`DefaultIdleConnectionTimeout`.
+136 :param max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
+137 :param max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
+138 """
+139
+140self._set_endpoint(endpoint)
+141self._max_attempts=max_attempts
+142self._max_backoff=max_backoff
+143
+144ifsecretisNone:
+145self._auth=_Auth(_Environment.EnvFaunaSecret())
+146else:
+147self._auth=_Auth(secret)
+148
+149self._last_txn_ts=LastTxnTs()
+150
+151self._query_tags={}
+152ifquery_tagsisnotNone:
+153self._query_tags.update(query_tags)
+154
+155ifquery_timeoutisnotNone:
+156self._query_timeout_ms=int(query_timeout.total_seconds()*1000)
+157else:
+158self._query_timeout_ms=None
+159
+160self._headers:Dict[str,str]={
+161_Header.AcceptEncoding:"gzip",
+162_Header.ContentType:"application/json;charset=utf-8",
+163_Header.Driver:"python",
+164_Header.DriverEnv:str(_DriverEnvironment()),
+165}
+166
+167iftypecheckisnotNone:
+168self._headers[Header.Typecheck]=str(typecheck).lower()
+169
+170iflinearizedisnotNone:
+171self._headers[Header.Linearized]=str(linearized).lower()
+172
+173ifmax_contention_retriesisnotNoneandmax_contention_retries>0:
+174self._headers[Header.MaxContentionRetries]= \
+175f"{max_contention_retries}"
+176
+177ifadditional_headersisnotNone:
+178self._headers={
+179**self._headers,
+180**additional_headers,
+181}
+182
+183self._session:HTTPClient
+184
+185ifhttp_clientisnotNone:
+186self._session=http_client
+187else:
+188iffauna.global_http_clientisNone:
+189timeout_s:Optional[float]=None
+190ifquery_timeoutisnotNoneandclient_buffer_timeoutisnotNone:
+191timeout_s=(query_timeout+client_buffer_timeout).total_seconds()
+192read_timeout_s:Optional[float]=None
+193ifhttp_read_timeoutisnotNone:
+194read_timeout_s=http_read_timeout.total_seconds()
+195
+196write_timeout_s:Optional[float]=http_write_timeout.total_seconds(
+197)ifhttp_write_timeoutisnotNoneelseNone
+198connect_timeout_s:Optional[float]=http_connect_timeout.total_seconds(
+199)ifhttp_connect_timeoutisnotNoneelseNone
+200pool_timeout_s:Optional[float]=http_pool_timeout.total_seconds(
+201)ifhttp_pool_timeoutisnotNoneelseNone
+202idle_timeout_s:Optional[float]=http_idle_timeout.total_seconds(
+203)ifhttp_idle_timeoutisnotNoneelseNone
+204
+205importhttpx
+206fromfauna.http.httpx_clientimportHTTPXClient
+207c=HTTPXClient(
+208httpx.Client(
+209http1=True,
+210http2=False,
+211timeout=httpx.Timeout(
+212timeout=timeout_s,
+213connect=connect_timeout_s,
+214read=read_timeout_s,
+215write=write_timeout_s,
+216pool=pool_timeout_s,
+217),
+218limits=httpx.Limits(
+219max_connections=DefaultMaxConnections,
+220max_keepalive_connections=DefaultMaxIdleConnections,
+221keepalive_expiry=idle_timeout_s,
+222),
+223),logger)
+224fauna.global_http_client=c
+225
+226self._session=fauna.global_http_client
+227
+228defclose(self):
+229self._session.close()
+230ifself._session==fauna.global_http_client:
+231fauna.global_http_client=None
+232
+233defset_last_txn_ts(self,txn_ts:int):
+234"""
+235 Set the last timestamp seen by this client.
+236 This has no effect if earlier than stored timestamp.
+237
+238 .. WARNING:: This should be used only when coordinating timestamps across
+239 multiple clients. Moving the timestamp arbitrarily forward into
+240 the future will cause transactions to stall.
+241
+242 :param txn_ts: the new transaction time.
+243 """
+244self._last_txn_ts.update_txn_time(txn_ts)
+245
+246defget_last_txn_ts(self)->Optional[int]:
+247"""
+248 Get the last timestamp seen by this client.
+249 :return:
+250 """
+251returnself._last_txn_ts.time
+252
+253defget_query_timeout(self)->Optional[timedelta]:
+254"""
+255 Get the query timeout for all queries.
+256 """
+257ifself._query_timeout_msisnotNone:
+258returntimedelta(milliseconds=self._query_timeout_ms)
+259else:
+260returnNone
+261
+262defpaginate(
+263self,
+264fql:Query,
+265opts:Optional[QueryOptions]=None,
+266)->"QueryIterator":
+267"""
+268 Run a query on Fauna and returning an iterator of results. If the query
+269 returns a Page, the iterator will fetch additional Pages until the
+270 after token is null. Each call for a page will be retried with exponential
+271 backoff up to the max_attempts set in the client's retry policy in the
+272 event of a 429 or 502.
+273
+274 :param fql: A Query
+275 :param opts: (Optional) Query Options
+276
+277 :return: a :class:`QueryResponse`
+278
+279 :raises NetworkError: HTTP Request failed in transit
+280 :raises ProtocolError: HTTP error not from Fauna
+281 :raises ServiceError: Fauna returned an error
+282 :raises ValueError: Encoding and decoding errors
+283 :raises TypeError: Invalid param types
+284 """
+285
+286ifnotisinstance(fql,Query):
+287err_msg=f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+288f"Query by calling fauna.fql()"
+289raiseTypeError(err_msg)
+290
+291returnQueryIterator(self,fql,opts)
+292
+293defquery(
+294self,
+295fql:Query,
+296opts:Optional[QueryOptions]=None,
+297)->QuerySuccess:
+298"""
+299 Run a query on Fauna. A query will be retried max_attempt times with exponential backoff
+300 up to the max_backoff in the event of a 429.
+301
+302 :param fql: A Query
+303 :param opts: (Optional) Query Options
+304
+305 :return: a :class:`QueryResponse`
+306
+307 :raises NetworkError: HTTP Request failed in transit
+308 :raises ProtocolError: HTTP error not from Fauna
+309 :raises ServiceError: Fauna returned an error
+310 :raises ValueError: Encoding and decoding errors
+311 :raises TypeError: Invalid param types
+312 """
+313
+314ifnotisinstance(fql,Query):
+315err_msg=f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+316f"Query by calling fauna.fql()"
+317raiseTypeError(err_msg)
+318
+319try:
+320encoded_query:Mapping[str,Any]=FaunaEncoder.encode(fql)
+321exceptExceptionase:
+322raiseClientError("Failed to encode Query")frome
+323
+324retryable=Retryable[QuerySuccess](
+325self._max_attempts,
+326self._max_backoff,
+327self._query,
+328"/query/1",
+329fql=encoded_query,
+330opts=opts,
+331)
+332
+333r=retryable.run()
+334r.response.stats.attempts=r.attempts
+335returnr.response
+336
+337def_query(
+338self,
+339path:str,
+340fql:Mapping[str,Any],
+341arguments:Optional[Mapping[str,Any]]=None,
+342opts:Optional[QueryOptions]=None,
+343)->QuerySuccess:
+344
+345headers=self._headers.copy()
+346headers[_Header.Format]="tagged"
+347headers[_Header.Authorization]=self._auth.bearer()
+348
+349ifself._query_timeout_msisnotNone:
+350headers[Header.QueryTimeoutMs]=str(self._query_timeout_ms)
+351
+352headers.update(self._last_txn_ts.request_header)
+353
+354query_tags={}
+355ifself._query_tagsisnotNone:
+356query_tags.update(self._query_tags)
+357
+358ifoptsisnotNone:
+359ifopts.linearizedisnotNone:
+360headers[Header.Linearized]=str(opts.linearized).lower()
+361ifopts.max_contention_retriesisnotNone:
+362headers[Header.MaxContentionRetries]= \
+363f"{opts.max_contention_retries}"
+364ifopts.traceparentisnotNone:
+365headers[Header.Traceparent]=opts.traceparent
+366ifopts.query_timeoutisnotNone:
+367timeout_ms=f"{int(opts.query_timeout.total_seconds()*1000)}"
+368headers[Header.QueryTimeoutMs]=timeout_ms
+369ifopts.query_tagsisnotNone:
+370query_tags.update(opts.query_tags)
+371ifopts.typecheckisnotNone:
+372headers[Header.Typecheck]=str(opts.typecheck).lower()
+373ifopts.additional_headersisnotNone:
+374headers.update(opts.additional_headers)
+375
+376iflen(query_tags)>0:
+377headers[Header.Tags]=QueryTags.encode(query_tags)
+378
+379data:dict[str,Any]={
+380"query":fql,
+381"arguments":argumentsor{},
+382}
+383
+384withself._session.request(
+385method="POST",
+386url=self._endpoint+path,
+387headers=headers,
+388data=data,
+389)asresponse:
+390status_code=response.status_code()
+391response_json=response.json()
+392headers=response.headers()
+393
+394self._check_protocol(response_json,status_code)
+395
+396dec:Any=FaunaDecoder.decode(response_json)
+397
+398ifstatus_code>399:
+399FaunaError.parse_error_and_throw(dec,status_code)
+400
+401if"txn_ts"indec:
+402self.set_last_txn_ts(int(response_json["txn_ts"]))
+403
+404stats=QueryStats(dec["stats"])if"stats"indecelseNone
+405summary=dec["summary"]if"summary"indecelseNone
+406query_tags=QueryTags.decode(
+407dec["query_tags"])if"query_tags"indecelseNone
+408txn_ts=dec["txn_ts"]if"txn_ts"indecelseNone
+409schema_version=dec["schema_version"]if"schema_version"indecelseNone
+410traceparent=headers.get("traceparent",None)
+411static_type=dec["static_type"]if"static_type"indecelseNone
+412
+413returnQuerySuccess(
+414data=dec["data"],
+415query_tags=query_tags,
+416static_type=static_type,
+417stats=stats,
+418summary=summary,
+419traceparent=traceparent,
+420txn_ts=txn_ts,
+421schema_version=schema_version,
+422)
+423
+424defstream(
+425self,
+426fql:Union[EventSource,Query],
+427opts:StreamOptions=StreamOptions()
+428)->"StreamIterator":
+429"""
+430 Opens a Stream in Fauna and returns an iterator that consume Fauna events.
+431
+432 :param fql: An EventSource or a Query that returns an EventSource.
+433 :param opts: (Optional) Stream Options.
+434
+435 :return: a :class:`StreamIterator`
+436
+437 :raises ClientError: Invalid options provided
+438 :raises NetworkError: HTTP Request failed in transit
+439 :raises ProtocolError: HTTP error not from Fauna
+440 :raises ServiceError: Fauna returned an error
+441 :raises ValueError: Encoding and decoding errors
+442 :raises TypeError: Invalid param types
+443 """
+444
+445ifisinstance(fql,Query):
+446ifopts.cursorisnotNone:
+447raiseClientError(
+448"The 'cursor' configuration can only be used with an event source.")
+449
+450source=self.query(fql).data
+451else:
+452source=fql
+453
+454ifnotisinstance(source,EventSource):
+455err_msg=f"'fql' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+456raiseTypeError(err_msg)
+457
+458headers=self._headers.copy()
+459headers[_Header.Format]="tagged"
+460headers[_Header.Authorization]=self._auth.bearer()
+461
+462returnStreamIterator(self._session,headers,self._endpoint+"/stream/1",
+463self._max_attempts,self._max_backoff,opts,source)
+464
+465deffeed(
+466self,
+467source:Union[EventSource,Query],
+468opts:FeedOptions=FeedOptions(),
+469)->"FeedIterator":
+470"""
+471 Opens an Event Feed in Fauna and returns an iterator that consume Fauna events.
+472
+473 :param source: An EventSource or a Query that returns an EventSource.
+474 :param opts: (Optional) Event Feed options.
+475
+476 :return: a :class:`FeedIterator`
+477
+478 :raises ClientError: Invalid options provided
+479 :raises NetworkError: HTTP Request failed in transit
+480 :raises ProtocolError: HTTP error not from Fauna
+481 :raises ServiceError: Fauna returned an error
+482 :raises ValueError: Encoding and decoding errors
+483 :raises TypeError: Invalid param types
+484 """
+485
+486ifisinstance(source,Query):
+487source=self.query(source).data
+488
+489ifnotisinstance(source,EventSource):
+490err_msg=f"'source' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+491raiseTypeError(err_msg)
+492
+493headers=self._headers.copy()
+494headers[_Header.Format]="tagged"
+495headers[_Header.Authorization]=self._auth.bearer()
+496
+497ifopts.query_timeoutisnotNone:
+498query_timeout_ms=int(opts.query_timeout.total_seconds()*1000)
+499headers[Header.QueryTimeoutMs]=str(query_timeout_ms)
+500elifself._query_timeout_msisnotNone:
+501headers[Header.QueryTimeoutMs]=str(self._query_timeout_ms)
+502
+503returnFeedIterator(self._session,headers,self._endpoint+"/feed/1",
+504self._max_attempts,self._max_backoff,opts,source)
+505
+506def_check_protocol(self,response_json:Any,status_code):
+507# TODO: Logic to validate wire protocol belongs elsewhere.
+508should_raise=False
+509
+510# check for QuerySuccess
+511ifstatus_code<=399and"data"notinresponse_json:
+512should_raise=True
+513
+514# check for QueryFailure
+515ifstatus_code>399:
+516if"error"notinresponse_json:
+517should_raise=True
+518else:
+519e=response_json["error"]
+520if"code"notineor"message"notine:
+521should_raise=True
+522
+523ifshould_raise:
+524raiseProtocolError(
+525status_code,
+526f"Response is in an unknown format: \n{response_json}",
+527)
+528
+529def_set_endpoint(self,endpoint):
+530ifendpointisNone:
+531endpoint=_Environment.EnvFaunaEndpoint()
+532
+533ifendpoint[-1:]=="/":
+534endpoint=endpoint[:-1]
+535
+536self._endpoint=endpoint
+
99def__init__(
+100self,
+101endpoint:Optional[str]=None,
+102secret:Optional[str]=None,
+103http_client:Optional[HTTPClient]=None,
+104query_tags:Optional[Mapping[str,str]]=None,
+105linearized:Optional[bool]=None,
+106max_contention_retries:Optional[int]=None,
+107typecheck:Optional[bool]=None,
+108additional_headers:Optional[Dict[str,str]]=None,
+109query_timeout:Optional[timedelta]=DefaultQueryTimeout,
+110client_buffer_timeout:Optional[timedelta]=DefaultClientBufferTimeout,
+111http_read_timeout:Optional[timedelta]=DefaultHttpReadTimeout,
+112http_write_timeout:Optional[timedelta]=DefaultHttpWriteTimeout,
+113http_connect_timeout:Optional[timedelta]=DefaultHttpConnectTimeout,
+114http_pool_timeout:Optional[timedelta]=DefaultHttpPoolTimeout,
+115http_idle_timeout:Optional[timedelta]=DefaultIdleConnectionTimeout,
+116max_attempts:int=3,
+117max_backoff:int=20,
+118):
+119"""Initializes a Client.
+120
+121 :param endpoint: The Fauna Endpoint to use. Defaults to https://db.fauna.com, or the `FAUNA_ENDPOINT` env variable.
+122 :param secret: The Fauna Secret to use. Defaults to empty, or the `FAUNA_SECRET` env variable.
+123 :param http_client: An :class:`HTTPClient` implementation. Defaults to a global :class:`HTTPXClient`.
+124 :param query_tags: Tags to associate with the query. See `logging <https://docs.fauna.com/fauna/current/build/logs/query_log/>`_
+125 :param linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+126 :param max_contention_retries: The max number of times to retry the query if contention is encountered.
+127 :param typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
+128 :param additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
+129 :param query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is :py:data:`DefaultQueryTimeout`.
+130 :param client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is :py:data:`DefaultClientBufferTimeout`, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
+131 :param http_read_timeout: Set HTTP Read timeout, default is :py:data:`DefaultHttpReadTimeout`.
+132 :param http_write_timeout: Set HTTP Write timeout, default is :py:data:`DefaultHttpWriteTimeout`.
+133 :param http_connect_timeout: Set HTTP Connect timeout, default is :py:data:`DefaultHttpConnectTimeout`.
+134 :param http_pool_timeout: Set HTTP Pool timeout, default is :py:data:`DefaultHttpPoolTimeout`.
+135 :param http_idle_timeout: Set HTTP Idle timeout, default is :py:data:`DefaultIdleConnectionTimeout`.
+136 :param max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
+137 :param max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
+138 """
+139
+140self._set_endpoint(endpoint)
+141self._max_attempts=max_attempts
+142self._max_backoff=max_backoff
+143
+144ifsecretisNone:
+145self._auth=_Auth(_Environment.EnvFaunaSecret())
+146else:
+147self._auth=_Auth(secret)
+148
+149self._last_txn_ts=LastTxnTs()
+150
+151self._query_tags={}
+152ifquery_tagsisnotNone:
+153self._query_tags.update(query_tags)
+154
+155ifquery_timeoutisnotNone:
+156self._query_timeout_ms=int(query_timeout.total_seconds()*1000)
+157else:
+158self._query_timeout_ms=None
+159
+160self._headers:Dict[str,str]={
+161_Header.AcceptEncoding:"gzip",
+162_Header.ContentType:"application/json;charset=utf-8",
+163_Header.Driver:"python",
+164_Header.DriverEnv:str(_DriverEnvironment()),
+165}
+166
+167iftypecheckisnotNone:
+168self._headers[Header.Typecheck]=str(typecheck).lower()
+169
+170iflinearizedisnotNone:
+171self._headers[Header.Linearized]=str(linearized).lower()
+172
+173ifmax_contention_retriesisnotNoneandmax_contention_retries>0:
+174self._headers[Header.MaxContentionRetries]= \
+175f"{max_contention_retries}"
+176
+177ifadditional_headersisnotNone:
+178self._headers={
+179**self._headers,
+180**additional_headers,
+181}
+182
+183self._session:HTTPClient
+184
+185ifhttp_clientisnotNone:
+186self._session=http_client
+187else:
+188iffauna.global_http_clientisNone:
+189timeout_s:Optional[float]=None
+190ifquery_timeoutisnotNoneandclient_buffer_timeoutisnotNone:
+191timeout_s=(query_timeout+client_buffer_timeout).total_seconds()
+192read_timeout_s:Optional[float]=None
+193ifhttp_read_timeoutisnotNone:
+194read_timeout_s=http_read_timeout.total_seconds()
+195
+196write_timeout_s:Optional[float]=http_write_timeout.total_seconds(
+197)ifhttp_write_timeoutisnotNoneelseNone
+198connect_timeout_s:Optional[float]=http_connect_timeout.total_seconds(
+199)ifhttp_connect_timeoutisnotNoneelseNone
+200pool_timeout_s:Optional[float]=http_pool_timeout.total_seconds(
+201)ifhttp_pool_timeoutisnotNoneelseNone
+202idle_timeout_s:Optional[float]=http_idle_timeout.total_seconds(
+203)ifhttp_idle_timeoutisnotNoneelseNone
+204
+205importhttpx
+206fromfauna.http.httpx_clientimportHTTPXClient
+207c=HTTPXClient(
+208httpx.Client(
+209http1=True,
+210http2=False,
+211timeout=httpx.Timeout(
+212timeout=timeout_s,
+213connect=connect_timeout_s,
+214read=read_timeout_s,
+215write=write_timeout_s,
+216pool=pool_timeout_s,
+217),
+218limits=httpx.Limits(
+219max_connections=DefaultMaxConnections,
+220max_keepalive_connections=DefaultMaxIdleConnections,
+221keepalive_expiry=idle_timeout_s,
+222),
+223),logger)
+224fauna.global_http_client=c
+225
+226self._session=fauna.global_http_client
+
+
+
+
Initializes a Client.
+
+
Parameters
+
+
+
endpoint: The Fauna Endpoint to use. Defaults to https: //db.fauna.com, or the FAUNA_ENDPOINT env variable.
+
secret: The Fauna Secret to use. Defaults to empty, or the FAUNA_SECRET env variable.
+
http_client: An HTTPClient implementation. Defaults to a global HTTPXClient.
+
**query_tags: Tags to associate with the query. See logging
+
linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
+
max_contention_retries: The max number of times to retry the query if contention is encountered.
+
typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the "typechecked" flag on the database configuration.
+
additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
+
query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is DefaultQueryTimeout.
+
client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is DefaultClientBufferTimeout, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
233defset_last_txn_ts(self,txn_ts:int):
+234"""
+235 Set the last timestamp seen by this client.
+236 This has no effect if earlier than stored timestamp.
+237
+238 .. WARNING:: This should be used only when coordinating timestamps across
+239 multiple clients. Moving the timestamp arbitrarily forward into
+240 the future will cause transactions to stall.
+241
+242 :param txn_ts: the new transaction time.
+243 """
+244self._last_txn_ts.update_txn_time(txn_ts)
+
+
+
+
Set the last timestamp seen by this client.
+This has no effect if earlier than stored timestamp.
+
+
.. WARNING:: This should be used only when coordinating timestamps across
+multiple clients. Moving the timestamp arbitrarily forward into
+the future will cause transactions to stall.
246defget_last_txn_ts(self)->Optional[int]:
+247"""
+248 Get the last timestamp seen by this client.
+249 :return:
+250 """
+251returnself._last_txn_ts.time
+
253defget_query_timeout(self)->Optional[timedelta]:
+254"""
+255 Get the query timeout for all queries.
+256 """
+257ifself._query_timeout_msisnotNone:
+258returntimedelta(milliseconds=self._query_timeout_ms)
+259else:
+260returnNone
+
262defpaginate(
+263self,
+264fql:Query,
+265opts:Optional[QueryOptions]=None,
+266)->"QueryIterator":
+267"""
+268 Run a query on Fauna and returning an iterator of results. If the query
+269 returns a Page, the iterator will fetch additional Pages until the
+270 after token is null. Each call for a page will be retried with exponential
+271 backoff up to the max_attempts set in the client's retry policy in the
+272 event of a 429 or 502.
+273
+274 :param fql: A Query
+275 :param opts: (Optional) Query Options
+276
+277 :return: a :class:`QueryResponse`
+278
+279 :raises NetworkError: HTTP Request failed in transit
+280 :raises ProtocolError: HTTP error not from Fauna
+281 :raises ServiceError: Fauna returned an error
+282 :raises ValueError: Encoding and decoding errors
+283 :raises TypeError: Invalid param types
+284 """
+285
+286ifnotisinstance(fql,Query):
+287err_msg=f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+288f"Query by calling fauna.fql()"
+289raiseTypeError(err_msg)
+290
+291returnQueryIterator(self,fql,opts)
+
+
+
+
Run a query on Fauna and returning an iterator of results. If the query
+returns a Page, the iterator will fetch additional Pages until the
+after token is null. Each call for a page will be retried with exponential
+backoff up to the max_attempts set in the client's retry policy in the
+event of a 429 or 502.
293defquery(
+294self,
+295fql:Query,
+296opts:Optional[QueryOptions]=None,
+297)->QuerySuccess:
+298"""
+299 Run a query on Fauna. A query will be retried max_attempt times with exponential backoff
+300 up to the max_backoff in the event of a 429.
+301
+302 :param fql: A Query
+303 :param opts: (Optional) Query Options
+304
+305 :return: a :class:`QueryResponse`
+306
+307 :raises NetworkError: HTTP Request failed in transit
+308 :raises ProtocolError: HTTP error not from Fauna
+309 :raises ServiceError: Fauna returned an error
+310 :raises ValueError: Encoding and decoding errors
+311 :raises TypeError: Invalid param types
+312 """
+313
+314ifnotisinstance(fql,Query):
+315err_msg=f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+316f"Query by calling fauna.fql()"
+317raiseTypeError(err_msg)
+318
+319try:
+320encoded_query:Mapping[str,Any]=FaunaEncoder.encode(fql)
+321exceptExceptionase:
+322raiseClientError("Failed to encode Query")frome
+323
+324retryable=Retryable[QuerySuccess](
+325self._max_attempts,
+326self._max_backoff,
+327self._query,
+328"/query/1",
+329fql=encoded_query,
+330opts=opts,
+331)
+332
+333r=retryable.run()
+334r.response.stats.attempts=r.attempts
+335returnr.response
+
+
+
+
Run a query on Fauna. A query will be retried max_attempt times with exponential backoff
+up to the max_backoff in the event of a 429.
424defstream(
+425self,
+426fql:Union[EventSource,Query],
+427opts:StreamOptions=StreamOptions()
+428)->"StreamIterator":
+429"""
+430 Opens a Stream in Fauna and returns an iterator that consume Fauna events.
+431
+432 :param fql: An EventSource or a Query that returns an EventSource.
+433 :param opts: (Optional) Stream Options.
+434
+435 :return: a :class:`StreamIterator`
+436
+437 :raises ClientError: Invalid options provided
+438 :raises NetworkError: HTTP Request failed in transit
+439 :raises ProtocolError: HTTP error not from Fauna
+440 :raises ServiceError: Fauna returned an error
+441 :raises ValueError: Encoding and decoding errors
+442 :raises TypeError: Invalid param types
+443 """
+444
+445ifisinstance(fql,Query):
+446ifopts.cursorisnotNone:
+447raiseClientError(
+448"The 'cursor' configuration can only be used with an event source.")
+449
+450source=self.query(fql).data
+451else:
+452source=fql
+453
+454ifnotisinstance(source,EventSource):
+455err_msg=f"'fql' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+456raiseTypeError(err_msg)
+457
+458headers=self._headers.copy()
+459headers[_Header.Format]="tagged"
+460headers[_Header.Authorization]=self._auth.bearer()
+461
+462returnStreamIterator(self._session,headers,self._endpoint+"/stream/1",
+463self._max_attempts,self._max_backoff,opts,source)
+
+
+
+
Opens a Stream in Fauna and returns an iterator that consume Fauna events.
+
+
Parameters
+
+
+
fql: An EventSource or a Query that returns an EventSource.
465deffeed(
+466self,
+467source:Union[EventSource,Query],
+468opts:FeedOptions=FeedOptions(),
+469)->"FeedIterator":
+470"""
+471 Opens an Event Feed in Fauna and returns an iterator that consume Fauna events.
+472
+473 :param source: An EventSource or a Query that returns an EventSource.
+474 :param opts: (Optional) Event Feed options.
+475
+476 :return: a :class:`FeedIterator`
+477
+478 :raises ClientError: Invalid options provided
+479 :raises NetworkError: HTTP Request failed in transit
+480 :raises ProtocolError: HTTP error not from Fauna
+481 :raises ServiceError: Fauna returned an error
+482 :raises ValueError: Encoding and decoding errors
+483 :raises TypeError: Invalid param types
+484 """
+485
+486ifisinstance(source,Query):
+487source=self.query(source).data
+488
+489ifnotisinstance(source,EventSource):
+490err_msg=f"'source' must be an EventSource, or a Query that returns an EventSource but was a {type(source)}."
+491raiseTypeError(err_msg)
+492
+493headers=self._headers.copy()
+494headers[_Header.Format]="tagged"
+495headers[_Header.Authorization]=self._auth.bearer()
+496
+497ifopts.query_timeoutisnotNone:
+498query_timeout_ms=int(opts.query_timeout.total_seconds()*1000)
+499headers[Header.QueryTimeoutMs]=str(query_timeout_ms)
+500elifself._query_timeout_msisnotNone:
+501headers[Header.QueryTimeoutMs]=str(self._query_timeout_ms)
+502
+503returnFeedIterator(self._session,headers,self._endpoint+"/feed/1",
+504self._max_attempts,self._max_backoff,opts,source)
+
+
+
+
Opens an Event Feed in Fauna and returns an iterator that consume Fauna events.
+
+
Parameters
+
+
+
source: An EventSource or a Query that returns an EventSource.
542def__init__(self,http_client:HTTPClient,headers:Dict[str,str],
+543endpoint:str,max_attempts:int,max_backoff:int,
+544opts:StreamOptions,source:EventSource):
+545self._http_client=http_client
+546self._headers=headers
+547self._endpoint=endpoint
+548self._max_attempts=max_attempts
+549self._max_backoff=max_backoff
+550self._opts=opts
+551self._source=source
+552self._stream=None
+553self.last_ts=None
+554self.last_cursor=None
+555self._ctx=self._create_stream()
+556
+557ifopts.start_tsisnotNoneandopts.cursorisnotNone:
+558err_msg="Only one of 'start_ts' or 'cursor' can be defined in the StreamOptions."
+559raiseTypeError(err_msg)
+
666def__init__(self,http:HTTPClient,headers:Dict[str,str],endpoint:str,
+667max_attempts:int,max_backoff:int,opts:FeedOptions,
+668source:EventSource):
+669self._http=http
+670self._headers=headers
+671self._endpoint=endpoint
+672self._max_attempts=opts.max_attemptsormax_attempts
+673self._max_backoff=opts.max_backofformax_backoff
+674self._request:Dict[str,Any]={"token":source.token}
+675self._is_done=False
+676
+677ifopts.start_tsisnotNoneandopts.cursorisnotNone:
+678err_msg="Only one of 'start_ts' or 'cursor' can be defined in the FeedOptions."
+679raiseTypeError(err_msg)
+680
+681ifopts.page_sizeisnotNone:
+682self._request["page_size"]=opts.page_size
+683
+684ifopts.cursorisnotNone:
+685self._request["cursor"]=opts.cursor
+686elifopts.start_tsisnotNone:
+687self._request["start_ts"]=opts.start_ts
+
+
+
+
+
+
+
+
+
+
+ def
+ flatten(self) -> Iterator:
+
+
+
+
+
+
723defflatten(self)->Iterator:
+724"""A generator that yields events instead of pages of events."""
+725forpageinself:
+726foreventinpage:
+727yieldevent
+
+
+
+
A generator that yields events instead of pages of events.
+
+
+
+
+
+
+
+
+
+ class
+ QueryIterator:
+
+
+
+
+
+
730classQueryIterator:
+731"""A class to provider an iterator on top of Fauna queries."""
+732
+733def__init__(self,
+734client:Client,
+735fql:Query,
+736opts:Optional[QueryOptions]=None):
+737"""Initializes the QueryIterator
+738
+739 :param fql: A Query
+740 :param opts: (Optional) Query Options
+741
+742 :raises TypeError: Invalid param types
+743 """
+744ifnotisinstance(client,Client):
+745err_msg=f"'client' must be a Client but was a {type(client)}. You can build a " \
+746f"Client by calling fauna.client.Client()"
+747raiseTypeError(err_msg)
+748
+749ifnotisinstance(fql,Query):
+750err_msg=f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+751f"Query by calling fauna.fql()"
+752raiseTypeError(err_msg)
+753
+754self.client=client
+755self.fql=fql
+756self.opts=opts
+757
+758def__iter__(self)->Iterator:
+759returnself.iter()
+760
+761defiter(self)->Iterator:
+762"""
+763 A generator function that immediately fetches and yields the results of
+764 the stored query. Yields additional pages on subsequent iterations if
+765 they exist
+766 """
+767
+768cursor=None
+769initial_response=self.client.query(self.fql,self.opts)
+770
+771ifisinstance(initial_response.data,Page):
+772cursor=initial_response.data.after
+773yieldinitial_response.data.data
+774
+775whilecursorisnotNone:
+776next_response=self.client.query(
+777fql("Set.paginate(${after})",after=cursor),self.opts)
+778# TODO: `Set.paginate` does not yet return a `@set` tagged value
+779# so we will get back a plain object that might not have
+780# an after property.
+781cursor=next_response.data.get("after")
+782yieldnext_response.data.get("data")
+783
+784else:
+785yield[initial_response.data]
+786
+787defflatten(self)->Iterator:
+788"""
+789 A generator function that immediately fetches and yields the results of
+790 the stored query. Yields each item individually, rather than a whole
+791 Page at a time. Fetches additional pages as required if they exist.
+792 """
+793
+794forpageinself.iter():
+795foriteminpage:
+796yielditem
+
+
+
+
A class to provider an iterator on top of Fauna queries.
733def__init__(self,
+734client:Client,
+735fql:Query,
+736opts:Optional[QueryOptions]=None):
+737"""Initializes the QueryIterator
+738
+739 :param fql: A Query
+740 :param opts: (Optional) Query Options
+741
+742 :raises TypeError: Invalid param types
+743 """
+744ifnotisinstance(client,Client):
+745err_msg=f"'client' must be a Client but was a {type(client)}. You can build a " \
+746f"Client by calling fauna.client.Client()"
+747raiseTypeError(err_msg)
+748
+749ifnotisinstance(fql,Query):
+750err_msg=f"'fql' must be a Query but was a {type(fql)}. You can build a " \
+751f"Query by calling fauna.fql()"
+752raiseTypeError(err_msg)
+753
+754self.client=client
+755self.fql=fql
+756self.opts=opts
+
+
+
+
Initializes the QueryIterator
+
+
Parameters
+
+
+
fql: A Query
+
opts: (Optional) Query Options
+
+
+
Raises
+
+
+
TypeError: Invalid param types
+
+
+
+
+
+
+
+ client
+
+
+
+
+
+
+
+
+
+
+ fql
+
+
+
+
+
+
+
+
+
+
+ opts
+
+
+
+
+
+
+
+
+
+
+
+
+ def
+ iter(self) -> Iterator:
+
+
+
+
+
+
761defiter(self)->Iterator:
+762"""
+763 A generator function that immediately fetches and yields the results of
+764 the stored query. Yields additional pages on subsequent iterations if
+765 they exist
+766 """
+767
+768cursor=None
+769initial_response=self.client.query(self.fql,self.opts)
+770
+771ifisinstance(initial_response.data,Page):
+772cursor=initial_response.data.after
+773yieldinitial_response.data.data
+774
+775whilecursorisnotNone:
+776next_response=self.client.query(
+777fql("Set.paginate(${after})",after=cursor),self.opts)
+778# TODO: `Set.paginate` does not yet return a `@set` tagged value
+779# so we will get back a plain object that might not have
+780# an after property.
+781cursor=next_response.data.get("after")
+782yieldnext_response.data.get("data")
+783
+784else:
+785yield[initial_response.data]
+
+
+
+
A generator function that immediately fetches and yields the results of
+the stored query. Yields additional pages on subsequent iterations if
+they exist
+
+
+
+
+
+
+
+
+ def
+ flatten(self) -> Iterator:
+
+
+
+
+
+
787defflatten(self)->Iterator:
+788"""
+789 A generator function that immediately fetches and yields the results of
+790 the stored query. Yields each item individually, rather than a whole
+791 Page at a time. Fetches additional pages as required if they exist.
+792 """
+793
+794forpageinself.iter():
+795foriteminpage:
+796yielditem
+
+
+
+
A generator function that immediately fetches and yields the results of
+the stored query. Yields each item individually, rather than a whole
+Page at a time. Fetches additional pages as required if they exist.
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/2.3.0/fauna/client/endpoints.html b/2.3.0/fauna/client/endpoints.html
new file mode 100644
index 00000000..9012d33e
--- /dev/null
+++ b/2.3.0/fauna/client/endpoints.html
@@ -0,0 +1,300 @@
+
+
+
+
+
+
+ fauna.client.endpoints API documentation
+
+
+
+
+
+
+
+
+
+
+
+
1importabc
+ 2fromdataclassesimportdataclass
+ 3fromrandomimportrandom
+ 4fromtimeimportsleep
+ 5fromtypingimportCallable,Optional,TypeVar,Generic
+ 6
+ 7fromfauna.errorsimportRetryableFaunaException
+ 8
+ 9
+10classRetryStrategy:
+11
+12@abc.abstractmethod
+13defwait(self)->float:
+14pass
+15
+16
+17classExponentialBackoffStrategy(RetryStrategy):
+18
+19def__init__(self,max_backoff:int):
+20self._max_backoff=float(max_backoff)
+21self._i=0.0
+22
+23defwait(self)->float:
+24"""Returns the number of seconds to wait for the next call."""
+25backoff=random()*(2.0**self._i)
+26self._i+=1.0
+27returnmin(backoff,self._max_backoff)
+28
+29
+30T=TypeVar('T')
+31
+32
+33@dataclass
+34classRetryableResponse(Generic[T]):
+35attempts:int
+36response:T
+37
+38
+39classRetryable(Generic[T]):
+40"""
+41 Retryable is a wrapper class that acts on a Callable that returns a T type.
+42 """
+43_strategy:RetryStrategy
+44_error:Optional[Exception]
+45
+46def__init__(
+47self,
+48max_attempts:int,
+49max_backoff:int,
+50func:Callable[...,T],
+51*args,
+52**kwargs,
+53):
+54self._max_attempts=max_attempts
+55self._strategy=ExponentialBackoffStrategy(max_backoff)
+56self._func=func
+57self._args=args
+58self._kwargs=kwargs
+59self._error=None
+60
+61defrun(self)->RetryableResponse[T]:
+62"""Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates
+63 the thrown exception if max_attempts is reached or if a non-retryable is thrown.
+64
+65 Returns the number of attempts and the response
+66 """
+67attempt=0
+68whileTrue:
+69sleep_time=0.0ifattempt==0elseself._strategy.wait()
+70sleep(sleep_time)
+71
+72try:
+73attempt+=1
+74qs=self._func(*self._args,**self._kwargs)
+75returnRetryableResponse[T](attempt,qs)
+76exceptRetryableFaunaExceptionase:
+77ifattempt>=self._max_attempts:
+78raisee
+
+
+ class
+ ExponentialBackoffStrategy(RetryStrategy):
+
+
+
+
+
+
18classExponentialBackoffStrategy(RetryStrategy):
+19
+20def__init__(self,max_backoff:int):
+21self._max_backoff=float(max_backoff)
+22self._i=0.0
+23
+24defwait(self)->float:
+25"""Returns the number of seconds to wait for the next call."""
+26backoff=random()*(2.0**self._i)
+27self._i+=1.0
+28returnmin(backoff,self._max_backoff)
+
24defwait(self)->float:
+25"""Returns the number of seconds to wait for the next call."""
+26backoff=random()*(2.0**self._i)
+27self._i+=1.0
+28returnmin(backoff,self._max_backoff)
+
+
+
+
Returns the number of seconds to wait for the next call.
+
+
+
+
+
+
+
+
+
@dataclass
+
+ class
+ RetryableResponse(typing.Generic[~T]):
+
+
+
+
+
+ class
+ Retryable(typing.Generic[~T]):
+
+
+
+
+
+
40classRetryable(Generic[T]):
+41"""
+42 Retryable is a wrapper class that acts on a Callable that returns a T type.
+43 """
+44_strategy:RetryStrategy
+45_error:Optional[Exception]
+46
+47def__init__(
+48self,
+49max_attempts:int,
+50max_backoff:int,
+51func:Callable[...,T],
+52*args,
+53**kwargs,
+54):
+55self._max_attempts=max_attempts
+56self._strategy=ExponentialBackoffStrategy(max_backoff)
+57self._func=func
+58self._args=args
+59self._kwargs=kwargs
+60self._error=None
+61
+62defrun(self)->RetryableResponse[T]:
+63"""Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates
+64 the thrown exception if max_attempts is reached or if a non-retryable is thrown.
+65
+66 Returns the number of attempts and the response
+67 """
+68attempt=0
+69whileTrue:
+70sleep_time=0.0ifattempt==0elseself._strategy.wait()
+71sleep(sleep_time)
+72
+73try:
+74attempt+=1
+75qs=self._func(*self._args,**self._kwargs)
+76returnRetryableResponse[T](attempt,qs)
+77exceptRetryableFaunaExceptionase:
+78ifattempt>=self._max_attempts:
+79raisee
+
+
+
+
Retryable is a wrapper class that acts on a Callable that returns a T type.
62defrun(self)->RetryableResponse[T]:
+63"""Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates
+64 the thrown exception if max_attempts is reached or if a non-retryable is thrown.
+65
+66 Returns the number of attempts and the response
+67 """
+68attempt=0
+69whileTrue:
+70sleep_time=0.0ifattempt==0elseself._strategy.wait()
+71sleep(sleep_time)
+72
+73try:
+74attempt+=1
+75qs=self._func(*self._args,**self._kwargs)
+76returnRetryableResponse[T](attempt,qs)
+77exceptRetryableFaunaExceptionase:
+78ifattempt>=self._max_attempts:
+79raisee
+
+
+
+
Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates
+the thrown exception if max_attempts is reached or if a non-retryable is thrown.
+
+
Returns the number of attempts and the response
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/2.3.0/fauna/client/utils.html b/2.3.0/fauna/client/utils.html
new file mode 100644
index 00000000..7c2c921e
--- /dev/null
+++ b/2.3.0/fauna/client/utils.html
@@ -0,0 +1,488 @@
+
+
+
+
+
+
+ fauna.client.utils API documentation
+
+
+
+
+
+
+
+
+
+
+
+
1importos
+ 2importthreading
+ 3fromtypingimportGeneric,Callable,TypeVar,Optional
+ 4
+ 5fromfauna.client.endpointsimportEndpoints
+ 6fromfauna.client.headersimportHeader
+ 7
+ 8
+ 9def_fancy_bool_from_str(val:str)->bool:
+10returnval.lower()in["1","true","yes","y"]
+11
+12
+13classLastTxnTs(object):
+14"""Wraps tracking the last transaction time supplied from the database."""
+15
+16def__init__(
+17self,
+18time:Optional[int]=None,
+19):
+20self._lock:threading.Lock=threading.Lock()
+21self._time:Optional[int]=time
+22
+23@property
+24deftime(self):
+25"""Produces the last transaction time, or, None if not yet updated."""
+26withself._lock:
+27returnself._time
+28
+29@property
+30defrequest_header(self):
+31"""Produces a dictionary with a non-zero `X-Last-Seen-Txn` header; or,
+32 if one has not yet been set, the empty header dictionary."""
+33t=self._time
+34iftisNone:
+35return{}
+36return{Header.LastTxnTs:str(t)}
+37
+38defupdate_txn_time(self,new_txn_time:int):
+39"""Updates the internal transaction time.
+40 In order to maintain a monotonically-increasing value, `newTxnTime`
+41 is discarded if it is behind the current timestamp."""
+42withself._lock:
+43self._time=max(self._timeor0,new_txn_time)
+44
+45
+46T=TypeVar('T')
+47
+48
+49class_SettingFromEnviron(Generic[T]):
+50
+51def__init__(
+52self,
+53var_name:str,
+54default_value:str,
+55adapt_from_str:Callable[[str],T],
+56):
+57self.__var_name=var_name
+58self.__default_value=default_value
+59self.__adapt_from_str=adapt_from_str
+60
+61def__call__(self)->T:
+62returnself.__adapt_from_str(
+63os.environ.get(
+64self.__var_name,
+65default=self.__default_value,
+66))
+67
+68
+69class_Environment:
+70EnvFaunaEndpoint=_SettingFromEnviron(
+71"FAUNA_ENDPOINT",
+72Endpoints.Default,
+73str,
+74)
+75"""environment variable for Fauna Client HTTP endpoint"""
+76
+77EnvFaunaSecret=_SettingFromEnviron(
+78"FAUNA_SECRET",
+79"",
+80str,
+81)
+82"""environment variable for Fauna Client authentication"""
+
+
+
+
+
+
+
+
+ class
+ LastTxnTs:
+
+
+
+
+
+
14classLastTxnTs(object):
+15"""Wraps tracking the last transaction time supplied from the database."""
+16
+17def__init__(
+18self,
+19time:Optional[int]=None,
+20):
+21self._lock:threading.Lock=threading.Lock()
+22self._time:Optional[int]=time
+23
+24@property
+25deftime(self):
+26"""Produces the last transaction time, or, None if not yet updated."""
+27withself._lock:
+28returnself._time
+29
+30@property
+31defrequest_header(self):
+32"""Produces a dictionary with a non-zero `X-Last-Seen-Txn` header; or,
+33 if one has not yet been set, the empty header dictionary."""
+34t=self._time
+35iftisNone:
+36return{}
+37return{Header.LastTxnTs:str(t)}
+38
+39defupdate_txn_time(self,new_txn_time:int):
+40"""Updates the internal transaction time.
+41 In order to maintain a monotonically-increasing value, `newTxnTime`
+42 is discarded if it is behind the current timestamp."""
+43withself._lock:
+44self._time=max(self._timeor0,new_txn_time)
+
+
+
+
Wraps tracking the last transaction time supplied from the database.
24@property
+25deftime(self):
+26"""Produces the last transaction time, or, None if not yet updated."""
+27withself._lock:
+28returnself._time
+
+
+
+
Produces the last transaction time, or, None if not yet updated.
+
+
+
+
+
+
+
+ request_header
+
+
+
+
+
+
30@property
+31defrequest_header(self):
+32"""Produces a dictionary with a non-zero `X-Last-Seen-Txn` header; or,
+33 if one has not yet been set, the empty header dictionary."""
+34t=self._time
+35iftisNone:
+36return{}
+37return{Header.LastTxnTs:str(t)}
+
+
+
+
Produces a dictionary with a non-zero X-Last-Seen-Txn header; or,
+if one has not yet been set, the empty header dictionary.
39defupdate_txn_time(self,new_txn_time:int):
+40"""Updates the internal transaction time.
+41 In order to maintain a monotonically-increasing value, `newTxnTime`
+42 is discarded if it is behind the current timestamp."""
+43withself._lock:
+44self._time=max(self._timeor0,new_txn_time)
+
+
+
+
Updates the internal transaction time.
+In order to maintain a monotonically-increasing value, newTxnTime
+is discarded if it is behind the current timestamp.
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/2.3.0/fauna/encoding.html b/2.3.0/fauna/encoding.html
new file mode 100644
index 00000000..22e7c62a
--- /dev/null
+++ b/2.3.0/fauna/encoding.html
@@ -0,0 +1,246 @@
+
+
+
+
+
+
+ fauna.encoding API documentation
+
+
+
+
+
+
+
+
+
+
+
+
54@staticmethod
+55defdecode(obj:Any):
+56"""Decodes supported objects from the tagged typed into untagged.
+57
+58 Examples:
+59 - { "@int": "100" } decodes to 100 of type int
+60 - { "@double": "100" } decodes to 100.0 of type float
+61 - { "@long": "100" } decodes to 100 of type int
+62 - { "@time": "..." } decodes to a datetime
+63 - { "@date": "..." } decodes to a date
+64 - { "@doc": ... } decodes to a Document or NamedDocument
+65 - { "@ref": ... } decodes to a DocumentReference or NamedDocumentReference
+66 - { "@mod": ... } decodes to a Module
+67 - { "@set": ... } decodes to a Page
+68 - { "@stream": ... } decodes to an EventSource
+69 - { "@bytes": ... } decodes to a bytearray
+70
+71 :param obj: the object to decode
+72 """
+73returnFaunaDecoder._decode(obj)
+
+
+
+
Decodes supported objects from the tagged typed into untagged.
+
+
Examples:
+ - { "@int": "100" } decodes to 100 of type int
+ - { "@double": "100" } decodes to 100.0 of type float
+ - { "@long": "100" } decodes to 100 of type int
+ - { "@time": "..." } decodes to a datetime
+ - { "@date": "..." } decodes to a date
+ - { "@doc": ... } decodes to a Document or NamedDocument
+ - { "@ref": ... } decodes to a DocumentReference or NamedDocumentReference
+ - { "@mod": ... } decodes to a Module
+ - { "@set": ... } decodes to a Page
+ - { "@stream": ... } decodes to an EventSource
+ - { "@bytes": ... } decodes to a bytearray
+
+
Parameters
+
+
+
obj: the object to decode
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/2.3.0/fauna/encoding/encoder.html b/2.3.0/fauna/encoding/encoder.html
new file mode 100644
index 00000000..0ae97f8c
--- /dev/null
+++ b/2.3.0/fauna/encoding/encoder.html
@@ -0,0 +1,1222 @@
+
+
+
+
+
+
+ fauna.encoding.encoder API documentation
+
+
+
+
+
+
+
+
+
+
+
+
71@staticmethod
+72defencode(obj:Any)->Any:
+73"""Encodes supported objects into the tagged format.
+74
+75 Examples:
+76 - Up to 32-bit ints encode to { "@int": "..." }
+77 - Up to 64-bit ints encode to { "@long": "..." }
+78 - Floats encode to { "@double": "..." }
+79 - datetime encodes to { "@time": "..." }
+80 - date encodes to { "@date": "..." }
+81 - DocumentReference encodes to { "@doc": "..." }
+82 - Module encodes to { "@mod": "..." }
+83 - Query encodes to { "fql": [...] }
+84 - ValueFragment encodes to { "value": <encoded_val> }
+85 - LiteralFragment encodes to a string
+86 - EventSource encodes to a string
+87
+88 :raises ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
+89 :param obj: the object to decode
+90 """
+91returnFaunaEncoder._encode(obj)
+
+
+
+
Encodes supported objects into the tagged format.
+
+
Examples:
+ - Up to 32-bit ints encode to { "@int": "..." }
+ - Up to 64-bit ints encode to { "@long": "..." }
+ - Floats encode to { "@double": "..." }
+ - datetime encodes to { "@time": "..." }
+ - date encodes to { "@date": "..." }
+ - DocumentReference encodes to { "@doc": "..." }
+ - Module encodes to { "@mod": "..." }
+ - Query encodes to { "fql": [...] }
+ - ValueFragment encodes to { "value": }
+ - LiteralFragment encodes to a string
+ - EventSource encodes to a string
+
+
Raises
+
+
+
ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
+
+
+
Parameters
+
+
+
obj: the object to decode
+
+
+
+
+
+
+
+
+
@staticmethod
+
+ def
+ from_int(obj:int):
+
+
+
+
+
+
93@staticmethod
+ 94deffrom_int(obj:int):
+ 95if-2**31<=obj<=2**31-1:
+ 96return{"@int":repr(obj)}
+ 97elif-2**63<=obj<=2**63-1:
+ 98return{"@long":repr(obj)}
+ 99else:
+100raiseValueError("Precision loss when converting int to Fauna type")
+
114@staticmethod
+115deffrom_datetime(obj:datetime):
+116ifobj.utcoffset()isNone:
+117raiseValueError("datetimes must be timezone-aware")
+118
+119return{"@time":obj.isoformat(sep="T")}
+
1fromdataclassesimportdataclass
+ 2fromtypingimportOptional,Mapping,Any,List
+ 3
+ 4
+ 5classQueryStats:
+ 6"""Query stats"""
+ 7
+ 8@property
+ 9defcompute_ops(self)->int:
+ 10"""The amount of Transactional Compute Ops consumed by the query."""
+ 11returnself._compute_ops
+ 12
+ 13@property
+ 14defread_ops(self)->int:
+ 15"""The amount of Transactional Read Ops consumed by the query."""
+ 16returnself._read_ops
+ 17
+ 18@property
+ 19defwrite_ops(self)->int:
+ 20"""The amount of Transactional Write Ops consumed by the query."""
+ 21returnself._write_ops
+ 22
+ 23@property
+ 24defquery_time_ms(self)->int:
+ 25"""The query run time in milliseconds."""
+ 26returnself._query_time_ms
+ 27
+ 28@property
+ 29defstorage_bytes_read(self)->int:
+ 30"""The amount of data read from storage, in bytes."""
+ 31returnself._storage_bytes_read
+ 32
+ 33@property
+ 34defstorage_bytes_write(self)->int:
+ 35"""The amount of data written to storage, in bytes."""
+ 36returnself._storage_bytes_write
+ 37
+ 38@property
+ 39defcontention_retries(self)->int:
+ 40"""The number of times the transaction was retried due to write contention."""
+ 41returnself._contention_retries
+ 42
+ 43@property
+ 44defattempts(self)->int:
+ 45"""The number of attempts made by the client to run the query."""
+ 46returnself._attempts
+ 47
+ 48@attempts.setter
+ 49defattempts(self,value):
+ 50self._attempts=value
+ 51
+ 52def__init__(self,stats:Mapping[str,Any]):
+ 53self._compute_ops=stats.get("compute_ops",0)
+ 54self._read_ops=stats.get("read_ops",0)
+ 55self._write_ops=stats.get("write_ops",0)
+ 56self._query_time_ms=stats.get("query_time_ms",0)
+ 57self._storage_bytes_read=stats.get("storage_bytes_read",0)
+ 58self._storage_bytes_write=stats.get("storage_bytes_write",0)
+ 59self._contention_retries=stats.get("contention_retries",0)
+ 60self._attempts=0
+ 61
+ 62def__repr__(self):
+ 63stats={
+ 64"compute_ops":self._compute_ops,
+ 65"read_ops":self._read_ops,
+ 66"write_ops":self._write_ops,
+ 67"query_time_ms":self._query_time_ms,
+ 68"storage_bytes_read":self._storage_bytes_read,
+ 69"storage_bytes_write":self._storage_bytes_write,
+ 70"contention_retries":self._contention_retries,
+ 71"attempts":self._attempts,
+ 72}
+ 73
+ 74returnf"{self.__class__.__name__}(stats={repr(stats)})"
+ 75
+ 76def__eq__(self,other):
+ 77returntype(self)==type(other) \
+ 78andself.compute_ops==other.compute_ops \
+ 79andself.read_ops==other.read_ops \
+ 80andself.write_ops==other.write_ops \
+ 81andself.query_time_ms==other.query_time_ms \
+ 82andself.storage_bytes_read==other.storage_bytes_read \
+ 83andself.storage_bytes_write==other.storage_bytes_write \
+ 84andself.contention_retries==other.contention_retries \
+ 85andself.attempts==other.attempts
+ 86
+ 87def__ne__(self,other):
+ 88returnnotself.__eq__(other)
+ 89
+ 90
+ 91classQueryInfo:
+ 92
+ 93@property
+ 94defquery_tags(self)->Mapping[str,Any]:
+ 95"""The tags associated with the query."""
+ 96returnself._query_tags
+ 97
+ 98@property
+ 99defsummary(self)->str:
+100"""A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query."""
+101returnself._summary
+102
+103@property
+104defstats(self)->QueryStats:
+105"""Query stats associated with the query."""
+106returnself._stats
+107
+108@property
+109deftxn_ts(self)->int:
+110"""The last transaction timestamp of the query. A Unix epoch in microseconds."""
+111returnself._txn_ts
+112
+113@property
+114defschema_version(self)->int:
+115"""The schema version that was used for the query execution."""
+116returnself._schema_version
+117
+118def__init__(
+119self,
+120query_tags:Optional[Mapping[str,str]]=None,
+121stats:Optional[QueryStats]=None,
+122summary:Optional[str]=None,
+123txn_ts:Optional[int]=None,
+124schema_version:Optional[int]=None,
+125):
+126self._query_tags=query_tagsor{}
+127self._stats=statsorQueryStats({})
+128self._summary=summaryor""
+129self._txn_ts=txn_tsor0
+130self._schema_version=schema_versionor0
+131
+132def__repr__(self):
+133returnf"{self.__class__.__name__}(" \
+134f"query_tags={repr(self.query_tags)}," \
+135f"stats={repr(self.stats)}," \
+136f"summary={repr(self.summary)}," \
+137f"txn_ts={repr(self.txn_ts)}," \
+138f"schema_version={repr(self.schema_version)})"
+139
+140
+141classQuerySuccess(QueryInfo):
+142"""The result of the query."""
+143
+144@property
+145defdata(self)->Any:
+146"""The data returned by the query. This is the result of the FQL query."""
+147returnself._data
+148
+149@property
+150defstatic_type(self)->Optional[str]:
+151"""If typechecked, the query's inferred static result type, if the query was typechecked."""
+152returnself._static_type
+153
+154@property
+155deftraceparent(self)->Optional[str]:
+156"""The traceparent for the query."""
+157returnself._traceparent
+158
+159def__init__(
+160self,
+161data:Any,
+162query_tags:Optional[Mapping[str,str]],
+163static_type:Optional[str],
+164stats:Optional[QueryStats],
+165summary:Optional[str],
+166traceparent:Optional[str],
+167txn_ts:Optional[int],
+168schema_version:Optional[int],
+169):
+170
+171super().__init__(
+172query_tags=query_tags,
+173stats=stats,
+174summary=summary,
+175txn_ts=txn_ts,
+176schema_version=schema_version,
+177)
+178
+179self._traceparent=traceparent
+180self._static_type=static_type
+181self._data=data
+182
+183def__repr__(self):
+184returnf"{self.__class__.__name__}(" \
+185f"query_tags={repr(self.query_tags)}," \
+186f"static_type={repr(self.static_type)}," \
+187f"stats={repr(self.stats)}," \
+188f"summary={repr(self.summary)}," \
+189f"traceparent={repr(self.traceparent)}," \
+190f"txn_ts={repr(self.txn_ts)}," \
+191f"schema_version={repr(self.schema_version)}," \
+192f"data={repr(self.data)})"
+193
+194
+195@dataclass
+196classConstraintFailure:
+197message:str
+198name:Optional[str]=None
+199paths:Optional[List[Any]]=None
+200
+201
+202classQueryTags:
+203
+204@staticmethod
+205defencode(tags:Mapping[str,str])->str:
+206return",".join([f"{k}={v}"fork,vintags.items()])
+207
+208@staticmethod
+209defdecode(tag_str:str)->Mapping[str,str]:
+210res:dict[str,str]={}
+211forpairintag_str.split(","):
+212kv=pair.split("=")
+213res[kv[0]]=kv[1]
+214returnres
+
+
+
+
+
+
+
+
+ class
+ QueryStats:
+
+
+
+
+
+
6classQueryStats:
+ 7"""Query stats"""
+ 8
+ 9@property
+10defcompute_ops(self)->int:
+11"""The amount of Transactional Compute Ops consumed by the query."""
+12returnself._compute_ops
+13
+14@property
+15defread_ops(self)->int:
+16"""The amount of Transactional Read Ops consumed by the query."""
+17returnself._read_ops
+18
+19@property
+20defwrite_ops(self)->int:
+21"""The amount of Transactional Write Ops consumed by the query."""
+22returnself._write_ops
+23
+24@property
+25defquery_time_ms(self)->int:
+26"""The query run time in milliseconds."""
+27returnself._query_time_ms
+28
+29@property
+30defstorage_bytes_read(self)->int:
+31"""The amount of data read from storage, in bytes."""
+32returnself._storage_bytes_read
+33
+34@property
+35defstorage_bytes_write(self)->int:
+36"""The amount of data written to storage, in bytes."""
+37returnself._storage_bytes_write
+38
+39@property
+40defcontention_retries(self)->int:
+41"""The number of times the transaction was retried due to write contention."""
+42returnself._contention_retries
+43
+44@property
+45defattempts(self)->int:
+46"""The number of attempts made by the client to run the query."""
+47returnself._attempts
+48
+49@attempts.setter
+50defattempts(self,value):
+51self._attempts=value
+52
+53def__init__(self,stats:Mapping[str,Any]):
+54self._compute_ops=stats.get("compute_ops",0)
+55self._read_ops=stats.get("read_ops",0)
+56self._write_ops=stats.get("write_ops",0)
+57self._query_time_ms=stats.get("query_time_ms",0)
+58self._storage_bytes_read=stats.get("storage_bytes_read",0)
+59self._storage_bytes_write=stats.get("storage_bytes_write",0)
+60self._contention_retries=stats.get("contention_retries",0)
+61self._attempts=0
+62
+63def__repr__(self):
+64stats={
+65"compute_ops":self._compute_ops,
+66"read_ops":self._read_ops,
+67"write_ops":self._write_ops,
+68"query_time_ms":self._query_time_ms,
+69"storage_bytes_read":self._storage_bytes_read,
+70"storage_bytes_write":self._storage_bytes_write,
+71"contention_retries":self._contention_retries,
+72"attempts":self._attempts,
+73}
+74
+75returnf"{self.__class__.__name__}(stats={repr(stats)})"
+76
+77def__eq__(self,other):
+78returntype(self)==type(other) \
+79andself.compute_ops==other.compute_ops \
+80andself.read_ops==other.read_ops \
+81andself.write_ops==other.write_ops \
+82andself.query_time_ms==other.query_time_ms \
+83andself.storage_bytes_read==other.storage_bytes_read \
+84andself.storage_bytes_write==other.storage_bytes_write \
+85andself.contention_retries==other.contention_retries \
+86andself.attempts==other.attempts
+87
+88def__ne__(self,other):
+89returnnotself.__eq__(other)
+
9@property
+10defcompute_ops(self)->int:
+11"""The amount of Transactional Compute Ops consumed by the query."""
+12returnself._compute_ops
+
+
+
+
The amount of Transactional Compute Ops consumed by the query.
+
+
+
+
+
+
+
+ read_ops: int
+
+
+
+
+
+
14@property
+15defread_ops(self)->int:
+16"""The amount of Transactional Read Ops consumed by the query."""
+17returnself._read_ops
+
+
+
+
The amount of Transactional Read Ops consumed by the query.
+
+
+
+
+
+
+
+ write_ops: int
+
+
+
+
+
+
19@property
+20defwrite_ops(self)->int:
+21"""The amount of Transactional Write Ops consumed by the query."""
+22returnself._write_ops
+
+
+
+
The amount of Transactional Write Ops consumed by the query.
+
+
+
+
+
+
+
+ query_time_ms: int
+
+
+
+
+
+
24@property
+25defquery_time_ms(self)->int:
+26"""The query run time in milliseconds."""
+27returnself._query_time_ms
+
+
+
+
The query run time in milliseconds.
+
+
+
+
+
+
+
+ storage_bytes_read: int
+
+
+
+
+
+
29@property
+30defstorage_bytes_read(self)->int:
+31"""The amount of data read from storage, in bytes."""
+32returnself._storage_bytes_read
+
+
+
+
The amount of data read from storage, in bytes.
+
+
+
+
+
+
+
+ storage_bytes_write: int
+
+
+
+
+
+
34@property
+35defstorage_bytes_write(self)->int:
+36"""The amount of data written to storage, in bytes."""
+37returnself._storage_bytes_write
+
+
+
+
The amount of data written to storage, in bytes.
+
+
+
+
+
+
+
+ contention_retries: int
+
+
+
+
+
+
39@property
+40defcontention_retries(self)->int:
+41"""The number of times the transaction was retried due to write contention."""
+42returnself._contention_retries
+
+
+
+
The number of times the transaction was retried due to write contention.
+
+
+
+
+
+
+
+ attempts: int
+
+
+
+
+
+
44@property
+45defattempts(self)->int:
+46"""The number of attempts made by the client to run the query."""
+47returnself._attempts
+
+
+
+
The number of attempts made by the client to run the query.
+
+
+
+
+
+
+
+
+
+ class
+ QueryInfo:
+
+
+
+
+
+
92classQueryInfo:
+ 93
+ 94@property
+ 95defquery_tags(self)->Mapping[str,Any]:
+ 96"""The tags associated with the query."""
+ 97returnself._query_tags
+ 98
+ 99@property
+100defsummary(self)->str:
+101"""A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query."""
+102returnself._summary
+103
+104@property
+105defstats(self)->QueryStats:
+106"""Query stats associated with the query."""
+107returnself._stats
+108
+109@property
+110deftxn_ts(self)->int:
+111"""The last transaction timestamp of the query. A Unix epoch in microseconds."""
+112returnself._txn_ts
+113
+114@property
+115defschema_version(self)->int:
+116"""The schema version that was used for the query execution."""
+117returnself._schema_version
+118
+119def__init__(
+120self,
+121query_tags:Optional[Mapping[str,str]]=None,
+122stats:Optional[QueryStats]=None,
+123summary:Optional[str]=None,
+124txn_ts:Optional[int]=None,
+125schema_version:Optional[int]=None,
+126):
+127self._query_tags=query_tagsor{}
+128self._stats=statsorQueryStats({})
+129self._summary=summaryor""
+130self._txn_ts=txn_tsor0
+131self._schema_version=schema_versionor0
+132
+133def__repr__(self):
+134returnf"{self.__class__.__name__}(" \
+135f"query_tags={repr(self.query_tags)}," \
+136f"stats={repr(self.stats)}," \
+137f"summary={repr(self.summary)}," \
+138f"txn_ts={repr(self.txn_ts)}," \
+139f"schema_version={repr(self.schema_version)})"
+
94@property
+95defquery_tags(self)->Mapping[str,Any]:
+96"""The tags associated with the query."""
+97returnself._query_tags
+
+
+
+
The tags associated with the query.
+
+
+
+
+
+
+
+ summary: str
+
+
+
+
+
+
99@property
+100defsummary(self)->str:
+101"""A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query."""
+102returnself._summary
+
+
+
+
A comprehensive, human readable summary of any errors, warnings and/or logs returned from the query.
142classQuerySuccess(QueryInfo):
+143"""The result of the query."""
+144
+145@property
+146defdata(self)->Any:
+147"""The data returned by the query. This is the result of the FQL query."""
+148returnself._data
+149
+150@property
+151defstatic_type(self)->Optional[str]:
+152"""If typechecked, the query's inferred static result type, if the query was typechecked."""
+153returnself._static_type
+154
+155@property
+156deftraceparent(self)->Optional[str]:
+157"""The traceparent for the query."""
+158returnself._traceparent
+159
+160def__init__(
+161self,
+162data:Any,
+163query_tags:Optional[Mapping[str,str]],
+164static_type:Optional[str],
+165stats:Optional[QueryStats],
+166summary:Optional[str],
+167traceparent:Optional[str],
+168txn_ts:Optional[int],
+169schema_version:Optional[int],
+170):
+171
+172super().__init__(
+173query_tags=query_tags,
+174stats=stats,
+175summary=summary,
+176txn_ts=txn_ts,
+177schema_version=schema_version,
+178)
+179
+180self._traceparent=traceparent
+181self._static_type=static_type
+182self._data=data
+183
+184def__repr__(self):
+185returnf"{self.__class__.__name__}(" \
+186f"query_tags={repr(self.query_tags)}," \
+187f"static_type={repr(self.static_type)}," \
+188f"stats={repr(self.stats)}," \
+189f"summary={repr(self.summary)}," \
+190f"traceparent={repr(self.traceparent)}," \
+191f"txn_ts={repr(self.txn_ts)}," \
+192f"schema_version={repr(self.schema_version)}," \
+193f"data={repr(self.data)})"
+
145@property
+146defdata(self)->Any:
+147"""The data returned by the query. This is the result of the FQL query."""
+148returnself._data
+
+
+
+
The data returned by the query. This is the result of the FQL query.
+
+
+
+
+
+
+
+ static_type: Optional[str]
+
+
+
+
+
+
150@property
+151defstatic_type(self)->Optional[str]:
+152"""If typechecked, the query's inferred static result type, if the query was typechecked."""
+153returnself._static_type
+
+
+
+
If typechecked, the query's inferred static result type, if the query was typechecked.
+
+
+
+
+
+
+
+ traceparent: Optional[str]
+
+
+
+
+
+
155@property
+156deftraceparent(self)->Optional[str]:
+157"""The traceparent for the query."""
+158returnself._traceparent
+
16classClientError(FaunaException):
+17"""An error representing a failure internal to the client, itself.
+18 This indicates Fauna was never called - the client failed internally
+19 prior to sending the request."""
+20pass
+
+
+
+
An error representing a failure internal to the client, itself.
+This indicates Fauna was never called - the client failed internally
+prior to sending the request.
23classNetworkError(FaunaException):
+24"""An error representing a failure due to the network.
+25 This indicates Fauna was never reached."""
+26pass
+
+
+
+
An error representing a failure due to the network.
+This indicates Fauna was never reached.
+
+ class
+ QueryRuntimeError(ServiceError):
+
+
+
+
+
+
344classQueryRuntimeError(ServiceError):
+345"""An error response that is the result of the query failing during execution.
+346 QueryRuntimeError's occur when a bug in your query causes an invalid execution
+347 to be requested.
+348 The 'code' field will vary based on the specific error cause."""
+349pass
+
+
+
+
An error response that is the result of the query failing during execution.
+QueryRuntimeError's occur when a bug in your query causes an invalid execution
+to be requested.
+The 'code' field will vary based on the specific error cause.
+
+ class
+ AuthorizationError(ServiceError):
+
+
+
+
+
+
357classAuthorizationError(ServiceError):
+358"""AuthorizationError indicates the credentials used do not have
+359 permission to perform the requested action."""
+360pass
+
+
+
+
AuthorizationError indicates the credentials used do not have
+permission to perform the requested action.
363classThrottlingError(ServiceError,RetryableFaunaException):
+364"""ThrottlingError indicates some capacity limit was exceeded
+365 and thus the request could not be served."""
+366pass
+
+
+
+
ThrottlingError indicates some capacity limit was exceeded
+and thus the request could not be served.
+
+ class
+ QueryTimeoutError(ServiceError):
+
+
+
+
+
+
369classQueryTimeoutError(ServiceError):
+370"""A failure due to the timeout being exceeded, but the timeout
+371 was set lower than the query's expected processing time.
+372 This response is distinguished from a ServiceTimeoutException
+373 in that a QueryTimeoutError shows Fauna behaving in an expected manner."""
+374pass
+
+
+
+
A failure due to the timeout being exceeded, but the timeout
+was set lower than the query's expected processing time.
+This response is distinguished from a ServiceTimeoutException
+in that a QueryTimeoutError shows Fauna behaving in an expected manner.
+
+ class
+ ServiceTimeoutError(ServiceError):
+
+
+
+
+
+
382classServiceTimeoutError(ServiceError):
+383"""ServiceTimeoutError indicates Fauna was not available to service
+384 the request before the timeout was reached."""
+385pass
+
+
+
+
ServiceTimeoutError indicates Fauna was not available to service
+the request before the timeout was reached.
25defjson(self)->Any:
+26try:
+27decoded=self._r.read().decode("utf-8")
+28returnjson.loads(decoded)
+29except(JSONDecodeError,UnicodeDecodeError)ase:
+30raiseClientError(
+31f"Unable to decode response from endpoint {self._r.request.url}. Check that your endpoint is valid."
+32)frome
+
101def__init__(self,coll:Union[str,Module]):
+102ifisinstance(coll,Module):
+103self._collection=coll
+104elifisinstance(coll,str):
+105self._collection=Module(coll)
+106else:
+107raiseTypeError(
+108f"'coll' should be of type Module or str, but was {type(coll)}")
+
+
+ class
+ DocumentReference(BaseReference):
+
+
+
+
+
+
117classDocumentReference(BaseReference):
+118"""A class representing a reference to a :class:`Document` stored in Fauna.
+119 """
+120
+121@property
+122defid(self)->str:
+123"""The ID for the :class:`Document`. Valid IDs are 64-bit integers, stored as strings.
+124
+125 :rtype: str
+126 """
+127returnself._id
+128
+129def__init__(self,coll:Union[str,Module],id:str):
+130super().__init__(coll)
+131
+132ifnotisinstance(id,str):
+133raiseTypeError(f"'id' should be of type str, but was {type(id)}")
+134self._id=id
+135
+136def__hash__(self):
+137returnhash((type(self),self._collection,self._id))
+138
+139def__repr__(self):
+140returnf"{self.__class__.__name__}(id={repr(self._id)},coll={repr(self._collection)})"
+141
+142@staticmethod
+143deffrom_string(ref:str):
+144rs=ref.split(":")
+145iflen(rs)!=2:
+146raiseValueError("Expects string of format <CollectionName>:<ID>")
+147returnDocumentReference(rs[0],rs[1])
+
+
+
+
A class representing a reference to a Document stored in Fauna.
129def__init__(self,coll:Union[str,Module],id:str):
+130super().__init__(coll)
+131
+132ifnotisinstance(id,str):
+133raiseTypeError(f"'id' should be of type str, but was {type(id)}")
+134self._id=id
+
+
+
+
+
+
+
+
+
+ id: str
+
+
+
+
+
+
121@property
+122defid(self)->str:
+123"""The ID for the :class:`Document`. Valid IDs are 64-bit integers, stored as strings.
+124
+125 :rtype: str
+126 """
+127returnself._id
+
+
+
+
The ID for the Document. Valid IDs are 64-bit integers, stored as strings.
+
+
+
+
+
+
+
+
@staticmethod
+
+ def
+ from_string(ref:str):
+
+
+
+
+
+
142@staticmethod
+143deffrom_string(ref:str):
+144rs=ref.split(":")
+145iflen(rs)!=2:
+146raiseValueError("Expects string of format <CollectionName>:<ID>")
+147returnDocumentReference(rs[0],rs[1])
+
+
+ class
+ NamedDocumentReference(BaseReference):
+
+
+
+
+
+
150classNamedDocumentReference(BaseReference):
+151"""A class representing a reference to a :class:`NamedDocument` stored in Fauna.
+152 """
+153
+154@property
+155defname(self)->str:
+156"""The name of the :class:`NamedDocument`.
+157
+158 :rtype: str
+159 """
+160returnself._name
+161
+162def__init__(self,coll:Union[str,Module],name:str):
+163super().__init__(coll)
+164
+165ifnotisinstance(name,str):
+166raiseTypeError(f"'name' should be of type str, but was {type(name)}")
+167
+168self._name=name
+169
+170def__hash__(self):
+171returnhash((type(self),self._collection,self._name))
+172
+173def__repr__(self):
+174returnf"{self.__class__.__name__}(name={repr(self._name)},coll={repr(self._collection)})"
+
+
+
+
A class representing a reference to a NamedDocument stored in Fauna.
162def__init__(self,coll:Union[str,Module],name:str):
+163super().__init__(coll)
+164
+165ifnotisinstance(name,str):
+166raiseTypeError(f"'name' should be of type str, but was {type(name)}")
+167
+168self._name=name
+
+
+
+
+
+
+
+
+
+ name: str
+
+
+
+
+
+
154@property
+155defname(self)->str:
+156"""The name of the :class:`NamedDocument`.
+157
+158 :rtype: str
+159 """
+160returnself._name
+
243classDocument(BaseDocument):
+244"""A class representing a user document stored in Fauna.
+245
+246 User data should be stored directly on the map, while id, ts, and coll should only be stored on the related
+247 properties. When working with a :class:`Document` in code, it should be considered immutable.
+248 """
+249
+250@property
+251defid(self)->str:
+252returnself._id
+253
+254@property
+255defts(self)->datetime:
+256returnself._ts
+257
+258@property
+259defcoll(self)->Module:
+260returnself._coll
+261
+262def__init__(self,
+263id:str,
+264ts:datetime,
+265coll:Union[str,Module],
+266data:Optional[Mapping]=None):
+267ifnotisinstance(id,str):
+268raiseTypeError(f"'id' should be of type str, but was {type(id)}")
+269
+270ifnotisinstance(ts,datetime):
+271raiseTypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+272
+273ifnot(isinstance(coll,str)orisinstance(coll,Module)):
+274raiseTypeError(
+275f"'coll' should be of type Module or str, but was {type(coll)}")
+276
+277ifisinstance(coll,str):
+278coll=Module(coll)
+279
+280self._id=id
+281self._ts=ts
+282self._coll=coll
+283
+284super().__init__(dataor{})
+285
+286def__eq__(self,other):
+287returntype(self)==type(other) \
+288andself.id==other.id \
+289andself.coll==other.coll \
+290andself.ts==other.ts \
+291andsuper().__eq__(other)
+292
+293def__ne__(self,other):
+294returnnotself.__eq__(other)
+295
+296def__repr__(self):
+297kvs=",".join([f"{repr(k)}:{repr(v)}"fork,vinself.items()])
+298
+299returnf"{self.__class__.__name__}(" \
+300f"id={repr(self.id)}," \
+301f"coll={repr(self.coll)}," \
+302f"ts={repr(self.ts)}," \
+303f"data={{{kvs}}})"
+
+
+
+
A class representing a user document stored in Fauna.
+
+
User data should be stored directly on the map, while id, ts, and coll should only be stored on the related
+properties. When working with a Document in code, it should be considered immutable.
262def__init__(self,
+263id:str,
+264ts:datetime,
+265coll:Union[str,Module],
+266data:Optional[Mapping]=None):
+267ifnotisinstance(id,str):
+268raiseTypeError(f"'id' should be of type str, but was {type(id)}")
+269
+270ifnotisinstance(ts,datetime):
+271raiseTypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+272
+273ifnot(isinstance(coll,str)orisinstance(coll,Module)):
+274raiseTypeError(
+275f"'coll' should be of type Module or str, but was {type(coll)}")
+276
+277ifisinstance(coll,str):
+278coll=Module(coll)
+279
+280self._id=id
+281self._ts=ts
+282self._coll=coll
+283
+284super().__init__(dataor{})
+
306classNamedDocument(BaseDocument):
+307"""A class representing a named document stored in Fauna. Examples of named documents include Collection
+308 definitions, Index definitions, and Roles, among others.
+309
+310 When working with a :class:`NamedDocument` in code, it should be considered immutable.
+311 """
+312
+313@property
+314defname(self)->str:
+315returnself._name
+316
+317@property
+318defts(self)->datetime:
+319returnself._ts
+320
+321@property
+322defcoll(self)->Module:
+323returnself._coll
+324
+325def__init__(self,
+326name:str,
+327ts:datetime,
+328coll:Union[Module,str],
+329data:Optional[Mapping]=None):
+330ifnotisinstance(name,str):
+331raiseTypeError(f"'name' should be of type str, but was {type(name)}")
+332
+333ifnotisinstance(ts,datetime):
+334raiseTypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+335
+336ifnot(isinstance(coll,str)orisinstance(coll,Module)):
+337raiseTypeError(
+338f"'coll' should be of type Module or str, but was {type(coll)}")
+339
+340ifisinstance(coll,str):
+341coll=Module(coll)
+342
+343self._name=name
+344self._ts=ts
+345self._coll=coll
+346
+347super().__init__(dataor{})
+348
+349def__eq__(self,other):
+350returntype(self)==type(other) \
+351andself.name==other.name \
+352andself.coll==other.coll \
+353andself.ts==other.ts \
+354andsuper().__eq__(other)
+355
+356def__ne__(self,other):
+357returnnotself.__eq__(other)
+358
+359def__repr__(self):
+360kvs=",".join([f"{repr(k)}:{repr(v)}"fork,vinself.items()])
+361
+362returnf"{self.__class__.__name__}(" \
+363f"name={repr(self.name)}," \
+364f"coll={repr(self.coll)}," \
+365f"ts={repr(self.ts)}," \
+366f"data={{{kvs}}})"
+
+
+
+
A class representing a named document stored in Fauna. Examples of named documents include Collection
+definitions, Index definitions, and Roles, among others.
+
+
When working with a NamedDocument in code, it should be considered immutable.
325def__init__(self,
+326name:str,
+327ts:datetime,
+328coll:Union[Module,str],
+329data:Optional[Mapping]=None):
+330ifnotisinstance(name,str):
+331raiseTypeError(f"'name' should be of type str, but was {type(name)}")
+332
+333ifnotisinstance(ts,datetime):
+334raiseTypeError(f"'ts' should be of type datetime, but was {type(ts)}")
+335
+336ifnot(isinstance(coll,str)orisinstance(coll,Module)):
+337raiseTypeError(
+338f"'coll' should be of type Module or str, but was {type(coll)}")
+339
+340ifisinstance(coll,str):
+341coll=Module(coll)
+342
+343self._name=name
+344self._ts=ts
+345self._coll=coll
+346
+347super().__init__(dataor{})
+
1importabc
+ 2fromtypingimportAny,Optional,List
+ 3
+ 4from.templateimportFaunaTemplate
+ 5
+ 6
+ 7classFragment(abc.ABC):
+ 8"""An abstract class representing a Fragment of a query.
+ 9 """
+ 10
+ 11@abc.abstractmethod
+ 12defget(self)->Any:
+ 13"""An abstract method for returning a stored value.
+ 14 """
+ 15pass
+ 16
+ 17
+ 18classValueFragment(Fragment):
+ 19"""A concrete :class:`Fragment` representing a part of a query that can represent a template variable.
+ 20 For example, if a template contains a variable ``${foo}``, and an object ``{ "prop": 1 }`` is provided for foo,
+ 21 then ``{ "prop": 1 }`` should be wrapped as a :class:`ValueFragment`.
+ 22
+ 23 :param Any val: The value to be used as a fragment.
+ 24 """
+ 25
+ 26def__init__(self,val:Any):
+ 27self._val=val
+ 28
+ 29defget(self)->Any:
+ 30"""Gets the stored value.
+ 31
+ 32 :returns: The stored value.
+ 33 """
+ 34returnself._val
+ 35
+ 36
+ 37classLiteralFragment(Fragment):
+ 38"""A concrete :class:`Fragment` representing a query literal For example, in the template ```let x = ${foo}```,
+ 39 the portion ```let x = ``` is a query literal and should be wrapped as a :class:`LiteralFragment`.
+ 40
+ 41 :param str val: The query literal to be used as a fragment.
+ 42 """
+ 43
+ 44def__init__(self,val:str):
+ 45self._val=val
+ 46
+ 47defget(self)->str:
+ 48"""Returns the stored value.
+ 49
+ 50 :returns: The stored value.
+ 51 """
+ 52returnself._val
+ 53
+ 54
+ 55classQuery:
+ 56"""A class for representing a query.
+ 57
+ 58 e.g. { "fql": [...] }
+ 59 """
+ 60_fragments:List[Fragment]
+ 61
+ 62def__init__(self,fragments:Optional[List[Fragment]]=None):
+ 63self._fragments=fragmentsor[]
+ 64
+ 65@property
+ 66deffragments(self)->List[Fragment]:
+ 67"""The list of stored Fragments"""
+ 68returnself._fragments
+ 69
+ 70def__str__(self)->str:
+ 71res=""
+ 72forfinself._fragments:
+ 73res+=str(f.get())
+ 74
+ 75returnres
+ 76
+ 77
+ 78deffql(query:str,**kwargs:Any)->Query:
+ 79"""Creates a Query - capable of performing query composition and simple querying. It can accept a
+ 80 simple string query, or can perform composition using ``${}`` sigil string template with ``**kwargs`` as
+ 81 substitutions.
+ 82
+ 83 The ``**kwargs`` can be Fauna data types - such as strings, document references, or modules - and embedded
+ 84 Query - allowing you to compose arbitrarily complex queries.
+ 85
+ 86 When providing ``**kwargs``, following types are accepted:
+ 87 - :class:`str`, :class:`int`, :class:`float`, :class:`bool`, :class:`datetime.datetime`, :class:`datetime.date`,
+ 88 :class:`dict`, :class:`list`, :class:`Query`, :class:`DocumentReference`, :class:`Module`
+ 89
+ 90 :raises ValueError: If there is an invalid template placeholder or a value that cannot be encoded.
+ 91 :returns: A :class:`Query` that can be passed to the client for evaluation against Fauna.
+ 92
+ 93 Examples:
+ 94
+ 95 .. code-block:: python
+ 96 :name: Simple-FQL-Example
+ 97 :caption: Simple query declaration using this function.
+ 98
+ 99 fql('Dogs.byName("Fido")')
+100
+101 .. code-block:: python
+102 :name: Composition-FQL-Example
+103 :caption: Query composition using this function.
+104
+105 def get_dog(id):
+106 return fql('Dogs.byId(${id})', id=id)
+107
+108 def get_vet_phone(id):
+109 return fql('${dog} { .vet_phone_number }', dog=get_dog(id))
+110
+111 get_vet_phone('d123')
+112
+113 """
+114
+115fragments:List[Any]=[]
+116template=FaunaTemplate(query)
+117fortext,field_nameintemplate.iter():
+118iftextisnotNoneandlen(text)>0:
+119fragments.append(LiteralFragment(text))
+120
+121iffield_nameisnotNone:
+122iffield_namenotinkwargs:
+123raiseValueError(
+124f"template variable `{field_name}` not found in provided kwargs")
+125
+126# TODO: Reject if it's already a fragment, or accept *Fragment? Decide on API here
+127fragments.append(ValueFragment(kwargs[field_name]))
+128returnQuery(fragments)
+
+
+
+
+
+
+
+
+ class
+ Fragment(abc.ABC):
+
+
+
+
+
+
8classFragment(abc.ABC):
+ 9"""An abstract class representing a Fragment of a query.
+10 """
+11
+12@abc.abstractmethod
+13defget(self)->Any:
+14"""An abstract method for returning a stored value.
+15 """
+16pass
+
+
+
+
An abstract class representing a Fragment of a query.
+
+
+
+
+
+
+
@abc.abstractmethod
+
+ def
+ get(self) -> Any:
+
+
+
+
+
+
12@abc.abstractmethod
+13defget(self)->Any:
+14"""An abstract method for returning a stored value.
+15 """
+16pass
+
19classValueFragment(Fragment):
+20"""A concrete :class:`Fragment` representing a part of a query that can represent a template variable.
+21 For example, if a template contains a variable ``${foo}``, and an object ``{ "prop": 1 }`` is provided for foo,
+22 then ``{ "prop": 1 }`` should be wrapped as a :class:`ValueFragment`.
+23
+24 :param Any val: The value to be used as a fragment.
+25 """
+26
+27def__init__(self,val:Any):
+28self._val=val
+29
+30defget(self)->Any:
+31"""Gets the stored value.
+32
+33 :returns: The stored value.
+34 """
+35returnself._val
+
+
+
+
A concrete Fragment representing a part of a query that can represent a template variable.
+For example, if a template contains a variable ${foo}, and an object { "prop": 1 } is provided for foo,
+then { "prop": 1 } should be wrapped as a ValueFragment.
38classLiteralFragment(Fragment):
+39"""A concrete :class:`Fragment` representing a query literal For example, in the template ```let x = ${foo}```,
+40 the portion ```let x = ``` is a query literal and should be wrapped as a :class:`LiteralFragment`.
+41
+42 :param str val: The query literal to be used as a fragment.
+43 """
+44
+45def__init__(self,val:str):
+46self._val=val
+47
+48defget(self)->str:
+49"""Returns the stored value.
+50
+51 :returns: The stored value.
+52 """
+53returnself._val
+
+
+
+
A concrete Fragment representing a query literal For example, in the template let x = ${foo},
+the portion let x = is a query literal and should be wrapped as a LiteralFragment.
+
+
Parameters
+
+
+
str val: The query literal to be used as a fragment.
79deffql(query:str,**kwargs:Any)->Query:
+ 80"""Creates a Query - capable of performing query composition and simple querying. It can accept a
+ 81 simple string query, or can perform composition using ``${}`` sigil string template with ``**kwargs`` as
+ 82 substitutions.
+ 83
+ 84 The ``**kwargs`` can be Fauna data types - such as strings, document references, or modules - and embedded
+ 85 Query - allowing you to compose arbitrarily complex queries.
+ 86
+ 87 When providing ``**kwargs``, following types are accepted:
+ 88 - :class:`str`, :class:`int`, :class:`float`, :class:`bool`, :class:`datetime.datetime`, :class:`datetime.date`,
+ 89 :class:`dict`, :class:`list`, :class:`Query`, :class:`DocumentReference`, :class:`Module`
+ 90
+ 91 :raises ValueError: If there is an invalid template placeholder or a value that cannot be encoded.
+ 92 :returns: A :class:`Query` that can be passed to the client for evaluation against Fauna.
+ 93
+ 94 Examples:
+ 95
+ 96 .. code-block:: python
+ 97 :name: Simple-FQL-Example
+ 98 :caption: Simple query declaration using this function.
+ 99
+100 fql('Dogs.byName("Fido")')
+101
+102 .. code-block:: python
+103 :name: Composition-FQL-Example
+104 :caption: Query composition using this function.
+105
+106 def get_dog(id):
+107 return fql('Dogs.byId(${id})', id=id)
+108
+109 def get_vet_phone(id):
+110 return fql('${dog} { .vet_phone_number }', dog=get_dog(id))
+111
+112 get_vet_phone('d123')
+113
+114 """
+115
+116fragments:List[Any]=[]
+117template=FaunaTemplate(query)
+118fortext,field_nameintemplate.iter():
+119iftextisnotNoneandlen(text)>0:
+120fragments.append(LiteralFragment(text))
+121
+122iffield_nameisnotNone:
+123iffield_namenotinkwargs:
+124raiseValueError(
+125f"template variable `{field_name}` not found in provided kwargs")
+126
+127# TODO: Reject if it's already a fragment, or accept *Fragment? Decide on API here
+128fragments.append(ValueFragment(kwargs[field_name]))
+129returnQuery(fragments)
+
+
+
+
Creates a Query - capable of performing query composition and simple querying. It can accept a
+simple string query, or can perform composition using ${} sigil string template with **kwargs as
+substitutions.
+
+
The **kwargs can be Fauna data types - such as strings, document references, or modules - and embedded
+Query - allowing you to compose arbitrarily complex queries.
+
+
When providing **kwargs, following types are accepted:
+ - str, int, float, bool, datetime.datetime, datetime.date,
+ dict, list, Query, DocumentReference, Module
+
+
Raises
+
+
+
ValueError: If there is an invalid template placeholder or a value that cannot be encoded.
+:returns: A Query that can be passed to the client for evaluation against Fauna.
1importreas_re
+ 2fromtypingimportOptional,Tuple,Iterator,Match
+ 3
+ 4
+ 5classFaunaTemplate:
+ 6"""A template class that supports variables marked with a ${}-sigil. Its primary purpose
+ 7 is to expose an iterator for the template parts that support composition of FQL queries.
+ 8
+ 9 Implementation adapted from https://github.com/python/cpython/blob/main/Lib/string.py
+10
+11 :param template: A string template e.g. "${my_var} { name }"
+12 :type template: str
+13 """
+14
+15_delimiter='$'
+16_idpattern=r'[_a-zA-Z][_a-zA-Z0-9]*'
+17_flags=_re.VERBOSE
+18
+19def__init__(self,template:str):
+20"""The initializer"""
+21delim=_re.escape(self._delimiter)
+22pattern=fr"""
+23{delim}(?:
+24 (?P<escaped>{delim}) | # Escape sequence of two delimiters
+25{{(?P<braced>{self._idpattern})}} | # delimiter and a braced identifier
+26 (?P<invalid>) # Other ill-formed delimiter exprs
+27 )
+28 """
+29self._pattern=_re.compile(pattern,self._flags)
+30self._template=template
+31
+32defiter(self)->Iterator[Tuple[Optional[str],Optional[str]]]:
+33"""A method that returns an iterator over tuples representing template parts. The
+34 first value of the tuple, if not None, is a template literal. The second value of
+35 the tuple, if not None, is a template variable. If both are not None, then the
+36 template literal comes *before* the variable.
+37
+38 :raises ValueError: If there is an invalid template placeholder
+39
+40 :return: An iterator of template parts
+41 :rtype: collections.Iterable[Tuple[Optional[str], Optional[str]]]
+42 """
+43match_objects=self._pattern.finditer(self._template)
+44cur_pos=0
+45formoinmatch_objects:
+46ifmo.group("invalid")isnotNone:
+47self._handle_invalid(mo)
+48
+49span_start_pos=mo.span()[0]
+50span_end_pos=mo.span()[1]
+51escaped_part=mo.group("escaped")or""
+52variable_part=mo.group("braced")
+53literal_part:Optional[str]=None
+54
+55ifcur_pos!=span_start_pos:
+56literal_part= \
+57self._template[cur_pos:span_start_pos] \
+58+escaped_part
+59
+60cur_pos=span_end_pos
+61
+62yieldliteral_part,variable_part
+63
+64ifcur_pos!=len(self._template):
+65yieldself._template[cur_pos:],None
+66
+67def_handle_invalid(self,mo:Match)->None:
+68i=mo.start("invalid")
+69lines=self._template[:i].splitlines(keepends=True)
+70
+71ifnotlines:
+72colno=1
+73lineno=1
+74else:
+75colno=i-len(''.join(lines[:-1]))
+76lineno=len(lines)
+77
+78raiseValueError(
+79f"Invalid placeholder in template: line {lineno}, col {colno}")
+
+
+
+
+
+
+
+
+ class
+ FaunaTemplate:
+
+
+
+
+
+
6classFaunaTemplate:
+ 7"""A template class that supports variables marked with a ${}-sigil. Its primary purpose
+ 8 is to expose an iterator for the template parts that support composition of FQL queries.
+ 9
+10 Implementation adapted from https://github.com/python/cpython/blob/main/Lib/string.py
+11
+12 :param template: A string template e.g. "${my_var} { name }"
+13 :type template: str
+14 """
+15
+16_delimiter='$'
+17_idpattern=r'[_a-zA-Z][_a-zA-Z0-9]*'
+18_flags=_re.VERBOSE
+19
+20def__init__(self,template:str):
+21"""The initializer"""
+22delim=_re.escape(self._delimiter)
+23pattern=fr"""
+24{delim}(?:
+25 (?P<escaped>{delim}) | # Escape sequence of two delimiters
+26{{(?P<braced>{self._idpattern})}} | # delimiter and a braced identifier
+27 (?P<invalid>) # Other ill-formed delimiter exprs
+28 )
+29 """
+30self._pattern=_re.compile(pattern,self._flags)
+31self._template=template
+32
+33defiter(self)->Iterator[Tuple[Optional[str],Optional[str]]]:
+34"""A method that returns an iterator over tuples representing template parts. The
+35 first value of the tuple, if not None, is a template literal. The second value of
+36 the tuple, if not None, is a template variable. If both are not None, then the
+37 template literal comes *before* the variable.
+38
+39 :raises ValueError: If there is an invalid template placeholder
+40
+41 :return: An iterator of template parts
+42 :rtype: collections.Iterable[Tuple[Optional[str], Optional[str]]]
+43 """
+44match_objects=self._pattern.finditer(self._template)
+45cur_pos=0
+46formoinmatch_objects:
+47ifmo.group("invalid")isnotNone:
+48self._handle_invalid(mo)
+49
+50span_start_pos=mo.span()[0]
+51span_end_pos=mo.span()[1]
+52escaped_part=mo.group("escaped")or""
+53variable_part=mo.group("braced")
+54literal_part:Optional[str]=None
+55
+56ifcur_pos!=span_start_pos:
+57literal_part= \
+58self._template[cur_pos:span_start_pos] \
+59+escaped_part
+60
+61cur_pos=span_end_pos
+62
+63yieldliteral_part,variable_part
+64
+65ifcur_pos!=len(self._template):
+66yieldself._template[cur_pos:],None
+67
+68def_handle_invalid(self,mo:Match)->None:
+69i=mo.start("invalid")
+70lines=self._template[:i].splitlines(keepends=True)
+71
+72ifnotlines:
+73colno=1
+74lineno=1
+75else:
+76colno=i-len(''.join(lines[:-1]))
+77lineno=len(lines)
+78
+79raiseValueError(
+80f"Invalid placeholder in template: line {lineno}, col {colno}")
+
+
+
+
A template class that supports variables marked with a ${}-sigil. Its primary purpose
+is to expose an iterator for the template parts that support composition of FQL queries.
33defiter(self)->Iterator[Tuple[Optional[str],Optional[str]]]:
+34"""A method that returns an iterator over tuples representing template parts. The
+35 first value of the tuple, if not None, is a template literal. The second value of
+36 the tuple, if not None, is a template variable. If both are not None, then the
+37 template literal comes *before* the variable.
+38
+39 :raises ValueError: If there is an invalid template placeholder
+40
+41 :return: An iterator of template parts
+42 :rtype: collections.Iterable[Tuple[Optional[str], Optional[str]]]
+43 """
+44match_objects=self._pattern.finditer(self._template)
+45cur_pos=0
+46formoinmatch_objects:
+47ifmo.group("invalid")isnotNone:
+48self._handle_invalid(mo)
+49
+50span_start_pos=mo.span()[0]
+51span_end_pos=mo.span()[1]
+52escaped_part=mo.group("escaped")or""
+53variable_part=mo.group("braced")
+54literal_part:Optional[str]=None
+55
+56ifcur_pos!=span_start_pos:
+57literal_part= \
+58self._template[cur_pos:span_start_pos] \
+59+escaped_part
+60
+61cur_pos=span_end_pos
+62
+63yieldliteral_part,variable_part
+64
+65ifcur_pos!=len(self._template):
+66yieldself._template[cur_pos:],None
+
+
+
+
A method that returns an iterator over tuples representing template parts. The
+first value of the tuple, if not None, is a template literal. The second value of
+the tuple, if not None, is a template variable. If both are not None, then the
+template literal comes before the variable.
+
+
Raises
+
+
+
ValueError: If there is an invalid template placeholder
+
+
+
Returns
+
+
+
An iterator of template parts
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/2.3.0/index.html b/2.3.0/index.html
new file mode 100644
index 00000000..cd7f994c
--- /dev/null
+++ b/2.3.0/index.html
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/2.3.0/search.js b/2.3.0/search.js
new file mode 100644
index 00000000..4e699e36
--- /dev/null
+++ b/2.3.0/search.js
@@ -0,0 +1,46 @@
+window.pdocSearch = (function(){
+/** elasticlunr - http://weixsong.github.io * Copyright (C) 2017 Oliver Nightingale * Copyright (C) 2017 Wei Song * MIT Licensed */!function(){function e(e){if(null===e||"object"!=typeof e)return e;var t=e.constructor();for(var n in e)e.hasOwnProperty(n)&&(t[n]=e[n]);return t}var t=function(e){var n=new t.Index;return n.pipeline.add(t.trimmer,t.stopWordFilter,t.stemmer),e&&e.call(n,n),n};t.version="0.9.5",lunr=t,t.utils={},t.utils.warn=function(e){return function(t){e.console&&console.warn&&console.warn(t)}}(this),t.utils.toString=function(e){return void 0===e||null===e?"":e.toString()},t.EventEmitter=function(){this.events={}},t.EventEmitter.prototype.addListener=function(){var e=Array.prototype.slice.call(arguments),t=e.pop(),n=e;if("function"!=typeof t)throw new TypeError("last argument must be a function");n.forEach(function(e){this.hasHandler(e)||(this.events[e]=[]),this.events[e].push(t)},this)},t.EventEmitter.prototype.removeListener=function(e,t){if(this.hasHandler(e)){var n=this.events[e].indexOf(t);-1!==n&&(this.events[e].splice(n,1),0==this.events[e].length&&delete this.events[e])}},t.EventEmitter.prototype.emit=function(e){if(this.hasHandler(e)){var t=Array.prototype.slice.call(arguments,1);this.events[e].forEach(function(e){e.apply(void 0,t)},this)}},t.EventEmitter.prototype.hasHandler=function(e){return e in this.events},t.tokenizer=function(e){if(!arguments.length||null===e||void 0===e)return[];if(Array.isArray(e)){var n=e.filter(function(e){return null===e||void 0===e?!1:!0});n=n.map(function(e){return t.utils.toString(e).toLowerCase()});var i=[];return n.forEach(function(e){var n=e.split(t.tokenizer.seperator);i=i.concat(n)},this),i}return e.toString().trim().toLowerCase().split(t.tokenizer.seperator)},t.tokenizer.defaultSeperator=/[\s\-]+/,t.tokenizer.seperator=t.tokenizer.defaultSeperator,t.tokenizer.setSeperator=function(e){null!==e&&void 0!==e&&"object"==typeof e&&(t.tokenizer.seperator=e)},t.tokenizer.resetSeperator=function(){t.tokenizer.seperator=t.tokenizer.defaultSeperator},t.tokenizer.getSeperator=function(){return t.tokenizer.seperator},t.Pipeline=function(){this._queue=[]},t.Pipeline.registeredFunctions={},t.Pipeline.registerFunction=function(e,n){n in t.Pipeline.registeredFunctions&&t.utils.warn("Overwriting existing registered function: "+n),e.label=n,t.Pipeline.registeredFunctions[n]=e},t.Pipeline.getRegisteredFunction=function(e){return e in t.Pipeline.registeredFunctions!=!0?null:t.Pipeline.registeredFunctions[e]},t.Pipeline.warnIfFunctionNotRegistered=function(e){var n=e.label&&e.label in this.registeredFunctions;n||t.utils.warn("Function is not registered with pipeline. This may cause problems when serialising the index.\n",e)},t.Pipeline.load=function(e){var n=new t.Pipeline;return e.forEach(function(e){var i=t.Pipeline.getRegisteredFunction(e);if(!i)throw new Error("Cannot load un-registered function: "+e);n.add(i)}),n},t.Pipeline.prototype.add=function(){var e=Array.prototype.slice.call(arguments);e.forEach(function(e){t.Pipeline.warnIfFunctionNotRegistered(e),this._queue.push(e)},this)},t.Pipeline.prototype.after=function(e,n){t.Pipeline.warnIfFunctionNotRegistered(n);var i=this._queue.indexOf(e);if(-1===i)throw new Error("Cannot find existingFn");this._queue.splice(i+1,0,n)},t.Pipeline.prototype.before=function(e,n){t.Pipeline.warnIfFunctionNotRegistered(n);var i=this._queue.indexOf(e);if(-1===i)throw new Error("Cannot find existingFn");this._queue.splice(i,0,n)},t.Pipeline.prototype.remove=function(e){var t=this._queue.indexOf(e);-1!==t&&this._queue.splice(t,1)},t.Pipeline.prototype.run=function(e){for(var t=[],n=e.length,i=this._queue.length,o=0;n>o;o++){for(var r=e[o],s=0;i>s&&(r=this._queue[s](r,o,e),void 0!==r&&null!==r);s++);void 0!==r&&null!==r&&t.push(r)}return t},t.Pipeline.prototype.reset=function(){this._queue=[]},t.Pipeline.prototype.get=function(){return this._queue},t.Pipeline.prototype.toJSON=function(){return this._queue.map(function(e){return t.Pipeline.warnIfFunctionNotRegistered(e),e.label})},t.Index=function(){this._fields=[],this._ref="id",this.pipeline=new t.Pipeline,this.documentStore=new t.DocumentStore,this.index={},this.eventEmitter=new t.EventEmitter,this._idfCache={},this.on("add","remove","update",function(){this._idfCache={}}.bind(this))},t.Index.prototype.on=function(){var e=Array.prototype.slice.call(arguments);return this.eventEmitter.addListener.apply(this.eventEmitter,e)},t.Index.prototype.off=function(e,t){return this.eventEmitter.removeListener(e,t)},t.Index.load=function(e){e.version!==t.version&&t.utils.warn("version mismatch: current "+t.version+" importing "+e.version);var n=new this;n._fields=e.fields,n._ref=e.ref,n.documentStore=t.DocumentStore.load(e.documentStore),n.pipeline=t.Pipeline.load(e.pipeline),n.index={};for(var i in e.index)n.index[i]=t.InvertedIndex.load(e.index[i]);return n},t.Index.prototype.addField=function(e){return this._fields.push(e),this.index[e]=new t.InvertedIndex,this},t.Index.prototype.setRef=function(e){return this._ref=e,this},t.Index.prototype.saveDocument=function(e){return this.documentStore=new t.DocumentStore(e),this},t.Index.prototype.addDoc=function(e,n){if(e){var n=void 0===n?!0:n,i=e[this._ref];this.documentStore.addDoc(i,e),this._fields.forEach(function(n){var o=this.pipeline.run(t.tokenizer(e[n]));this.documentStore.addFieldLength(i,n,o.length);var r={};o.forEach(function(e){e in r?r[e]+=1:r[e]=1},this);for(var s in r){var u=r[s];u=Math.sqrt(u),this.index[n].addToken(s,{ref:i,tf:u})}},this),n&&this.eventEmitter.emit("add",e,this)}},t.Index.prototype.removeDocByRef=function(e){if(e&&this.documentStore.isDocStored()!==!1&&this.documentStore.hasDoc(e)){var t=this.documentStore.getDoc(e);this.removeDoc(t,!1)}},t.Index.prototype.removeDoc=function(e,n){if(e){var n=void 0===n?!0:n,i=e[this._ref];this.documentStore.hasDoc(i)&&(this.documentStore.removeDoc(i),this._fields.forEach(function(n){var o=this.pipeline.run(t.tokenizer(e[n]));o.forEach(function(e){this.index[n].removeToken(e,i)},this)},this),n&&this.eventEmitter.emit("remove",e,this))}},t.Index.prototype.updateDoc=function(e,t){var t=void 0===t?!0:t;this.removeDocByRef(e[this._ref],!1),this.addDoc(e,!1),t&&this.eventEmitter.emit("update",e,this)},t.Index.prototype.idf=function(e,t){var n="@"+t+"/"+e;if(Object.prototype.hasOwnProperty.call(this._idfCache,n))return this._idfCache[n];var i=this.index[t].getDocFreq(e),o=1+Math.log(this.documentStore.length/(i+1));return this._idfCache[n]=o,o},t.Index.prototype.getFields=function(){return this._fields.slice()},t.Index.prototype.search=function(e,n){if(!e)return[];e="string"==typeof e?{any:e}:JSON.parse(JSON.stringify(e));var i=null;null!=n&&(i=JSON.stringify(n));for(var o=new t.Configuration(i,this.getFields()).get(),r={},s=Object.keys(e),u=0;u0&&t.push(e);for(var i in n)"docs"!==i&&"df"!==i&&this.expandToken(e+i,t,n[i]);return t},t.InvertedIndex.prototype.toJSON=function(){return{root:this.root}},t.Configuration=function(e,n){var e=e||"";if(void 0==n||null==n)throw new Error("fields should not be null");this.config={};var i;try{i=JSON.parse(e),this.buildUserConfig(i,n)}catch(o){t.utils.warn("user configuration parse failed, will use default configuration"),this.buildDefaultConfig(n)}},t.Configuration.prototype.buildDefaultConfig=function(e){this.reset(),e.forEach(function(e){this.config[e]={boost:1,bool:"OR",expand:!1}},this)},t.Configuration.prototype.buildUserConfig=function(e,n){var i="OR",o=!1;if(this.reset(),"bool"in e&&(i=e.bool||i),"expand"in e&&(o=e.expand||o),"fields"in e)for(var r in e.fields)if(n.indexOf(r)>-1){var s=e.fields[r],u=o;void 0!=s.expand&&(u=s.expand),this.config[r]={boost:s.boost||0===s.boost?s.boost:1,bool:s.bool||i,expand:u}}else t.utils.warn("field name in user configuration not found in index instance fields");else this.addAllFields2UserConfig(i,o,n)},t.Configuration.prototype.addAllFields2UserConfig=function(e,t,n){n.forEach(function(n){this.config[n]={boost:1,bool:e,expand:t}},this)},t.Configuration.prototype.get=function(){return this.config},t.Configuration.prototype.reset=function(){this.config={}},lunr.SortedSet=function(){this.length=0,this.elements=[]},lunr.SortedSet.load=function(e){var t=new this;return t.elements=e,t.length=e.length,t},lunr.SortedSet.prototype.add=function(){var e,t;for(e=0;e1;){if(r===e)return o;e>r&&(t=o),r>e&&(n=o),i=n-t,o=t+Math.floor(i/2),r=this.elements[o]}return r===e?o:-1},lunr.SortedSet.prototype.locationFor=function(e){for(var t=0,n=this.elements.length,i=n-t,o=t+Math.floor(i/2),r=this.elements[o];i>1;)e>r&&(t=o),r>e&&(n=o),i=n-t,o=t+Math.floor(i/2),r=this.elements[o];return r>e?o:e>r?o+1:void 0},lunr.SortedSet.prototype.intersect=function(e){for(var t=new lunr.SortedSet,n=0,i=0,o=this.length,r=e.length,s=this.elements,u=e.elements;;){if(n>o-1||i>r-1)break;s[n]!==u[i]?s[n]u[i]&&i++:(t.add(s[n]),n++,i++)}return t},lunr.SortedSet.prototype.clone=function(){var e=new lunr.SortedSet;return e.elements=this.toArray(),e.length=e.elements.length,e},lunr.SortedSet.prototype.union=function(e){var t,n,i;this.length>=e.length?(t=this,n=e):(t=e,n=this),i=t.clone();for(var o=0,r=n.toArray();o
A dataclass representing options available for a query.
\n\n
\n
linearized - If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
\n
max_contention_retries - The max number of times to retry the query if contention is encountered.
\n
query_timeout - Controls the maximum amount of time Fauna will execute your query before marking it failed.
\n
query_tags - Tags to associate with the query. See logging
typecheck - Enable or disable typechecking of the query before evaluation. If not set, the value configured on the Client will be used. If neither is set, Fauna will use the value of the \"typechecked\" flag on the database configuration.
\n
additional_headers - Add/update HTTP request headers for the query. In general, this should not be necessary.
A dataclass representing options available for a stream.
\n\n
\n
max_attempts - The maximum number of times to attempt a stream query when a retryable exception is thrown.
\n
max_backoff - The maximum backoff in seconds for an individual retry.
\n
start_ts - The starting timestamp of the stream, exclusive. If set, Fauna will return events starting after\nthe timestamp.
\n
cursor - The starting event cursor, exclusive. If set, Fauna will return events starting after the cursor.
\n
status_events - Indicates if stream should include status events. Status events are periodic events that\nupdate the client with the latest valid timestamp (in the event of a dropped connection) as well as metrics\nabout the cost of maintaining the stream other than the cost of the received events.
endpoint: The Fauna Endpoint to use. Defaults to https: //db.fauna.com, or the FAUNA_ENDPOINT env variable.
\n
secret: The Fauna Secret to use. Defaults to empty, or the FAUNA_SECRET env variable.
\n
http_client: An HTTPClient implementation. Defaults to a global HTTPXClient.
\n
**query_tags: Tags to associate with the query. See logging
\n
linearized: If true, unconditionally run the query as strictly serialized. This affects read-only transactions. Transactions which write will always be strictly serialized.
\n
max_contention_retries: The max number of times to retry the query if contention is encountered.
\n
typecheck: Enable or disable typechecking of the query before evaluation. If not set, Fauna will use the value of the \"typechecked\" flag on the database configuration.
\n
additional_headers: Add/update HTTP request headers for the query. In general, this should not be necessary.
\n
query_timeout: Controls the maximum amount of time Fauna will execute your query before marking it failed, default is DefaultQueryTimeout.
\n
client_buffer_timeout: Time in milliseconds beyond query_timeout at which the client will abort a request if it has not received a response. The default is DefaultClientBufferTimeout, which should account for network latency for most clients. The value must be greater than zero. The closer to zero the value is, the more likely the client is to abort the request before the server can report a legitimate response or error.
\n
http_read_timeout: Set HTTP Read timeout, default is DefaultHttpReadTimeout.
\n
http_write_timeout: Set HTTP Write timeout, default is DefaultHttpWriteTimeout.
\n
http_connect_timeout: Set HTTP Connect timeout, default is DefaultHttpConnectTimeout.
\n
http_pool_timeout: Set HTTP Pool timeout, default is DefaultHttpPoolTimeout.
\n
http_idle_timeout: Set HTTP Idle timeout, default is DefaultIdleConnectionTimeout.
\n
max_attempts: The maximum number of times to attempt a query when a retryable exception is thrown. Defaults to 3.
\n
max_backoff: The maximum backoff in seconds for an individual retry. Defaults to 20.
Set the last timestamp seen by this client.\nThis has no effect if earlier than stored timestamp.
\n\n
.. WARNING:: This should be used only when coordinating timestamps across\nmultiple clients. Moving the timestamp arbitrarily forward into\nthe future will cause transactions to stall.
Run a query on Fauna and returning an iterator of results. If the query\nreturns a Page, the iterator will fetch additional Pages until the\nafter token is null. Each call for a page will be retried with exponential\nbackoff up to the max_attempts set in the client's retry policy in the\nevent of a 429 or 502.
A generator function that immediately fetches and yields the results of\nthe stored query. Yields additional pages on subsequent iterations if\nthey exist
A generator function that immediately fetches and yields the results of\nthe stored query. Yields each item individually, rather than a whole\nPage at a time. Fetches additional pages as required if they exist.
Runs the wrapped function. Retries up to max_attempts if the function throws a RetryableFaunaException. It propagates\nthe thrown exception if max_attempts is reached or if a non-retryable is thrown.
Updates the internal transaction time.\nIn order to maintain a monotonically-increasing value, newTxnTime\nis discarded if it is behind the current timestamp.
Decodes supported objects from the tagged typed into untagged.
\n\n
Examples:\n - { \"@int\": \"100\" } decodes to 100 of type int\n - { \"@double\": \"100\" } decodes to 100.0 of type float\n - { \"@long\": \"100\" } decodes to 100 of type int\n - { \"@time\": \"...\" } decodes to a datetime\n - { \"@date\": \"...\" } decodes to a date\n - { \"@doc\": ... } decodes to a Document or NamedDocument\n - { \"@ref\": ... } decodes to a DocumentReference or NamedDocumentReference\n - { \"@mod\": ... } decodes to a Module\n - { \"@set\": ... } decodes to a Page\n - { \"@stream\": ... } decodes to an EventSource\n - { \"@bytes\": ... } decodes to a bytearray
Examples:\n - Up to 32-bit ints encode to { \"@int\": \"...\" }\n - Up to 64-bit ints encode to { \"@long\": \"...\" }\n - Floats encode to { \"@double\": \"...\" }\n - datetime encodes to { \"@time\": \"...\" }\n - date encodes to { \"@date\": \"...\" }\n - DocumentReference encodes to { \"@doc\": \"...\" }\n - Module encodes to { \"@mod\": \"...\" }\n - Query encodes to { \"fql\": [...] }\n - ValueFragment encodes to { \"value\": }\n - LiteralFragment encodes to a string\n - EventSource encodes to a string
\n\n
Raises
\n\n
\n
ValueError: If value cannot be encoded, cannot be encoded safely, or there's a circular reference.
An error representing a failure internal to the client, itself.\nThis indicates Fauna was never called - the client failed internally\nprior to sending the request.
An error response that is the result of the query failing during execution.\nQueryRuntimeError's occur when a bug in your query causes an invalid execution\nto be requested.\nThe 'code' field will vary based on the specific error cause.
A failure due to the timeout being exceeded, but the timeout\nwas set lower than the query's expected processing time.\nThis response is distinguished from a ServiceTimeoutException\nin that a QueryTimeoutError shows Fauna behaving in an expected manner.
A class representing a user document stored in Fauna.
\n\n
User data should be stored directly on the map, while id, ts, and coll should only be stored on the related\nproperties. When working with a Document in code, it should be considered immutable.
A class representing a named document stored in Fauna. Examples of named documents include Collection\ndefinitions, Index definitions, and Roles, among others.
\n\n
When working with a NamedDocument in code, it should be considered immutable.
A concrete Fragment representing a part of a query that can represent a template variable.\nFor example, if a template contains a variable ${foo}, and an object { \"prop\": 1 } is provided for foo,\nthen { \"prop\": 1 } should be wrapped as a ValueFragment.
A concrete Fragment representing a query literal For example, in the template let x = ${foo},\nthe portion let x = is a query literal and should be wrapped as a LiteralFragment.
\n\n
Parameters
\n\n
\n
str val: The query literal to be used as a fragment.
Creates a Query - capable of performing query composition and simple querying. It can accept a\nsimple string query, or can perform composition using ${} sigil string template with **kwargs as\nsubstitutions.
\n\n
The **kwargs can be Fauna data types - such as strings, document references, or modules - and embedded\nQuery - allowing you to compose arbitrarily complex queries.
\n\n
When providing **kwargs, following types are accepted:\n - str, int, float, bool, datetime.datetime, datetime.date,\n dict, list, Query, DocumentReference, Module
\n\n
Raises
\n\n
\n
ValueError: If there is an invalid template placeholder or a value that cannot be encoded.\n:returns: A Query that can be passed to the client for evaluation against Fauna.
A template class that supports variables marked with a ${}-sigil. Its primary purpose\nis to expose an iterator for the template parts that support composition of FQL queries.
A method that returns an iterator over tuples representing template parts. The\nfirst value of the tuple, if not None, is a template literal. The second value of\nthe tuple, if not None, is a template variable. If both are not None, then the\ntemplate literal comes before the variable.
\n\n
Raises
\n\n
\n
ValueError: If there is an invalid template placeholder
\n
\n\n
Returns
\n\n
\n
An iterator of template parts
\n
\n", "signature": "(self) -> Iterator[Tuple[Optional[str],Optional[str]]]:", "funcdef": "def"}];
+
+ // mirrored in build-search-index.js (part 1)
+ // Also split on html tags. this is a cheap heuristic, but good enough.
+ elasticlunr.tokenizer.setSeperator(/[\s\-.;&_'"=,()]+|<[^>]*>/);
+
+ let searchIndex;
+ if (docs._isPrebuiltIndex) {
+ console.info("using precompiled search index");
+ searchIndex = elasticlunr.Index.load(docs);
+ } else {
+ console.time("building search index");
+ // mirrored in build-search-index.js (part 2)
+ searchIndex = elasticlunr(function () {
+ this.pipeline.remove(elasticlunr.stemmer);
+ this.pipeline.remove(elasticlunr.stopWordFilter);
+ this.addField("qualname");
+ this.addField("fullname");
+ this.addField("annotation");
+ this.addField("default_value");
+ this.addField("signature");
+ this.addField("bases");
+ this.addField("doc");
+ this.setRef("fullname");
+ });
+ for (let doc of docs) {
+ searchIndex.addDoc(doc);
+ }
+ console.timeEnd("building search index");
+ }
+
+ return (term) => searchIndex.search(term, {
+ fields: {
+ qualname: {boost: 4},
+ fullname: {boost: 2},
+ annotation: {boost: 2},
+ default_value: {boost: 2},
+ signature: {boost: 2},
+ bases: {boost: 2},
+ doc: {boost: 1},
+ },
+ expand: true
+ });
+})();
\ No newline at end of file
diff --git a/latest b/latest
index e3a4f193..cc6612c3 120000
--- a/latest
+++ b/latest
@@ -1 +1 @@
-2.2.0
\ No newline at end of file
+2.3.0
\ No newline at end of file