-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathanalysis.py
92 lines (77 loc) · 2.37 KB
/
analysis.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import concurrent.futures
from faker import Faker
import requests
import random
import time
from core import run_query, authenticate, do_work
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
engine = Faker()
add_ar_query = """
mutation AddAnalysisRequest ($payload: AnalysisRequestInputType!) {
createAnalysisRequest(payload: $payload) {
... on AnalysisRequestWithSamples {
uid
}
... on OperationError {
error
}
}
}
"""
def gen_sample():
randoms = [
{
"sampletypes": [2, 3],
"analyses": [None, 1, 2, 3],
"profiles": [None, 1, 2, 3, 4],
},
{
"sampletypes": [4],
"analyses": [3],
"profiles": [None],
},
]
while True:
selected = random.choice(randoms)
s_typ = random.choice(selected.get("sampletypes"))
anal = random.choice(selected.get("analyses"))
prof = random.choice(selected.get("profiles"))
if anal or prof:
break
return {
"sampleType": s_typ,
"profiles": [prof] if prof else [],
"analyses": [anal] if anal else []
}
ar_variables = [ # list of lists - each list will be run in its own thread -> simulating multi user regs
[
{
"payload": {
"clientRequestId": engine.ssn(),
"clientUid": random.randint(1, 1500),
"clientContactUid": 1,
"patientUid": random.randint(1, 210196),
"priority": random.choice([0, 2]),
"samples": [gen_sample() for _x in range(random.randint(1, 3))],
}
} for i in range(100)
] for x in range(25000)
]
# def do_work1(var_list):
# auth_headers = authenticate()
#
# for variables in var_list:
# run_query(query=add_patient_query, variables=variables, headers=auth_headers)
#
def start_ar_reg():
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
future_to_url = (executor.submit(do_work, add_ar_query, variables)
for variables in ar_variables)
for future in concurrent.futures.as_completed(future_to_url):
try:
data = future.result()
logger.info("Done")
except Exception as exc:
logger.error(exc)