-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrun.py
executable file
·90 lines (75 loc) · 3.23 KB
/
run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
#!/usr/bin/env python3
from functions import *
from colorama import Fore, Style
# Load Auth-Token and API-URL or prompt for user auth input if not available
token, api_url = auth.load_auth_input_from_file()
if not token:
# Prompt the user to enter API-Token
token = validate.user_input("Enter your API-Token: ", is_text=True).strip()
auth.save_auth_input_to_file(token, api_url)
if not api_url:
# Prompt the user to enter API-URL
api_url = validate.user_input(
"\nEnter your API-URL (without path - e.g. http://mealie.dev:9925): ",
is_url=True,
).strip()
auth.save_auth_input_to_file(token, api_url)
# Check connection and auth data
get_user_self(api_url, token)
# Prompt for script selection input
selected_script = validate.user_input(
f"""\nWhich script do you want to run?
1 - Bulk Create new recipes from URLs (with/without Tags)
2 - {Fore.RED}ALPHA{Style.RESET_ALL} - Crawl custom website and create new recipes based on crawl results (with/without Tags)
Select your option (1-2): """,
valid_options=["1", "2"],
)
# Bulk Create new recipes from URLs (with/without Tags)
if selected_script == "1":
# Prompt for include tags input
include_tags = validate.user_input(
"\nDo you want to import original keywords as tags? (true, false): ",
valid_options=["true", "false"],
)
# Prompt the user to input URLs separated by commas
user_input_urls = (
validate.user_input("\nEnter URLs separated by commas: ", is_url=True)
.strip()
.split(",")
)
# Loop through each URL
for url in user_input_urls:
# Check if the URL starts with "http://" or "https://"
if url.strip().startswith(("http://", "https://")):
# Send a POST request for each URL
print(f"{Fore.BLUE}\nChecking {url}{Style.RESET_ALL}")
requests.post_recipes_create_url(url.strip(), include_tags, token, api_url)
else:
print(
f"\n{Fore.RED}Invalid URL: {url} (URL should start with http:// or https://){Style.RESET_ALL}"
)
# Crawl custom website and create new recipes based on crawl results (with/without Tags)
elif selected_script == "2":
print(
f"\n{Fore.RED}ALPHA - Crawler has a moderate chance to find bad URLs that can't be parsed! Mealie API will throw an error (400). However this won't break anything. {Style.RESET_ALL}"
)
# Prompt for url input
url = validate.user_input(
"\nInput your external recipe URL to crawl (e.g. https://www.kitchenstories.com/en/categories/breakfast): ",
is_url=True,
).strip()
# Prompt for keyword input
keyword = validate.user_input(
'\nEnter crawler keyword - Crawler will search for all results within your external recipe URL containing this keyword (e.g. "recipes"): ',
is_text=True,
)
# Prompt for include tags input
include_tags = validate.user_input(
"\nDo you want to import original keywords as tags? (true, false): ",
valid_options=["true", "false"],
)
# Extract recipe links
links = crawler(url, f"/{keyword}/")
# Loop through each URL and create POST
for link in links:
post_recipes_create_url(link, include_tags, token, api_url)