-
Notifications
You must be signed in to change notification settings - Fork 5
/
main.py
281 lines (224 loc) · 9.43 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
import sys
import os
from dotenv import load_dotenv
from openai import OpenAI
from bs4 import BeautifulSoup
import requests
from notion_client import Client
import time
from urllib.parse import urlparse
import datetime
import csv
import re
load_dotenv()
def extract_urls_from_file(file_path):
url_pattern = re.compile(
r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"
)
try:
with open(file_path, "r") as file:
data = file.read()
urls = re.findall(url_pattern, data)
except UnicodeDecodeError:
raise Exception(
f"Cannot process '{file_path}'. It appears to be a binary file."
)
return urls
def print_ts(message=""):
# Prints a message with a timestamp
if len(message) == 0:
print()
return
timestamp = datetime.datetime.now().strftime("[%H:%M:%S]")
print(f"{timestamp}: {message}")
def validate_environment_variable(variable_name):
# Validates if an environment variable is set
variable = os.getenv(variable_name)
if variable is None:
raise EnvironmentError(
f"Couldn't load the env variable '{variable_name}', check the .env file"
)
return variable
def log_failed_url(url, error):
file = validate_environment_variable("FAILED_URLS_FILE")
if file is None:
file = "failed_jobs.csv"
# Split the filename into name and extension
base, extension = os.path.splitext(file)
# Format the filename to append date like 'failed_jobs_2021-01-31.csv'
file = f"{base}_{datetime.datetime.now().strftime('%Y-%m-%d')}{extension}"
# Check if the file is empty (i.e., we're creating a new file)
is_file_empty = not os.path.exists(file) or os.path.getsize(file) == 0
with open(file, "a", newline="") as f:
writer = csv.writer(f)
# If it's a new file, write the headers
if is_file_empty:
writer.writerow(["Time", "URL", "Error"])
# write into file like '12:00:00, https://www.google.com, <error message>'
writer.writerow([datetime.datetime.now().strftime("%H:%M:%S"), url, str(error)])
def get_web_info(url):
# Scrapes a webpage and returns its title and description
user_agent = os.getenv("USER_AGENT")
if user_agent is None:
user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36"
headers = {"User-Agent": user_agent}
response = requests.get(url, headers=headers)
soup = BeautifulSoup(response.text, "html.parser")
title = soup.title.string if soup.title else ""
meta_description = soup.find("meta", attrs={"name": "description"})
description = meta_description["content"] if meta_description else ""
return title, description
def process_info_with_ai(url, title, description):
# Processes webpage info with an AI and returns a response
language = os.getenv("LANGUAGE")
if language is None:
language = "english"
parsed_url = urlparse(url)
hostname = parsed_url.netloc
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
ai_messages = [
{
"role": "assistant",
"content": "I'm your Notion page creator. Please provide the details of the website you're working with and I will provide you a title and description suitable for a Notion page.",
},
{
"role": "user",
"content": f"Using the provided information about the website titled '{title}', which is hosted at '{hostname}', and described as '{description}', along with any pre-existing knowledge you have, I need you to perform a couple of tasks. First, if necessary, simplify the website's title for better suitability on a Notion page. Next, create a detailed and engaging description that fits the Notion page's context. This description should avoid including the URL or hostname, stay within 400 to 550 characters, and preserve the original brand names and terminologies. If there's not enough information for a detailed description, please summarize with 'No information found'. Remember, the final output should be strictly in the format: '{title}|||{description}'. Make sure the content aligns with {language} language standards.",
},
]
ai_model = os.getenv("OPENAI_API_MODEL")
if ai_model is None:
raise EnvironmentError(
"Couldn't load the env variable 'OPENAI_API_MODEL', check the .env file"
)
ai_temperature = os.getenv("OPENAI_API_TEMPERATURE")
# Check if temperature can be parsed to float. If not set it to 0.3
try:
ai_temperature = float(ai_temperature)
except ValueError:
ai_temperature = 0.3
ai_max_tokens = os.getenv("OPENAI_API_MAX_TOKENS")
# Check if max tokens can be parsed to int. If not set it to 1000
try:
ai_max_tokens = int(ai_max_tokens)
except ValueError:
raise EnvironmentError(
"Couldn't load or parse the env variable 'OPENAI_API_MAX_TOKENS', check the .env file"
)
ai_frequency_penalty = os.getenv("OPENAI_API_FREQUENCY_PENALTY")
# Check if penalty can be parsed to float. If not set it to 0.5
try:
ai_frequency_penalty = float(ai_frequency_penalty)
except ValueError:
ai_frequency_penalty = 0.5
print_ts(f"Generating best page title and description with AI...")
# Start the timer
start_time = time.time()
try:
# Assuming the completion method or equivalent for detailed content generation
response = client.chat.completions.create(
model=ai_model,
messages=ai_messages,
temperature=ai_temperature,
max_tokens=ai_max_tokens,
frequency_penalty=ai_frequency_penalty,
)
# End the timer
end_time = time.time()
# Convert to milliseconds
elapsed_time = round((end_time - start_time) * 1000, 0)
if response.choices and response.choices[0].message.content.strip():
return response.choices[0].message.content.strip()
else:
raise Exception("OpenAI API unknown error")
except Exception as e:
raise Exception(f"Unexpected Error: {str(e)}")
def create_notion_page(url, title, description_content):
# Creates a new page in Notion
print_ts("Creating Notion page...")
notion_api_key = os.getenv("NOTION_API_KEY")
notion = Client(auth=notion_api_key)
notion_db_id = os.getenv("NOTION_DB_ID")
page = notion.pages.create(
parent={"database_id": notion_db_id},
properties={
"Name": {"title": [{"text": {"content": title}}]},
"URL": {"url": url},
},
# Page content
children=[
{
"object": "block",
"paragraph": {
"rich_text": [{"text": {"content": description_content}}]
},
}
],
)
return page
def main():
# Main function
print_ts()
print_ts("Starting the script...")
print_ts()
print_ts("Validating indispensable environment variables...")
try:
bookmarks_file = validate_environment_variable("BOOKMARKS_FILE")
print_ts(f"BOOKMARKS_FILE: ✓")
validate_environment_variable("OPENAI_API_KEY")
print_ts(f"OPENAI_API_KEY: ✓")
validate_environment_variable("NOTION_API_KEY")
print_ts(f"NOTION_API_KEY: ✓")
validate_environment_variable("NOTION_DB_ID")
print_ts(f"NOTION_DB_ID: ✓")
except EnvironmentError as e:
print_ts(f"{str(e)}")
sys.exit()
print_ts("Indispensable environment variables loaded successfully!")
print_ts()
try:
print_ts("Trying to load your bookmarks file...")
urls = extract_urls_from_file(bookmarks_file)
if len(urls) == 0:
raise Exception("No URLs found in the file")
print_ts("URLs loaded successfully!")
print_ts()
except FileNotFoundError:
print_ts(f"Failed to load your bookmarks file '{bookmarks_file}'")
sys.exit()
print_ts("Starting the main process...")
print_ts()
failed = 0
succeeded = 0
for url in urls:
# Remove newline characters
url = url.strip()
try:
print_ts(f"[ ======= {url} ======= ]")
#
# Scrapes a webpage, processes the info with an AI and creates a new page in Notion
print_ts(f"Scraping web info...")
title, description = get_web_info(url)
if len(title) == 0 and len(description) == 0:
raise Exception("Web scraping error")
if "cloudflare" in title.lower() or "cloudflare" in description.lower():
raise Exception("Web scraping error")
processed_info = process_info_with_ai(url, title, description)
processed_title, processed_description = processed_info.split("|||")
create_notion_page(url, processed_title, processed_description)
print_ts("Successfully created Notion page")
succeeded += 1
except Exception as e:
failed += 1
print_ts(f"Failed to create a Notion page. Error: {e}")
log_failed_url(url, e)
finally:
print_ts()
print_ts(f"Total URL's: {urls.__len__()}")
print_ts(f"Failed: {failed} | Succeeded: {succeeded}")
print_ts()
print_ts("Shutting down the script...")
sys.exit()
# Run the main function
if __name__ == "__main__":
main()