From 95362fc3b6b4ca0d4956acaf74c2b52d12aabce4 Mon Sep 17 00:00:00 2001 From: KKlochko Date: Sat, 9 Jul 2022 13:30:23 +0300 Subject: [PATCH] (New feature) 0.4.2 - check for a new url and for the updated state --- CHANGELOG.org | 3 +++ src/db.py | 2 +- src/main.py | 52 ++++++++++++++++++++++++++++++++++++++++----------- 3 files changed, 45 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.org b/CHANGELOG.org index 9cf5d43..0e1c3ae 100644 --- a/CHANGELOG.org +++ b/CHANGELOG.org @@ -11,3 +11,6 @@ New feature: get an anime info from page ** 0.3.0 <2022-07-09> New feature: send a notification +** 0.4.1 <2022-07-09> + New feature: check for new url or update state + New small feature: add the urls config file diff --git a/src/db.py b/src/db.py index 1a80172..d9cb658 100644 --- a/src/db.py +++ b/src/db.py @@ -68,7 +68,7 @@ class DataBase: """Add entry of a content.""" connect = sqlite3.connect(self.PATH) cursor = connect.cursor() - cursor.execute(f"UPDATE {self.TABLE} SET Status = {new_status} WHERE Url = '{url}'") + cursor.execute(f"UPDATE {self.TABLE} SET Status = '{new_status}' WHERE Url = '{url}'") connect.commit() cursor.close() connect.close() diff --git a/src/main.py b/src/main.py index e35704c..9f3b2e4 100644 --- a/src/main.py +++ b/src/main.py @@ -18,20 +18,50 @@ # . # ########################################################################## from db import DataBase +from scraper import Scraper +from notify import Notification + +def get_urls(file_path): + """Read file that containes urls.""" + urls = [] + with open(file_path, 'r') as file: + for line in file: + if '\n' in line: + line = line.replace('\n', '') + urls.append(line) + return urls def main(): + # Const sections. + + # Here you can change testing headers to yours. + HEADERS = { + 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/47.0' + } + + MESSAGE = "New episode." + URL_FILE = "urls" + WAITING_PERIOD = 60 # seconds + + # Initialising objects + scr = Scraper(HEADERS) db = DataBase() - # Test data - #db.add_anime("test.url", "The anime", "1") - #db.update_anime_status("test.url", "2") - #data = db.get_entry("test.url") - #print(f"{data=}") - r = db.add_anime_if("test.url", "The anime", "1") - print(f"1{r=}") - r = db.add_anime_if("test.url", "The anime", "1") - print(f"2{r=}") - r = db.add_anime_if("test.url", "The anime", "2") - print(f"3{r=}") + + # Reading the URL_FILE for making list + urls = get_urls(URL_FILE) + #print(f"{urls}") + + # Checks for new urls in file and add as current state + # If one of page has updated then notifing. + for url in urls: + data = scr.get_anime(url) + if data == None: + continue + url, title, status = data + r = db.add_anime_if(url, title, status) + if r == -1: + n = Notification(title, MESSAGE) + n.send() if __name__ == "__main__": main()