Implement logging and adjusted database location

Changes include creating the directory and database before the script.
This commit is contained in:
雲華
2021-08-17 19:36:26 -04:00
parent 12bd0027f9
commit aaea16ab99
3 changed files with 8 additions and 3 deletions

View File

@@ -7,4 +7,7 @@ WORKDIR /lanews
RUN python3 -m venv venv && \ RUN python3 -m venv venv && \
venv/bin/pip install -r requirements.txt venv/bin/pip install -r requirements.txt
RUN mkdir database && \
touch database/news.db
CMD ["venv/bin/python", "main.py"] CMD ["venv/bin/python", "main.py"]

View File

@@ -7,7 +7,9 @@ import os
import asyncio import asyncio
import json import json
import time import time
import logging
logging.basicConfig(filename="lanews.log", level=logging.DEBUG)
dotenv_path = join(dirname(__file__), '.env') dotenv_path = join(dirname(__file__), '.env')
load_dotenv(dotenv_path) load_dotenv(dotenv_path)
@@ -16,7 +18,7 @@ loop = asyncio.get_event_loop()
async def publish_news(): async def publish_news():
print('Running web scrape...') logging.debug('Running web scrape...')
la_news = NewsScraper(loop=loop) la_news = NewsScraper(loop=loop)
articles = await la_news.news_articles() articles = await la_news.news_articles()
@@ -57,6 +59,6 @@ def run_async(coroutine):
schedule.every().hour.do(run_async, publish_news) schedule.every().hour.do(run_async, publish_news)
while True: while True:
print('Checking schedule...') logging.debug('Checking schedule...')
schedule.run_pending() schedule.run_pending()
time.sleep(300) time.sleep(300)

View File

@@ -24,7 +24,7 @@ class NewsScraper:
raise Exception() raise Exception()
self.client = client if None else ClientSession(loop=loop) self.client = client if None else ClientSession(loop=loop)
self.database = _create_connection('news.db') if database is None else database self.database = _create_connection('database/news.db') if database is None else database
self._md5 = hashlib.new('md5', usedforsecurity=False) self._md5 = hashlib.new('md5', usedforsecurity=False)
async def _fetch_url(self, url): async def _fetch_url(self, url):