From 6944544ae7c81d4ff6429dab826a150aa1bdeb92 Mon Sep 17 00:00:00 2001
From: Simon Rebers <srebers@uni-osnabrueck.de>
Date: Fri, 24 Nov 2023 14:01:05 +0000
Subject: [PATCH] Delete crawler_alt.py

---
 crawler_alt.py | 36 ------------------------------------
 1 file changed, 36 deletions(-)
 delete mode 100644 crawler_alt.py

diff --git a/crawler_alt.py b/crawler_alt.py
deleted file mode 100644
index 420487d..0000000
--- a/crawler_alt.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import os.path
-from queue import Queue
-from urllib.parse import urljoin, urlparse
-
-import requests
-from bs4 import BeautifulSoup
-
-# r = requests.get('https://vm009.rz.uos.de/crawl/index.html')
-
-
-queue = Queue()
-visited_links = []
-start_site = 'https://vm009.rz.uos.de/crawl/index.html'
-base_site = 'https://vm009.rz.uos.de/crawl/'
-queue.put(start_site)
-visited_links.append(start_site)
-
-while not queue.empty():
-    link = queue.get()
-    r = requests.get(link)
-    soup = BeautifulSoup(r.content, 'html.parser')
-    # print(r.content)
-    for l in soup.find_all("a"):
-        url = urlparse(urljoin(base_site, l['href']))
-        print(os.path.split(url))
-        if url not in visited_links and base_site in url :
-            print(url)
-            print(l.text)
-            queue.put(url)
-            visited_links.append(url)
-
-print(visited_links)
-
-
-
-
-- 
GitLab