Skip to content

Commit 649860a

Browse files
Merge pull request souravjain540#77 from ralucaginga/main
Added Tic Tac Toe using MiniMax algorithm + Reddit Scraper
2 parents e38f85f + 53f57f7 commit 649860a

2 files changed

Lines changed: 144 additions & 0 deletions

File tree

Player.py

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
import math
2+
import random
3+
4+
5+
class Player():
6+
def __init__(self, player):
7+
self.player = player
8+
9+
def get_move(self, game):
10+
pass
11+
12+
13+
class Human(Player):
14+
def __init__(self, player):
15+
super().__init__(player)
16+
17+
def get_move(self, game):
18+
valid_square = False
19+
val = None
20+
while not valid_square:
21+
square = input(self.player + ' turn. Please introduce a move (1-9): ')
22+
try:
23+
val = int(square) - 1
24+
if val not in game.remaining_moves():
25+
raise ValueError
26+
valid_square = True
27+
except ValueError:
28+
print('Invalid square. Try again.')
29+
return val
30+
31+
32+
class RandomComputer(Player):
33+
def __init__(self, player):
34+
super().__init__(player)
35+
36+
def get_move(self, game):
37+
square = random.choice(game.remaining_moves())
38+
return square
39+
40+
41+
class SmartComputer(Player):
42+
def __init__(self, player):
43+
super().__init__(player)
44+
45+
def get_move(self, game):
46+
if len(game.remaining_moves()) == 9:
47+
square = random.choice(game.remaining_moves())
48+
else:
49+
square = self.minimax(game, self.player)['position']
50+
return square
51+
52+
def minimax(self, state, player):
53+
max_player = self.player
54+
min_player = '0' if player == 'X' else 'X'
55+
56+
# checking if the previous move is winner
57+
if state.actual_winner == min_player:
58+
return {'position': None,
59+
'score': 1 * (state.number_null_squares() + 1) if min_player == max_player
60+
else -1 * (state.number_null_squares() + 1)}
61+
elif not state.null_squares():
62+
return {'position': None, 'score': 0}
63+
64+
if player == max_player:
65+
best = {'position': None, 'score': -math.inf}
66+
else:
67+
best = {'position': None, 'score': math.inf}
68+
69+
for possible_move in state.remaining_moves():
70+
state.make_a_move(possible_move, player)
71+
sim_score = self.minimax(state, min_player)
72+
73+
# undo move
74+
state.board[possible_move] = ' '
75+
state.actual_winner = None
76+
sim_score['position'] = possible_move
77+
78+
if player == max_player:
79+
if sim_score['score'] > best['score']:
80+
best = sim_score
81+
else:
82+
if sim_score['score'] < best['score']:
83+
best = sim_score
84+
return best

reddit_scraper.py

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import requests
2+
import csv
3+
import time
4+
from bs4 import BeautifulSoup
5+
6+
7+
class HaikuScraper:
8+
"""
9+
This scraper is designed with the purpose of scraping Haikus (Japanese poems) from Reddit.
10+
"""
11+
def __init__(self, url: str, headers: dict):
12+
self.url = url
13+
self.headers = headers
14+
15+
def make_request(self):
16+
time.sleep(3)
17+
page = requests.get(self.url, headers=self.headers)
18+
soup = BeautifulSoup(page.text, 'html.parser')
19+
return soup
20+
21+
def get_next_page(self, soup: BeautifulSoup):
22+
time.sleep(3)
23+
next_button = soup.find('span', class_='next-button')
24+
next_page_link = next_button.find("a").attrs['href']
25+
return next_page_link
26+
27+
def get_haikus(self, soup: BeautifulSoup):
28+
haikus = [str(title.text) for title in soup.find_all("a", class_="title may-blank ")]
29+
return haikus
30+
31+
def write_haikus_to_csv(self, haikus: list):
32+
with open('scraped_haikus_v2.txt', 'a') as f:
33+
writer = csv.writer(f)
34+
for haiku in haikus:
35+
writer.writerow([haiku])
36+
f.close()
37+
38+
39+
40+
url = "https://old.reddit.com/r/haiku/"
41+
# Headers to mimic a browser visit
42+
headers = {'User-Agent': 'Mozilla/5.0'}
43+
44+
scraper = HaikuScraper(url, headers)
45+
soup = scraper.make_request()
46+
47+
haikus = scraper.get_haikus(soup)
48+
scraper.write_haikus_to_csv(haikus)
49+
50+
counter = 1
51+
52+
while (counter <= 2500):
53+
time.sleep(2)
54+
link = scraper.get_next_page(soup)
55+
print(f"Page {counter + 1}. Link {link}.")
56+
scraper = HaikuScraper(link, headers)
57+
soup = scraper.make_request()
58+
haikus = scraper.get_haikus(soup)
59+
scraper.write_haikus_to_csv(haikus)
60+
counter += 1

0 commit comments

Comments
 (0)