forked from mrlan/EnglishPal
WIP:[REFACTOR]:main.py,create_clozeTest.py:重构
parent
0117470ab5
commit
9d5175d0a6
|
@ -1,7 +1,9 @@
|
|||
import random
|
||||
import sqlite3,re
|
||||
import sqlite3
|
||||
import re
|
||||
from nltk.corpus import wordnet as wn
|
||||
|
||||
|
||||
class Essay:
|
||||
def __init__(self):
|
||||
self._article_id = 0
|
||||
|
@ -16,18 +18,19 @@ class Essay:
|
|||
pass
|
||||
|
||||
@article_id.setter
|
||||
def article_id(self,article_id):
|
||||
def article_id(self, article_id):
|
||||
self._article_id = article_id
|
||||
self.find_essay_in_database(self._article_id)
|
||||
|
||||
#获取数据库中的文章和等级
|
||||
def find_essay_in_database(self,id):
|
||||
# 获取数据库中的文章和等级
|
||||
def find_essay_in_database(self, id):
|
||||
try:
|
||||
# 连接数据库
|
||||
conn = sqlite3.connect("static/wordfreqapp.db")
|
||||
# 创建游标
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("select text,level from article where article_id = "+str(id))
|
||||
cursor.execute(
|
||||
"select text,level from article where article_id = "+str(id))
|
||||
results = cursor.fetchall()
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
|
@ -38,14 +41,14 @@ class Essay:
|
|||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
#将文章分割成单词列表
|
||||
# 将文章分割成单词列表
|
||||
def split_essay_to_word(self):
|
||||
article = "".join(self._essay)
|
||||
words = re.split(r"\b[\.,\s\n\r\n\$\']+?\b", article)
|
||||
word_list = [word.lower() for word in words]
|
||||
return word_list
|
||||
|
||||
#从数据库中查找和文章难度相同的单词
|
||||
# 从数据库中查找和文章难度相同的单词
|
||||
def find_same_difficulty_words(self):
|
||||
result_list = []
|
||||
try:
|
||||
|
@ -53,7 +56,8 @@ class Essay:
|
|||
conn = sqlite3.connect("static/wordfreqapp.db")
|
||||
# 创建游标
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("select word from words where difficulty = " + str(self._difficulty))
|
||||
cursor.execute(
|
||||
"select word from words where difficulty = " + str(self._difficulty))
|
||||
results = cursor.fetchall()
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
|
@ -65,16 +69,16 @@ class Essay:
|
|||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
#获取单词的近义词
|
||||
def get_word_synsets(self,word):
|
||||
# 获取单词的近义词
|
||||
def get_word_synsets(self, word):
|
||||
synsets_set = wn.synsets(word)
|
||||
synset_list = []
|
||||
for synset in synsets_set:
|
||||
synset_list.append(synset.name().split(".")[0])
|
||||
return synset_list
|
||||
|
||||
#生成完形填空
|
||||
def create_clozeTest(self,essay):
|
||||
# 生成完形填空
|
||||
def create_clozeTest(self, essay):
|
||||
essay.article_id = self._article_id
|
||||
word = [] # 存放文章中含有的与文章难度相同的单词
|
||||
answers = [] # 存放正确答案
|
||||
|
@ -82,26 +86,27 @@ class Essay:
|
|||
|
||||
database_words = essay.find_same_difficulty_words()
|
||||
essay_words = essay.split_essay_to_word()
|
||||
# 寻找文章中与文章难度相同的单词存入word[]中
|
||||
for essay_word in essay_words:
|
||||
if database_words.__contains__(essay_word) and essay_word not in word:
|
||||
word.append(essay_word)
|
||||
|
||||
# 给出因文章内容太少的问题导致题目少于10个的情况
|
||||
if len(word) <= 10:
|
||||
answers = word
|
||||
else:
|
||||
else: # 将找出来的单词作为正确答案存入answers[]中
|
||||
for i in range(0, 10):
|
||||
w = word[random.randint(0, len(word) - 1)]
|
||||
if not answers.__contains__(w):
|
||||
answers.append(w)
|
||||
|
||||
self._answers = answers
|
||||
|
||||
# 用题号来替换文章中的单词
|
||||
No = 1
|
||||
for answer in answers:
|
||||
questions.append(list(answer.split(",")))
|
||||
self._essay = self._essay.replace(answer,'('+str(No)+')____', 1)
|
||||
self._essay = self._essay.replace(answer, '('+str(No)+')____', 1)
|
||||
No += 1
|
||||
|
||||
# 生成每道题目的四个选项
|
||||
for question in questions:
|
||||
synset = list(set(essay.get_word_synsets(question[0])))
|
||||
if len(synset) == 0 or len(synset) == 1:
|
||||
|
|
|
@ -109,6 +109,10 @@ essay = Essay()
|
|||
|
||||
@app.route('/gocloze', methods=['GET', 'POST'])
|
||||
def go_ClozeTest():
|
||||
'''
|
||||
根据GET或POST方法来分别返回答题前后的完型填空界面
|
||||
:return: 完型填空界面
|
||||
'''
|
||||
if request.method == 'GET':
|
||||
essay._article_id += 1
|
||||
essay.create_clozeTest(essay)
|
||||
|
|
Loading…
Reference in New Issue