Add dependencies and clean code

This commit is contained in:
nyyu 2018-06-03 09:26:59 +02:00
parent d0869d2f1d
commit abe74b7812
2 changed files with 27 additions and 21 deletions

View file

@ -7,16 +7,23 @@ from time import sleep
import requests import requests
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
LOL_CHAMPS_DIR = 'C:\\League of Legends\\Config\\Champions\\'
PATTERN_CHAMPIONGG = re.compile(r'^\s+matchupData\.championData = (.*)$', re.MULTILINE) PATTERN_CHAMPIONGG = re.compile(r'^\s+matchupData\.championData = (.*)$', re.MULTILINE)
LOL_CHAMPS_DIR = r'C:\\League of Legends\\Config\\Champions\\'
HEADERS = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:60.0) Gecko/20100101 Firefox/60.0'} HEADERS = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:60.0) Gecko/20100101 Firefox/60.0'}
ITEMS_TYPE = {
'Most Frequent Starters': ['firstItems', 'mostGames'],
'Highest Win % Starters': ['firstItems', 'highestWinPercent'],
'Most Frequent Core Build': ['items', 'mostGames'],
'Highest Win % Core Build': ['items', 'highestWinPercent'],
}
def getChampData(champ, position): def getChampData(champ, position):
page = requests.get('http://champion.gg/champion/%s/%s?league=' % (champ, position), headers=HEADERS).text page = requests.get('https://champion.gg/champion/%s/%s?league=' % (champ, position), headers=HEADERS).text
return json.loads(PATTERN_CHAMPIONGG.search(page).group(1)) return json.loads(PATTERN_CHAMPIONGG.search(page).group(1))
def getChampsWithPositionsAndPatch(): def getChampsWithPositionsAndPatch():
soup = BeautifulSoup(requests.get('http://champion.gg', headers=HEADERS).text, 'html.parser') soup = BeautifulSoup(requests.get('https://champion.gg', headers=HEADERS).text, 'html.parser')
patch = soup.find('div', class_='analysis-holder').find('strong').text patch = soup.find('div', class_='analysis-holder').find('strong').text
@ -24,27 +31,28 @@ def getChampsWithPositionsAndPatch():
for div in soup.find_all('div', class_='champ-height'): for div in soup.find_all('div', class_='champ-height'):
champ_name = div.find('span', class_='champion-name').text champ_name = div.find('span', class_='champion-name').text
pos = [link['href'].split('/')[-1] for link in div.find_all('a', href=True)[1:]] pos = [link['href'].split('/')[-1] for link in div.find_all('a', href=True)[1:]]
champs.append({champ_name: pos }) champs.append({champ_name: pos})
return (champs, patch) return (champs, patch)
def makeItemSet(data, label): def makeItemSet(data, label):
return { return {
"items": [{ "id": item['id'], "count": 1} for item in data['items']], "items": [{"id": item['id'], "count": 1} for item in data['items']],
"type": "%s (%.2f %% - %d games)" % (label, data['winPercent'] * 100, data['games']) "type": "%s (%.2f%% - %d games)" % (label, data['winPercent'] * 100, data['games'])
} }
def makeItemSetFromList(list, label, key, data): def makeItemSetFromList(list, label, key, data):
return { return {
"items": [{ "id": str(id), "count": 1} for id in list], "items": [{"id": str(id), "count": 1} for id in list],
"type": label % '.'.join([data['skills']['skillInfo'][int(k)-1]['key'] for k in data['skills'][key]['order']]) "type": label % '.'.join([data['skills']['skillInfo'][int(k)-1]['key'] for k in data['skills'][key]['order']])
} }
def writeItemSet(id, pos, ver, data, path): def writeItemSet(id, pos, ver, dir):
logging.info('Writing item set for %s at %s' % (id, pos)) logging.info('Retrieving data for %s at %s' % (id, pos))
data = getChampData(id, pos)
item_set = { item_set = {
"title": "CGG %s %s" % (pos, ver), "title": "CGG %s %s - %.2f%% Winrate" % (pos, ver, data['stats']['winRate'] * 100),
"type": "custom", "type": "custom",
"map": "any", "map": "any",
"mode": "any", "mode": "any",
@ -53,19 +61,15 @@ def writeItemSet(id, pos, ver, data, path):
"blocks": [] "blocks": []
} }
if 'mostGames' in data['firstItems']: for label, path in ITEMS_TYPE.items():
item_set['blocks'].append(makeItemSet(data['firstItems']['mostGames'], 'Most Frequent Starters')) if path[1] in data[path[0]]:
if 'highestWinPercent' in data['firstItems']: item_set['blocks'].append(makeItemSet(data[path[0]][path[1]], label))
item_set['blocks'].append(makeItemSet(data['firstItems']['highestWinPercent'], 'Highest Win % Starters'))
if 'mostGames' in data['items']:
item_set['blocks'].append(makeItemSet(data['items']['mostGames'], 'Most Frequent Core Build'))
if 'highestWinPercent' in data['items']:
item_set['blocks'].append(makeItemSet(data['items']['highestWinPercent'], 'Highest Win % Core Build'))
item_set['blocks'].append(makeItemSetFromList([2003, 2004, 2055, 2031, 2032, 2033, 2138, 2140, 2139], "Consumables | Frequent: %s", 'mostGames', data)) item_set['blocks'].append(makeItemSetFromList([2003, 2004, 2055, 2031, 2032, 2033, 2138, 2140, 2139], "Consumables | Frequent: %s", 'mostGames', data))
item_set['blocks'].append(makeItemSetFromList([3340, 3364, 3363], "Trinkets | Wins: %s", 'highestWinPercent', data)) item_set['blocks'].append(makeItemSetFromList([3340, 3364, 3363], "Trinkets | Wins: %s", 'highestWinPercent', data))
with open('%sCGG_%s_%s.json' % (path, id, pos), 'w', newline='\n') as out: logging.info('Writing item set for %s at %s' % (id, pos))
with open('%sCGG_%s_%s.json' % (dir, id, pos), 'w', newline='\n') as out:
json.dump(item_set, out, indent=4) json.dump(item_set, out, indent=4)
def main(): def main():
@ -92,7 +96,7 @@ def main():
path = '%s%s\\Recommended\\' % (LOL_CHAMPS_DIR, c['id']) path = '%s%s\\Recommended\\' % (LOL_CHAMPS_DIR, c['id'])
Path(path).mkdir(parents=True, exist_ok=True) Path(path).mkdir(parents=True, exist_ok=True)
for pos in positions: for pos in positions:
writeItemSet(c['id'], pos, patch, getChampData(c['id'], pos), path) writeItemSet(c['id'], pos, patch, path)
sleep(.3) sleep(.3)
else: else:
logging.error('%s not found in LoL champs' % name) logging.error('%s not found in LoL champs' % name)

2
requirements.txt Normal file
View file

@ -0,0 +1,2 @@
beautifulsoup4==4.6.0
Requests==2.18.4