-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathmonitor.py
112 lines (98 loc) · 3.94 KB
/
monitor.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import subprocess
from playsound import playsound, PlaysoundException
import requests
from bs4 import BeautifulSoup
import urllib.parse
import hashlib
import pickle
from os import path
import sys
import time
from datetime import datetime
def getContent(url, proxies={}):
content = ''
try:
response = requests.get(url, proxies=proxies)
content = response.content.decode('utf-8')
except KeyboardInterrupt:
sys.exit('Exit!')
except:
e = str(sys.exc_info()[0])
e = e.replace('<class \'', '')
e = e.replace('\'>', '')
e = e.replace('requests.exceptions.', '')
print('Unexpected error: ', e)
pass
return content
# load input urls to monitor
if not path.exists('urls.txt'):
sys.exit('urls.txt file can not be found')
input_urls = []
with open('urls.txt', 'r') as f:
input_urls = [u.strip() for u in f.readlines()]
if not input_urls:
sys.exit('urls.txt is empty')
input_proxies = []
if path.exists('proxies.txt'):
with open('proxies.txt', 'r') as f:
input_proxies = [u.strip() for u in f.readlines()]
monitored_urls = {}
# load monitored urls state
if path.exists('state.data'):
with open('state.data', 'rb') as f:
monitored_urls = pickle.load(f)
proxies = None
input_proxies_index = -1
while True:
for url in input_urls:
now = datetime.now()
print('%s Checking %s ...' % (now.strftime("%d/%m/%Y %H:%M:%S"),url))
token = ''
content = getContent(url, proxies)
res = BeautifulSoup(content, 'html.parser')
listUrls = res.findAll('a', attrs={'class': 'hash-tag'})
if not len(listUrls):
# print(content)
if len(input_proxies):
input_proxies_index = 0 if input_proxies_index >= (len(input_proxies)-1) else input_proxies_index+1
proxyIP = input_proxies[input_proxies_index]
proxies = {
'http': proxyIP ,
'https': proxyIP,
}
print('Switch to Proxy ', proxyIP)
continue
topItemUrl = listUrls[0]
# check if it is failed transaction
isFailed = True if topItemUrl.previous_sibling else False
topItemUrl = topItemUrl['href']
topItemUrl = urllib.parse.urljoin(url, topItemUrl)
monitored_url_key = hashlib.md5(url.encode('utf-16')).hexdigest()
if (monitored_url_key not in monitored_urls) or ( monitored_urls[monitored_url_key] != topItemUrl):
# alert if difference only not new page
if monitored_url_key in monitored_urls:
print ('Change Detected!')
# follow if not failed transaction only
if not isFailed:
content = getContent(topItemUrl, proxies)
res = BeautifulSoup(content, 'html.parser')
last_transcation_action_url = res.findAll('a', attrs={'class': 'd-inline-block'})[-1]['href']
token = last_transcation_action_url.split('/')[-1]
# copy token to clipboard
subprocess.run(['clip.exe'], input=token.encode('utf-16'), check=True)
print ('Token %s copied to Clipboard' % token)
else:
print ('Failed Transaction')
# play alert sound
try:
playsound('alert.mp3')
except PlaysoundException:
print('Can not play sound! Check your soundcard')
else:
print ('New Url. First Check!')
monitored_urls[monitored_url_key] = topItemUrl
with open('state.data', 'wb') as f:
pickle.dump(monitored_urls, f, pickle.HIGHEST_PROTOCOL)
else:
print ('No Change!')
time.sleep(1/(len(input_urls)+1))