2017-11-12 22:23:12 +03:00
|
|
|
import os
|
2020-01-08 21:33:24 +03:00
|
|
|
import re
|
2017-11-12 22:23:12 +03:00
|
|
|
import socket
|
|
|
|
|
import time
|
2018-11-17 23:19:17 +03:00
|
|
|
import urllib
|
2020-01-08 21:33:24 +03:00
|
|
|
import xml.etree.ElementTree as ETree
|
2017-11-06 22:17:43 +03:00
|
|
|
from enum import Enum
|
2020-12-05 14:16:07 +03:00
|
|
|
from ftplib import FTP, CRLF, Error, error_perm
|
2019-12-17 11:59:57 +03:00
|
|
|
from http.client import RemoteDisconnected
|
2017-11-12 22:23:12 +03:00
|
|
|
from telnetlib import Telnet
|
2018-11-08 13:07:24 +03:00
|
|
|
from urllib.error import HTTPError, URLError
|
|
|
|
|
from urllib.parse import urlencode
|
2020-04-28 14:49:10 +03:00
|
|
|
from urllib.request import (urlopen, HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, build_opener,
|
|
|
|
|
install_opener, Request)
|
2017-11-06 22:17:43 +03:00
|
|
|
|
2020-01-17 00:34:18 +03:00
|
|
|
from app.commons import log, run_task
|
2019-12-22 20:42:29 +03:00
|
|
|
from app.settings import SettingsType
|
2018-01-07 16:33:18 +03:00
|
|
|
|
2020-04-28 14:49:10 +03:00
|
|
|
BQ_FILES_LIST = ("tv", "radio", # enigma 2
|
2021-08-20 21:24:27 +03:00
|
|
|
"services.xml", "myservices.xml", "bouquets.xml", "ubouquets.xml") # neutrino
|
2018-11-11 18:35:45 +03:00
|
|
|
|
2020-04-28 14:49:10 +03:00
|
|
|
DATA_FILES_LIST = ("lamedb", "lamedb5", "blacklist", "whitelist",)
|
2017-10-14 12:24:59 +03:00
|
|
|
|
2020-04-28 14:49:10 +03:00
|
|
|
STC_XML_FILE = ("satellites.xml", "terrestrial.xml", "cables.xml")
|
|
|
|
|
WEB_TV_XML_FILE = ("webtv.xml",)
|
|
|
|
|
PICONS_SUF = (".jpg", ".png")
|
2018-02-12 13:34:00 +03:00
|
|
|
|
2017-10-14 12:24:59 +03:00
|
|
|
|
2018-08-04 11:38:38 +03:00
|
|
|
class DownloadType(Enum):
|
2017-11-06 22:17:43 +03:00
|
|
|
ALL = 0
|
|
|
|
|
BOUQUETS = 1
|
|
|
|
|
SATELLITES = 2
|
2018-01-16 18:51:08 +03:00
|
|
|
PICONS = 3
|
2018-02-12 13:34:00 +03:00
|
|
|
WEBTV = 4
|
2019-05-04 20:13:57 +03:00
|
|
|
EPG = 5
|
2017-11-06 22:17:43 +03:00
|
|
|
|
|
|
|
|
|
2018-11-08 13:07:24 +03:00
|
|
|
class TestException(Exception):
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
2020-01-06 13:17:56 +03:00
|
|
|
class HttpApiException(Exception):
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
2020-12-05 14:16:07 +03:00
|
|
|
class UtfFTP(FTP):
|
2020-12-13 15:19:38 +03:00
|
|
|
""" FTP class wrapper. """
|
2020-12-05 14:16:07 +03:00
|
|
|
|
|
|
|
|
def retrlines(self, cmd, callback=None):
|
|
|
|
|
""" Small modification of the original method.
|
|
|
|
|
|
2020-12-13 15:19:38 +03:00
|
|
|
It is used to retrieve data in line mode and skip errors related
|
|
|
|
|
to reading file names in encoding other than UTF-8 or Latin-1.
|
2020-12-05 14:16:07 +03:00
|
|
|
Decode errors are ignored [UnicodeDecodeError, etc].
|
|
|
|
|
"""
|
|
|
|
|
if callback is None:
|
|
|
|
|
callback = log
|
|
|
|
|
self.sendcmd("TYPE A")
|
|
|
|
|
with self.transfercmd(cmd) as conn, conn.makefile("r", encoding=self.encoding, errors="ignore") as fp:
|
|
|
|
|
while 1:
|
|
|
|
|
line = fp.readline(self.maxline + 1)
|
|
|
|
|
if len(line) > self.maxline:
|
|
|
|
|
msg = "UtfFTP [retrlines] error: got more than {} bytes".format(self.maxline)
|
|
|
|
|
log(msg)
|
|
|
|
|
raise Error(msg)
|
|
|
|
|
if self.debugging > 2:
|
|
|
|
|
log('UtfFTP [retrlines] *retr* {}'.format(repr(line)))
|
|
|
|
|
if not line:
|
|
|
|
|
break
|
|
|
|
|
if line[-2:] == CRLF:
|
|
|
|
|
line = line[:-2]
|
|
|
|
|
elif line[-1:] == "\n":
|
|
|
|
|
line = line[:-1]
|
|
|
|
|
callback(line)
|
|
|
|
|
return self.voidresp()
|
|
|
|
|
|
2020-12-13 15:19:38 +03:00
|
|
|
# ***************** Download ******************* #
|
|
|
|
|
|
|
|
|
|
def download_files(self, save_path, file_list, callback=None):
|
|
|
|
|
""" Downloads files from the receiver via FTP. """
|
|
|
|
|
for file in filter(lambda s: s.endswith(file_list), self.nlst()):
|
|
|
|
|
self.download_file(file, save_path, callback)
|
|
|
|
|
|
|
|
|
|
def download_file(self, name, save_path, callback=None):
|
|
|
|
|
with open(save_path + name, "wb") as f:
|
|
|
|
|
msg = "Downloading file: {}. Status: {}\n"
|
|
|
|
|
try:
|
|
|
|
|
resp = str(self.retrbinary("RETR " + name, f.write))
|
|
|
|
|
except error_perm as e:
|
|
|
|
|
resp = str(e)
|
|
|
|
|
msg = msg.format(name, e)
|
2020-12-22 14:18:16 +03:00
|
|
|
log(msg.rstrip())
|
2020-12-13 15:19:38 +03:00
|
|
|
else:
|
|
|
|
|
msg = msg.format(name, resp)
|
|
|
|
|
|
2020-12-22 14:18:16 +03:00
|
|
|
callback(msg) if callback else log(msg.rstrip())
|
2020-12-13 15:19:38 +03:00
|
|
|
|
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
|
def download_dir(self, path, save_path, callback=None):
|
|
|
|
|
""" Downloads directory from FTP with all contents.
|
|
|
|
|
|
|
|
|
|
Creates a leaf directory and all intermediate ones. This is recursive.
|
|
|
|
|
"""
|
2020-12-22 14:18:16 +03:00
|
|
|
os.makedirs(os.path.join(save_path, path), exist_ok=True)
|
|
|
|
|
|
2020-12-13 15:19:38 +03:00
|
|
|
files = []
|
|
|
|
|
self.dir(path, files.append)
|
|
|
|
|
for f in files:
|
|
|
|
|
f_data = f.split()
|
|
|
|
|
f_path = os.path.join(path, " ".join(f_data[8:]))
|
|
|
|
|
|
|
|
|
|
if f_data[0][0] == "d":
|
|
|
|
|
try:
|
|
|
|
|
os.makedirs(os.path.join(save_path, f_path), exist_ok=True)
|
|
|
|
|
except OSError as e:
|
2020-12-22 14:18:16 +03:00
|
|
|
msg = "Download dir error: {}".format(e).rstrip()
|
2020-12-13 15:19:38 +03:00
|
|
|
log(msg)
|
|
|
|
|
return "500 " + msg
|
|
|
|
|
else:
|
|
|
|
|
self.download_dir(f_path, save_path, callback)
|
|
|
|
|
else:
|
|
|
|
|
try:
|
|
|
|
|
self.download_file(f_path, save_path, callback)
|
|
|
|
|
except OSError as e:
|
2020-12-22 14:18:16 +03:00
|
|
|
log("Download dir error: {}".format(e).rstrip())
|
2020-12-13 15:19:38 +03:00
|
|
|
|
|
|
|
|
resp = "226 Transfer complete."
|
2020-12-22 14:18:16 +03:00
|
|
|
msg = "Copy directory {}. Status: {}".format(path, resp)
|
2020-12-13 15:19:38 +03:00
|
|
|
log(msg)
|
|
|
|
|
|
|
|
|
|
if callback:
|
|
|
|
|
callback(msg)
|
|
|
|
|
|
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
|
def download_xml(self, data_path, xml_path, xml_files, callback):
|
|
|
|
|
""" Used for download *.xml files. """
|
|
|
|
|
self.cwd(xml_path)
|
|
|
|
|
self.download_files(data_path, xml_files, callback)
|
|
|
|
|
|
|
|
|
|
def download_picons(self, src, dest, callback, files_filter=None):
|
|
|
|
|
try:
|
|
|
|
|
self.cwd(src)
|
|
|
|
|
except error_perm as e:
|
|
|
|
|
callback(str(e))
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
for file in filter(picons_filter_function(files_filter), self.nlst()):
|
|
|
|
|
self.download_file(file, dest, callback)
|
|
|
|
|
|
|
|
|
|
# ***************** Uploading ******************* #
|
|
|
|
|
|
|
|
|
|
def upload_bouquets(self, data_path, remove_unused, callback):
|
|
|
|
|
if remove_unused:
|
|
|
|
|
self.remove_unused_bouquets(callback)
|
|
|
|
|
self.upload_files(data_path, BQ_FILES_LIST, callback)
|
|
|
|
|
|
|
|
|
|
def upload_files(self, data_path, file_list, callback):
|
|
|
|
|
for file_name in os.listdir(data_path):
|
|
|
|
|
if file_name in STC_XML_FILE or file_name in WEB_TV_XML_FILE:
|
|
|
|
|
continue
|
|
|
|
|
if file_name.endswith(file_list):
|
|
|
|
|
self.send_file(file_name, data_path, callback)
|
|
|
|
|
|
|
|
|
|
def upload_xml(self, data_path, xml_path, xml_files, callback):
|
|
|
|
|
""" Used for transfer *.xml files. """
|
|
|
|
|
self.cwd(xml_path)
|
|
|
|
|
for xml_file in xml_files:
|
|
|
|
|
self.send_file(xml_file, data_path, callback)
|
|
|
|
|
|
|
|
|
|
def upload_picons(self, src, dest, callback, files_filter=None):
|
|
|
|
|
try:
|
|
|
|
|
self.cwd(dest)
|
|
|
|
|
except error_perm as e:
|
|
|
|
|
if str(e).startswith("550"):
|
|
|
|
|
self.mkd(dest) # if not exist
|
|
|
|
|
self.cwd(dest)
|
|
|
|
|
|
|
|
|
|
for file_name in filter(picons_filter_function(files_filter), os.listdir(src)):
|
|
|
|
|
self.send_file(file_name, src, callback)
|
|
|
|
|
|
|
|
|
|
def remove_unused_bouquets(self, callback):
|
|
|
|
|
bq_files = ("userbouquet.", "bouquets.xml", "ubouquets.xml")
|
|
|
|
|
|
|
|
|
|
for file in filter(lambda f: f.startswith(bq_files), self.nlst()):
|
|
|
|
|
self.delete_file(file, callback)
|
|
|
|
|
|
|
|
|
|
def send_file(self, file_name, path, callback=None):
|
|
|
|
|
""" Opens the file in binary mode and transfers into receiver """
|
|
|
|
|
file_src = path + file_name
|
|
|
|
|
resp = "500"
|
|
|
|
|
if not os.path.isfile(file_src):
|
|
|
|
|
log("Uploading file: '{}'. File not found. Skipping.".format(file_src))
|
|
|
|
|
return resp + " File not found."
|
|
|
|
|
|
|
|
|
|
with open(file_src, "rb") as f:
|
|
|
|
|
msg = "Uploading file: {}. Status: {}\n"
|
|
|
|
|
try:
|
|
|
|
|
resp = str(self.storbinary("STOR " + file_name, f))
|
|
|
|
|
except Error as e:
|
|
|
|
|
resp = str(e)
|
|
|
|
|
msg = msg.format(file_name, resp)
|
|
|
|
|
log(msg)
|
|
|
|
|
else:
|
|
|
|
|
msg = msg.format(file_name, resp)
|
|
|
|
|
|
|
|
|
|
if callback:
|
|
|
|
|
callback(msg)
|
|
|
|
|
|
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
|
def upload_dir(self, path, callback=None):
|
|
|
|
|
""" Uploads directory to FTP with all contents.
|
|
|
|
|
|
|
|
|
|
Creates a leaf directory and all intermediate ones. This is recursive.
|
|
|
|
|
"""
|
2020-12-22 14:18:16 +03:00
|
|
|
resp = "200"
|
|
|
|
|
msg = "Uploading directory: {}. Status: {}"
|
2020-12-13 15:19:38 +03:00
|
|
|
try:
|
|
|
|
|
files = os.listdir(path)
|
|
|
|
|
except OSError as e:
|
|
|
|
|
log(e)
|
|
|
|
|
else:
|
|
|
|
|
os.chdir(path)
|
|
|
|
|
for f in files:
|
|
|
|
|
file = r"{}{}".format(path, f)
|
|
|
|
|
if os.path.isfile(file):
|
|
|
|
|
self.send_file(f, path, callback)
|
|
|
|
|
elif os.path.isdir(file):
|
|
|
|
|
try:
|
|
|
|
|
self.mkd(f)
|
2020-12-22 14:18:16 +03:00
|
|
|
except Error:
|
|
|
|
|
pass # NOP
|
|
|
|
|
|
|
|
|
|
try:
|
2020-12-13 15:19:38 +03:00
|
|
|
self.cwd(f)
|
|
|
|
|
except Error as e:
|
2020-12-22 14:18:16 +03:00
|
|
|
resp = str(e)
|
|
|
|
|
log(msg.format(f, resp))
|
2020-12-13 15:19:38 +03:00
|
|
|
else:
|
|
|
|
|
self.upload_dir(file + "/")
|
|
|
|
|
|
|
|
|
|
self.cwd("..")
|
|
|
|
|
os.chdir("..")
|
|
|
|
|
|
2020-12-22 14:18:16 +03:00
|
|
|
if callback:
|
|
|
|
|
callback(msg.format(path, resp))
|
|
|
|
|
|
|
|
|
|
return resp
|
2020-12-13 15:19:38 +03:00
|
|
|
|
|
|
|
|
# ****************** Deletion ******************** #
|
|
|
|
|
|
|
|
|
|
def delete_picons(self, callback, dest=None, files_filter=None):
|
|
|
|
|
if dest:
|
|
|
|
|
try:
|
|
|
|
|
self.cwd(dest)
|
|
|
|
|
except Error as e:
|
|
|
|
|
callback(str(e))
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
for file in filter(picons_filter_function(files_filter), self.nlst()):
|
|
|
|
|
self.delete_file(file, callback)
|
|
|
|
|
|
|
|
|
|
def delete_file(self, file, callback=log):
|
|
|
|
|
msg = "Deleting file: {}. Status: {}\n"
|
|
|
|
|
try:
|
|
|
|
|
resp = self.delete(file)
|
|
|
|
|
except Error as e:
|
|
|
|
|
resp = str(e)
|
|
|
|
|
msg = msg.format(file, resp)
|
|
|
|
|
log(msg)
|
|
|
|
|
else:
|
|
|
|
|
msg = msg.format(file, resp)
|
|
|
|
|
|
|
|
|
|
if callback:
|
|
|
|
|
callback(msg)
|
|
|
|
|
|
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
|
def delete_dir(self, path, callback=None):
|
|
|
|
|
files = []
|
|
|
|
|
self.dir(path, files.append)
|
|
|
|
|
for f in files:
|
|
|
|
|
f_data = f.split()
|
|
|
|
|
name = " ".join(f_data[8:])
|
|
|
|
|
f_path = path + "/" + name
|
|
|
|
|
|
|
|
|
|
if f_data[0][0] == "d":
|
|
|
|
|
self.delete_dir(f_path, callback)
|
|
|
|
|
else:
|
|
|
|
|
self.delete_file(f_path, callback)
|
|
|
|
|
|
|
|
|
|
msg = "Remove directory {}. Status: {}\n"
|
|
|
|
|
try:
|
|
|
|
|
resp = self.rmd(path)
|
|
|
|
|
except Error as e:
|
|
|
|
|
msg = msg.format(path, e)
|
|
|
|
|
log(msg)
|
|
|
|
|
return "500"
|
|
|
|
|
else:
|
|
|
|
|
msg = msg.format(path, resp)
|
2020-12-22 14:18:16 +03:00
|
|
|
log(msg.rstrip())
|
2020-12-13 15:19:38 +03:00
|
|
|
|
|
|
|
|
if callback:
|
|
|
|
|
callback(msg)
|
|
|
|
|
|
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
|
def rename_file(self, from_name, to_name, callback=None):
|
|
|
|
|
msg = "File rename: {}. Status: {}\n"
|
|
|
|
|
try:
|
|
|
|
|
resp = self.rename(from_name, to_name)
|
|
|
|
|
except Error as e:
|
|
|
|
|
resp = str(e)
|
|
|
|
|
msg = msg.format(from_name, resp)
|
|
|
|
|
log(msg)
|
|
|
|
|
else:
|
|
|
|
|
msg = msg.format(from_name, resp)
|
|
|
|
|
|
|
|
|
|
if callback:
|
|
|
|
|
callback(msg)
|
|
|
|
|
|
|
|
|
|
return resp
|
|
|
|
|
|
2020-12-05 14:16:07 +03:00
|
|
|
|
|
|
|
|
def download_data(*, settings, download_type=DownloadType.ALL, callback=log, files_filter=None):
|
|
|
|
|
with UtfFTP(host=settings.host, user=settings.user, passwd=settings.password) as ftp:
|
2018-02-05 18:24:49 +03:00
|
|
|
ftp.encoding = "utf-8"
|
2018-11-06 23:43:13 +03:00
|
|
|
callback("FTP OK.\n")
|
2019-12-22 20:42:29 +03:00
|
|
|
save_path = settings.data_local_path
|
2017-12-30 22:58:47 +03:00
|
|
|
os.makedirs(os.path.dirname(save_path), exist_ok=True)
|
2019-05-04 20:13:57 +03:00
|
|
|
# bouquets
|
2018-08-04 11:38:38 +03:00
|
|
|
if download_type is DownloadType.ALL or download_type is DownloadType.BOUQUETS:
|
2019-12-13 13:31:07 +03:00
|
|
|
ftp.cwd(settings.services_path)
|
2020-04-28 14:49:10 +03:00
|
|
|
file_list = BQ_FILES_LIST + DATA_FILES_LIST if download_type is DownloadType.ALL else BQ_FILES_LIST
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.download_files(save_path, file_list, callback)
|
2020-04-28 14:49:10 +03:00
|
|
|
# *.xml and webtv
|
|
|
|
|
if download_type in (DownloadType.ALL, DownloadType.SATELLITES):
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.download_xml(save_path, settings.satellites_xml_path, STC_XML_FILE, callback)
|
2020-04-28 14:49:10 +03:00
|
|
|
if download_type in (DownloadType.ALL, DownloadType.WEBTV):
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.download_xml(save_path, settings.satellites_xml_path, WEB_TV_XML_FILE, callback)
|
2020-01-23 00:47:01 +03:00
|
|
|
|
|
|
|
|
if download_type is DownloadType.PICONS:
|
|
|
|
|
picons_path = settings.picons_local_path
|
|
|
|
|
os.makedirs(os.path.dirname(picons_path), exist_ok=True)
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.download_picons(settings.picons_path, picons_path, callback, files_filter)
|
2019-05-04 20:13:57 +03:00
|
|
|
# epg.dat
|
|
|
|
|
if download_type is DownloadType.EPG:
|
2019-12-13 13:31:07 +03:00
|
|
|
stb_path = settings.services_path
|
|
|
|
|
epg_options = settings.epg_options
|
2019-05-04 20:13:57 +03:00
|
|
|
if epg_options:
|
2020-04-02 16:50:58 +03:00
|
|
|
stb_path = epg_options.get("epg_dat_stb_path", stb_path)
|
|
|
|
|
save_path = epg_options.get("epg_dat_path", save_path)
|
|
|
|
|
|
2019-05-04 20:13:57 +03:00
|
|
|
ftp.cwd(stb_path)
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.download_files(save_path, "epg.dat", callback)
|
2017-10-26 01:23:05 +03:00
|
|
|
|
2019-09-10 00:24:51 +03:00
|
|
|
callback("\nDone.\n")
|
2017-10-14 12:24:59 +03:00
|
|
|
|
2018-01-16 18:51:08 +03:00
|
|
|
|
2019-12-13 13:31:07 +03:00
|
|
|
def upload_data(*, settings, download_type=DownloadType.ALL, remove_unused=False,
|
2020-12-13 15:19:38 +03:00
|
|
|
callback=log, done_callback=None, use_http=False, files_filter=None):
|
2019-12-22 20:42:29 +03:00
|
|
|
s_type = settings.setting_type
|
|
|
|
|
data_path = settings.data_local_path
|
2019-12-13 13:31:07 +03:00
|
|
|
host = settings.host
|
2020-01-11 17:58:50 +03:00
|
|
|
base_url = "http{}://{}:{}".format("s" if settings.http_use_ssl else "", host, settings.http_port)
|
|
|
|
|
url = "{}/web/".format(base_url)
|
2018-11-13 14:17:59 +03:00
|
|
|
tn, ht = None, None # telnet, http
|
2017-11-10 13:38:03 +03:00
|
|
|
|
2018-11-13 14:17:59 +03:00
|
|
|
try:
|
2019-12-22 20:42:29 +03:00
|
|
|
if s_type is SettingsType.ENIGMA_2 and use_http:
|
2020-12-29 22:41:28 +03:00
|
|
|
ht = http(settings.user, settings.password, base_url, callback, settings.http_use_ssl)
|
2018-11-13 14:17:59 +03:00
|
|
|
next(ht)
|
|
|
|
|
message = ""
|
|
|
|
|
if download_type is DownloadType.BOUQUETS:
|
|
|
|
|
message = "User bouquets will be updated!"
|
|
|
|
|
elif download_type is DownloadType.ALL:
|
|
|
|
|
message = "All user data will be reloaded!"
|
|
|
|
|
elif download_type is DownloadType.SATELLITES:
|
|
|
|
|
message = "Satellites.xml file will be updated!"
|
2020-08-06 21:16:21 +03:00
|
|
|
elif download_type is DownloadType.PICONS:
|
|
|
|
|
message = "Picons will be updated!"
|
2018-11-13 14:17:59 +03:00
|
|
|
|
|
|
|
|
params = urlencode({"text": message, "type": 2, "timeout": 5})
|
2020-01-11 17:58:50 +03:00
|
|
|
ht.send((url + "message?{}".format(params), "Sending info message... "))
|
2018-11-13 14:17:59 +03:00
|
|
|
|
|
|
|
|
if download_type is DownloadType.ALL:
|
|
|
|
|
time.sleep(5)
|
2020-01-11 17:58:50 +03:00
|
|
|
ht.send((url + "powerstate?newstate=0", "Toggle Standby "))
|
2018-11-13 14:17:59 +03:00
|
|
|
time.sleep(2)
|
|
|
|
|
else:
|
2020-08-06 21:16:21 +03:00
|
|
|
if download_type is not DownloadType.PICONS:
|
|
|
|
|
# telnet
|
|
|
|
|
tn = telnet(host=host,
|
2020-12-29 22:41:28 +03:00
|
|
|
user=settings.user,
|
|
|
|
|
password=settings.password,
|
2020-08-06 21:16:21 +03:00
|
|
|
timeout=settings.telnet_timeout)
|
|
|
|
|
next(tn)
|
|
|
|
|
# terminate enigma or neutrino
|
2020-11-25 13:27:53 +03:00
|
|
|
callback("Telnet initialization ...\n")
|
2020-08-06 21:16:21 +03:00
|
|
|
tn.send("init 4")
|
2020-11-25 13:27:53 +03:00
|
|
|
callback("Stopping GUI...\n")
|
2018-11-13 14:17:59 +03:00
|
|
|
|
2020-12-05 14:16:07 +03:00
|
|
|
with UtfFTP(host=host, user=settings.user, passwd=settings.password) as ftp:
|
2018-11-13 14:17:59 +03:00
|
|
|
ftp.encoding = "utf-8"
|
|
|
|
|
callback("FTP OK.\n")
|
2019-12-13 13:31:07 +03:00
|
|
|
sat_xml_path = settings.satellites_xml_path
|
|
|
|
|
services_path = settings.services_path
|
2017-11-10 13:38:03 +03:00
|
|
|
|
2018-08-04 11:38:38 +03:00
|
|
|
if download_type is DownloadType.SATELLITES:
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.upload_xml(data_path, sat_xml_path, STC_XML_FILE, callback)
|
2018-11-13 14:17:59 +03:00
|
|
|
|
2019-12-22 20:42:29 +03:00
|
|
|
if s_type is SettingsType.NEUTRINO_MP and download_type is DownloadType.WEBTV:
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.upload_xml(data_path, sat_xml_path, WEB_TV_XML_FILE, callback)
|
2018-11-13 14:17:59 +03:00
|
|
|
|
|
|
|
|
if download_type is DownloadType.BOUQUETS:
|
|
|
|
|
ftp.cwd(services_path)
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.upload_bouquets(data_path, remove_unused, callback)
|
2018-11-13 14:17:59 +03:00
|
|
|
|
|
|
|
|
if download_type is DownloadType.ALL:
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.upload_xml(data_path, sat_xml_path, STC_XML_FILE, callback)
|
2019-12-22 20:42:29 +03:00
|
|
|
if s_type is SettingsType.NEUTRINO_MP:
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.upload_xml(data_path, sat_xml_path, WEB_TV_XML_FILE, callback)
|
2018-11-13 14:17:59 +03:00
|
|
|
|
|
|
|
|
ftp.cwd(services_path)
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.upload_bouquets(data_path, remove_unused, callback)
|
|
|
|
|
ftp.upload_files(data_path, DATA_FILES_LIST, callback)
|
2018-11-13 14:17:59 +03:00
|
|
|
|
|
|
|
|
if download_type is DownloadType.PICONS:
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.upload_picons(settings.picons_local_path, settings.picons_path, callback, files_filter)
|
2018-11-13 14:17:59 +03:00
|
|
|
|
|
|
|
|
if tn and not use_http:
|
|
|
|
|
# resume enigma or restart neutrino
|
2019-12-22 20:42:29 +03:00
|
|
|
tn.send("init 3" if s_type is SettingsType.ENIGMA_2 else "init 6")
|
2020-11-25 13:27:53 +03:00
|
|
|
callback("Starting...\n" if s_type is SettingsType.ENIGMA_2 else "Rebooting...\n")
|
2018-11-13 14:17:59 +03:00
|
|
|
elif ht and use_http:
|
|
|
|
|
if download_type is DownloadType.BOUQUETS:
|
2020-01-11 17:58:50 +03:00
|
|
|
ht.send((url + "servicelistreload?mode=2", "Reloading Userbouquets."))
|
2018-11-13 14:17:59 +03:00
|
|
|
elif download_type is DownloadType.ALL:
|
2020-01-11 17:58:50 +03:00
|
|
|
ht.send((url + "servicelistreload?mode=0", "Reloading lamedb and Userbouquets."))
|
|
|
|
|
ht.send((url + "powerstate?newstate=4", "Wakeup from Standby."))
|
2018-02-12 13:34:00 +03:00
|
|
|
|
2018-11-13 14:17:59 +03:00
|
|
|
if done_callback is not None:
|
|
|
|
|
done_callback()
|
|
|
|
|
finally:
|
|
|
|
|
if tn:
|
|
|
|
|
tn.close()
|
|
|
|
|
if ht:
|
|
|
|
|
ht.close()
|
2017-11-10 13:38:03 +03:00
|
|
|
|
|
|
|
|
|
2020-01-23 00:47:01 +03:00
|
|
|
# ***************** Picons *******************#
|
|
|
|
|
|
2020-06-07 18:44:46 +03:00
|
|
|
def remove_picons(*, settings, callback, done_callback=None, files_filter=None):
|
2020-12-13 15:19:38 +03:00
|
|
|
with UtfFTP(host=settings.host, user=settings.user, passwd=settings.password) as ftp:
|
2020-01-23 00:47:01 +03:00
|
|
|
ftp.encoding = "utf-8"
|
|
|
|
|
callback("FTP OK.\n")
|
2020-12-13 15:19:38 +03:00
|
|
|
ftp.delete_picons(callback, settings.picons_path, files_filter)
|
2020-01-23 00:47:01 +03:00
|
|
|
if done_callback:
|
|
|
|
|
done_callback()
|
2018-11-11 18:35:45 +03:00
|
|
|
|
|
|
|
|
|
2020-06-07 18:44:46 +03:00
|
|
|
def picons_filter_function(files_filter=None):
|
|
|
|
|
return lambda f: f in files_filter if files_filter else f.endswith(PICONS_SUF)
|
|
|
|
|
|
|
|
|
|
|
2020-01-11 17:58:50 +03:00
|
|
|
def http(user, password, url, callback, use_ssl=False):
|
|
|
|
|
init_auth(user, password, url, use_ssl)
|
|
|
|
|
data = get_post_data(url, password, url)
|
|
|
|
|
|
2018-11-13 14:17:59 +03:00
|
|
|
while True:
|
2020-01-11 17:58:50 +03:00
|
|
|
url, message = yield
|
2020-11-11 15:34:12 +03:00
|
|
|
resp = get_response(HttpAPI.Request.TEST, url, data).get("e2statetext", None)
|
2020-01-11 17:58:50 +03:00
|
|
|
callback("HTTP: {} {}\n".format(message, "Successful." if resp and message else ""))
|
2017-11-10 13:38:03 +03:00
|
|
|
|
|
|
|
|
|
2018-01-18 00:57:58 +03:00
|
|
|
def telnet(host, port=23, user="", password="", timeout=5):
|
2017-11-12 22:23:12 +03:00
|
|
|
try:
|
|
|
|
|
tn = Telnet(host=host, port=port, timeout=timeout)
|
|
|
|
|
except socket.timeout:
|
2018-01-07 16:33:18 +03:00
|
|
|
log("telnet error: socket timeout")
|
2017-11-12 22:23:12 +03:00
|
|
|
else:
|
2017-11-14 19:20:16 +03:00
|
|
|
time.sleep(1)
|
2017-11-12 22:23:12 +03:00
|
|
|
command = yield
|
2018-01-18 00:57:58 +03:00
|
|
|
if user != "":
|
2020-11-25 13:27:53 +03:00
|
|
|
tn.read_until(b"login: ", timeout)
|
2018-01-07 16:33:18 +03:00
|
|
|
tn.write(user.encode("utf-8") + b"\n")
|
|
|
|
|
time.sleep(timeout)
|
2018-01-18 00:57:58 +03:00
|
|
|
if password != "":
|
2020-11-25 13:27:53 +03:00
|
|
|
tn.read_until(b"Password: ", timeout)
|
2018-01-07 16:33:18 +03:00
|
|
|
tn.write(password.encode("utf-8") + b"\n")
|
|
|
|
|
time.sleep(timeout)
|
2017-11-12 22:23:12 +03:00
|
|
|
tn.write("{}\r\n".format(command).encode("utf-8"))
|
2017-11-14 19:20:16 +03:00
|
|
|
time.sleep(timeout)
|
2017-11-12 22:23:12 +03:00
|
|
|
command = yield
|
|
|
|
|
time.sleep(timeout)
|
|
|
|
|
tn.write("{}\r\n".format(command).encode("utf-8"))
|
|
|
|
|
time.sleep(timeout)
|
|
|
|
|
yield
|
|
|
|
|
|
|
|
|
|
|
2019-11-03 18:11:49 +03:00
|
|
|
# ***************** HTTP API *******************#
|
2018-11-17 23:19:17 +03:00
|
|
|
|
2019-11-21 16:59:43 +03:00
|
|
|
class HttpAPI:
|
2019-12-27 23:05:37 +03:00
|
|
|
__MAX_WORKERS = 4
|
|
|
|
|
|
2020-11-11 15:34:12 +03:00
|
|
|
class Request(Enum):
|
|
|
|
|
ZAP = "zap?sRef="
|
|
|
|
|
INFO = "about"
|
|
|
|
|
SIGNAL = "signal"
|
|
|
|
|
STREAM = "stream.m3u?ref="
|
2021-08-06 13:23:06 +03:00
|
|
|
STREAM_TS = "ts.m3u?file="
|
2020-11-11 15:34:12 +03:00
|
|
|
STREAM_CURRENT = "streamcurrent.m3u"
|
|
|
|
|
CURRENT = "getcurrent"
|
|
|
|
|
TEST = None
|
|
|
|
|
TOKEN = "session"
|
|
|
|
|
# Player
|
|
|
|
|
PLAY = "mediaplayerplay?file="
|
|
|
|
|
PLAYER_LIST = "mediaplayerlist?path=playlist"
|
|
|
|
|
PLAYER_PLAY = "mediaplayercmd?command=play"
|
|
|
|
|
PLAYER_NEXT = "mediaplayercmd?command=next"
|
|
|
|
|
PLAYER_PREV = "mediaplayercmd?command=previous"
|
|
|
|
|
PLAYER_STOP = "mediaplayercmd?command=stop"
|
|
|
|
|
PLAYER_REMOVE = "mediaplayerremove?file="
|
|
|
|
|
# Remote control
|
|
|
|
|
POWER = "powerstate?newstate="
|
|
|
|
|
REMOTE = "remotecontrol?command="
|
|
|
|
|
VOL = "vol?set=set"
|
|
|
|
|
# EPG
|
|
|
|
|
EPG = "epgservice?sRef="
|
|
|
|
|
# Timer
|
|
|
|
|
TIMER = ""
|
|
|
|
|
TIMER_LIST = "timerlist"
|
2021-08-06 13:23:06 +03:00
|
|
|
# Recordings
|
|
|
|
|
RECORDINGS = "movielist?dirname="
|
|
|
|
|
REC_DIRS = "getlocations"
|
|
|
|
|
REC_CURRENT = "getcurrlocation"
|
2020-11-11 15:34:12 +03:00
|
|
|
# Screenshot
|
|
|
|
|
GRUB = "grab?format=jpg&"
|
|
|
|
|
|
2020-10-24 15:46:59 +03:00
|
|
|
class Remote(str, Enum):
|
|
|
|
|
""" Args for HttpRequestType [REMOTE] class. """
|
|
|
|
|
UP = "103"
|
|
|
|
|
LEFT = "105"
|
|
|
|
|
RIGHT = "106"
|
|
|
|
|
DOWN = "108"
|
|
|
|
|
MENU = "139"
|
|
|
|
|
EXIT = "174"
|
|
|
|
|
OK = "352"
|
|
|
|
|
RED = "398"
|
|
|
|
|
GREEN = "399"
|
|
|
|
|
YELLOW = "400"
|
|
|
|
|
BLUE = "401"
|
|
|
|
|
|
|
|
|
|
class Power(str, Enum):
|
|
|
|
|
""" Args for HttpRequestType [POWER] class. """
|
|
|
|
|
TOGGLE_STANDBY = "0"
|
|
|
|
|
DEEP_STANDBY = "1"
|
|
|
|
|
REBOOT = "2"
|
|
|
|
|
RESTART_GUI = "3"
|
|
|
|
|
WAKEUP = "4"
|
|
|
|
|
STANDBY = "5"
|
|
|
|
|
|
2021-08-06 13:23:06 +03:00
|
|
|
PARAM_REQUESTS = {Request.REMOTE,
|
|
|
|
|
Request.POWER,
|
|
|
|
|
Request.VOL,
|
|
|
|
|
Request.EPG,
|
|
|
|
|
Request.TIMER,
|
|
|
|
|
Request.RECORDINGS}
|
|
|
|
|
|
|
|
|
|
STREAM_REQUESTS = {Request.STREAM,
|
|
|
|
|
Request.STREAM_CURRENT,
|
|
|
|
|
Request.STREAM_TS}
|
|
|
|
|
|
2019-12-27 23:05:37 +03:00
|
|
|
def __init__(self, settings):
|
2020-06-10 18:34:42 +03:00
|
|
|
from concurrent.futures import ThreadPoolExecutor as PoolExecutor
|
|
|
|
|
self._executor = PoolExecutor(max_workers=self.__MAX_WORKERS)
|
|
|
|
|
|
2019-12-27 23:05:37 +03:00
|
|
|
self._settings = settings
|
2020-01-18 15:28:46 +03:00
|
|
|
self._shutdown = False
|
2020-01-11 17:58:50 +03:00
|
|
|
self._session_id = 0
|
2020-05-24 18:47:56 +03:00
|
|
|
self._main_url = None
|
2020-01-18 15:28:46 +03:00
|
|
|
self._base_url = None
|
2020-01-11 17:58:50 +03:00
|
|
|
self._data = None
|
2020-06-08 19:32:18 +03:00
|
|
|
self._is_owif = True
|
2019-12-27 23:05:37 +03:00
|
|
|
self.init()
|
2019-11-21 16:59:43 +03:00
|
|
|
|
2020-01-28 14:51:23 +03:00
|
|
|
def send(self, req_type, ref, callback=print, ref_prefix=""):
|
2020-01-18 15:28:46 +03:00
|
|
|
if self._shutdown:
|
|
|
|
|
return
|
|
|
|
|
|
2019-11-21 16:59:43 +03:00
|
|
|
url = self._base_url + req_type.value
|
2020-06-08 19:32:18 +03:00
|
|
|
data = self._data
|
2019-11-03 18:11:49 +03:00
|
|
|
|
2021-08-06 13:23:06 +03:00
|
|
|
if req_type is self.Request.ZAP or req_type in self.STREAM_REQUESTS:
|
2019-11-03 18:11:49 +03:00
|
|
|
url += urllib.parse.quote(ref)
|
2020-11-11 15:34:12 +03:00
|
|
|
elif req_type is self.Request.PLAY or req_type is self.Request.PLAYER_REMOVE:
|
2020-01-28 14:51:23 +03:00
|
|
|
url += "{}{}".format(ref_prefix, urllib.parse.quote(ref).replace("%3A", "%253A"))
|
2020-11-11 15:34:12 +03:00
|
|
|
elif req_type is self.Request.GRUB:
|
2020-06-08 19:32:18 +03:00
|
|
|
data = None # Must be disabled for token-based security.
|
2020-05-24 18:47:56 +03:00
|
|
|
url = "{}/{}{}".format(self._main_url, req_type.value, ref)
|
2021-08-06 13:23:06 +03:00
|
|
|
elif req_type in self.PARAM_REQUESTS:
|
2020-10-24 15:46:59 +03:00
|
|
|
url += ref
|
2018-11-17 23:19:17 +03:00
|
|
|
|
2020-03-22 23:26:01 +03:00
|
|
|
def done_callback(f):
|
|
|
|
|
callback(f.result())
|
|
|
|
|
|
2020-06-08 19:32:18 +03:00
|
|
|
future = self._executor.submit(get_response, req_type, url, data)
|
2020-03-22 23:26:01 +03:00
|
|
|
future.add_done_callback(done_callback)
|
2019-11-21 16:59:43 +03:00
|
|
|
|
2020-01-17 00:34:18 +03:00
|
|
|
@run_task
|
2019-12-27 23:05:37 +03:00
|
|
|
def init(self):
|
2020-12-29 22:41:28 +03:00
|
|
|
user, password = self._settings.user, self._settings.password
|
2020-01-03 23:26:55 +03:00
|
|
|
use_ssl = self._settings.http_use_ssl
|
2020-05-24 18:47:56 +03:00
|
|
|
self._main_url = "http{}://{}:{}".format("s" if use_ssl else "", self._settings.host, self._settings.http_port)
|
|
|
|
|
self._base_url = "{}/web/".format(self._main_url)
|
|
|
|
|
init_auth(user, password, self._main_url, use_ssl)
|
2020-11-11 15:34:12 +03:00
|
|
|
url = "{}/web/{}".format(self._main_url, self.Request.TOKEN.value)
|
2020-01-11 17:58:50 +03:00
|
|
|
s_id = get_session_id(user, password, url)
|
|
|
|
|
if s_id != "0":
|
|
|
|
|
self._data = urllib.parse.urlencode({"user": user, "password": password, "sessionid": s_id}).encode("utf-8")
|
2019-12-27 23:05:37 +03:00
|
|
|
|
2020-11-11 15:34:12 +03:00
|
|
|
self.send(self.Request.INFO, None, self.init_callback)
|
2020-06-08 19:32:18 +03:00
|
|
|
|
|
|
|
|
def init_callback(self, info):
|
|
|
|
|
if info:
|
|
|
|
|
version = info.get("e2webifversion", "").upper()
|
|
|
|
|
self._is_owif = "OWIF" in version
|
2020-06-10 18:34:42 +03:00
|
|
|
version_info = "Web Interface version: {}".format(version) if version else ""
|
|
|
|
|
log("HTTP API initialized... {}".format(version_info))
|
2020-06-08 19:32:18 +03:00
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def is_owif(self):
|
|
|
|
|
""" Returns true if the web interface is OpenWebif. """
|
|
|
|
|
return self._is_owif
|
|
|
|
|
|
2020-01-18 15:28:46 +03:00
|
|
|
@run_task
|
2019-11-21 16:59:43 +03:00
|
|
|
def close(self):
|
2020-01-18 15:28:46 +03:00
|
|
|
self._shutdown = True
|
|
|
|
|
self._executor.shutdown()
|
2018-11-17 23:19:17 +03:00
|
|
|
|
2018-11-20 22:21:26 +03:00
|
|
|
|
2020-01-11 17:58:50 +03:00
|
|
|
def get_response(req_type, url, data=None):
|
2019-09-28 21:57:41 +03:00
|
|
|
try:
|
2020-01-11 17:58:50 +03:00
|
|
|
with urlopen(Request(url, data=data), timeout=10) as f:
|
2021-08-06 13:23:06 +03:00
|
|
|
if req_type in HttpAPI.STREAM_REQUESTS:
|
2020-03-22 23:26:01 +03:00
|
|
|
return {"m3u": f.read().decode("utf-8")}
|
2020-11-11 15:34:12 +03:00
|
|
|
elif req_type is HttpAPI.Request.GRUB:
|
2020-05-24 18:47:56 +03:00
|
|
|
return {"img_data": f.read()}
|
2020-11-11 15:34:12 +03:00
|
|
|
elif req_type is HttpAPI.Request.CURRENT:
|
2020-01-17 00:34:18 +03:00
|
|
|
for el in ETree.fromstring(f.read().decode("utf-8")).iter("e2event"):
|
|
|
|
|
return {el.tag: el.text for el in el.iter()} # return first[current] event from the list
|
2020-11-11 15:34:12 +03:00
|
|
|
elif req_type is HttpAPI.Request.PLAYER_LIST:
|
2020-01-28 14:51:23 +03:00
|
|
|
return [{el.tag: el.text for el in el.iter()} for el in
|
|
|
|
|
ETree.fromstring(f.read().decode("utf-8")).iter("e2file")]
|
2020-11-11 15:34:12 +03:00
|
|
|
elif req_type is HttpAPI.Request.EPG:
|
2020-11-07 18:38:40 +03:00
|
|
|
return {"event_list": [{el.tag: el.text for el in el.iter()} for el in
|
2020-11-11 15:34:12 +03:00
|
|
|
ETree.fromstring(f.read().decode("utf-8")).iter("e2event")]}
|
|
|
|
|
elif req_type is HttpAPI.Request.TIMER_LIST:
|
2020-11-07 18:38:40 +03:00
|
|
|
return {"timer_list": [{el.tag: el.text for el in el.iter()} for el in
|
2020-11-11 15:34:12 +03:00
|
|
|
ETree.fromstring(f.read().decode("utf-8")).iter("e2timer")]}
|
2021-08-06 13:23:06 +03:00
|
|
|
elif req_type is HttpAPI.Request.REC_DIRS:
|
|
|
|
|
return {"rec_dirs": [el.text for el in ETree.fromstring(f.read().decode("utf-8")).iter("e2location")]}
|
|
|
|
|
elif req_type is HttpAPI.Request.RECORDINGS:
|
|
|
|
|
return {"recordings": [{el.tag: el.text for el in el.iter()} for el in
|
|
|
|
|
ETree.fromstring(f.read().decode("utf-8")).iter("e2movie")]}
|
2019-09-28 21:57:41 +03:00
|
|
|
else:
|
2020-01-17 00:34:18 +03:00
|
|
|
return {el.tag: el.text for el in ETree.fromstring(f.read().decode("utf-8")).iter()}
|
|
|
|
|
except HTTPError as e:
|
2020-11-11 15:34:12 +03:00
|
|
|
if req_type is HttpAPI.Request.TEST:
|
2020-01-17 00:34:18 +03:00
|
|
|
raise e
|
|
|
|
|
return {"error_code": e.code}
|
|
|
|
|
except (URLError, RemoteDisconnected, ConnectionResetError) as e:
|
2020-11-11 15:34:12 +03:00
|
|
|
if req_type is HttpAPI.Request.TEST:
|
2020-01-08 21:33:24 +03:00
|
|
|
raise e
|
|
|
|
|
except ETree.ParseError as e:
|
|
|
|
|
log("Parsing response error: {}".format(e))
|
|
|
|
|
|
2020-01-17 00:34:18 +03:00
|
|
|
return {"error_code": -1}
|
2019-09-28 21:57:41 +03:00
|
|
|
|
2018-11-08 13:07:24 +03:00
|
|
|
|
2020-01-11 17:58:50 +03:00
|
|
|
def init_auth(user, password, url, use_ssl=False):
|
|
|
|
|
""" Init authentication """
|
|
|
|
|
pass_mgr = HTTPPasswordMgrWithDefaultRealm()
|
|
|
|
|
pass_mgr.add_password(None, url, user, password)
|
|
|
|
|
auth_handler = HTTPBasicAuthHandler(pass_mgr)
|
|
|
|
|
|
|
|
|
|
if use_ssl:
|
|
|
|
|
import ssl
|
|
|
|
|
from urllib.request import HTTPSHandler
|
|
|
|
|
|
|
|
|
|
opener = build_opener(auth_handler, HTTPSHandler(context=ssl._create_unverified_context()))
|
|
|
|
|
else:
|
|
|
|
|
opener = build_opener(auth_handler)
|
|
|
|
|
|
|
|
|
|
install_opener(opener)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_session_id(user, password, url):
|
|
|
|
|
data = urllib.parse.urlencode(dict(user=user, password=password)).encode("utf-8")
|
2020-11-11 15:34:12 +03:00
|
|
|
return get_response(HttpAPI.Request.TOKEN, url, data=data).get("e2sessionid", "0")
|
2020-01-11 17:58:50 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_post_data(base_url, password, user):
|
2020-11-11 15:34:12 +03:00
|
|
|
s_id = get_session_id(user, password, "{}/web/{}".format(base_url, HttpAPI.Request.TOKEN.value))
|
2020-01-11 17:58:50 +03:00
|
|
|
data = None
|
|
|
|
|
if s_id != "0":
|
|
|
|
|
data = urllib.parse.urlencode({"user": user, "password": password, "sessionid": s_id}).encode("utf-8")
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
2019-09-28 21:57:41 +03:00
|
|
|
# ***************** Connections testing *******************#
|
2018-11-08 13:07:24 +03:00
|
|
|
|
2018-11-09 12:41:36 +03:00
|
|
|
def test_ftp(host, port, user, password, timeout=5):
|
2018-11-08 13:07:24 +03:00
|
|
|
try:
|
|
|
|
|
with FTP(host=host, user=user, passwd=password, timeout=timeout) as ftp:
|
|
|
|
|
return ftp.getwelcome()
|
|
|
|
|
except (error_perm, ConnectionRefusedError, OSError) as e:
|
|
|
|
|
raise TestException(e)
|
|
|
|
|
|
|
|
|
|
|
2020-01-03 23:26:55 +03:00
|
|
|
def test_http(host, port, user, password, timeout=5, use_ssl=False, skip_message=False):
|
2020-01-06 13:17:56 +03:00
|
|
|
params = urlencode({"text": "Connection test", "type": 2, "timeout": timeout})
|
|
|
|
|
params = "statusinfo" if skip_message else "message?{}".format(params)
|
|
|
|
|
base_url = "http{}://{}:{}".format("s" if use_ssl else "", host, port)
|
|
|
|
|
# authentication
|
|
|
|
|
init_auth(user, password, base_url, use_ssl)
|
2020-01-11 17:58:50 +03:00
|
|
|
data = get_post_data(base_url, password, user)
|
|
|
|
|
|
2018-11-08 13:07:24 +03:00
|
|
|
try:
|
2020-11-11 15:34:12 +03:00
|
|
|
return get_response(HttpAPI.Request.TEST, "{}/web/{}".format(base_url, params), data).get("e2statetext", "")
|
2019-12-17 11:59:57 +03:00
|
|
|
except (RemoteDisconnected, URLError, HTTPError) as e:
|
2018-11-08 13:07:24 +03:00
|
|
|
raise TestException(e)
|
|
|
|
|
|
|
|
|
|
|
2018-11-09 12:41:36 +03:00
|
|
|
def test_telnet(host, port, user, password, timeout=5):
|
2018-11-08 13:07:24 +03:00
|
|
|
try:
|
|
|
|
|
gen = telnet_test(host, port, user, password, timeout)
|
|
|
|
|
res = next(gen)
|
2020-01-08 21:33:24 +03:00
|
|
|
msg = str(res, encoding="utf8").strip()
|
|
|
|
|
log(msg)
|
|
|
|
|
next(gen)
|
|
|
|
|
if re.search("password", msg, re.IGNORECASE):
|
|
|
|
|
raise TestException(msg)
|
|
|
|
|
return msg
|
2018-11-08 13:07:24 +03:00
|
|
|
except (socket.timeout, OSError) as e:
|
|
|
|
|
raise TestException(e)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def telnet_test(host, port, user, password, timeout):
|
2018-11-04 00:33:50 +03:00
|
|
|
tn = Telnet(host=host, port=port, timeout=timeout)
|
|
|
|
|
time.sleep(1)
|
|
|
|
|
tn.read_until(b"login: ", timeout=2)
|
2020-01-08 21:33:24 +03:00
|
|
|
tn.write(user.encode("utf-8") + b"\r")
|
2018-11-04 00:33:50 +03:00
|
|
|
time.sleep(timeout)
|
|
|
|
|
tn.read_until(b"Password: ", timeout=2)
|
2020-01-08 21:33:24 +03:00
|
|
|
tn.write(password.encode("utf-8") + b"\r")
|
2018-11-04 00:33:50 +03:00
|
|
|
time.sleep(timeout)
|
|
|
|
|
yield tn.read_very_eager()
|
|
|
|
|
tn.close()
|
2020-01-08 21:33:24 +03:00
|
|
|
yield
|
2018-11-04 00:33:50 +03:00
|
|
|
|
|
|
|
|
|
2017-11-12 22:23:12 +03:00
|
|
|
if __name__ == "__main__":
|
|
|
|
|
pass
|