Skip to content
This repository has been archived by the owner on Dec 4, 2023. It is now read-only.

Commit

Permalink
Update
Browse files Browse the repository at this point in the history
-Add multi folder/td search
-Improve search
-Improve stop duplicate
-Other minor changes

Signed-off-by: anas <[email protected]>
  • Loading branch information
anasty17 committed Sep 4, 2021
1 parent 92340bc commit cd7349c
Show file tree
Hide file tree
Showing 11 changed files with 219 additions and 96 deletions.
82 changes: 66 additions & 16 deletions bot/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import random
import string
import subprocess
import requests

import aria2p
import qbittorrentapi as qba
Expand Down Expand Up @@ -35,9 +36,13 @@

CONFIG_FILE_URL = os.environ.get('CONFIG_FILE_URL', None)
if CONFIG_FILE_URL is not None:
out = subprocess.run(["wget", "-q", "-O", "config.env", CONFIG_FILE_URL])
if out.returncode != 0:
logging.error(out)
res = requests.get(CONFIG_FILE_URL)
if res.status_code == 200:
with open('config.env', 'wb') as f:
f.truncate(0)
f.write(res.content)
else:
logging.error(res.status_code)

load_dotenv('config.env')

Expand All @@ -48,7 +53,9 @@
subprocess.run(["qbittorrent-nox", "-d", "--profile=."])

Interval = []

DRIVES_NAMES = []
DRIVES_IDS = []
INDEX_URLS = []

def getConfig(name: str):
return os.environ[name]
Expand All @@ -60,9 +67,9 @@ def mktable():
sql = "CREATE TABLE users (uid bigint, sudo boolean DEFAULT FALSE);"
cur.execute(sql)
conn.commit()
LOGGER.info("Table Created!")
logging.info("Table Created!")
except Error as e:
LOGGER.error(e)
logging.error(e)
exit(1)

try:
Expand All @@ -88,7 +95,7 @@ def get_client() -> qba.TorrentsAPIMixIn:
#qb_client.application.set_preferences({"disk_cache":64, "incomplete_files_ext":True, "max_connec":3000, "max_connec_per_torrent":300, "async_io_threads":8, "preallocate_all":True, "upnp":True, "dl_limit":-1, "up_limit":-1, "dht":True, "pex":True, "lsd":True, "encryption":0, "queueing_enabled":True, "max_active_downloads":15, "max_active_torrents":50, "dont_count_slow_torrents":True, "bittorrent_protocol":0, "recheck_completed_torrents":True, "enable_multi_connections_from_same_ip":True, "slow_torrent_dl_rate_threshold":100,"slow_torrent_inactive_timer":600})
return qb_client
except qba.LoginFailed as e:
LOGGER.error(str(e))
logging.error(str(e))
return None


Expand Down Expand Up @@ -222,8 +229,12 @@ def get_client() -> qba.TorrentsAPIMixIn:
INDEX_URL = getConfig('INDEX_URL')
if len(INDEX_URL) == 0:
INDEX_URL = None
INDEX_URLS.append(None)
else:
INDEX_URLS.append(INDEX_URL)
except KeyError:
INDEX_URL = None
INDEX_URLS.append(None)
try:
TORRENT_DIRECT_LIMIT = getConfig('TORRENT_DIRECT_LIMIT')
if len(TORRENT_DIRECT_LIMIT) == 0:
Expand Down Expand Up @@ -332,31 +343,70 @@ def get_client() -> qba.TorrentsAPIMixIn:
if len(SERVER_PORT) == 0:
SERVER_PORT = None
except KeyError:
logging.warning('SERVER_PORT not provided!')
if IS_VPS:
logging.warning('SERVER_PORT not provided!')
SERVER_PORT = None
try:
TOKEN_PICKLE_URL = getConfig('TOKEN_PICKLE_URL')
if len(TOKEN_PICKLE_URL) == 0:
TOKEN_PICKLE_URL = None
else:
out = subprocess.run(["wget", "-q", "-O", "token.pickle", TOKEN_PICKLE_URL])
if out.returncode != 0:
logging.error(out)
res = requests.get(TOKEN_PICKLE_URL)
if res.status_code == 200:
with open('token.pickle', 'wb') as f:
f.truncate(0)
f.write(res.content)
else:
logging.error(res.status_code)
raise KeyError
except KeyError:
TOKEN_PICKLE_URL = None
pass
try:
ACCOUNTS_ZIP_URL = getConfig('ACCOUNTS_ZIP_URL')
if len(ACCOUNTS_ZIP_URL) == 0:
ACCOUNTS_ZIP_URL = None
else:
out = subprocess.run(["wget", "-q", "-O", "accounts.zip", ACCOUNTS_ZIP_URL])
if out.returncode != 0:
logging.error(out)
res = requests.get(ACCOUNTS_ZIP_URL)
if res.status_code == 200:
with open('accounts.zip', 'wb') as f:
f.truncate(0)
f.write(res.content)
else:
logging.error(res.status_code)
raise KeyError
subprocess.run(["unzip", "-q", "-o", "accounts.zip"])
os.remove("accounts.zip")
except KeyError:
ACCOUNTS_ZIP_URL = None
pass
try:
MULTI_SEARCH_URL = getConfig('MULTI_SEARCH_URL')
if len(MULTI_SEARCH_URL) == 0:
MULTI_SEARCH_URL = None
else:
res = requests.get(MULTI_SEARCH_URL)
if res.status_code == 200:
with open('drive_folder', 'wb') as f:
f.truncate(0)
f.write(res.content)
else:
logging.error(res.status_code)
raise KeyError
except KeyError:
pass

DRIVES_NAMES.append("Main")
DRIVES_IDS.append(parent_id)
if os.path.exists('drive_folder'):
with open('drive_folder', 'r+') as f:
lines = f.readlines()
for line in lines:
temp = line.strip().split()
DRIVES_NAMES.append(temp[0].replace("_", " "))
DRIVES_IDS.append(temp[1])
try:
INDEX_URLS.append(temp[2])
except IndexError as e:
INDEX_URLS.append(None)

updater = tg.Updater(token=BOT_TOKEN)
bot = updater.bot
Expand Down
7 changes: 4 additions & 3 deletions bot/helper/mirror_utils/download_utils/aria2_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def __init__(self):
@new_thread
def __onDownloadStarted(self, api, gid):
if STOP_DUPLICATE or TORRENT_DIRECT_LIMIT is not None or TAR_UNZIP_LIMIT is not None:
sleep(2)
sleep(1)
dl = getDownloadByGid(gid)
download = aria2.get_download(gid)
if STOP_DUPLICATE and dl is not None:
Expand All @@ -27,14 +27,15 @@ def __onDownloadStarted(self, api, gid):
if dl.getListener().extract:
smsg = None
else:
gdrive = GoogleDriveHelper(None)
smsg, button = gdrive.drive_list(sname)
gdrive = GoogleDriveHelper()
smsg, button = gdrive.drive_list(sname, True)
if smsg:
dl.getListener().onDownloadError('File/Folder already available in Drive.\n\n')
aria2.remove([download], force=True)
sendMarkup("Here are the search results:", dl.getListener().bot, dl.getListener().update, button)
return
if (TORRENT_DIRECT_LIMIT is not None or TAR_UNZIP_LIMIT is not None) and dl is not None:
sleep(1)
size = aria2.get_download(gid).total_length
if dl.getListener().isTar or dl.getListener().extract:
is_tar_ext = True
Expand Down
2 changes: 1 addition & 1 deletion bot/helper/mirror_utils/download_utils/mega_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def add_download(mega_link: str, path: str, listener):
smsg = None
else:
gd = GoogleDriveHelper()
smsg, button = gd.drive_list(mname)
smsg, button = gd.drive_list(mname, True)
if smsg:
msg1 = "File/Folder is already available in Drive.\nHere are the search results:"
sendMarkup(msg1, listener.bot, listener.update, button)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def add_download(self, message, path, filename):
smsg = None
else:
gd = GoogleDriveHelper()
smsg, button = gd.drive_list(name)
smsg, button = gd.drive_list(name, True)
if smsg:
sendMarkup("File/Folder is already available in Drive.\nHere are the search results:", self.__listener.bot, self.__listener.update, button)
return
Expand Down
149 changes: 86 additions & 63 deletions bot/helper/mirror_utils/upload_utils/gdriveTools.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from bot.helper.telegram_helper import button_build
from telegraph import Telegraph
from bot import parent_id, DOWNLOAD_DIR, IS_TEAM_DRIVE, INDEX_URL, \
USE_SERVICE_ACCOUNTS, telegraph_token, BUTTON_FOUR_NAME, BUTTON_FOUR_URL, BUTTON_FIVE_NAME, BUTTON_FIVE_URL, BUTTON_SIX_NAME, BUTTON_SIX_URL, SHORTENER, SHORTENER_API, VIEW_LINK
USE_SERVICE_ACCOUNTS, telegraph_token, BUTTON_FOUR_NAME, BUTTON_FOUR_URL, BUTTON_FIVE_NAME, BUTTON_FIVE_URL, BUTTON_SIX_NAME, BUTTON_SIX_URL, SHORTENER, SHORTENER_API, VIEW_LINK, DRIVES_NAMES, DRIVES_IDS, INDEX_URLS
from bot.helper.ext_utils.bot_utils import get_readable_file_size, setInterval
from bot.helper.ext_utils.fs_utils import get_mime_type, get_path_size
from bot.helper.ext_utils.shortenurl import short_url
Expand Down Expand Up @@ -372,10 +372,11 @@ def getFilesByFolderId(self,folder_id):
q=q,
spaces='drive',
pageSize=200,
fields='nextPageToken, files(id, name, mimeType,size)', corpora='allDrives', orderBy='folder, name',
fields='nextPageToken, files(id, name, mimeType,size)',
corpora='allDrives',
orderBy='folder, name',
pageToken=page_token).execute()
for file in response.get('files', []):
files.append(file)
files.extend(response.get('files', []))
page_token = response.get('nextPageToken', None)
if page_token is None:
break
Expand Down Expand Up @@ -593,85 +594,107 @@ def edit_telegraph(self):
return


def escapes(self, str):
chars = ['\\', "'", '"', r'\a', r'\b', r'\f', r'\n', r'\r', r'\t']
for char in chars:
str = str.replace(char, '\\'+char)
return str
def escapes(self, str):
chars = ['\\', "'", '"', r'\a', r'\b', r'\f', r'\n', r'\r', r'\s', r'\t']
for char in chars:
str = str.replace(char, ' ')
return str


def drive_list(self, fileName):
msg = ""
fileName = self.escapes(str(fileName))
def drive_query(self, parent_id, fileName):
# Create Search Query for API request.
query = f"'{parent_id}' in parents and (name contains '{fileName}')"
if self.stopDup:
query = f"'{parent_id}' in parents and name = '{fileName}' and "
else:
query = f"'{parent_id}' in parents and "
fileName = fileName.split(' ')
for name in fileName:
if name != '':
query += f"name contains '{name}' and "
query += "trashed = false"
response = self.__service.files().list(supportsTeamDrives=True,
includeTeamDriveItems=True,
q=query,
spaces='drive',
pageSize=200,
fields='files(id, name, mimeType, size)',
orderBy='name asc').execute()
return response


def drive_list(self, fileName, stopDup=False):
self.stopDup = stopDup
msg = ""
if not stopDup:
fileName = self.escapes(str(fileName))
content_count = 0
if not response["files"]:
return '', ''

msg += f'<h4>{len(response["files"])} Results: {fileName}</h4><br><br>'
for file in response.get('files', []):
if file.get('mimeType') == "application/vnd.google-apps.folder": # Detect Whether Current Entity is a Folder or File.
furl = f"https://drive.google.com/drive/folders/{file.get('id')}"
msg += f"📁 <code>{file.get('name')}<br>(folder)</code><br>"
if SHORTENER is not None and SHORTENER_API is not None:
sfurl = short_url(furl)
msg += f"<b><a href={sfurl}>Drive Link</a></b>"
else:
msg += f"<b><a href={furl}>Drive Link</a></b>"
if INDEX_URL is not None:
url_path = requests.utils.quote(f'{file.get("name")}')
url = f'{INDEX_URL}/{url_path}/'
all_contents_count = 0
Title = False
for index, parent_id in enumerate(DRIVES_IDS):
response = self.drive_query(parent_id, fileName)
if not response["files"]:
continue
if not Title:
msg += f'<h4>Search Result For: {fileName}</h4><br><br>'
Title = True
if len(DRIVES_NAMES) > 1 and DRIVES_NAMES[index] is not None:
msg += f"╾────────────╼<br><b>{DRIVES_NAMES[index]}</b><br>╾────────────╼<br>"
for file in response.get('files', []):
if file.get('mimeType') == "application/vnd.google-apps.folder":
furl = f"https://drive.google.com/drive/folders/{file.get('id')}"
msg += f"📁 <code>{file.get('name')}<br>(folder)</code><br>"
if SHORTENER is not None and SHORTENER_API is not None:
siurl = short_url(url)
msg += f' <b>| <a href="{siurl}">Index Link</a></b>'
sfurl = short_url(furl)
msg += f"<b><a href={sfurl}>Drive Link</a></b>"
else:
msg += f' <b>| <a href="{url}">Index Link</a></b>'
elif file.get('mimeType') == 'application/vnd.google-apps.shortcut':
msg += f"⁍<a href='https://drive.google.com/drive/folders/{file.get('id')}'>{file.get('name')}" \
f"</a> (shortcut)"
# Excluded index link as indexes cant download or open these shortcuts
else:
furl = f"https://drive.google.com/uc?id={file.get('id')}&export=download"
msg += f"📄 <code>{file.get('name')}<br>({get_readable_file_size(int(file.get('size')))})</code><br>"
if SHORTENER is not None and SHORTENER_API is not None:
sfurl = short_url(furl)
msg += f"<b><a href={sfurl}>Drive Link</a></b>"
msg += f"<b><a href={furl}>Drive Link</a></b>"
if INDEX_URL[index] is not None:
url_path = requests.utils.quote(f'{file.get("name")}')
url = f'{INDEX_URLS[index]}/{url_path}/'
if SHORTENER is not None and SHORTENER_API is not None:
siurl = short_url(url)
msg += f' <b>| <a href="{siurl}">Index Link</a></b>'
else:
msg += f' <b>| <a href="{url}">Index Link</a></b>'
elif file.get('mimeType') == 'application/vnd.google-apps.shortcut':
msg += f"⁍<a href='https://drive.google.com/drive/folders/{file.get('id')}'>{file.get('name')}" \
f"</a> (shortcut)"
# Excluded index link as indexes cant download or open these shortcuts
else:
msg += f"<b><a href={furl}>Drive Link</a></b>"
if INDEX_URL is not None:
url_path = requests.utils.quote(f'{file.get("name")}')
url = f'{INDEX_URL}/{url_path}'
urls = f'{INDEX_URL}/{url_path}?a=view'
furl = f"https://drive.google.com/uc?id={file.get('id')}&export=download"
msg += f"📄 <code>{file.get('name')}<br>({get_readable_file_size(int(file.get('size')))})</code><br>"
if SHORTENER is not None and SHORTENER_API is not None:
siurl = short_url(url)
msg += f' <b>| <a href="{siurl}">Index Link</a></b>'
if VIEW_LINK:
siurls = short_url(urls)
msg += f' <b>| <a href="{siurls}">View Link</a></b>'
sfurl = short_url(furl)
msg += f"<b><a href={sfurl}>Drive Link</a></b>"
else:
msg += f' <b>| <a href="{url}">Index Link</a></b>'
if VIEW_LINK:
msg += f' <b>| <a href="{urls}">View Link</a></b>'
msg += '<br><br>'
content_count += 1
if content_count == TELEGRAPHLIMIT :
self.telegraph_content.append(msg)
msg = ""
content_count = 0
msg += f"<b><a href={furl}>Drive Link</a></b>"
if INDEX_URL[index] is not None:
url_path = requests.utils.quote(f'{file.get("name")}')
url = f'{INDEX_URLS[index]}/{url_path}'
urls = f'{INDEX_URLS[index]}/{url_path}?a=view'
if SHORTENER is not None and SHORTENER_API is not None:
siurl = short_url(url)
msg += f' <b>| <a href="{siurl}">Index Link</a></b>'
if VIEW_LINK:
siurls = short_url(urls)
msg += f' <b>| <a href="{siurls}">View Link</a></b>'
else:
msg += f' <b>| <a href="{url}">Index Link</a></b>'
if VIEW_LINK:
msg += f' <b>| <a href="{urls}">View Link</a></b>'
msg += '<br><br>'
content_count += 1
all_contents_count += 1
if content_count == TELEGRAPHLIMIT :
self.telegraph_content.append(msg)
msg = ""
content_count = 0

if msg != '':
self.telegraph_content.append(msg)

if len(self.telegraph_content) == 0:
return "No Result Found ❌", None
return "", None

for content in self.telegraph_content :
self.path.append(Telegraph(access_token=telegraph_token).create_page(
Expand All @@ -685,7 +708,7 @@ def drive_list(self, fileName):
if self.num_of_path > 1:
self.edit_telegraph()

msg = f"<b>Found <code>{len(response['files'])}</code> results for <code>{fileName}</code></b>"
msg = f"<b>Found <code>{all_contents_count}</code> results for <code>{fileName}</code></b>"
buttons = button_build.ButtonMaker()
buttons.buildbutton("🔎 VIEW", f"https://telegra.ph/{self.path[0]}")

Expand Down
Loading

0 comments on commit cd7349c

Please sign in to comment.