add AUR support :D
This commit is contained in:
161
pamac/aur.py
Normal file
161
pamac/aur.py
Normal file
@@ -0,0 +1,161 @@
|
||||
#! /usr/bin/python3
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import os
|
||||
import urllib
|
||||
import Namcap
|
||||
import requests
|
||||
import tarfile
|
||||
|
||||
# i18n
|
||||
import gettext
|
||||
import locale
|
||||
locale.bindtextdomain('pamac', '/usr/share/locale')
|
||||
gettext.bindtextdomain('pamac', '/usr/share/locale')
|
||||
gettext.textdomain('pamac')
|
||||
_ = gettext.gettext
|
||||
|
||||
aur_url = 'http://aur.archlinux.org'
|
||||
rpc_url = aur_url + '/rpc.php'
|
||||
srcpkgdir = '/tmp/pamac'
|
||||
|
||||
class AURPkg():
|
||||
def __init__(self, pkginfo):
|
||||
self.db = FakeDB()
|
||||
self.isize = None
|
||||
self.size = None
|
||||
self.download_size = 0
|
||||
keys = pkginfo.keys()
|
||||
if 'URL' in keys:
|
||||
self.url = pkginfo['URL']
|
||||
if 'URLPath' in keys:
|
||||
self.tarpath = pkginfo['URLPath']
|
||||
if 'name' in keys:
|
||||
self.name = pkginfo['name']
|
||||
else:
|
||||
self.name = pkginfo['Name']
|
||||
if 'version' in keys:
|
||||
self.version = pkginfo['version']
|
||||
else:
|
||||
self.version = pkginfo['Version']
|
||||
if 'desc' in keys:
|
||||
self.desc = pkginfo['desc']
|
||||
else:
|
||||
self.desc = pkginfo['Description']
|
||||
if 'licenses' in keys:
|
||||
self.licenses = pkginfo['licenses']
|
||||
elif 'License' in keys:
|
||||
self.licenses = [pkginfo['License']]
|
||||
else:
|
||||
self.licenses = []
|
||||
if 'source' in keys:
|
||||
self.source = pkginfo['source']
|
||||
else:
|
||||
self.source = []
|
||||
if 'orig_depends' in keys:
|
||||
self.depends = pkginfo['orig_depends']
|
||||
else:
|
||||
self.depends = []
|
||||
#~ if 'orig_optdepends' in keys:
|
||||
#~ self.optdepends = pkginfo['orig_optdepends']
|
||||
#~ else:
|
||||
#~ self.optdepends = []
|
||||
#~ if 'orig_provides' in keys:
|
||||
#~ self.provides = pkginfo['orig_provides']
|
||||
#~ else:
|
||||
#~ self.provides = []
|
||||
if 'orig_makedepends' in keys:
|
||||
self.makedepends = pkginfo['orig_makedepends']
|
||||
else:
|
||||
self.makedepends = []
|
||||
#~ if 'replaces' in keys:
|
||||
#~ self.replaces = pkginfo['replaces']
|
||||
#~ else:
|
||||
#~ self.replaces = []
|
||||
#~ if 'conflicts' in keys:
|
||||
#~ self.conflicts = pkginfo['conflicts']
|
||||
#~ else:
|
||||
#~ self.conflicts = []
|
||||
#~ if 'groups' in keys:
|
||||
#~ self.groups = pkginfo['groups']
|
||||
#~ else:
|
||||
#~ self.groups = []
|
||||
|
||||
def __repr__(self):
|
||||
return '{}-{}'.format(self.name, self.version)
|
||||
|
||||
class FakeDB():
|
||||
def __init__(self):
|
||||
self.name = 'AUR'
|
||||
|
||||
def get_pkgs(pkgbuild_path):
|
||||
pkgbuild_info = Namcap.package.load_from_pkgbuild(pkgbuild_path)
|
||||
pkgs = []
|
||||
if pkgbuild_info.is_split:
|
||||
for infos in pkgbuild_info.subpackages:
|
||||
pkg = AURPkg(infos)
|
||||
pkgs.append(pkg)
|
||||
else:
|
||||
pkg = AURPkg(pkgbuild_info)
|
||||
pkgs.append(pkg)
|
||||
return pkgs
|
||||
|
||||
def search(args):
|
||||
spec = {'type':'search', 'arg':args}
|
||||
try:
|
||||
r = requests.get(rpc_url, params = spec)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return []
|
||||
else:
|
||||
results_dict = r.json()
|
||||
pkgs = []
|
||||
for result in results_dict['results']:
|
||||
pkgs.append(AURPkg(result))
|
||||
return pkgs
|
||||
|
||||
def infos(pkgname):
|
||||
spec = {'type':'info', 'arg':pkgname}
|
||||
try:
|
||||
r = requests.get(rpc_url, params = spec)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return []
|
||||
else:
|
||||
results_dict = r.json()
|
||||
result = results_dict['results']
|
||||
if result:
|
||||
pkg = AURPkg(result)
|
||||
return pkg
|
||||
else:
|
||||
print('failed to get infos about {} from AUR'.format(pkgname))
|
||||
return None
|
||||
|
||||
def get_extract_tarball(pkg):
|
||||
try:
|
||||
r = requests.get(aur_url + pkg.tarpath)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
else:
|
||||
if not os.path.exists(srcpkgdir):
|
||||
os.makedirs(srcpkgdir)
|
||||
tarpath = os.path.join(srcpkgdir, os.path.basename(pkg.tarpath))
|
||||
try:
|
||||
with open(tarpath, 'wb') as f:
|
||||
f.write(r.content)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
tar = tarfile.open(tarpath)
|
||||
tar.extractall(path = srcpkgdir)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
else:
|
||||
return os.path.join(srcpkgdir, pkg.name)
|
@@ -2,18 +2,33 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
|
||||
import pyalpm
|
||||
from gi.repository import Gtk, GObject
|
||||
import subprocess
|
||||
import os
|
||||
import fnmatch
|
||||
#import requests
|
||||
#from ftplib import FTP
|
||||
#from urllib.parse import urlparse
|
||||
import dbus
|
||||
from gi.repository import Gtk
|
||||
from dbus.mainloop.glib import DBusGMainLoop
|
||||
|
||||
from pamac import config, common
|
||||
from pamac import config, common, aur
|
||||
|
||||
to_remove = set()
|
||||
to_add = set()
|
||||
to_update = set()
|
||||
to_load = set()
|
||||
to_build = []
|
||||
cancel_download = False
|
||||
build_proc = None
|
||||
make_depends = set()
|
||||
base_devel = ('autoconf', 'automake', 'binutils', 'bison', 'fakeroot',
|
||||
'file', 'findutils', 'flex', 'gawk', 'gcc', 'gettext',
|
||||
'grep', 'groff', 'gzip', 'libtool', 'm4', 'make', 'patch',
|
||||
'pkg-config', 'sed', 'sudo', 'texinfo', 'util-linux', 'which')
|
||||
build_depends = set()
|
||||
handle = None
|
||||
syncdbs =None
|
||||
syncdbs = None
|
||||
localdb = None
|
||||
|
||||
# i18n
|
||||
@@ -46,8 +61,8 @@ progress_label = interface.get_object('progresslabel2')
|
||||
action_icon = interface.get_object('action_icon')
|
||||
ProgressCancelButton = interface.get_object('ProgressCancelButton')
|
||||
ProgressCloseButton = interface.get_object('ProgressCloseButton')
|
||||
progress_textview = interface.get_object('progress_textview')
|
||||
progress_expander = interface.get_object('progress_expander')
|
||||
progress_textview = interface.get_object('progress_textview')
|
||||
|
||||
progress_buffer = progress_textview.get_buffer()
|
||||
|
||||
@@ -95,6 +110,16 @@ def config_dbus_signals():
|
||||
bus.add_signal_receiver(log_error, dbus_interface = "org.manjaro.pamac", signal_name = "EmitLogError")
|
||||
bus.add_signal_receiver(log_warning, dbus_interface = "org.manjaro.pamac", signal_name = "EmitLogWarning")
|
||||
|
||||
def write_to_buffer(fd, condition):
|
||||
if condition == GObject.IO_IN: # if there's something interesting to read
|
||||
line = fd.readline().decode(encoding='UTF-8')
|
||||
print(line.rstrip('\n'))
|
||||
progress_buffer.insert_at_cursor(line)
|
||||
progress_bar.pulse()
|
||||
return True # FUNDAMENTAL, otherwise the callback isn't recalled
|
||||
else:
|
||||
return False # Raised an error: exit
|
||||
|
||||
def action_handler(action):
|
||||
progress_label.set_text(action)
|
||||
|
||||
@@ -148,7 +173,7 @@ def log_warning(msg):
|
||||
def choose_provides(data):
|
||||
virtual_dep = str(data[1])
|
||||
providers = data[0]
|
||||
choose_label.set_markup(_('<b>{pkgname} is provided by {number} packages.\nPlease choose the one(s) you want to install:</b>').format(pkgname = virtual_dep, number = str(len(providers))))
|
||||
choose_label.set_markup('<b>{}</b>'.format(_('{pkgname} is provided by {number} packages.\nPlease choose those you would like to install:').format(pkgname = virtual_dep, number = str(len(providers)))))
|
||||
choose_list.clear()
|
||||
for name in providers:
|
||||
choose_list.append([False, str(name)])
|
||||
@@ -195,11 +220,11 @@ def get_syncpkg(name):
|
||||
if pkg:
|
||||
return pkg
|
||||
|
||||
def refresh(force_update):
|
||||
progress_label.set_text(_('Refreshing')+'...')
|
||||
action_icon.set_from_file('/usr/share/pamac/icons/24x24/status/refresh-cache.png')
|
||||
progress_bar.set_text('')
|
||||
progress_bar.set_fraction(0)
|
||||
def refresh(force_update = False):
|
||||
action_handler(_('Refreshing')+'...')
|
||||
icon_handler('/usr/share/pamac/icons/24x24/status/refresh-cache.png')
|
||||
target_handler('')
|
||||
percent_handler(0)
|
||||
ProgressCancelButton.set_visible(True)
|
||||
ProgressCloseButton.set_visible(False)
|
||||
while Gtk.events_pending():
|
||||
@@ -209,30 +234,121 @@ def refresh(force_update):
|
||||
def init_transaction(**options):
|
||||
return Init(dbus.Dictionary(options, signature='sb'))
|
||||
|
||||
def check_to_build():
|
||||
global to_build
|
||||
global to_add
|
||||
global make_depends
|
||||
global build_depends
|
||||
already_checked = set()
|
||||
build_order = []
|
||||
i = 0
|
||||
error = ''
|
||||
while i < len(to_build):
|
||||
pkg = to_build[i]
|
||||
# if current pkg is not in build_order add it at the end of the list
|
||||
if not pkg.name in build_order:
|
||||
build_order.append(pkg.name)
|
||||
# download end extract tarball from AUR
|
||||
srcdir = aur.get_extract_tarball(pkg)
|
||||
if srcdir:
|
||||
# get PKGBUILD and parse it to create a new pkg object with makedeps and deps
|
||||
new_pkgs = aur.get_pkgs(srcdir + '/PKGBUILD')
|
||||
for new_pkg in new_pkgs:
|
||||
print('checking', new_pkg.name)
|
||||
# check if some makedeps must be installed
|
||||
for makedepend in new_pkg.makedepends:
|
||||
if not makedepend in already_checked:
|
||||
if not pyalpm.find_satisfier(localdb.pkgcache, makedepend):
|
||||
print('found make dep:',makedepend)
|
||||
for db in syncdbs:
|
||||
provider = pyalpm.find_satisfier(db.pkgcache, makedepend)
|
||||
if provider:
|
||||
break
|
||||
if provider:
|
||||
make_depends.add(provider.name)
|
||||
already_checked.add(makedepend)
|
||||
# check if some deps must be installed or built
|
||||
for depend in new_pkg.depends:
|
||||
if not depend in already_checked:
|
||||
if not pyalpm.find_satisfier(localdb.pkgcache, depend):
|
||||
print('found dep:',depend)
|
||||
for db in syncdbs:
|
||||
provider = pyalpm.find_satisfier(db.pkgcache, depend)
|
||||
if provider:
|
||||
break
|
||||
if provider:
|
||||
# current dep need to be installed
|
||||
build_depends.add(provider.name)
|
||||
already_checked.add(depend)
|
||||
else:
|
||||
# current dep need to be built
|
||||
if not depend in build_order:
|
||||
# get infos about it
|
||||
dep_pkg = aur.infos(depend)
|
||||
if dep_pkg:
|
||||
# add it in to_build so it will be checked
|
||||
to_build.append(dep_pkg)
|
||||
# add it in build_order before pkg
|
||||
index = build_order.index(pkg.name)
|
||||
build_order.insert(index, dep_pkg.name)
|
||||
else:
|
||||
if error:
|
||||
error += '\n'
|
||||
error += _('{pkgname} depends on {dependname} but it is not installable').format(pkgname = pkg.name, dependname = depend)
|
||||
else:
|
||||
if error:
|
||||
error += '\n'
|
||||
error += _('Failed to get {pkgname} archive from AUR').format(pkgname = pkg.name)
|
||||
i += 1
|
||||
if error:
|
||||
return error
|
||||
# add pkgname in make_depends and build_depends in to_add
|
||||
for name in make_depends:
|
||||
to_add.add(name)
|
||||
for name in build_depends:
|
||||
to_add.add(name)
|
||||
# reorder to_build following build_order
|
||||
to_build.sort(key = lambda pkg: build_order.index(pkg.name))
|
||||
print('order:', build_order)
|
||||
print('to build:',to_build)
|
||||
print('makedeps:',make_depends)
|
||||
print('builddeps:',build_depends)
|
||||
return error
|
||||
|
||||
def run():
|
||||
if to_add | to_remove | to_load:
|
||||
if to_add or to_remove or to_load or to_build:
|
||||
error = ''
|
||||
trans_flags = {'cascade' : True}
|
||||
error += init_transaction(**trans_flags)
|
||||
if to_build:
|
||||
# check if packages in to_build have deps or makedeps which need to be install first
|
||||
error += check_to_build()
|
||||
if not error:
|
||||
for name in to_add:
|
||||
error += Add(name)
|
||||
for name in to_remove:
|
||||
error += Remove(name)
|
||||
for path in to_load:
|
||||
error += Load(path)
|
||||
if to_add or to_remove or to_load:
|
||||
trans_flags = {'cascade' : True}
|
||||
error += init_transaction(**trans_flags)
|
||||
if not error:
|
||||
for name in to_add:
|
||||
error += Add(name)
|
||||
for name in to_remove:
|
||||
error += Remove(name)
|
||||
for path in to_load:
|
||||
error += Load(path)
|
||||
if not error:
|
||||
error += prepare(**trans_flags)
|
||||
if not error:
|
||||
error += prepare(False, **trans_flags)
|
||||
set_transaction_sum()
|
||||
ConfDialog.show_all()
|
||||
while Gtk.events_pending():
|
||||
Gtk.main_iteration()
|
||||
if error:
|
||||
Release()
|
||||
return(error)
|
||||
else:
|
||||
return (_('Nothing to do'))
|
||||
|
||||
def prepare(show_updates, **trans_flags):
|
||||
global to_add
|
||||
def prepare(**trans_flags):
|
||||
error = ''
|
||||
ret = Prepare()
|
||||
# ret type is a(ass) so [([''], '')]
|
||||
if ret[0][0]: # providers are emitted
|
||||
Release()
|
||||
for item in ret:
|
||||
@@ -251,36 +367,173 @@ def prepare(show_updates, **trans_flags):
|
||||
error = str(ret[0][1])
|
||||
elif ret[0][1]: # an error is emitted
|
||||
error = str(ret[0][1])
|
||||
if not error:
|
||||
set_transaction_sum(show_updates)
|
||||
if show_updates:
|
||||
ConfDialog.show_all()
|
||||
while Gtk.events_pending():
|
||||
Gtk.main_iteration()
|
||||
else:
|
||||
if len(transaction_sum) != 0:
|
||||
ConfDialog.show_all()
|
||||
while Gtk.events_pending():
|
||||
Gtk.main_iteration()
|
||||
else:
|
||||
finalize()
|
||||
return(error)
|
||||
|
||||
def finalize():
|
||||
global progress_buffer
|
||||
progress_label.set_text(_('Preparing')+'...')
|
||||
action_icon.set_from_file('/usr/share/pamac/icons/24x24/status/package-setup.png')
|
||||
progress_bar.set_text('')
|
||||
progress_bar.set_fraction(0)
|
||||
progress_buffer.delete(progress_buffer.get_start_iter(), progress_buffer.get_end_iter())
|
||||
def check_finished_build(data):
|
||||
global to_build
|
||||
global build_proc
|
||||
path = data[0]
|
||||
pkg = data[1]
|
||||
if build_proc.poll() is None:
|
||||
return True
|
||||
else:
|
||||
built = []
|
||||
# parse again PKGBUILD to have new pkg objects in case of a pkgver() function
|
||||
# was used so pkgver was changed during build process
|
||||
new_pkgs = aur.get_pkgs(path + '/PKGBUILD')
|
||||
# find built packages
|
||||
for new_pkg in new_pkgs:
|
||||
for item in os.listdir(path):
|
||||
if os.path.isfile(os.path.join(path, item)):
|
||||
if fnmatch.fnmatch(item, '{}-{}-*.pkg.tar*'.format(new_pkg.name, new_pkg.version)):
|
||||
built.append(os.path.join(path, item))
|
||||
break
|
||||
if built:
|
||||
print('successfully built:', built)
|
||||
build_proc = None
|
||||
to_build_pkgs = to_build.copy()
|
||||
for to_build_pkg in to_build_pkgs:
|
||||
if pkg.name == to_build_pkg.name:
|
||||
to_build.remove(pkg)
|
||||
# install built packages
|
||||
error = ''
|
||||
error += init_transaction()
|
||||
if not error:
|
||||
for pkg_path in built:
|
||||
error += Load(pkg_path)
|
||||
if not error:
|
||||
error += prepare()
|
||||
if not error:
|
||||
if To_Remove():
|
||||
set_transaction_sum()
|
||||
ConfDialog.show_all()
|
||||
while Gtk.events_pending():
|
||||
Gtk.main_iteration()
|
||||
else:
|
||||
finalize()
|
||||
if error:
|
||||
Release()
|
||||
return False
|
||||
|
||||
def download(url_list, path):
|
||||
def write_file(chunk):
|
||||
nonlocal transferred
|
||||
nonlocal f
|
||||
if cancel_download:
|
||||
if ftp:
|
||||
ftp.quit()
|
||||
raise Exception('Download cancelled')
|
||||
return
|
||||
f.write(chunk)
|
||||
transferred += len(chunk)
|
||||
if total_size > 0:
|
||||
percent = round(transferred/total_size, 2)
|
||||
percent_handler(percent)
|
||||
if transferred <= total_size:
|
||||
target = '{transferred}/{size}'.format(transferred = common.format_size(transferred), size = common.format_size(total_size))
|
||||
else:
|
||||
target = ''
|
||||
target_handler(target)
|
||||
while Gtk.events_pending():
|
||||
Gtk.main_iteration()
|
||||
|
||||
global cancel_download
|
||||
cancel_download = False
|
||||
ftp = None
|
||||
total_size = 0
|
||||
transferred = 0
|
||||
icon_handler('/usr/share/pamac/icons/24x24/status/package-download.png')
|
||||
ProgressCancelButton.set_visible(True)
|
||||
ProgressCloseButton.set_visible(False)
|
||||
try:
|
||||
Commit()
|
||||
except dbus.exceptions.DBusException as e:
|
||||
Release()
|
||||
parsed_urls = []
|
||||
for url in url_list:
|
||||
url_components = urlparse(url)
|
||||
if url_components.scheme:
|
||||
parsed_urls.append(url_components)
|
||||
print(parsed_urls)
|
||||
for url_components in parsed_urls:
|
||||
if url_components.scheme == 'http':
|
||||
total_size += int(requests.get(url).headers['Content-Length'])
|
||||
elif url_components.scheme == 'ftp':
|
||||
ftp = FTP(url_components.netloc)
|
||||
ftp.login('anonymous', '')
|
||||
total_size += int(ftp.size(url_components.path))
|
||||
print(total_size)
|
||||
for url_components in parsed_urls:
|
||||
filename = url_components.path.split('/')[-1]
|
||||
print(filename)
|
||||
action = _('Downloading {pkgname}').format(pkgname = filename)+'...'
|
||||
action_long = action+'\n'
|
||||
action_handler(action)
|
||||
action_long_handler(action_long)
|
||||
ProgressWindow.show()
|
||||
while Gtk.events_pending():
|
||||
Gtk.main_iteration()
|
||||
with open(os.path.join(path, filename), 'wb') as f:
|
||||
if url_components.scheme == 'http':
|
||||
try:
|
||||
r = requests.get(url, stream = True)
|
||||
for chunk in r.iter_content(1024):
|
||||
if cancel_download:
|
||||
raise Exception('Download cancelled')
|
||||
break
|
||||
else:
|
||||
write_file(chunk)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
cancel_download = False
|
||||
elif url_components.scheme == 'ftp':
|
||||
try:
|
||||
ftp = FTP(url_components.netloc)
|
||||
ftp.login('anonymous', '')
|
||||
ftp.retrbinary('RETR '+url_components.path, write_file, blocksize=1024)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
cancel_download = False
|
||||
|
||||
def build_next():
|
||||
global build_proc
|
||||
pkg = to_build[0]
|
||||
path = os.path.join(aur.srcpkgdir, pkg.name)
|
||||
new_pkgs = aur.get_pkgs(path + '/PKGBUILD')
|
||||
# sources are identicals for splitted packages
|
||||
# (not complete) download(new_pkgs[0].source, path)
|
||||
icon_handler('/usr/share/pamac/icons/24x24/status/package-setup.png')
|
||||
target_handler('')
|
||||
action = _('Building {pkgname}').format(pkgname = pkg.name)+'...'
|
||||
action_handler(action)
|
||||
action_long_handler(action+'\n')
|
||||
ProgressCancelButton.set_visible(True)
|
||||
ProgressCloseButton.set_visible(False)
|
||||
progress_expander.set_expanded(True)
|
||||
ProgressWindow.show()
|
||||
while Gtk.events_pending():
|
||||
Gtk.main_iteration()
|
||||
build_proc = subprocess.Popen(["makepkg", "-cf"], cwd = path, stdout = subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
GObject.io_add_watch(build_proc.stdout, GObject.IO_IN, write_to_buffer)
|
||||
GObject.timeout_add(500, check_finished_build, (path, pkg))
|
||||
|
||||
def finalize():
|
||||
if To_Add() or To_Remove():
|
||||
global progress_buffer
|
||||
action_handler(_('Preparing')+'...')
|
||||
icon_handler('/usr/share/pamac/icons/24x24/status/package-setup.png')
|
||||
target_handler('')
|
||||
percent_handler(0)
|
||||
progress_buffer.delete(progress_buffer.get_start_iter(), progress_buffer.get_end_iter())
|
||||
ProgressCancelButton.set_visible(True)
|
||||
ProgressCloseButton.set_visible(False)
|
||||
try:
|
||||
Commit()
|
||||
except dbus.exceptions.DBusException as e:
|
||||
Release()
|
||||
while Gtk.events_pending():
|
||||
Gtk.main_iteration()
|
||||
elif to_build:
|
||||
# packages in to_build have no deps or makedeps
|
||||
# so we build and install the first one
|
||||
# the next ones will be built by the caller
|
||||
build_next()
|
||||
|
||||
def get_updates():
|
||||
do_syncfirst = False
|
||||
@@ -309,16 +562,25 @@ def get_updates():
|
||||
return do_syncfirst, list_first
|
||||
result = []
|
||||
for pkg in localdb.pkgcache:
|
||||
candidate = pyalpm.sync_newversion(pkg, syncdbs)
|
||||
if candidate:
|
||||
if not candidate.name in _ignorepkgs:
|
||||
if not pkg.name in _ignorepkgs:
|
||||
candidate = pyalpm.sync_newversion(pkg, syncdbs)
|
||||
if candidate:
|
||||
result.append(candidate)
|
||||
else:
|
||||
if not get_syncpkg(pkg.name):
|
||||
aur_pkg = aur.infos(pkg.name)
|
||||
if aur_pkg:
|
||||
comp = pyalpm.vercmp(aur_pkg.version, pkg.version)
|
||||
if comp == 1:
|
||||
result.append(aur_pkg)
|
||||
return do_syncfirst, result
|
||||
|
||||
def get_transaction_sum():
|
||||
transaction_dict = {'to_remove': [], 'to_install': [], 'to_update': [], 'to_reinstall': [], 'to_downgrade': []}
|
||||
to_remove = sorted(To_Remove())
|
||||
for name, version in to_remove:
|
||||
transaction_dict = {'to_remove': [], 'to_build': [], 'to_install': [], 'to_update': [], 'to_reinstall': [], 'to_downgrade': []}
|
||||
for pkg in to_build:
|
||||
transaction_dict['to_build'].append(pkg.name+' '+pkg.version)
|
||||
_to_remove = sorted(To_Remove())
|
||||
for name, version in _to_remove:
|
||||
transaction_dict['to_remove'].append(name+' '+version)
|
||||
others = sorted(To_Add())
|
||||
for name, version, dsize in others:
|
||||
@@ -333,82 +595,123 @@ def get_transaction_sum():
|
||||
transaction_dict['to_downgrade'].append((name+' '+version, dsize))
|
||||
else:
|
||||
transaction_dict['to_install'].append((name+' '+version, dsize))
|
||||
#~ if transaction_dict['to_install']:
|
||||
#~ print('To install:', [name for name, size in transaction_dict['to_install']])
|
||||
#~ if transaction_dict['to_reinstall']:
|
||||
#~ print('To reinstall:', [name for name, size in transaction_dict['to_reinstall']])
|
||||
#~ if transaction_dict['to_downgrade']:
|
||||
#~ print('To downgrade:', [name for name, size in transaction_dict['to_downgrade']])
|
||||
#~ if transaction_dict['to_remove']:
|
||||
#~ print('To remove:', [name for name in transaction_dict['to_remove']])
|
||||
#~ if transaction_dict['to_update']:
|
||||
#~ print('To update:', [name for name, size in transaction_dict['to_update']])
|
||||
if transaction_dict['to_build']:
|
||||
print('To build:', [name for name in transaction_dict['to_build']])
|
||||
if transaction_dict['to_install']:
|
||||
print('To install:', [name for name, size in transaction_dict['to_install']])
|
||||
if transaction_dict['to_reinstall']:
|
||||
print('To reinstall:', [name for name, size in transaction_dict['to_reinstall']])
|
||||
if transaction_dict['to_downgrade']:
|
||||
print('To downgrade:', [name for name, size in transaction_dict['to_downgrade']])
|
||||
if transaction_dict['to_remove']:
|
||||
print('To remove:', [name for name in transaction_dict['to_remove']])
|
||||
if transaction_dict['to_update']:
|
||||
print('To update:', [name for name, size in transaction_dict['to_update']])
|
||||
return transaction_dict
|
||||
|
||||
def set_transaction_sum(show_updates):
|
||||
def set_transaction_sum(show_updates = True):
|
||||
dsize = 0
|
||||
transaction_sum.clear()
|
||||
transaction_dict = get_transaction_sum()
|
||||
sum_top_label.set_markup(_('<big><b>Transaction Summary</b></big>'))
|
||||
if transaction_dict['to_install']:
|
||||
transaction_sum.append([_('To install')+':', transaction_dict['to_install'][0][0]])
|
||||
sum_top_label.set_markup('<big><b>{}</b></big>'.format(_('Transaction Summary')))
|
||||
if transaction_dict['to_remove']:
|
||||
transaction_sum.append([_('To remove')+':', transaction_dict['to_remove'][0]])
|
||||
i = 1
|
||||
while i < len(transaction_dict['to_install']):
|
||||
transaction_sum.append([' ', transaction_dict['to_install'][i][0]])
|
||||
i += 1
|
||||
if transaction_dict['to_reinstall']:
|
||||
transaction_sum.append([_('To reinstall')+':', transaction_dict['to_reinstall'][0][0]])
|
||||
i = 1
|
||||
while i < len(transaction_dict['to_reinstall']):
|
||||
transaction_sum.append([' ', transaction_dict['to_reinstall'][i][0]])
|
||||
while i < len(transaction_dict['to_remove']):
|
||||
transaction_sum.append(['', transaction_dict['to_remove'][i]])
|
||||
i += 1
|
||||
if transaction_dict['to_downgrade']:
|
||||
transaction_sum.append([_('To downgrade')+':', transaction_dict['to_downgrade'][0][0]])
|
||||
i = 1
|
||||
while i < len(transaction_dict['to_downgrade']):
|
||||
transaction_sum.append([' ', transaction_dict['to_downgrade'][i][0]])
|
||||
transaction_sum.append(['', transaction_dict['to_downgrade'][i][0]])
|
||||
dsize += transaction_dict['to_downgrade'][i][1]
|
||||
i += 1
|
||||
if transaction_dict['to_remove']:
|
||||
transaction_sum.append([_('To remove')+':', transaction_dict['to_remove'][0]])
|
||||
if transaction_dict['to_build']:
|
||||
transaction_sum.append([_('To build')+':', transaction_dict['to_build'][0]])
|
||||
i = 1
|
||||
while i < len(transaction_dict['to_remove']):
|
||||
transaction_sum.append([' ', transaction_dict['to_remove'][i]])
|
||||
while i < len(transaction_dict['to_build']):
|
||||
transaction_sum.append(['', transaction_dict['to_build'][i]])
|
||||
i += 1
|
||||
if transaction_dict['to_install']:
|
||||
transaction_sum.append([_('To install')+':', transaction_dict['to_install'][0][0]])
|
||||
i = 1
|
||||
while i < len(transaction_dict['to_install']):
|
||||
transaction_sum.append(['', transaction_dict['to_install'][i][0]])
|
||||
dsize += transaction_dict['to_install'][i][1]
|
||||
i += 1
|
||||
if transaction_dict['to_reinstall']:
|
||||
transaction_sum.append([_('To reinstall')+':', transaction_dict['to_reinstall'][0][0]])
|
||||
i = 1
|
||||
while i < len(transaction_dict['to_reinstall']):
|
||||
transaction_sum.append(['', transaction_dict['to_reinstall'][i][0]])
|
||||
dsize += transaction_dict['to_reinstall'][i][1]
|
||||
i += 1
|
||||
if show_updates:
|
||||
if transaction_dict['to_update']:
|
||||
transaction_sum.append([_('To update')+':', transaction_dict['to_update'][0][0]])
|
||||
i = 1
|
||||
while i < len(transaction_dict['to_update']):
|
||||
transaction_sum.append([' ', transaction_dict['to_update'][i][0]])
|
||||
transaction_sum.append(['', transaction_dict['to_update'][i][0]])
|
||||
dsize += transaction_dict['to_update'][i][1]
|
||||
i += 1
|
||||
dsize = 0
|
||||
for nameversion, size in transaction_dict['to_install'] + transaction_dict['to_update'] + transaction_dict['to_reinstall'] + transaction_dict['to_downgrade']:
|
||||
dsize += size
|
||||
if dsize == 0:
|
||||
sum_bottom_label.set_markup('')
|
||||
else:
|
||||
sum_bottom_label.set_markup(_('<b>Total download size: </b>')+common.format_size(dsize))
|
||||
sum_bottom_label.set_markup('<b>{} {}</b>'.format(_('Total download size:'), common.format_size(dsize)))
|
||||
|
||||
def sysupgrade(show_updates):
|
||||
def sysupgrade(show_updates = True):
|
||||
global to_update
|
||||
global to_add
|
||||
global to_remove
|
||||
do_syncfirst, updates = get_updates()
|
||||
if updates:
|
||||
to_update = set([pkg.name for pkg in updates])
|
||||
to_update.clear()
|
||||
to_add.clear()
|
||||
to_remove.clear()
|
||||
for pkg in updates:
|
||||
if pkg.db.name == 'AUR':
|
||||
to_build.append(pkg)
|
||||
else:
|
||||
to_update.add(pkg.name)
|
||||
error = ''
|
||||
if do_syncfirst:
|
||||
error += init_transaction()
|
||||
if not error:
|
||||
for name in to_update:
|
||||
error += Add(name)
|
||||
if not error:
|
||||
error += prepare()
|
||||
else:
|
||||
error += init_transaction()
|
||||
if not error:
|
||||
error += Sysupgrade()
|
||||
if to_build:
|
||||
# check if packages in to_build have deps or makedeps which need to be install first
|
||||
# grab errors differently here to not break regular updates
|
||||
_error = check_to_build()
|
||||
if to_update or to_add:
|
||||
error += init_transaction()
|
||||
if not error:
|
||||
if to_update:
|
||||
error += Sysupgrade()
|
||||
_error = ''
|
||||
for name in to_add:
|
||||
_error += Add(name)
|
||||
if _error:
|
||||
print(_error)
|
||||
if not error:
|
||||
error += prepare()
|
||||
if not error:
|
||||
error += prepare(show_updates)
|
||||
set_transaction_sum(show_updates = show_updates)
|
||||
if show_updates:
|
||||
ConfDialog.show_all()
|
||||
while Gtk.events_pending():
|
||||
Gtk.main_iteration()
|
||||
else:
|
||||
if len(transaction_sum) != 0:
|
||||
ConfDialog.show_all()
|
||||
while Gtk.events_pending():
|
||||
Gtk.main_iteration()
|
||||
else:
|
||||
finalize()
|
||||
if error:
|
||||
Release()
|
||||
return error
|
||||
|
Reference in New Issue
Block a user