pamac-classic/src/daemon.vala

2086 lines
65 KiB
Vala
Raw Normal View History

2014-10-22 13:44:02 -03:00
/*
* pamac-vala
*
2016-02-22 09:47:40 -03:00
* Copyright (C) 2014-2016 Guillaume Benoit <guillaume@manjaro.org>
2014-10-22 13:44:02 -03:00
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a get of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// i18n
2014-10-26 08:30:04 -03:00
const string GETTEXT_PACKAGE = "pamac";
2014-10-22 13:44:02 -03:00
2014-10-30 10:44:09 -03:00
Pamac.Daemon pamac_daemon;
2014-10-22 13:44:02 -03:00
MainLoop loop;
2015-08-20 10:11:18 -03:00
public delegate void AlpmActionDelegate ();
2016-02-02 05:28:07 -03:00
[Compact]
public class AlpmAction {
public unowned AlpmActionDelegate action_delegate;
public AlpmAction (AlpmActionDelegate action_delegate) {
this.action_delegate = action_delegate;
}
public void run () {
action_delegate ();
2015-08-20 10:11:18 -03:00
}
2016-02-02 05:28:07 -03:00
}
2015-08-20 10:11:18 -03:00
2016-04-14 13:19:20 -03:00
private int alpm_pkg_compare_name (Alpm.Package pkg_a, Alpm.Package pkg_b) {
return strcmp (pkg_a.name, pkg_b.name);
}
private string global_search_string;
private int alpm_pkg_sort_search_by_relevance (Alpm.Package pkg_a, Alpm.Package pkg_b) {
if (global_search_string != null) {
// display exact match first
if (pkg_a.name == global_search_string) {
return 0;
}
if (pkg_b.name == global_search_string) {
return 1;
}
if (pkg_a.name.has_prefix (global_search_string + "-")) {
return 0;
}
if (pkg_b.name.has_prefix (global_search_string + "-")) {
return 1;
}
if (pkg_a.name.has_prefix (global_search_string)) {
return 0;
}
if (pkg_b.name.has_prefix (global_search_string)) {
return 1;
}
if (pkg_a.name.contains (global_search_string)) {
return 0;
}
if (pkg_b.name.contains (global_search_string)) {
return 1;
}
}
return strcmp (pkg_a.name, pkg_b.name);
}
2014-10-30 10:44:09 -03:00
namespace Pamac {
[DBus (name = "org.manjaro.pamac")]
public class Daemon: Object {
private AlpmConfig alpm_config;
2016-04-14 13:19:20 -03:00
private Alpm.Handle? alpm_handle;
2014-10-30 10:44:09 -03:00
public Cond provider_cond;
public Mutex provider_mutex;
public int? choosen_provider;
2016-02-02 05:28:07 -03:00
private bool force_refresh;
2015-08-20 10:11:18 -03:00
private ThreadPool<AlpmAction> thread_pool;
private Mutex databases_lock_mutex;
private Json.Array aur_updates_results;
2016-04-14 13:19:20 -03:00
private HashTable<string, Json.Array> aur_search_results;
private HashTable<string, Json.Object> aur_infos;
2015-03-18 10:53:45 -03:00
private bool intern_lock;
private bool extern_lock;
private GLib.File lockfile;
2016-02-02 05:28:07 -03:00
private ErrorInfos current_error;
public Timer timer;
2016-02-22 09:47:40 -03:00
public Cancellable cancellable;
2016-04-14 13:19:20 -03:00
private GLib.List<string> aur_dep_list;
2014-10-30 10:44:09 -03:00
2014-12-03 12:02:14 -03:00
public signal void emit_event (uint primary_event, uint secondary_event, string[] details);
2014-10-30 10:44:09 -03:00
public signal void emit_providers (string depend, string[] providers);
2016-02-02 05:28:07 -03:00
public signal void emit_progress (uint progress, string pkgname, uint percent, uint n_targets, uint current_target);
2014-10-30 10:44:09 -03:00
public signal void emit_download (string filename, uint64 xfered, uint64 total);
public signal void emit_totaldownload (uint64 total);
public signal void emit_log (uint level, string msg);
2015-03-04 11:55:36 -03:00
public signal void set_pkgreason_finished ();
2016-02-02 05:28:07 -03:00
public signal void refresh_finished (bool success);
public signal void get_updates_finished (Updates updates);
public signal void trans_prepare_finished (bool success);
public signal void trans_commit_finished (bool success);
2015-08-20 10:11:18 -03:00
public signal void get_authorization_finished (bool authorized);
2016-02-02 05:28:07 -03:00
public signal void write_pamac_config_finished (bool recurse, uint64 refresh_period, bool no_update_hide_icon,
2015-08-24 11:13:18 -03:00
bool enable_aur, bool search_aur, bool check_aur_updates,
2015-08-20 10:11:18 -03:00
bool no_confirm_build);
public signal void write_alpm_config_finished (bool checkspace);
public signal void write_mirrors_config_finished (string choosen_country, string choosen_generation_method);
public signal void generate_mirrors_list_data (string line);
public signal void generate_mirrors_list_finished ();
2014-10-30 10:44:09 -03:00
public Daemon () {
alpm_config = new AlpmConfig ("/etc/pacman.conf");
databases_lock_mutex = Mutex ();
aur_updates_results = new Json.Array ();
2016-04-14 13:19:20 -03:00
aur_search_results = new HashTable<string, Json.Array> (str_hash, str_equal);
aur_infos = new HashTable<string, Json.Object> (str_hash, str_equal);
2016-02-02 05:28:07 -03:00
timer = new Timer ();
2015-03-18 10:53:45 -03:00
intern_lock = false;
extern_lock = false;
2015-03-04 11:55:36 -03:00
refresh_handle ();
Timeout.add (500, check_pacman_running);
2015-08-20 10:11:18 -03:00
create_thread_pool ();
2016-02-22 09:47:40 -03:00
cancellable = new Cancellable ();
2015-08-20 10:11:18 -03:00
}
2016-02-02 05:28:07 -03:00
public void set_environment_variables (HashTable<string,string> variables) {
string[] keys = { "HTTP_USER_AGENT",
"http_proxy",
"https_proxy",
"ftp_proxy",
"socks_proxy",
"no_proxy" };
foreach (unowned string key in keys) {
unowned string val;
if (variables.lookup_extended (key, null, out val)) {
Environment.set_variable (key, val, true);
}
}
}
public ErrorInfos get_current_error () {
return current_error;
}
2015-08-20 10:11:18 -03:00
private void create_thread_pool () {
// create a thread pool which will run alpm action one after one
try {
thread_pool = new ThreadPool<AlpmAction>.with_owned_data (
// call alpm_action.run () on thread start
(alpm_action) => {
alpm_action.run ();
},
// only one thread created so alpm action will run one after one
1,
// no exclusive thread
false
2015-08-20 10:11:18 -03:00
);
} catch (ThreadError e) {
stderr.printf ("Thread Error %s\n", e.message);
}
2014-10-30 10:44:09 -03:00
}
2014-10-22 13:44:02 -03:00
2014-12-03 12:02:14 -03:00
private void refresh_handle () {
2016-04-14 13:19:20 -03:00
alpm_handle = alpm_config.get_handle ();
if (alpm_handle == null) {
2016-02-02 05:28:07 -03:00
current_error = ErrorInfos () {
2016-03-01 11:43:51 -03:00
message = _("Failed to initialize alpm library")
2016-02-02 05:28:07 -03:00
};
trans_commit_finished (false);
2014-10-30 10:44:09 -03:00
} else {
2016-04-14 13:19:20 -03:00
alpm_handle.eventcb = (Alpm.EventCallBack) cb_event;
alpm_handle.progresscb = (Alpm.ProgressCallBack) cb_progress;
alpm_handle.questioncb = (Alpm.QuestionCallBack) cb_question;
alpm_handle.fetchcb = (Alpm.FetchCallBack) cb_fetch;
alpm_handle.totaldlcb = (Alpm.TotalDownloadCallBack) cb_totaldownload;
alpm_handle.logcb = (Alpm.LogCallBack) cb_log;
lockfile = GLib.File.new_for_path (alpm_handle.lockfile);
2014-10-22 13:44:02 -03:00
}
}
2015-03-11 15:42:04 -03:00
private bool check_pacman_running () {
2015-03-18 10:53:45 -03:00
if (extern_lock) {
2016-02-02 05:28:07 -03:00
if (!lockfile.query_exists ()) {
2015-03-18 10:53:45 -03:00
extern_lock = false;
2015-03-11 15:42:04 -03:00
refresh_handle ();
}
} else {
2016-02-02 05:28:07 -03:00
if (lockfile.query_exists ()) {
if (!intern_lock) {
2015-03-18 10:53:45 -03:00
extern_lock = true;
}
2015-03-11 15:42:04 -03:00
}
}
return true;
}
private async bool check_authorization (GLib.BusName sender) {
SourceFunc callback = check_authorization.callback;
2015-08-20 10:11:18 -03:00
bool authorized = false;
try {
Polkit.Authority authority = Polkit.Authority.get_sync ();
Polkit.Subject subject = Polkit.SystemBusName.new (sender);
authority.check_authorization.begin (
subject,
"org.manjaro.pamac.commit",
null,
Polkit.CheckAuthorizationFlags.ALLOW_USER_INTERACTION,
null,
(obj, res) => {
try {
var result = authority.check_authorization.end (res);
authorized = result.get_is_authorized ();
} catch (GLib.Error e) {
stderr.printf ("%s\n", e.message);
}
Idle.add ((owned) callback);
2015-08-20 10:11:18 -03:00
}
);
2016-02-02 05:28:07 -03:00
yield;
2015-08-20 10:11:18 -03:00
} catch (GLib.Error e) {
stderr.printf ("%s\n", e.message);
}
return authorized;
}
public void start_get_authorization (GLib.BusName sender) {
check_authorization.begin (sender, (obj, res) => {
bool authorized = check_authorization.end (res);
get_authorization_finished (authorized);
});
2015-08-20 10:11:18 -03:00
}
2015-03-04 11:55:36 -03:00
public void start_write_pamac_config (HashTable<string,Variant> new_pamac_conf, GLib.BusName sender) {
check_authorization.begin (sender, (obj, res) => {
2016-04-14 13:19:20 -03:00
var pamac_config = new Pamac.Config ("/etc/pamac.conf");
bool authorized = check_authorization.end (res);
if (authorized ) {
pamac_config.write (new_pamac_conf);
pamac_config.reload ();
}
2015-08-24 11:13:18 -03:00
write_pamac_config_finished (pamac_config.recurse, pamac_config.refresh_period, pamac_config.no_update_hide_icon,
pamac_config.enable_aur, pamac_config.search_aur, pamac_config.check_aur_updates,
pamac_config.no_confirm_build);
});
}
2015-03-04 11:55:36 -03:00
public void start_write_alpm_config (HashTable<string,Variant> new_alpm_conf, GLib.BusName sender) {
check_authorization.begin (sender, (obj, res) => {
bool authorized = check_authorization.end (res);
if (authorized ) {
alpm_config.write (new_alpm_conf);
alpm_config.reload ();
refresh_handle ();
}
2016-04-14 13:19:20 -03:00
write_alpm_config_finished ((alpm_handle.checkspace == 1));
});
}
private void generate_mirrors_list () {
try {
var process = new Subprocess.newv (
{"pacman-mirrors", "-g"},
SubprocessFlags.STDOUT_PIPE | SubprocessFlags.STDERR_MERGE);
var dis = new DataInputStream (process.get_stdout_pipe ());
string? line;
while ((line = dis.read_line ()) != null) {
generate_mirrors_list_data (line);
}
} catch (Error e) {
stderr.printf ("Error: %s\n", e.message);
}
alpm_config.reload ();
refresh_handle ();
generate_mirrors_list_finished ();
}
2015-08-20 10:11:18 -03:00
public void start_generate_mirrors_list () {
try {
thread_pool.add (new AlpmAction (generate_mirrors_list));
} catch (ThreadError e) {
stderr.printf ("Thread Error %s\n", e.message);
}
}
2015-03-04 11:55:36 -03:00
public void start_write_mirrors_config (HashTable<string,Variant> new_mirrors_conf, GLib.BusName sender) {
var mirrors_config = new MirrorsConfig ("/etc/pacman-mirrors.conf");
check_authorization.begin (sender, (obj, res) => {
bool authorized = check_authorization.end (res);
if (authorized) {
mirrors_config.write (new_mirrors_conf);
mirrors_config.reload ();
}
2015-08-20 10:11:18 -03:00
write_mirrors_config_finished (mirrors_config.choosen_country, mirrors_config.choosen_generation_method);
});
2014-10-30 10:44:09 -03:00
}
2015-03-04 11:55:36 -03:00
public void start_set_pkgreason (string pkgname, uint reason, GLib.BusName sender) {
check_authorization.begin (sender, (obj, res) => {
bool authorized = check_authorization.end (res);
if (authorized) {
2016-04-14 13:19:20 -03:00
unowned Alpm.Package? pkg = alpm_handle.localdb.get_pkg (pkgname);
if (pkg != null) {
pkg.reason = (Alpm.Package.Reason) reason;
refresh_handle ();
}
}
set_pkgreason_finished ();
});
}
2015-08-20 10:11:18 -03:00
private void refresh () {
2016-05-13 10:44:10 -03:00
write_log_file ("synchronizing package lists");
2015-08-20 10:11:18 -03:00
intern_lock = true;
2016-02-02 05:28:07 -03:00
current_error = ErrorInfos ();
int force = (force_refresh) ? 1 : 0;
uint success = 0;
2016-02-22 09:47:40 -03:00
cancellable.reset ();
2016-04-14 13:19:20 -03:00
unowned Alpm.List<unowned Alpm.DB> syncdbs = alpm_handle.syncdbs;
while (syncdbs != null) {
unowned Alpm.DB db = syncdbs.data;
2016-02-22 09:47:40 -03:00
if (cancellable.is_cancelled ()) {
refresh_handle ();
refresh_finished (false);
intern_lock = false;
return;
}
2016-02-02 05:28:07 -03:00
if (db.update (force) >= 0) {
2015-08-20 10:11:18 -03:00
success++;
}
2016-04-14 13:19:20 -03:00
syncdbs.next ();
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
refresh_handle ();
2015-08-20 10:11:18 -03:00
// We should always succeed if at least one DB was upgraded - we may possibly
// fail later with unresolved deps, but that should be rare, and would be expected
if (success == 0) {
2016-04-14 13:19:20 -03:00
Alpm.Errno errno = alpm_handle.errno ();
2016-03-01 11:43:51 -03:00
current_error.errno = (uint) errno;
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to synchronize any databases");
2016-08-24 12:19:14 -03:00
if (errno != 0) {
current_error.details = { Alpm.strerror (errno) };
}
2016-02-02 05:28:07 -03:00
refresh_finished (false);
} else {
refresh_finished (true);
2015-08-20 10:11:18 -03:00
}
intern_lock = false;
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
public void start_refresh (bool force) {
2015-08-20 10:11:18 -03:00
force_refresh = force;
try {
thread_pool.add (new AlpmAction (refresh));
} catch (ThreadError e) {
stderr.printf ("Thread Error %s\n", e.message);
2015-03-04 11:55:36 -03:00
}
}
2016-04-14 13:19:20 -03:00
public bool get_checkspace () {
return alpm_handle.checkspace == 1 ? true : false;
}
public string get_lockfile () {
return alpm_handle.lockfile;
}
public string[] get_ignorepkgs () {
string[] result = {};
unowned Alpm.List<unowned string> ignorepkgs = alpm_handle.ignorepkgs;
while (ignorepkgs != null) {
unowned string ignorepkg = ignorepkgs.data;
result += ignorepkg;
ignorepkgs.next ();
}
return result;
}
public void add_ignorepkg (string pkgname) {
2016-04-14 13:19:20 -03:00
alpm_handle.add_ignorepkg (pkgname);
}
public void remove_ignorepkg (string pkgname) {
2016-04-14 13:19:20 -03:00
alpm_handle.remove_ignorepkg (pkgname);
}
public bool should_hold (string pkgname) {
if (alpm_config.get_holdpkgs ().find_custom (pkgname, strcmp) != null) {
return true;
}
return false;
}
public uint get_pkg_reason (string pkgname) {
unowned Alpm.Package? pkg = alpm_handle.localdb.get_pkg (pkgname);
if (pkg != null) {
return pkg.reason;
}
return 0;
}
public uint get_pkg_origin (string pkgname) {
unowned Alpm.Package? pkg = alpm_handle.localdb.get_pkg (pkgname);
if (pkg != null) {
return pkg.origin;
} else {
pkg = get_syncpkg (pkgname);
if (pkg != null) {
return pkg.origin;
}
}
return 0;
}
private AlpmPackage initialise_pkg_struct (Alpm.Package? alpm_pkg) {
if (alpm_pkg != null) {
string repo_name = "";
if (alpm_pkg.origin == Alpm.Package.From.LOCALDB) {
unowned Alpm.Package? sync_pkg = get_syncpkg (alpm_pkg.name);
if (sync_pkg != null) {
repo_name = sync_pkg.db.name;
}
} else if (alpm_pkg.origin == Alpm.Package.From.SYNCDB) {
repo_name = alpm_pkg.db.name;
}
return AlpmPackage () {
name = alpm_pkg.name,
version = alpm_pkg.version,
// desc can be null
2016-04-16 04:43:14 -03:00
desc = alpm_pkg.desc ?? "",
2016-04-14 13:19:20 -03:00
repo = (owned) repo_name,
size = alpm_pkg.isize,
origin = (uint) alpm_pkg.origin
};
} else {
return AlpmPackage () {
name = "",
version = "",
desc = "",
repo = ""
};
}
}
public async AlpmPackage[] get_installed_pkgs () {
AlpmPackage[] pkgs = {};
unowned Alpm.List<unowned Alpm.Package> pkgcache = alpm_handle.localdb.pkgcache;
while (pkgcache != null) {
unowned Alpm.Package alpm_pkg = pkgcache.data;
pkgs += initialise_pkg_struct (alpm_pkg);
pkgcache.next ();
}
return pkgs;
}
public async AlpmPackage[] get_foreign_pkgs () {
AlpmPackage[] pkgs = {};
unowned Alpm.List<unowned Alpm.Package> pkgcache = alpm_handle.localdb.pkgcache;
while (pkgcache != null) {
unowned Alpm.Package alpm_pkg = pkgcache.data;
bool sync_found = false;
unowned Alpm.List<unowned Alpm.DB> syncdbs = alpm_handle.syncdbs;
while (syncdbs != null) {
unowned Alpm.DB db = syncdbs.data;
unowned Alpm.Package? sync_pkg = db.get_pkg (alpm_pkg.name);
if (sync_pkg != null) {
sync_found = true;
break;
}
syncdbs.next ();
}
if (sync_found == false) {
pkgs += initialise_pkg_struct (alpm_pkg);
}
pkgcache.next ();
}
return pkgs;
}
public async AlpmPackage[] get_orphans () {
AlpmPackage[] pkgs = {};
unowned Alpm.List<unowned Alpm.Package> pkgcache = alpm_handle.localdb.pkgcache;
while (pkgcache != null) {
unowned Alpm.Package alpm_pkg = pkgcache.data;
if (alpm_pkg.reason == Alpm.Package.Reason.DEPEND) {
Alpm.List<string> requiredby = alpm_pkg.compute_requiredby ();
if (requiredby.length == 0) {
Alpm.List<string> optionalfor = alpm_pkg.compute_optionalfor ();
if (optionalfor.length == 0) {
pkgs += initialise_pkg_struct (alpm_pkg);
} else {
optionalfor.free_inner (GLib.free);
}
} else {
requiredby.free_inner (GLib.free);
}
}
pkgcache.next ();
}
return pkgs;
}
public AlpmPackage get_installed_pkg (string pkgname) {
return initialise_pkg_struct (alpm_handle.localdb.get_pkg (pkgname));
}
public AlpmPackage find_installed_satisfier (string depstring) {
return initialise_pkg_struct (Alpm.find_satisfier (alpm_handle.localdb.pkgcache, depstring));
}
private unowned Alpm.Package? get_syncpkg (string name) {
unowned Alpm.Package? pkg = null;
unowned Alpm.List<unowned Alpm.DB> syncdbs = alpm_handle.syncdbs;
while (syncdbs != null) {
unowned Alpm.DB db = syncdbs.data;
pkg = db.get_pkg (name);
if (pkg != null) {
break;
}
syncdbs.next ();
}
return pkg;
}
public AlpmPackage get_sync_pkg (string pkgname) {
return initialise_pkg_struct (get_syncpkg (pkgname));
}
2016-05-13 10:44:10 -03:00
private unowned Alpm.Package? find_dbs_satisfier (string depstring) {
unowned Alpm.Package? pkg = null;
unowned Alpm.List<unowned Alpm.DB> syncdbs = alpm_handle.syncdbs;
while (syncdbs != null) {
unowned Alpm.DB db = syncdbs.data;
pkg = Alpm.find_satisfier (db.pkgcache, depstring);
if (pkg != null) {
break;
}
syncdbs.next ();
}
return pkg;
}
2016-04-14 13:19:20 -03:00
public AlpmPackage find_sync_satisfier (string depstring) {
2016-05-13 10:44:10 -03:00
return initialise_pkg_struct (find_dbs_satisfier (depstring));
2016-04-14 13:19:20 -03:00
}
private Alpm.List<unowned Alpm.Package> search_all_dbs (string search_string) {
Alpm.List<unowned string> needles = null;
string[] splitted = search_string.split (" ");
foreach (unowned string part in splitted) {
needles.add (part);
}
Alpm.List<unowned Alpm.Package> result = alpm_handle.localdb.search (needles);
Alpm.List<unowned Alpm.Package> syncpkgs = null;
unowned Alpm.List<unowned Alpm.DB> syncdbs = alpm_handle.syncdbs;
while (syncdbs != null) {
unowned Alpm.DB db = syncdbs.data;
if (syncpkgs.length == 0) {
syncpkgs = db.search (needles);
} else {
syncpkgs.join (db.search (needles).diff (syncpkgs, (Alpm.List.CompareFunc) alpm_pkg_compare_name));
}
syncdbs.next ();
}
result.join (syncpkgs.diff (result, (Alpm.List.CompareFunc) alpm_pkg_compare_name));
// use custom sort function
global_search_string = search_string;
result.sort (result.length, (Alpm.List.CompareFunc) alpm_pkg_sort_search_by_relevance);
2016-04-14 13:19:20 -03:00
return result;
}
public async AlpmPackage[] search_pkgs (string search_string) {
AlpmPackage[] result = {};
Alpm.List<unowned Alpm.Package> alpm_pkgs = search_all_dbs (search_string);
unowned Alpm.List<unowned Alpm.Package> list = alpm_pkgs;
while (list != null) {
unowned Alpm.Package alpm_pkg = list.data;
result += initialise_pkg_struct (alpm_pkg);
list.next ();
}
return result;
}
AURPackage initialise_aur_struct (Json.Object json_object) {
return AURPackage () {
name = json_object.get_string_member ("Name"),
version = json_object.get_string_member ("Version"),
// desc can be null
2016-04-16 04:43:14 -03:00
desc = json_object.get_null_member ("Description") ? "" : json_object.get_string_member ("Description"),
2016-04-14 13:19:20 -03:00
popularity = json_object.get_double_member ("Popularity")
};
}
public async AURPackage[] search_in_aur (string search_string) {
if (!aur_search_results.contains (search_string)) {
Json.Array pkgs = yield AUR.search (search_string.split (" "));
aur_search_results.insert (search_string, pkgs);
}
AURPackage[] result = {};
Json.Array aur_pkgs = aur_search_results.get (search_string);
aur_pkgs.foreach_element ((array, index, node) => {
Json.Object aur_pkg = node.get_object ();
// remove results which exist in repos
if (get_syncpkg (aur_pkg.get_string_member ("Name")) == null) {
result += initialise_aur_struct (node.get_object ());
}
});
return result;
}
public async AURPackageDetails get_aur_details (string pkgname) {
string name = "";
string version = "";
string desc = "";
double popularity = 0;
string packagebase = "";
string url = "";
string maintainer = "";
int64 firstsubmitted = 0;
int64 lastmodified = 0;
int64 outofdate = 0;
int64 numvotes = 0;
string[] licenses = {};
string[] depends = {};
string[] makedepends = {};
string[] checkdepends = {};
string[] optdepends = {};
string[] provides = {};
string[] replaces = {};
string[] conflicts = {};
var details = AURPackageDetails ();
if (!aur_infos.contains (pkgname)) {
Json.Array results = yield AUR.multiinfo ({pkgname});
if (results.get_length () > 0) {
aur_infos.insert (pkgname, results.get_object_element (0));
}
}
unowned Json.Object? json_object = aur_infos.lookup (pkgname);
if (json_object != null) {
// name
name = json_object.get_string_member ("Name");
// version
version = json_object.get_string_member ("Version");
// desc can be null
if (!json_object.get_null_member ("Description")) {
2016-04-16 04:43:14 -03:00
desc = json_object.get_string_member ("Description");
2016-04-14 13:19:20 -03:00
}
popularity = json_object.get_double_member ("Popularity");
// packagebase
packagebase = json_object.get_string_member ("PackageBase");
// url can be null
unowned Json.Node? node = json_object.get_member ("URL");
if (!node.is_null ()) {
2016-04-16 04:43:14 -03:00
url = node.get_string ();
2016-04-14 13:19:20 -03:00
}
// maintainer can be null
node = json_object.get_member ("Maintainer");
if (!node.is_null ()) {
maintainer = node.get_string ();
}
// firstsubmitted
firstsubmitted = json_object.get_int_member ("FirstSubmitted");
// lastmodified
lastmodified = json_object.get_int_member ("LastModified");
// outofdate can be null
node = json_object.get_member ("OutOfDate");
if (!node.is_null ()) {
outofdate = node.get_int ();
}
//numvotes
numvotes = json_object.get_int_member ("NumVotes");
// licenses
node = json_object.get_member ("License");
if (!node.is_null ()) {
node.get_array ().foreach_element ((array, index, _node) => {
licenses += _node.get_string ();
});
} else {
licenses += _("Unknown");
}
// depends
node = json_object.get_member ("Depends");
if (node != null) {
node.get_array ().foreach_element ((array, index, _node) => {
depends += _node.get_string ();
});
}
// optdepends
node = json_object.get_member ("OptDepends");
if (node != null) {
node.get_array ().foreach_element ((array, index, _node) => {
optdepends += _node.get_string ();
});
}
// makedepends
node = json_object.get_member ("MakeDepends");
if (node != null) {
node.get_array ().foreach_element ((array, index, _node) => {
makedepends += _node.get_string ();
});
}
// checkdepends
node = json_object.get_member ("CheckDepends");
if (node != null) {
node.get_array ().foreach_element ((array, index, _node) => {
checkdepends += _node.get_string ();
});
}
// provides
node = json_object.get_member ("Provides");
if (node != null) {
node.get_array ().foreach_element ((array, index, _node) => {
provides += _node.get_string ();
});
}
// replaces
node = json_object.get_member ("Replaces");
if (node != null) {
node.get_array ().foreach_element ((array, index, _node) => {
replaces += _node.get_string ();
});
}
// conflicts
node = json_object.get_member ("Conflicts");
if (node != null) {
node.get_array ().foreach_element ((array, index, _node) => {
conflicts += _node.get_string ();
});
}
}
details.name = (owned) name;
details.version = (owned) version ;
details.desc = (owned) desc;
details.popularity = popularity;
details.packagebase = (owned) packagebase;
details.url = (owned) url;
details.maintainer = (owned) maintainer ;
details.firstsubmitted = firstsubmitted;
details.lastmodified = lastmodified;
details.outofdate = outofdate;
details.numvotes = numvotes;
details.licenses = (owned) licenses;
details.depends = (owned) depends;
details.optdepends = (owned) optdepends;
details.checkdepends = (owned) checkdepends;
details.makedepends = (owned) makedepends;
details.provides = (owned) provides;
details.replaces = (owned) replaces;
details.conflicts = (owned) conflicts;
return details;
}
public async string[] get_aur_build_list (string pkgname) {
string[] results = {};
aur_dep_list = new GLib.List<string> ();
bool success = yield set_aur_dep_list (pkgname);
if (success) {
foreach (unowned string name in aur_dep_list) {
results += name;
}
}
return results;
}
private async bool set_aur_dep_list (string pkgname) {
bool success = false;
Json.Array results = yield AUR.multiinfo ({pkgname});
Json.Object json_object = results.get_object_element (0);
if (json_object != null) {
success = true;
// add aur pkg to global list or move it to the end of the list;
unowned GLib.List<string> element = aur_dep_list.find_custom (pkgname, strcmp);
if (element == null) {
aur_dep_list.append (pkgname);
} else {
aur_dep_list.delete_link (element);
aur_dep_list.append (pkgname);
}
unowned Json.Node? node = json_object.get_member ("MakeDepends");
if (node != null) {
GLib.List<unowned Json.Node> list = node.get_array ().get_elements ();
foreach (unowned Json.Node? _node in list) {
unowned string depstring = _node.get_string ();
if (Alpm.find_satisfier (alpm_handle.localdb.pkgcache, depstring) == null) {
2016-05-13 10:44:10 -03:00
if (find_dbs_satisfier (depstring) == null) {
2016-04-14 13:19:20 -03:00
success = yield set_aur_dep_list (depstring);
}
}
if (!success) {
break;
}
}
}
if (success) {
node = json_object.get_member ("Depends");
if (node != null) {
GLib.List<unowned Json.Node> list = node.get_array ().get_elements ();
foreach (unowned Json.Node? _node in list) {
unowned string depstring = _node.get_string ();
if (Alpm.find_satisfier (alpm_handle.localdb.pkgcache, depstring) == null) {
2016-05-13 10:44:10 -03:00
if (find_dbs_satisfier (depstring) == null) {
2016-04-14 13:19:20 -03:00
success = yield set_aur_dep_list (depstring);
}
}
if (!success) {
break;
}
}
}
}
if (success) {
node = json_object.get_member ("CheckDepends");
if (node != null) {
GLib.List<unowned Json.Node> list = node.get_array ().get_elements ();
foreach (unowned Json.Node? _node in list) {
unowned string depstring = _node.get_string ();
if (Alpm.find_satisfier (alpm_handle.localdb.pkgcache, depstring) == null) {
2016-05-13 10:44:10 -03:00
if (find_dbs_satisfier (depstring) == null) {
2016-04-14 13:19:20 -03:00
success = yield set_aur_dep_list (depstring);
}
}
if (!success) {
break;
}
}
}
}
} else {
stdout.printf ("can't find %s in AUR\n", pkgname);
}
return success;
}
public string[] get_repos_names () {
string[] repos_names = {};
unowned Alpm.List<unowned Alpm.DB> syncdbs = alpm_handle.syncdbs;
while (syncdbs != null) {
unowned Alpm.DB db = syncdbs.data;
repos_names += db.name;
syncdbs.next ();
}
return repos_names;
}
public async AlpmPackage[] get_repo_pkgs (string repo) {
AlpmPackage[] pkgs = {};
unowned Alpm.List<unowned Alpm.DB> syncdbs = alpm_handle.syncdbs;
while (syncdbs != null) {
unowned Alpm.DB db = syncdbs.data;
if (db.name == repo) {
unowned Alpm.List<unowned Alpm.Package> pkgcache = db.pkgcache;
while (pkgcache != null) {
unowned Alpm.Package sync_pkg = pkgcache.data;
unowned Alpm.Package? local_pkg = alpm_handle.localdb.get_pkg (sync_pkg.name);
if (local_pkg != null) {
pkgs += initialise_pkg_struct (local_pkg);
} else {
pkgs += initialise_pkg_struct (sync_pkg);
}
pkgcache.next ();
}
break;
}
syncdbs.next ();
}
return pkgs;
}
public string[] get_groups_names () {
string[] groups_names = {};
unowned Alpm.List<unowned Alpm.Group> groupcache = alpm_handle.localdb.groupcache;
while (groupcache != null) {
unowned Alpm.Group group = groupcache.data;
if (!(group.name in groups_names)) {
groups_names += group.name;
}
groupcache.next ();
}
unowned Alpm.List<unowned Alpm.DB> syncdbs = alpm_handle.syncdbs;
while (syncdbs != null) {
unowned Alpm.DB db = syncdbs.data;
groupcache = db.groupcache;
while (groupcache != null) {
unowned Alpm.Group group = groupcache.data;
if (!(group.name in groups_names)) {
groups_names += group.name;
}
groupcache.next ();
}
syncdbs.next ();
}
return groups_names;
}
private Alpm.List<unowned Alpm.Package> group_pkgs (string group_name) {
Alpm.List<unowned Alpm.Package> result = null;
unowned Alpm.Group? grp = alpm_handle.localdb.get_group (group_name);
if (grp != null) {
unowned Alpm.List<unowned Alpm.Package> packages = grp.packages;
while (packages != null) {
unowned Alpm.Package pkg = packages.data;
result.add (pkg);
packages.next ();
}
}
unowned Alpm.List<unowned Alpm.DB> syncdbs = alpm_handle.syncdbs;
while (syncdbs != null) {
unowned Alpm.DB db = syncdbs.data;
grp = db.get_group (group_name);
if (grp != null) {
unowned Alpm.List<unowned Alpm.Package> packages = grp.packages;
while (packages != null) {
unowned Alpm.Package pkg = packages.data;
if (result.find (pkg, (Alpm.List.CompareFunc) alpm_pkg_compare_name) == null) {
result.add (pkg);
}
packages.next ();
}
}
syncdbs.next ();
}
return result;
}
public async AlpmPackage[] get_group_pkgs (string groupname) {
AlpmPackage[] pkgs = {};
Alpm.List<unowned Alpm.Package> alpm_pkgs = group_pkgs (groupname);
unowned Alpm.List<unowned Alpm.Package> list = alpm_pkgs;
while (list != null) {
unowned Alpm.Package alpm_pkg = list.data;
pkgs += initialise_pkg_struct (alpm_pkg);
list.next ();
}
return pkgs;
}
public string[] get_pkg_uninstalled_optdeps (string pkgname) {
string[] optdeps = {};
unowned Alpm.Package? alpm_pkg = alpm_handle.localdb.get_pkg (pkgname);
if (alpm_pkg == null) {
alpm_pkg = get_syncpkg (pkgname);
}
if (alpm_pkg != null) {
unowned Alpm.List<unowned Alpm.Depend> optdepends = alpm_pkg.optdepends;
while (optdepends != null) {
unowned Alpm.Depend optdep = optdepends.data;
if (Alpm.find_satisfier (alpm_handle.localdb.pkgcache, optdep.name) == null) {
optdeps += optdep.compute_string ();
}
optdepends.next ();
}
}
return optdeps;
}
public AlpmPackageDetails get_pkg_details (string pkgname) {
string name = "";
string version = "";
string desc = "";
string url = "";
string repo = "";
string has_signature = "";
string reason = "";
string packager = "";
string builddate = "";
string installdate = "";
string[] groups = {};
string[] backups = {};
string[] files = {};
string[] licenses = {};
string[] depends = {};
string[] optdepends = {};
string[] requiredby = {};
string[] optionalfor = {};
string[] provides = {};
string[] replaces = {};
string[] conflicts = {};
var details = AlpmPackageDetails ();
unowned Alpm.Package? alpm_pkg = alpm_handle.localdb.get_pkg (pkgname);
if (alpm_pkg == null) {
alpm_pkg = get_syncpkg (pkgname);
}
if (alpm_pkg != null) {
// name
name = alpm_pkg.name;
// version
version = alpm_pkg.version;
// desc can be null
if (alpm_pkg.desc != null) {
2016-04-16 04:43:14 -03:00
desc = alpm_pkg.desc;
2016-04-14 13:19:20 -03:00
}
2016-05-13 10:44:10 -03:00
details.origin = (uint) alpm_pkg.origin;
2016-04-14 13:19:20 -03:00
// url can be null
if (alpm_pkg.url != null) {
2016-04-16 04:43:14 -03:00
url = alpm_pkg.url;
2016-04-14 13:19:20 -03:00
}
// packager can be null
packager = alpm_pkg.packager ?? "";
// groups
unowned Alpm.List list = alpm_pkg.groups;
while (list != null) {
groups += ((Alpm.List<unowned string>) list).data;
list.next ();
}
// licenses
list = alpm_pkg.licenses;
while (list != null) {
licenses += ((Alpm.List<unowned string>) list).data;
list.next ();
}
// build_date
GLib.Time time = GLib.Time.local ((time_t) alpm_pkg.builddate);
builddate = time.format ("%a %d %b %Y %X %Z");
// local pkg
if (alpm_pkg.origin == Alpm.Package.From.LOCALDB) {
// repo
unowned Alpm.Package? sync_pkg = get_syncpkg (alpm_pkg.name);
if (sync_pkg != null) {
repo = sync_pkg.db.name;
}
// reason
if (alpm_pkg.reason == Alpm.Package.Reason.EXPLICIT) {
reason = _("Explicitly installed");
} else if (alpm_pkg.reason == Alpm.Package.Reason.DEPEND) {
reason = _("Installed as a dependency for another package");
} else {
reason = _("Unknown");
}
// install_date
time = GLib.Time.local ((time_t) alpm_pkg.installdate);
installdate = time.format ("%a %d %b %Y %X %Z");
// backups
list = alpm_pkg.backups;
while (list != null) {
backups += "/" + ((Alpm.List<unowned Alpm.Backup>) list).data.name;
list.next ();
}
// requiredby
Alpm.List<string> pkg_requiredby = alpm_pkg.compute_requiredby ();
list = pkg_requiredby;
while (list != null) {
requiredby += ((Alpm.List<unowned string>) list).data;
list.next ();
}
pkg_requiredby.free_inner (GLib.free);
// optionalfor
Alpm.List<string> pkg_optionalfor = alpm_pkg.compute_optionalfor ();
list = pkg_optionalfor;
while (list != null) {
optionalfor += ((Alpm.List<unowned string>) list).data;
list.next ();
}
pkg_optionalfor.free_inner (GLib.free);
// files
unowned Alpm.FileList filelist = alpm_pkg.files;
Alpm.File* file_ptr = filelist.files;
for (size_t i = 0; i < filelist.count; i++, file_ptr++) {
if (!file_ptr->name.has_suffix ("/")) {
files += "/" + file_ptr->name;
}
}
// sync pkg
} else if (alpm_pkg.origin == Alpm.Package.From.SYNCDB) {
// repos
repo = alpm_pkg.db.name;
// signature
has_signature = alpm_pkg.base64_sig != null ? _("Yes") : _("No");
}
// depends
list = alpm_pkg.depends;
while (list != null) {
depends += ((Alpm.List<unowned Alpm.Depend>) list).data.compute_string ();
list.next ();
}
// optdepends
list = alpm_pkg.optdepends;
while (list != null) {
optdepends += ((Alpm.List<unowned Alpm.Depend>) list).data.compute_string ();
list.next ();
}
// provides
list = alpm_pkg.provides;
while (list != null) {
provides += ((Alpm.List<unowned Alpm.Depend>) list).data.compute_string ();
list.next ();
}
// replaces
list = alpm_pkg.replaces;
while (list != null) {
replaces += ((Alpm.List<unowned Alpm.Depend>) list).data.compute_string ();
list.next ();
}
// conflicts
list = alpm_pkg.conflicts;
while (list != null) {
conflicts += ((Alpm.List<unowned Alpm.Depend>) list).data.compute_string ();
list.next ();
}
}
details.name = (owned) name;
details.version = (owned) version;
details.desc = (owned) desc;
details.repo = (owned) repo;
details.url = (owned) url;
details.packager = (owned) packager;
details.builddate = (owned) builddate;
details.installdate = (owned) installdate;
details.reason = (owned) reason;
details.has_signature = (owned) has_signature;
details.licenses = (owned) licenses;
details.depends = (owned) depends;
details.optdepends = (owned) optdepends;
details.requiredby = (owned) requiredby;
details.optionalfor = (owned) optionalfor;
details.provides = (owned) provides;
details.replaces = (owned) replaces;
details.conflicts = (owned) conflicts;
details.groups = (owned) groups;
details.backups = (owned) backups;
details.files = (owned) files;
return details;
}
2016-02-02 05:28:07 -03:00
public void start_get_updates (bool check_aur_updates) {
2016-04-14 13:19:20 -03:00
UpdateInfos[] updates_infos = {};
2015-03-04 11:55:36 -03:00
unowned Alpm.Package? pkg = null;
unowned Alpm.Package? candidate = null;
2016-04-14 13:19:20 -03:00
foreach (unowned string name in alpm_config.get_syncfirsts ()) {
pkg = Alpm.find_satisfier (alpm_handle.localdb.pkgcache, name);
2015-03-04 11:55:36 -03:00
if (pkg != null) {
2016-04-14 13:19:20 -03:00
candidate = pkg.sync_newversion (alpm_handle.syncdbs);
2015-03-04 11:55:36 -03:00
if (candidate != null) {
2016-04-14 13:19:20 -03:00
var infos = UpdateInfos () {
2016-02-02 05:28:07 -03:00
name = candidate.name,
2016-04-14 13:19:20 -03:00
old_version = pkg.version,
new_version = candidate.version,
repo = candidate.db.name,
2016-02-02 05:28:07 -03:00
download_size = candidate.download_size
};
updates_infos += (owned) infos;
2015-03-04 11:55:36 -03:00
}
}
}
if (updates_infos.length != 0) {
2016-02-02 05:28:07 -03:00
var updates = Updates () {
is_syncfirst = true,
repos_updates = (owned) updates_infos,
aur_updates = {}
};
get_updates_finished (updates);
return;
2014-10-30 10:44:09 -03:00
} else {
2015-03-04 11:55:36 -03:00
string[] local_pkgs = {};
2016-04-14 13:19:20 -03:00
unowned Alpm.List<unowned Alpm.Package> pkgcache = alpm_handle.localdb.pkgcache;
while (pkgcache != null) {
unowned Alpm.Package installed_pkg = pkgcache.data;
// check if installed_pkg is in IgnorePkg or IgnoreGroup
2016-04-14 13:19:20 -03:00
if (alpm_handle.should_ignore (installed_pkg) == 0) {
candidate = installed_pkg.sync_newversion (alpm_handle.syncdbs);
2015-03-04 11:55:36 -03:00
if (candidate != null) {
2016-04-14 13:19:20 -03:00
var infos = UpdateInfos () {
2016-02-02 05:28:07 -03:00
name = candidate.name,
2016-04-14 13:19:20 -03:00
old_version = installed_pkg.version,
new_version = candidate.version,
repo = candidate.db.name,
2016-02-02 05:28:07 -03:00
download_size = candidate.download_size
};
updates_infos += (owned) infos;
2015-03-04 11:55:36 -03:00
} else {
2016-04-14 13:19:20 -03:00
if (check_aur_updates && (aur_updates_results.get_length () == 0)) {
// check if installed_pkg is a local pkg
2016-04-14 13:19:20 -03:00
unowned Alpm.List<unowned Alpm.DB> syncdbs = alpm_handle.syncdbs;
while (syncdbs != null) {
unowned Alpm.DB db = syncdbs.data;
pkg = Alpm.find_satisfier (db.pkgcache, installed_pkg.name);
2015-03-04 11:55:36 -03:00
if (pkg != null) {
break;
}
2016-04-14 13:19:20 -03:00
syncdbs.next ();
2015-03-04 11:55:36 -03:00
}
if (pkg == null) {
local_pkgs += installed_pkg.name;
2015-03-04 11:55:36 -03:00
}
}
}
}
2016-04-14 13:19:20 -03:00
pkgcache.next ();
2015-03-04 11:55:36 -03:00
}
2015-08-20 10:11:18 -03:00
if (check_aur_updates) {
// get aur updates
if (aur_updates_results.get_length () == 0) {
2016-04-14 13:19:20 -03:00
AUR.multiinfo.begin (local_pkgs, (obj, res) => {
aur_updates_results = AUR.multiinfo.end (res);
var updates = Updates () {
is_syncfirst = false,
repos_updates = (owned) updates_infos,
aur_updates = get_aur_updates_infos ()
2016-02-02 05:28:07 -03:00
};
2016-04-14 13:19:20 -03:00
get_updates_finished (updates);
});
} else {
var updates = Updates () {
is_syncfirst = false,
repos_updates = (owned) updates_infos,
aur_updates = get_aur_updates_infos ()
};
get_updates_finished (updates);
}
} else {
var updates = Updates () {
is_syncfirst = false,
repos_updates = (owned) updates_infos,
aur_updates = {}
};
get_updates_finished (updates);
2014-10-30 10:44:09 -03:00
}
}
2014-10-22 13:44:02 -03:00
}
2016-04-14 13:19:20 -03:00
private UpdateInfos[] get_aur_updates_infos () {
UpdateInfos[] aur_updates_infos = {};
aur_updates_results.foreach_element ((array, index, node) => {
unowned Json.Object pkg_info = node.get_object ();
unowned string name = pkg_info.get_string_member ("Name");
unowned string new_version = pkg_info.get_string_member ("Version");
unowned string old_version = alpm_handle.localdb.get_pkg (name).version;
if (Alpm.pkg_vercmp (new_version, old_version) == 1) {
var infos = UpdateInfos () {
name = name,
old_version = old_version,
new_version = new_version,
repo = ""
};
aur_updates_infos += (owned) infos;
}
});
return aur_updates_infos;
}
2016-02-02 05:28:07 -03:00
public bool trans_init (Alpm.TransFlag transflags) {
current_error = ErrorInfos ();
2016-02-22 09:47:40 -03:00
cancellable.reset ();
2016-04-14 13:19:20 -03:00
if (alpm_handle.trans_init (transflags) == -1) {
Alpm.Errno errno = alpm_handle.errno ();
2016-03-01 11:43:51 -03:00
current_error.errno = (uint) errno;
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to init transaction");
2016-08-24 12:19:14 -03:00
if (errno != 0) {
current_error.details = { Alpm.strerror (errno) };
}
2016-02-02 05:28:07 -03:00
return false;
2015-03-18 10:53:45 -03:00
} else {
intern_lock = true;
2014-10-30 10:44:09 -03:00
}
2016-02-02 05:28:07 -03:00
return true;
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
public bool trans_sysupgrade (bool enable_downgrade) {
current_error = ErrorInfos ();
2016-04-14 13:19:20 -03:00
if (alpm_handle.trans_sysupgrade ((enable_downgrade) ? 1 : 0) == -1) {
Alpm.Errno errno = alpm_handle.errno ();
2016-03-01 11:43:51 -03:00
current_error.errno = (uint) errno;
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
2016-08-24 12:19:14 -03:00
if (errno != 0) {
current_error.details = { Alpm.strerror (errno) };
}
2016-02-02 05:28:07 -03:00
return false;
2014-10-30 10:44:09 -03:00
}
2016-02-02 05:28:07 -03:00
return true;
2014-10-22 13:44:02 -03:00
}
2014-10-30 10:44:09 -03:00
2016-02-02 05:28:07 -03:00
private bool trans_add_pkg_real (Alpm.Package pkg) {
current_error = ErrorInfos ();
2016-04-14 13:19:20 -03:00
if (alpm_handle.trans_add_pkg (pkg) == -1) {
Alpm.Errno errno = alpm_handle.errno ();
2016-02-02 05:28:07 -03:00
if (errno == Alpm.Errno.TRANS_DUP_TARGET || errno == Alpm.Errno.PKG_IGNORED) {
2014-10-22 13:44:02 -03:00
// just skip duplicate or ignored targets
2016-02-02 05:28:07 -03:00
return true;
} else {
2016-03-01 11:43:51 -03:00
current_error.errno = (uint) errno;
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
2016-08-24 12:19:14 -03:00
if (errno != 0) {
current_error.details = { "%s: %s".printf (pkg.name, Alpm.strerror (errno)) };
}
2016-02-02 05:28:07 -03:00
return false;
2014-10-22 13:44:02 -03:00
}
}
2016-02-02 05:28:07 -03:00
return true;
2015-01-28 12:37:51 -03:00
}
2016-02-02 05:28:07 -03:00
public bool trans_add_pkg (string pkgname) {
current_error = ErrorInfos ();
2016-04-14 13:19:20 -03:00
unowned Alpm.Package? pkg = get_syncpkg (pkgname);
2015-05-20 09:48:12 -03:00
if (pkg == null) {
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
current_error.details = { _("target not found: %s").printf (pkgname) };
return false;
2015-01-28 12:37:51 -03:00
} else {
2016-02-02 05:28:07 -03:00
bool success = trans_add_pkg_real (pkg);
if (success) {
if (("linux31" in pkg.name) || ("linux4" in pkg.name)) {
2015-01-28 12:37:51 -03:00
string[] installed_kernels = {};
string[] installed_modules = {};
2016-04-14 13:19:20 -03:00
unowned Alpm.List<unowned Alpm.Package> pkgcache = alpm_handle.localdb.pkgcache;
while (pkgcache != null) {
unowned Alpm.Package local_pkg = pkgcache.data;
if (("linux31" in local_pkg.name) || ("linux4" in local_pkg.name)) {
2015-01-28 12:37:51 -03:00
string[] local_pkg_splitted = local_pkg.name.split ("-", 2);
if ((local_pkg_splitted[0] in installed_kernels) == false) {
installed_kernels += local_pkg_splitted[0];
}
if (local_pkg_splitted.length == 2) {
if ((local_pkg_splitted[1] in installed_modules) == false) {
installed_modules += local_pkg_splitted[1];
}
}
}
2016-04-14 13:19:20 -03:00
pkgcache.next ();
2015-01-28 12:37:51 -03:00
}
string[] splitted = pkg.name.split ("-", 2);
if (splitted.length == 2) {
// we are adding a module
// add the same module for other installed kernels
2016-02-02 05:28:07 -03:00
foreach (unowned string installed_kernel in installed_kernels) {
2015-01-28 12:37:51 -03:00
string module = installed_kernel + "-" + splitted[1];
2016-04-14 13:19:20 -03:00
unowned Alpm.Package? module_pkg = get_syncpkg (module);
2015-01-28 12:37:51 -03:00
if (module_pkg != null) {
trans_add_pkg_real (module_pkg);
}
}
} else if (splitted.length == 1) {
// we are adding a kernel
// add all installed modules for other kernels
2016-02-02 05:28:07 -03:00
foreach (unowned string installed_module in installed_modules) {
2015-01-28 12:37:51 -03:00
string module = splitted[0] + "-" + installed_module;
2016-04-14 13:19:20 -03:00
unowned Alpm.Package? module_pkg = get_syncpkg (module);
2015-01-28 12:37:51 -03:00
if (module_pkg != null) {
trans_add_pkg_real (module_pkg);
}
}
}
}
}
2016-02-02 05:28:07 -03:00
return success;
2015-01-28 12:37:51 -03:00
}
}
2014-10-22 13:44:02 -03:00
2016-02-02 05:28:07 -03:00
public bool trans_load_pkg (string pkgpath) {
current_error = ErrorInfos ();
2016-04-14 13:19:20 -03:00
Alpm.Package* pkg;
if (alpm_handle.load_tarball (pkgpath, 1, alpm_handle.localfilesiglevel, out pkg) == -1) {
Alpm.Errno errno = alpm_handle.errno ();
2016-03-01 11:43:51 -03:00
current_error.errno = (uint) errno;
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
2016-08-24 12:19:14 -03:00
if (errno != 0) {
current_error.details = { "%s: %s".printf (pkgpath, Alpm.strerror (errno)) };
}
2016-02-02 05:28:07 -03:00
return false;
2016-04-14 13:19:20 -03:00
} else if (alpm_handle.trans_add_pkg (pkg) == -1) {
Alpm.Errno errno = alpm_handle.errno ();
2016-03-01 11:43:51 -03:00
current_error.errno = (uint) errno;
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
2016-08-24 12:19:14 -03:00
if (errno != 0) {
current_error.details = { "%s: %s".printf (pkg->name, Alpm.strerror (errno)) };
}
2016-02-02 05:28:07 -03:00
// free the package because it will not be used
delete pkg;
return false;
2014-10-30 10:44:09 -03:00
}
2016-02-02 05:28:07 -03:00
return true;
2014-10-22 13:44:02 -03:00
}
2014-10-30 10:44:09 -03:00
2016-02-02 05:28:07 -03:00
public bool trans_remove_pkg (string pkgname) {
current_error = ErrorInfos ();
2016-04-14 13:19:20 -03:00
unowned Alpm.Package? pkg = alpm_handle.localdb.get_pkg (pkgname);
2014-10-30 10:44:09 -03:00
if (pkg == null) {
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
current_error.details = { _("target not found: %s").printf (pkgname) };
return false;
2016-04-14 13:19:20 -03:00
} else if (alpm_handle.trans_remove_pkg (pkg) == -1) {
Alpm.Errno errno = alpm_handle.errno ();
2016-03-01 11:43:51 -03:00
current_error.errno = (uint) errno;
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
2016-08-24 12:19:14 -03:00
if (errno != 0) {
current_error.details = { "%s: %s".printf (pkg.name, Alpm.strerror (errno)) };
}
2016-02-02 05:28:07 -03:00
return false;
2014-10-30 10:44:09 -03:00
}
2016-02-02 05:28:07 -03:00
return true;
2014-10-22 13:44:02 -03:00
}
2015-08-20 10:11:18 -03:00
private void trans_prepare () {
2016-02-02 05:28:07 -03:00
current_error = ErrorInfos ();
2015-08-20 10:11:18 -03:00
string[] details = {};
2016-04-14 13:19:20 -03:00
Alpm.List err_data;
if (alpm_handle.trans_prepare (out err_data) == -1) {
Alpm.Errno errno = alpm_handle.errno ();
2016-03-01 11:43:51 -03:00
current_error.errno = (uint) errno;
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
2015-08-20 10:11:18 -03:00
switch (errno) {
2016-08-24 12:19:14 -03:00
case 0:
break;
2016-02-02 05:28:07 -03:00
case Alpm.Errno.PKG_INVALID_ARCH:
2016-08-24 12:19:14 -03:00
details += Alpm.strerror (errno) + ":";
2016-04-14 13:19:20 -03:00
unowned Alpm.List<string*> list = err_data;
while (list != null) {
string* pkgname = list.data;
2015-08-20 10:11:18 -03:00
details += _("package %s does not have a valid architecture").printf (pkgname);
delete pkgname;
2016-04-14 13:19:20 -03:00
list.next ();
2014-10-30 10:44:09 -03:00
}
2015-08-20 10:11:18 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Errno.UNSATISFIED_DEPS:
2016-08-24 12:19:14 -03:00
details += Alpm.strerror (errno) + ":";
2016-04-14 13:19:20 -03:00
unowned Alpm.List<Alpm.DepMissing*> list = err_data;
while (list != null) {
Alpm.DepMissing* miss = list.data;
2016-02-02 05:28:07 -03:00
details += _("%s: requires %s").printf (miss->target, miss->depend.compute_string ());
2015-08-20 10:11:18 -03:00
delete miss;
2016-04-14 13:19:20 -03:00
list.next ();
2014-10-30 10:44:09 -03:00
}
2015-08-20 10:11:18 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Errno.CONFLICTING_DEPS:
2016-08-24 12:19:14 -03:00
details += Alpm.strerror (errno) + ":";
2016-04-14 13:19:20 -03:00
unowned Alpm.List<Alpm.Conflict*> list = err_data;
while (list != null) {
Alpm.Conflict* conflict = list.data;
2016-02-02 05:28:07 -03:00
string conflict_detail = _("%s and %s are in conflict").printf (conflict->package1, conflict->package2);
2015-08-20 10:11:18 -03:00
// only print reason if it contains new information
2016-02-02 05:28:07 -03:00
if (conflict->reason.mod != Alpm.Depend.Mode.ANY) {
conflict_detail += " (%s)".printf (conflict->reason.compute_string ());
2015-08-20 10:11:18 -03:00
}
2016-02-02 05:28:07 -03:00
details += (owned) conflict_detail;
2015-08-20 10:11:18 -03:00
delete conflict;
2016-04-14 13:19:20 -03:00
list.next ();
2015-03-04 11:55:36 -03:00
}
2015-08-20 10:11:18 -03:00
break;
default:
2016-08-24 12:19:14 -03:00
details += Alpm.strerror (errno);
2015-08-20 10:11:18 -03:00
break;
}
2016-02-02 05:28:07 -03:00
current_error.details = (owned) details;
2015-08-20 10:11:18 -03:00
trans_release ();
2016-02-02 05:28:07 -03:00
trans_prepare_finished (false);
2015-08-20 10:11:18 -03:00
} else {
// Search for holdpkg in target list
bool found_locked_pkg = false;
2016-04-14 13:19:20 -03:00
unowned Alpm.List<unowned Alpm.Package> to_remove = alpm_handle.trans_to_remove ();
while (to_remove != null) {
unowned Alpm.Package pkg = to_remove.data;
if (alpm_config.get_holdpkgs ().find_custom (pkg.name, strcmp) != null) {
2015-08-20 10:11:18 -03:00
details += _("%s needs to be removed but it is a locked package").printf (pkg.name);
found_locked_pkg = true;
break;
2014-10-30 10:44:09 -03:00
}
2016-04-14 13:19:20 -03:00
to_remove.next ();
2015-08-20 10:11:18 -03:00
}
if (found_locked_pkg) {
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
current_error.details = (owned) details;
2015-08-20 10:11:18 -03:00
trans_release ();
2016-02-02 05:28:07 -03:00
trans_prepare_finished (false);
} else {
trans_prepare_finished (true);
2015-08-20 10:11:18 -03:00
}
2014-10-30 10:44:09 -03:00
}
2015-03-04 11:55:36 -03:00
}
public void start_trans_prepare () {
2015-08-20 10:11:18 -03:00
try {
thread_pool.add (new AlpmAction (trans_prepare));
} catch (ThreadError e) {
stderr.printf ("Thread Error %s\n", e.message);
}
2014-10-22 13:44:02 -03:00
}
2014-10-30 10:44:09 -03:00
public void choose_provider (int provider) {
provider_mutex.lock ();
choosen_provider = provider;
provider_cond.signal ();
provider_mutex.unlock ();
}
2014-10-22 13:44:02 -03:00
2016-04-14 13:19:20 -03:00
public TransactionSummary get_transaction_summary () {
UpdateInfos[] to_install = {};
UpdateInfos[] to_upgrade = {};
UpdateInfos[] to_downgrade = {};
UpdateInfos[] to_reinstall = {};
UpdateInfos[] to_remove = {};
unowned Alpm.List<unowned Alpm.Package> pkgs_to_add = alpm_handle.trans_to_add ();
while (pkgs_to_add != null) {
unowned Alpm.Package trans_pkg = pkgs_to_add.data;
unowned Alpm.Package? local_pkg = alpm_handle.localdb.get_pkg (trans_pkg.name);
var infos = UpdateInfos () {
name = trans_pkg.name,
old_version = local_pkg != null ? local_pkg.version : "",
new_version = trans_pkg.version,
2016-02-02 05:28:07 -03:00
// if pkg was load from a file, pkg.db is null
2016-04-14 13:19:20 -03:00
repo =trans_pkg.db != null ? trans_pkg.db.name : "",
download_size = trans_pkg.download_size
2016-02-02 05:28:07 -03:00
};
2016-04-14 13:19:20 -03:00
if (local_pkg == null) {
to_install += (owned) infos;
} else {
int cmp = Alpm.pkg_vercmp (trans_pkg.version, local_pkg.version);
if (cmp == 1) {
to_upgrade += (owned) infos;
} else if (cmp == 0) {
to_reinstall += (owned) infos;
} else {
to_downgrade += (owned) infos;
}
}
pkgs_to_add.next ();
2014-10-30 10:44:09 -03:00
}
2016-04-14 13:19:20 -03:00
unowned Alpm.List<unowned Alpm.Package> pkgs_to_remove = alpm_handle.trans_to_remove ();
while (pkgs_to_remove != null) {
unowned Alpm.Package trans_pkg = pkgs_to_remove.data;
var infos = UpdateInfos () {
name = trans_pkg.name,
old_version = trans_pkg.version,
new_version = "",
repo = trans_pkg.db.name
2016-02-02 05:28:07 -03:00
};
to_remove += (owned) infos;
2016-04-14 13:19:20 -03:00
pkgs_to_remove.next ();
2014-10-30 10:44:09 -03:00
}
2016-04-14 13:19:20 -03:00
var summary = TransactionSummary () {
to_install = (owned) to_install,
to_upgrade = (owned) to_upgrade,
to_downgrade = (owned) to_downgrade,
to_reinstall = (owned) to_reinstall,
to_remove = (owned) to_remove
};
return summary;
2014-10-22 13:44:02 -03:00
}
2015-08-20 10:11:18 -03:00
private void trans_commit () {
2016-02-02 05:28:07 -03:00
current_error = ErrorInfos ();
bool success = true;
2016-04-14 13:19:20 -03:00
Alpm.List err_data;
if (alpm_handle.trans_commit (out err_data) == -1) {
Alpm.Errno errno = alpm_handle.errno ();
current_error.errno = (uint) errno;
2016-02-22 09:47:40 -03:00
// cancel the download return an EXTERNAL_DOWNLOAD error
if (errno == Alpm.Errno.EXTERNAL_DOWNLOAD && cancellable.is_cancelled ()) {
trans_release ();
refresh_handle ();
trans_commit_finished (false);
return;
}
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to commit transaction");
string[] details = {};
2015-08-20 10:11:18 -03:00
switch (errno) {
2016-08-24 12:19:14 -03:00
case 0:
break;
2015-08-20 10:11:18 -03:00
case Alpm.Errno.FILE_CONFLICTS:
2016-08-24 12:19:14 -03:00
details += Alpm.strerror (errno) + ":";
2016-04-14 13:19:20 -03:00
//TransFlag flags = alpm_handle.trans_get_flags ();
2015-08-20 10:11:18 -03:00
//if ((flags & TransFlag.FORCE) != 0) {
//details += _("unable to %s directory-file conflicts").printf ("--force");
//}
2016-04-14 13:19:20 -03:00
unowned Alpm.List<Alpm.FileConflict*> list = err_data;
while (list != null) {
Alpm.FileConflict* conflict = list.data;
2015-08-20 10:11:18 -03:00
switch (conflict->type) {
2016-02-02 05:28:07 -03:00
case Alpm.FileConflict.Type.TARGET:
2015-08-20 10:11:18 -03:00
details += _("%s exists in both %s and %s").printf (conflict->file, conflict->target, conflict->ctarget);
break;
2016-02-02 05:28:07 -03:00
case Alpm.FileConflict.Type.FILESYSTEM:
2015-08-20 10:11:18 -03:00
details += _("%s: %s already exists in filesystem").printf (conflict->target, conflict->file);
break;
}
delete conflict;
2016-04-14 13:19:20 -03:00
list.next ();
2015-08-20 10:11:18 -03:00
}
break;
case Alpm.Errno.PKG_INVALID:
case Alpm.Errno.PKG_INVALID_CHECKSUM:
case Alpm.Errno.PKG_INVALID_SIG:
case Alpm.Errno.DLT_INVALID:
2016-08-24 12:19:14 -03:00
details += Alpm.strerror (errno) + ":";
2016-04-14 13:19:20 -03:00
unowned Alpm.List<string*> list = err_data;
while (list != null) {
string* filename = list.data;
2015-08-20 10:11:18 -03:00
details += _("%s is invalid or corrupted").printf (filename);
delete filename;
2016-04-14 13:19:20 -03:00
list.next ();
2015-08-20 10:11:18 -03:00
}
break;
default:
2016-08-24 12:19:14 -03:00
details += Alpm.strerror (errno);
2015-08-20 10:11:18 -03:00
break;
}
2016-02-02 05:28:07 -03:00
current_error.details = (owned) details;
success = false;
2015-08-20 10:11:18 -03:00
}
trans_release ();
refresh_handle ();
2016-02-02 05:28:07 -03:00
trans_commit_finished (success);
2015-08-20 10:11:18 -03:00
}
public void start_trans_commit (GLib.BusName sender) {
check_authorization.begin (sender, (obj, res) => {
bool authorized = check_authorization.end (res);
if (authorized) {
try {
thread_pool.add (new AlpmAction (trans_commit));
} catch (ThreadError e) {
stderr.printf ("Thread Error %s\n", e.message);
2014-10-30 10:44:09 -03:00
}
} else {
2016-02-02 05:28:07 -03:00
current_error = ErrorInfos () {
2016-03-01 11:43:51 -03:00
message = _("Authentication failed")
2016-02-02 05:28:07 -03:00
};
trans_release ();
refresh_handle ();
2016-02-02 05:28:07 -03:00
trans_commit_finished (false);
}
});
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
public void trans_release () {
2016-04-14 13:19:20 -03:00
alpm_handle.trans_release ();
2016-02-02 05:28:07 -03:00
intern_lock = false;
2014-10-30 10:44:09 -03:00
}
2014-10-22 13:44:02 -03:00
2016-02-02 05:28:07 -03:00
[DBus (no_reply = true)]
2014-10-30 10:44:09 -03:00
public void trans_cancel () {
2016-04-14 13:19:20 -03:00
if (alpm_handle.trans_interrupt () == 0) {
// a transaction is being interrupted
// it will end the normal way
return;
}
2016-02-22 09:47:40 -03:00
cancellable.cancel ();
2014-10-30 10:44:09 -03:00
}
2014-10-22 13:44:02 -03:00
2015-01-05 17:06:18 -03:00
[DBus (no_reply = true)]
2014-10-30 10:44:09 -03:00
public void quit () {
// be sure to not quit with locked databases
if (thread_pool.get_num_threads () == 0) {
alpm_handle.unlock ();
loop.quit ();
}
2014-10-30 10:44:09 -03:00
}
// End of Daemon Object
2014-10-22 13:44:02 -03:00
}
}
private void write_log_file (string event) {
var now = new DateTime.now_local ();
2016-05-17 13:11:49 -04:00
string log = "%s [PAMAC] %s\n".printf (now.format ("[%Y-%m-%d %H:%M]"), event);
2016-05-13 10:44:10 -03:00
var file = GLib.File.new_for_path ("/var/log/pacman.log");
2014-10-22 13:44:02 -03:00
try {
// creating a DataOutputStream to the file
var dos = new DataOutputStream (file.append_to (FileCreateFlags.NONE));
// writing a short string to the stream
dos.put_string (log);
} catch (GLib.Error e) {
stderr.printf ("%s\n", e.message);
2014-10-22 13:44:02 -03:00
}
}
2016-02-02 05:28:07 -03:00
private void cb_event (Alpm.Event.Data data) {
2014-10-26 08:30:04 -03:00
string[] details = {};
2014-12-03 12:02:14 -03:00
uint secondary_type = 0;
switch (data.type) {
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.HOOK_START:
switch (data.hook_when) {
case Alpm.HookWhen.PRE_TRANSACTION:
secondary_type = (uint) Alpm.HookWhen.PRE_TRANSACTION;
break;
case Alpm.HookWhen.POST_TRANSACTION:
secondary_type = (uint) Alpm.HookWhen.POST_TRANSACTION;
break;
default:
break;
}
break;
case Alpm.Event.Type.HOOK_RUN_START:
details += data.hook_run_name;
2016-02-06 05:33:07 -03:00
details += data.hook_run_desc ?? "";
2016-02-02 05:28:07 -03:00
details += data.hook_run_position.to_string ();
details += data.hook_run_total.to_string ();
2016-07-02 04:25:07 -04:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.PACKAGE_OPERATION_START:
2014-12-03 12:02:14 -03:00
switch (data.package_operation_operation) {
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.REMOVE:
2014-12-03 12:02:14 -03:00
details += data.package_operation_oldpkg.name;
details += data.package_operation_oldpkg.version;
2015-03-04 11:55:36 -03:00
secondary_type = (uint) Alpm.Package.Operation.REMOVE;
2014-12-03 12:02:14 -03:00
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.INSTALL:
2014-12-03 12:02:14 -03:00
details += data.package_operation_newpkg.name;
details += data.package_operation_newpkg.version;
2015-03-04 11:55:36 -03:00
secondary_type = (uint) Alpm.Package.Operation.INSTALL;
2014-12-03 12:02:14 -03:00
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.REINSTALL:
2014-12-03 12:02:14 -03:00
details += data.package_operation_newpkg.name;
details += data.package_operation_newpkg.version;
2015-03-04 11:55:36 -03:00
secondary_type = (uint) Alpm.Package.Operation.REINSTALL;
2014-12-03 12:02:14 -03:00
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.UPGRADE:
2014-12-03 12:02:14 -03:00
details += data.package_operation_oldpkg.name;
details += data.package_operation_oldpkg.version;
details += data.package_operation_newpkg.version;
2015-03-04 11:55:36 -03:00
secondary_type = (uint) Alpm.Package.Operation.UPGRADE;
2014-12-03 12:02:14 -03:00
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.DOWNGRADE:
2014-12-03 12:02:14 -03:00
details += data.package_operation_oldpkg.name;
details += data.package_operation_oldpkg.version;
details += data.package_operation_newpkg.version;
2015-03-04 11:55:36 -03:00
secondary_type = (uint) Alpm.Package.Operation.DOWNGRADE;
2014-12-03 12:02:14 -03:00
break;
2016-02-02 05:28:07 -03:00
default:
break;
2014-12-03 12:02:14 -03:00
}
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.DELTA_PATCH_START:
2014-12-03 12:02:14 -03:00
details += data.delta_patch_delta.to;
details += data.delta_patch_delta.delta;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.SCRIPTLET_INFO:
2014-12-03 12:02:14 -03:00
details += data.scriptlet_info_line;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.PKGDOWNLOAD_START:
2016-08-06 08:33:34 -04:00
// do not emit event when download is cancelled
if (pamac_daemon.cancellable.is_cancelled ()) {
return;
}
2014-12-03 12:02:14 -03:00
details += data.pkgdownload_file;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.OPTDEP_REMOVAL:
2014-12-03 12:02:14 -03:00
details += data.optdep_removal_pkg.name;
details += data.optdep_removal_optdep.compute_string ();
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.DATABASE_MISSING:
2014-12-03 12:02:14 -03:00
details += data.database_missing_dbname;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.PACNEW_CREATED:
2014-12-03 12:02:14 -03:00
details += data.pacnew_created_file;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.PACSAVE_CREATED:
2014-12-03 12:02:14 -03:00
details += data.pacsave_created_file;
2014-10-22 13:44:02 -03:00
break;
default:
break;
}
2014-12-03 12:02:14 -03:00
pamac_daemon.emit_event ((uint) data.type, secondary_type, details);
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
private void cb_question (Alpm.Question.Data data) {
2014-12-03 12:02:14 -03:00
switch (data.type) {
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.INSTALL_IGNOREPKG:
2014-10-22 13:44:02 -03:00
// Do not install package in IgnorePkg/IgnoreGroup
2014-12-03 12:02:14 -03:00
data.install_ignorepkg_install = 0;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.REPLACE_PKG:
2014-10-22 13:44:02 -03:00
// Auto-remove conflicts in case of replaces
2014-12-03 12:02:14 -03:00
data.replace_replace = 1;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.CONFLICT_PKG:
2014-10-22 13:44:02 -03:00
// Auto-remove conflicts
2014-12-03 12:02:14 -03:00
data.conflict_remove = 1;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.REMOVE_PKGS:
2014-10-22 13:44:02 -03:00
// Do not upgrade packages which have unresolvable dependencies
2014-12-03 12:02:14 -03:00
data.remove_pkgs_skip = 1;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.SELECT_PROVIDER:
2014-12-03 12:02:14 -03:00
string depend_str = data.select_provider_depend.compute_string ();
2014-10-22 13:44:02 -03:00
string[] providers_str = {};
2016-04-14 13:19:20 -03:00
unowned Alpm.List<unowned Alpm.Package> list = data.select_provider_providers;
while (list != null) {
unowned Alpm.Package pkg = list.data;
2014-10-22 13:44:02 -03:00
providers_str += pkg.name;
2016-04-14 13:19:20 -03:00
list.next ();
2014-10-22 13:44:02 -03:00
}
2014-10-30 10:44:09 -03:00
pamac_daemon.provider_cond = Cond ();
pamac_daemon.provider_mutex = Mutex ();
pamac_daemon.choosen_provider = null;
pamac_daemon.emit_providers (depend_str, providers_str);
pamac_daemon.provider_mutex.lock ();
while (pamac_daemon.choosen_provider == null) {
pamac_daemon.provider_cond.wait (pamac_daemon.provider_mutex);
2014-10-22 13:44:02 -03:00
}
2014-12-03 12:02:14 -03:00
data.select_provider_use_index = pamac_daemon.choosen_provider;
2014-10-30 10:44:09 -03:00
pamac_daemon.provider_mutex.unlock ();
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.CORRUPTED_PKG:
2014-10-22 13:44:02 -03:00
// Auto-remove corrupted pkgs in cache
2014-12-03 12:02:14 -03:00
data.corrupted_remove = 1;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.IMPORT_KEY:
if (data.import_key_key.revoked == 1) {
// Do not get revoked key
2014-12-03 12:02:14 -03:00
data.import_key_import = 0;
} else {
// Auto get not revoked key
2014-12-03 12:02:14 -03:00
data.import_key_import = 1;
}
2014-10-22 13:44:02 -03:00
break;
default:
2014-12-03 12:02:14 -03:00
data.any_answer = 0;
2014-10-22 13:44:02 -03:00
break;
}
}
2016-02-02 05:28:07 -03:00
private void cb_progress (Alpm.Progress progress, string pkgname, int percent, uint n_targets, uint current_target) {
if (percent == 0) {
pamac_daemon.emit_progress ((uint) progress, pkgname, (uint) percent, n_targets, current_target);
pamac_daemon.timer.start ();
} else if (percent == 100) {
pamac_daemon.emit_progress ((uint) progress, pkgname, (uint) percent, n_targets, current_target);
pamac_daemon.timer.stop ();
}else if (pamac_daemon.timer.elapsed () < 0.5) {
return;
} else {
pamac_daemon.emit_progress ((uint) progress, pkgname, (uint) percent, n_targets, current_target);
pamac_daemon.timer.start ();
2014-10-22 13:44:02 -03:00
}
}
2016-02-22 09:47:40 -03:00
private uint64 prevprogress;
private int cb_download (void* data, uint64 dltotal, uint64 dlnow, uint64 ultotal, uint64 ulnow) {
if (unlikely (pamac_daemon.cancellable.is_cancelled ())) {
return 1;
}
string filename = (string) data;
2016-08-06 08:33:34 -04:00
if (unlikely (dlnow == 0 || dltotal == 0 || prevprogress == dltotal)) {
2016-02-22 09:47:40 -03:00
return 0;
2016-04-14 13:19:20 -03:00
} else if (unlikely (prevprogress == 0)) {
pamac_daemon.emit_download (filename, 0, dltotal);
pamac_daemon.emit_download (filename, dlnow, dltotal);
pamac_daemon.timer.start ();
2016-02-22 09:47:40 -03:00
} else if (unlikely (dlnow == dltotal)) {
pamac_daemon.emit_download (filename, dlnow, dltotal);
2016-02-02 05:28:07 -03:00
pamac_daemon.timer.stop ();
2016-02-22 09:47:40 -03:00
} else if (likely (pamac_daemon.timer.elapsed () < 0.5)) {
return 0;
2016-02-02 05:28:07 -03:00
} else {
2016-02-22 09:47:40 -03:00
pamac_daemon.emit_download (filename, dlnow, dltotal);
2016-02-02 05:28:07 -03:00
pamac_daemon.timer.start ();
2014-10-22 13:44:02 -03:00
}
2016-02-22 09:47:40 -03:00
prevprogress = dlnow;
return 0;
}
private int cb_fetch (string fileurl, string localpath, int force) {
if (pamac_daemon.cancellable.is_cancelled ()) {
return -1;
}
2016-08-06 08:33:34 -04:00
Curl.Easy curl;
curl = new Curl.Easy ();
2016-02-22 09:47:40 -03:00
char error_buffer[Curl.ERROR_SIZE];
var url = GLib.File.new_for_uri (fileurl);
var destfile = GLib.File.new_for_path (localpath + url.get_basename ());
var tempfile = GLib.File.new_for_path (destfile.get_path () + ".part");
2016-08-06 08:33:34 -04:00
curl.reset ();
curl.setopt (Curl.Option.URL, fileurl);
curl.setopt (Curl.Option.FAILONERROR, 1L);
curl.setopt (Curl.Option.ERRORBUFFER, error_buffer);
curl.setopt (Curl.Option.CONNECTTIMEOUT, 30L);
curl.setopt (Curl.Option.FILETIME, 1L);
curl.setopt (Curl.Option.NOPROGRESS, 0L);
curl.setopt (Curl.Option.FOLLOWLOCATION, 1L);
curl.setopt (Curl.Option.XFERINFOFUNCTION, cb_download);
curl.setopt (Curl.Option.XFERINFODATA, (void*) url.get_basename ());
curl.setopt (Curl.Option.LOW_SPEED_LIMIT, 1L);
curl.setopt (Curl.Option.LOW_SPEED_TIME, 30L);
curl.setopt (Curl.Option.NETRC, Curl.NetRCOption.OPTIONAL);
curl.setopt (Curl.Option.HTTPAUTH, Curl.CURLAUTH_ANY);
2016-02-22 09:47:40 -03:00
bool remove_partial_download = true;
if (fileurl.contains (".pkg.tar.") && !fileurl.has_suffix (".sig")) {
remove_partial_download = false;
}
string open_mode = "wb";
prevprogress = 0;
try {
if (force == 0) {
if (destfile.query_exists ()) {
// start from scratch only download if our local is out of date.
2016-08-06 08:33:34 -04:00
curl.setopt (Curl.Option.TIMECONDITION, Curl.TimeCond.IFMODSINCE);
2016-02-22 09:47:40 -03:00
FileInfo info = destfile.query_info ("time::modified", 0);
TimeVal time = info.get_modification_time ();
2016-08-06 08:33:34 -04:00
curl.setopt (Curl.Option.TIMEVALUE, time.tv_sec);
2016-02-22 09:47:40 -03:00
} else if (tempfile.query_exists ()) {
// a previous partial download exists, resume from end of file.
FileInfo info = tempfile.query_info ("standard::size", 0);
int64 size = info.get_size ();
2016-08-06 08:33:34 -04:00
curl.setopt (Curl.Option.RESUME_FROM_LARGE, size);
2016-02-22 09:47:40 -03:00
open_mode = "ab";
}
} else {
if (tempfile.query_exists ()) {
tempfile.delete ();
}
}
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
Posix.FILE localf = Posix.FILE.open (tempfile.get_path (), open_mode);
if (localf == null) {
stdout.printf ("could not open file %s\n", tempfile.get_path ());
return -1;
}
2016-08-06 08:33:34 -04:00
curl.setopt (Curl.Option.WRITEDATA, localf);
2016-02-22 09:47:40 -03:00
// perform transfer
2016-08-06 08:33:34 -04:00
Curl.Code err = curl.perform ();
2016-02-22 09:47:40 -03:00
// disconnect relationships from the curl handle for things that might go out
// of scope, but could still be touched on connection teardown. This really
// only applies to FTP transfers.
2016-08-06 08:33:34 -04:00
curl.setopt (Curl.Option.NOPROGRESS, 1L);
curl.setopt (Curl.Option.ERRORBUFFER, null);
2016-02-22 09:47:40 -03:00
int ret;
// was it a success?
switch (err) {
case Curl.Code.OK:
long timecond, remote_time = -1;
double remote_size, bytes_dl;
unowned string effective_url;
// retrieve info about the state of the transfer
2016-08-06 08:33:34 -04:00
curl.getinfo (Curl.Info.FILETIME, out remote_time);
curl.getinfo (Curl.Info.CONTENT_LENGTH_DOWNLOAD, out remote_size);
curl.getinfo (Curl.Info.SIZE_DOWNLOAD, out bytes_dl);
curl.getinfo (Curl.Info.CONDITION_UNMET, out timecond);
curl.getinfo (Curl.Info.EFFECTIVE_URL, out effective_url);
2016-02-22 09:47:40 -03:00
if (timecond == 1 && bytes_dl == 0) {
// time condition was met and we didn't download anything. we need to
// clean up the 0 byte .part file that's left behind.
try {
if (tempfile.query_exists ()) {
tempfile.delete ();
}
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
ret = 1;
}
// remote_size isn't necessarily the full size of the file, just what the
// server reported as remaining to download. compare it to what curl reported
// as actually being transferred during curl_easy_perform ()
else if (remote_size != -1 && bytes_dl != -1 && bytes_dl != remote_size) {
pamac_daemon.emit_log ((uint) Alpm.LogLevel.ERROR,
_("%s appears to be truncated: %jd/%jd bytes\n").printf (
2016-02-22 09:47:40 -03:00
fileurl, bytes_dl, remote_size));
if (remove_partial_download) {
try {
if (tempfile.query_exists ()) {
tempfile.delete ();
}
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
}
ret = -1;
} else {
try {
tempfile.move (destfile, FileCopyFlags.OVERWRITE);
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
ret = 0;
}
break;
case Curl.Code.ABORTED_BY_CALLBACK:
if (remove_partial_download) {
try {
if (tempfile.query_exists ()) {
tempfile.delete ();
}
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
}
ret = -1;
break;
default:
// other cases are errors
try {
if (tempfile.query_exists ()) {
if (remove_partial_download) {
tempfile.delete ();
} else {
// delete zero length downloads
FileInfo info = tempfile.query_info ("standard::size", 0);
int64 size = info.get_size ();
if (size == 0) {
tempfile.delete ();
}
}
}
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
// do not report error for missing sig with db
if (!fileurl.has_suffix ("db.sig")) {
string hostname = url.get_uri ().split("/")[2];
pamac_daemon.emit_log ((uint) Alpm.LogLevel.ERROR,
_("failed retrieving file '%s' from %s : %s\n").printf (
2016-02-22 09:47:40 -03:00
url.get_basename (), hostname, error_buffer));
}
ret = -1;
break;
}
return ret;
2014-10-22 13:44:02 -03:00
}
private void cb_totaldownload (uint64 total) {
2014-10-30 10:44:09 -03:00
pamac_daemon.emit_totaldownload (total);
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
private void cb_log (Alpm.LogLevel level, string fmt, va_list args) {
2016-02-22 09:47:40 -03:00
// do not log errors when download is cancelled
if (pamac_daemon.cancellable.is_cancelled ()) {
return;
}
2016-02-02 05:28:07 -03:00
Alpm.LogLevel logmask = Alpm.LogLevel.ERROR | Alpm.LogLevel.WARNING;
2015-05-20 09:48:12 -03:00
if ((level & logmask) == 0) {
2014-10-22 13:44:02 -03:00
return;
2015-05-20 09:48:12 -03:00
}
2014-10-22 13:44:02 -03:00
string? log = null;
log = fmt.vprintf (args);
2015-05-20 09:48:12 -03:00
if (log != null) {
2014-10-30 10:44:09 -03:00
pamac_daemon.emit_log ((uint) level, log);
2015-05-20 09:48:12 -03:00
}
2014-10-22 13:44:02 -03:00
}
void on_bus_acquired (DBusConnection conn) {
2014-10-30 10:44:09 -03:00
pamac_daemon = new Pamac.Daemon ();
2014-10-22 13:44:02 -03:00
try {
2014-10-30 10:44:09 -03:00
conn.register_object ("/org/manjaro/pamac", pamac_daemon);
2014-10-22 13:44:02 -03:00
}
catch (IOError e) {
stderr.printf ("Could not register service\n");
loop.quit ();
2014-10-22 13:44:02 -03:00
}
}
void main () {
// i18n
2014-10-30 10:44:09 -03:00
Intl.setlocale (LocaleCategory.ALL, "");
Intl.textdomain (GETTEXT_PACKAGE);
2014-10-22 13:44:02 -03:00
2016-02-02 05:28:07 -03:00
Bus.own_name (BusType.SYSTEM,
"org.manjaro.pamac",
BusNameOwnerFlags.NONE,
2014-10-22 13:44:02 -03:00
on_bus_acquired,
null,
() => {
stderr.printf ("Could not acquire name\n");
loop.quit ();
});
2014-10-22 13:44:02 -03:00
2016-08-06 08:33:34 -04:00
Curl.global_init (Curl.GLOBAL_SSL);
2014-10-22 13:44:02 -03:00
loop = new MainLoop ();
loop.run ();
2016-08-06 08:33:34 -04:00
Curl.global_cleanup ();
2014-10-22 13:44:02 -03:00
}