pamac-classic/src/daemon.vala

1276 lines
40 KiB
Vala
Raw Normal View History

2014-10-22 13:44:02 -03:00
/*
* pamac-vala
*
2016-02-22 09:47:40 -03:00
* Copyright (C) 2014-2016 Guillaume Benoit <guillaume@manjaro.org>
2014-10-22 13:44:02 -03:00
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a get of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// i18n
2014-10-26 08:30:04 -03:00
const string GETTEXT_PACKAGE = "pamac";
2014-10-22 13:44:02 -03:00
2014-10-30 10:44:09 -03:00
Pamac.Daemon pamac_daemon;
2014-10-22 13:44:02 -03:00
MainLoop loop;
2015-08-20 10:11:18 -03:00
public delegate void AlpmActionDelegate ();
2016-02-02 05:28:07 -03:00
[Compact]
public class AlpmAction {
public unowned AlpmActionDelegate action_delegate;
public AlpmAction (AlpmActionDelegate action_delegate) {
this.action_delegate = action_delegate;
}
public void run () {
action_delegate ();
2015-08-20 10:11:18 -03:00
}
2016-02-02 05:28:07 -03:00
}
2015-08-20 10:11:18 -03:00
2014-10-30 10:44:09 -03:00
namespace Pamac {
[DBus (name = "org.manjaro.pamac")]
public class Daemon: Object {
private AlpmConfig alpm_config;
2014-10-30 10:44:09 -03:00
public Cond provider_cond;
public Mutex provider_mutex;
public int? choosen_provider;
2016-02-02 05:28:07 -03:00
private bool force_refresh;
2015-08-20 10:11:18 -03:00
private ThreadPool<AlpmAction> thread_pool;
private Mutex databases_lock_mutex;
private Json.Array aur_updates_results;
2015-03-18 10:53:45 -03:00
private bool intern_lock;
private bool extern_lock;
private GLib.File lockfile;
2016-02-02 05:28:07 -03:00
private ErrorInfos current_error;
public Timer timer;
2016-02-22 09:47:40 -03:00
public Cancellable cancellable;
public Curl.Easy curl;
2014-10-30 10:44:09 -03:00
2014-12-03 12:02:14 -03:00
public signal void emit_event (uint primary_event, uint secondary_event, string[] details);
2014-10-30 10:44:09 -03:00
public signal void emit_providers (string depend, string[] providers);
2016-02-02 05:28:07 -03:00
public signal void emit_progress (uint progress, string pkgname, uint percent, uint n_targets, uint current_target);
2014-10-30 10:44:09 -03:00
public signal void emit_download (string filename, uint64 xfered, uint64 total);
public signal void emit_totaldownload (uint64 total);
public signal void emit_log (uint level, string msg);
2015-03-04 11:55:36 -03:00
public signal void set_pkgreason_finished ();
2016-02-02 05:28:07 -03:00
public signal void refresh_finished (bool success);
public signal void get_updates_finished (Updates updates);
public signal void trans_prepare_finished (bool success);
public signal void trans_commit_finished (bool success);
2015-08-20 10:11:18 -03:00
public signal void get_authorization_finished (bool authorized);
2016-02-02 05:28:07 -03:00
public signal void write_pamac_config_finished (bool recurse, uint64 refresh_period, bool no_update_hide_icon,
2015-08-24 11:13:18 -03:00
bool enable_aur, bool search_aur, bool check_aur_updates,
2015-08-20 10:11:18 -03:00
bool no_confirm_build);
public signal void write_alpm_config_finished (bool checkspace);
public signal void write_mirrors_config_finished (string choosen_country, string choosen_generation_method);
public signal void generate_mirrors_list_data (string line);
public signal void generate_mirrors_list_finished ();
2014-10-30 10:44:09 -03:00
public Daemon () {
alpm_config = new AlpmConfig ("/etc/pacman.conf");
databases_lock_mutex = Mutex ();
aur_updates_results = new Json.Array ();
2016-02-02 05:28:07 -03:00
timer = new Timer ();
2015-03-18 10:53:45 -03:00
intern_lock = false;
extern_lock = false;
2015-03-04 11:55:36 -03:00
refresh_handle ();
Timeout.add (500, check_pacman_running);
2015-08-20 10:11:18 -03:00
create_thread_pool ();
2016-02-22 09:47:40 -03:00
cancellable = new Cancellable ();
Curl.global_init (Curl.GLOBAL_SSL);
}
~Daemon () {
Curl.global_cleanup ();
2015-08-20 10:11:18 -03:00
}
2016-02-02 05:28:07 -03:00
public void set_environment_variables (HashTable<string,string> variables) {
string[] keys = { "HTTP_USER_AGENT",
"http_proxy",
"https_proxy",
"ftp_proxy",
"socks_proxy",
"no_proxy" };
foreach (unowned string key in keys) {
unowned string val;
if (variables.lookup_extended (key, null, out val)) {
Environment.set_variable (key, val, true);
}
}
}
public ErrorInfos get_current_error () {
return current_error;
}
2015-08-20 10:11:18 -03:00
private void create_thread_pool () {
// create a thread pool which will run alpm action one after one
try {
thread_pool = new ThreadPool<AlpmAction>.with_owned_data (
// call alpm_action.run () on thread start
(alpm_action) => {
alpm_action.run ();
},
// only one thread created so alpm action will run one after one
1,
// exclusive thread
true
);
} catch (ThreadError e) {
stderr.printf ("Thread Error %s\n", e.message);
}
2014-10-30 10:44:09 -03:00
}
2014-10-22 13:44:02 -03:00
2014-12-03 12:02:14 -03:00
private void refresh_handle () {
alpm_config.set_handle ();
2014-12-03 12:02:14 -03:00
if (alpm_config.handle == null) {
2016-02-02 05:28:07 -03:00
current_error = ErrorInfos () {
message = _("Failed to initialize alpm library"),
details = {}
};
trans_commit_finished (false);
2014-10-30 10:44:09 -03:00
} else {
2016-02-02 05:28:07 -03:00
alpm_config.handle.eventcb = (Alpm.EventCallBack) cb_event;
alpm_config.handle.progresscb = (Alpm.ProgressCallBack) cb_progress;
alpm_config.handle.questioncb = (Alpm.QuestionCallBack) cb_question;
2016-02-22 09:47:40 -03:00
alpm_config.handle.fetchcb = (Alpm.FetchCallBack) cb_fetch;
2016-02-02 05:28:07 -03:00
alpm_config.handle.totaldlcb = (Alpm.TotalDownloadCallBack) cb_totaldownload;
alpm_config.handle.logcb = (Alpm.LogCallBack) cb_log;
lockfile = GLib.File.new_for_path (alpm_config.handle.lockfile);
2014-10-22 13:44:02 -03:00
}
}
2015-03-11 15:42:04 -03:00
private bool check_pacman_running () {
2015-03-18 10:53:45 -03:00
if (extern_lock) {
2016-02-02 05:28:07 -03:00
if (!lockfile.query_exists ()) {
2015-03-18 10:53:45 -03:00
extern_lock = false;
2015-03-11 15:42:04 -03:00
refresh_handle ();
}
} else {
2016-02-02 05:28:07 -03:00
if (lockfile.query_exists ()) {
if (!intern_lock) {
2015-03-18 10:53:45 -03:00
extern_lock = true;
}
2015-03-11 15:42:04 -03:00
}
}
return true;
}
private async bool check_authorization (GLib.BusName sender) {
SourceFunc callback = check_authorization.callback;
2015-08-20 10:11:18 -03:00
bool authorized = false;
try {
Polkit.Authority authority = Polkit.Authority.get_sync ();
Polkit.Subject subject = Polkit.SystemBusName.new (sender);
authority.check_authorization.begin (
subject,
"org.manjaro.pamac.commit",
null,
Polkit.CheckAuthorizationFlags.ALLOW_USER_INTERACTION,
null,
(obj, res) => {
try {
var result = authority.check_authorization.end (res);
authorized = result.get_is_authorized ();
} catch (GLib.Error e) {
stderr.printf ("%s\n", e.message);
}
Idle.add ((owned) callback);
2015-08-20 10:11:18 -03:00
}
);
2016-02-02 05:28:07 -03:00
yield;
2015-08-20 10:11:18 -03:00
} catch (GLib.Error e) {
stderr.printf ("%s\n", e.message);
}
return authorized;
}
public void start_get_authorization (GLib.BusName sender) {
check_authorization.begin (sender, (obj, res) => {
bool authorized = check_authorization.end (res);
get_authorization_finished (authorized);
});
2015-08-20 10:11:18 -03:00
}
2015-03-04 11:55:36 -03:00
public void start_write_pamac_config (HashTable<string,Variant> new_pamac_conf, GLib.BusName sender) {
2014-10-30 10:44:09 -03:00
var pamac_config = new Pamac.Config ("/etc/pamac.conf");
check_authorization.begin (sender, (obj, res) => {
bool authorized = check_authorization.end (res);
if (authorized ) {
pamac_config.write (new_pamac_conf);
pamac_config.reload ();
}
2015-08-24 11:13:18 -03:00
write_pamac_config_finished (pamac_config.recurse, pamac_config.refresh_period, pamac_config.no_update_hide_icon,
pamac_config.enable_aur, pamac_config.search_aur, pamac_config.check_aur_updates,
pamac_config.no_confirm_build);
});
}
2015-03-04 11:55:36 -03:00
public void start_write_alpm_config (HashTable<string,Variant> new_alpm_conf, GLib.BusName sender) {
check_authorization.begin (sender, (obj, res) => {
bool authorized = check_authorization.end (res);
if (authorized ) {
alpm_config.write (new_alpm_conf);
alpm_config.reload ();
refresh_handle ();
}
2016-02-02 05:28:07 -03:00
write_alpm_config_finished ((alpm_config.checkspace == 1));
});
}
private bool process_line (IOChannel channel, IOCondition condition, string stream_name) {
if (condition == IOCondition.HUP) {
return false;
}
try {
string line;
channel.read_line (out line, null, null);
2015-08-20 10:11:18 -03:00
generate_mirrors_list_data (line);
} catch (IOChannelError e) {
2015-03-04 11:55:36 -03:00
stderr.printf ("%s: IOChannelError: %s\n", stream_name, e.message);
return false;
} catch (ConvertError e) {
2015-03-04 11:55:36 -03:00
stderr.printf ("%s: ConvertError: %s\n", stream_name, e.message);
return false;
}
return true;
}
2015-08-20 10:11:18 -03:00
public void start_generate_mirrors_list () {
int standard_output;
int standard_error;
Pid child_pid;
try {
Process.spawn_async_with_pipes (null,
{"pacman-mirrors", "-g"},
null,
SpawnFlags.SEARCH_PATH | SpawnFlags.DO_NOT_REAP_CHILD,
null,
out child_pid,
null,
out standard_output,
out standard_error);
// stdout
IOChannel output = new IOChannel.unix_new (standard_output);
output.add_watch (IOCondition.IN | IOCondition.HUP, (channel, condition) => {
return process_line (channel, condition, "stdout");
});
// stderr
IOChannel error = new IOChannel.unix_new (standard_error);
error.add_watch (IOCondition.IN | IOCondition.HUP, (channel, condition) => {
return process_line (channel, condition, "stderr");
});
ChildWatch.add (child_pid, (pid, status) => {
// Triggered when the child indicated by child_pid exits
Process.close_pid (pid);
alpm_config.reload ();
refresh_handle ();
2015-08-20 10:11:18 -03:00
generate_mirrors_list_finished ();
});
} catch (SpawnError e) {
2015-08-20 10:11:18 -03:00
generate_mirrors_list_finished ();
stdout.printf ("SpawnError: %s\n", e.message);
}
}
2015-03-04 11:55:36 -03:00
public void start_write_mirrors_config (HashTable<string,Variant> new_mirrors_conf, GLib.BusName sender) {
var mirrors_config = new MirrorsConfig ("/etc/pacman-mirrors.conf");
check_authorization.begin (sender, (obj, res) => {
bool authorized = check_authorization.end (res);
if (authorized) {
mirrors_config.write (new_mirrors_conf);
mirrors_config.reload ();
}
2015-08-20 10:11:18 -03:00
write_mirrors_config_finished (mirrors_config.choosen_country, mirrors_config.choosen_generation_method);
});
2014-10-30 10:44:09 -03:00
}
2015-03-04 11:55:36 -03:00
public void start_set_pkgreason (string pkgname, uint reason, GLib.BusName sender) {
check_authorization.begin (sender, (obj, res) => {
bool authorized = check_authorization.end (res);
if (authorized) {
unowned Alpm.Package? pkg = alpm_config.handle.localdb.get_pkg (pkgname);
if (pkg != null) {
pkg.reason = (Alpm.Package.Reason) reason;
refresh_handle ();
}
}
set_pkgreason_finished ();
});
}
2016-02-02 05:28:07 -03:00
public PackageInfos get_installed_pkg (string pkgname) {
unowned Alpm.Package? pkg = alpm_config.handle.localdb.get_pkg (pkgname);
if (pkg == null) {
return PackageInfos () {
name = "",
version = "",
db_name = "",
download_size = 0
};
}
return PackageInfos () {
name = pkg.name,
version = pkg.version,
db_name = pkg.db.name,
download_size = pkg.download_size
};
}
2015-08-20 10:11:18 -03:00
private void refresh () {
intern_lock = true;
2016-02-02 05:28:07 -03:00
current_error = ErrorInfos ();
int force = (force_refresh) ? 1 : 0;
uint success = 0;
2016-02-22 09:47:40 -03:00
cancellable.reset ();
2015-08-20 10:11:18 -03:00
foreach (var db in alpm_config.handle.syncdbs) {
2016-02-22 09:47:40 -03:00
if (cancellable.is_cancelled ()) {
refresh_handle ();
refresh_finished (false);
intern_lock = false;
return;
}
2016-02-02 05:28:07 -03:00
if (db.update (force) >= 0) {
2015-08-20 10:11:18 -03:00
success++;
}
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
refresh_handle ();
2015-08-20 10:11:18 -03:00
// We should always succeed if at least one DB was upgraded - we may possibly
// fail later with unresolved deps, but that should be rare, and would be expected
if (success == 0) {
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to synchronize any databases");
current_error.details = { Alpm.strerror (alpm_config.handle.errno ()) };
refresh_finished (false);
} else {
refresh_finished (true);
2015-08-20 10:11:18 -03:00
}
intern_lock = false;
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
public void start_refresh (bool force) {
2015-08-20 10:11:18 -03:00
force_refresh = force;
try {
thread_pool.add (new AlpmAction (refresh));
} catch (ThreadError e) {
stderr.printf ("Thread Error %s\n", e.message);
2015-03-04 11:55:36 -03:00
}
}
public void add_ignorepkg (string pkgname) {
alpm_config.handle.add_ignorepkg (pkgname);
}
public void remove_ignorepkg (string pkgname) {
alpm_config.handle.remove_ignorepkg (pkgname);
}
2016-02-02 05:28:07 -03:00
public void start_get_updates (bool check_aur_updates) {
PackageInfos[] updates_infos = {};
2015-03-04 11:55:36 -03:00
unowned Alpm.Package? pkg = null;
unowned Alpm.Package? candidate = null;
foreach (var name in alpm_config.syncfirsts) {
pkg = Alpm.find_satisfier (alpm_config.handle.localdb.pkgcache, name);
if (pkg != null) {
candidate = pkg.sync_newversion (alpm_config.handle.syncdbs);
if (candidate != null) {
2016-02-02 05:28:07 -03:00
var infos = PackageInfos () {
name = candidate.name,
version = candidate.version,
db_name = candidate.db.name,
download_size = candidate.download_size
};
updates_infos += (owned) infos;
2015-03-04 11:55:36 -03:00
}
}
}
if (updates_infos.length != 0) {
2016-02-02 05:28:07 -03:00
var updates = Updates () {
is_syncfirst = true,
repos_updates = (owned) updates_infos,
aur_updates = {}
};
get_updates_finished (updates);
return;
2014-10-30 10:44:09 -03:00
} else {
2015-03-04 11:55:36 -03:00
string[] local_pkgs = {};
foreach (var installed_pkg in alpm_config.handle.localdb.pkgcache) {
// check if installed_pkg is in IgnorePkg or IgnoreGroup
if (alpm_config.handle.should_ignore (installed_pkg) == 0) {
candidate = installed_pkg.sync_newversion (alpm_config.handle.syncdbs);
2015-03-04 11:55:36 -03:00
if (candidate != null) {
2016-02-02 05:28:07 -03:00
var infos = PackageInfos () {
name = candidate.name,
version = candidate.version,
db_name = candidate.db.name,
download_size = candidate.download_size
};
updates_infos += (owned) infos;
2015-03-04 11:55:36 -03:00
} else {
2015-08-20 10:11:18 -03:00
if (check_aur_updates) {
// check if installed_pkg is a local pkg
2015-03-04 11:55:36 -03:00
foreach (var db in alpm_config.handle.syncdbs) {
pkg = Alpm.find_satisfier (db.pkgcache, installed_pkg.name);
2015-03-04 11:55:36 -03:00
if (pkg != null) {
break;
}
}
if (pkg == null) {
local_pkgs += installed_pkg.name;
2015-03-04 11:55:36 -03:00
}
}
}
}
}
2016-02-02 05:28:07 -03:00
PackageInfos[] aur_updates_infos = {};
2015-08-20 10:11:18 -03:00
if (check_aur_updates) {
// get aur updates
if (aur_updates_results.get_length () == 0) {
aur_updates_results = AUR.multiinfo (local_pkgs);
}
aur_updates_results.foreach_element ((array, index,node) => {
unowned Json.Object pkg_info = node.get_object ();
string version = pkg_info.get_string_member ("Version");
string name = pkg_info.get_string_member ("Name");
int cmp = Alpm.pkg_vercmp (version, alpm_config.handle.localdb.get_pkg (name).version);
if (cmp == 1) {
2016-02-02 05:28:07 -03:00
var infos = PackageInfos () {
name = name,
version = version,
db_name = "AUR",
download_size = 0
};
aur_updates_infos += (owned) infos;
2015-03-04 11:55:36 -03:00
}
});
2014-10-30 10:44:09 -03:00
}
2016-02-02 05:28:07 -03:00
var updates = Updates () {
is_syncfirst = false,
repos_updates = (owned) updates_infos,
aur_updates = (owned) aur_updates_infos
};
get_updates_finished (updates);
2014-10-30 10:44:09 -03:00
}
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
public bool trans_init (Alpm.TransFlag transflags) {
current_error = ErrorInfos ();
2016-02-22 09:47:40 -03:00
cancellable.reset ();
2016-02-02 05:28:07 -03:00
if (alpm_config.handle.trans_init (transflags) == -1) {
current_error.message = _("Failed to init transaction");
current_error.details = { Alpm.strerror (alpm_config.handle.errno ()) };
return false;
2015-03-18 10:53:45 -03:00
} else {
intern_lock = true;
2014-10-30 10:44:09 -03:00
}
2016-02-02 05:28:07 -03:00
return true;
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
public bool trans_sysupgrade (bool enable_downgrade) {
current_error = ErrorInfos ();
if (alpm_config.handle.trans_sysupgrade ((enable_downgrade) ? 1 : 0) == -1) {
current_error.message = _("Failed to prepare transaction");
current_error.details = { Alpm.strerror (alpm_config.handle.errno ()) };
return false;
2014-10-30 10:44:09 -03:00
}
2016-02-02 05:28:07 -03:00
return true;
2014-10-22 13:44:02 -03:00
}
2014-10-30 10:44:09 -03:00
2016-02-02 05:28:07 -03:00
private bool trans_add_pkg_real (Alpm.Package pkg) {
current_error = ErrorInfos ();
if (alpm_config.handle.trans_add_pkg (pkg) == -1) {
2014-12-03 12:02:14 -03:00
Alpm.Errno errno = alpm_config.handle.errno ();
2016-02-02 05:28:07 -03:00
if (errno == Alpm.Errno.TRANS_DUP_TARGET || errno == Alpm.Errno.PKG_IGNORED) {
2014-10-22 13:44:02 -03:00
// just skip duplicate or ignored targets
2016-02-02 05:28:07 -03:00
return true;
} else {
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
current_error.details = { "%s: %s".printf (pkg.name, Alpm.strerror (errno)) };
return false;
2014-10-22 13:44:02 -03:00
}
}
2016-02-02 05:28:07 -03:00
return true;
2015-01-28 12:37:51 -03:00
}
2016-02-02 05:28:07 -03:00
private unowned Alpm.Package? get_sync_pkg (string pkgname) {
unowned Alpm.Package? pkg = null;
foreach (var db in alpm_config.handle.syncdbs) {
pkg = db.get_pkg (pkgname);
if (pkg != null) {
break;
}
}
return pkg;
}
public bool trans_add_pkg (string pkgname) {
current_error = ErrorInfos ();
unowned Alpm.Package? pkg = get_sync_pkg (pkgname);
2015-05-20 09:48:12 -03:00
if (pkg == null) {
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
current_error.details = { _("target not found: %s").printf (pkgname) };
return false;
2015-01-28 12:37:51 -03:00
} else {
2016-02-02 05:28:07 -03:00
bool success = trans_add_pkg_real (pkg);
if (success) {
if (("linux31" in pkg.name) || ("linux4" in pkg.name)) {
2015-01-28 12:37:51 -03:00
string[] installed_kernels = {};
string[] installed_modules = {};
foreach (var local_pkg in alpm_config.handle.localdb.pkgcache) {
if (("linux31" in local_pkg.name) || ("linux4" in local_pkg.name)) {
2015-01-28 12:37:51 -03:00
string[] local_pkg_splitted = local_pkg.name.split ("-", 2);
if ((local_pkg_splitted[0] in installed_kernels) == false) {
installed_kernels += local_pkg_splitted[0];
}
if (local_pkg_splitted.length == 2) {
if ((local_pkg_splitted[1] in installed_modules) == false) {
installed_modules += local_pkg_splitted[1];
}
}
}
}
string[] splitted = pkg.name.split ("-", 2);
if (splitted.length == 2) {
// we are adding a module
// add the same module for other installed kernels
2016-02-02 05:28:07 -03:00
foreach (unowned string installed_kernel in installed_kernels) {
2015-01-28 12:37:51 -03:00
string module = installed_kernel + "-" + splitted[1];
2016-02-02 05:28:07 -03:00
unowned Alpm.Package? module_pkg = get_sync_pkg (module);
2015-01-28 12:37:51 -03:00
if (module_pkg != null) {
trans_add_pkg_real (module_pkg);
}
}
} else if (splitted.length == 1) {
// we are adding a kernel
// add all installed modules for other kernels
2016-02-02 05:28:07 -03:00
foreach (unowned string installed_module in installed_modules) {
2015-01-28 12:37:51 -03:00
string module = splitted[0] + "-" + installed_module;
2016-02-02 05:28:07 -03:00
unowned Alpm.Package? module_pkg = get_sync_pkg (module);
2015-01-28 12:37:51 -03:00
if (module_pkg != null) {
trans_add_pkg_real (module_pkg);
}
}
}
}
}
2016-02-02 05:28:07 -03:00
return success;
2015-01-28 12:37:51 -03:00
}
}
2014-10-22 13:44:02 -03:00
2016-02-02 05:28:07 -03:00
public bool trans_load_pkg (string pkgpath) {
current_error = ErrorInfos ();
2015-03-04 11:55:36 -03:00
Alpm.Package* pkg = alpm_config.handle.load_file (pkgpath, 1, alpm_config.handle.localfilesiglevel);
2014-10-30 10:44:09 -03:00
if (pkg == null) {
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
current_error.details = { "%s: %s".printf (pkgpath, Alpm.strerror (alpm_config.handle.errno ())) };
return false;
} else if (alpm_config.handle.trans_add_pkg (pkg) == -1) {
current_error.message = _("Failed to prepare transaction");
current_error.details = { "%s: %s".printf (pkg->name, Alpm.strerror (alpm_config.handle.errno ())) };
// free the package because it will not be used
delete pkg;
return false;
2014-10-30 10:44:09 -03:00
}
2016-02-02 05:28:07 -03:00
return true;
2014-10-22 13:44:02 -03:00
}
2014-10-30 10:44:09 -03:00
2016-02-02 05:28:07 -03:00
public bool trans_remove_pkg (string pkgname) {
current_error = ErrorInfos ();
2015-03-04 11:55:36 -03:00
unowned Alpm.Package? pkg = alpm_config.handle.localdb.get_pkg (pkgname);
2014-10-30 10:44:09 -03:00
if (pkg == null) {
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
current_error.details = { _("target not found: %s").printf (pkgname) };
return false;
} else if (alpm_config.handle.trans_remove_pkg (pkg) == -1) {
current_error.message = _("Failed to prepare transaction");
current_error.details = { "%s: %s".printf (pkg.name, Alpm.strerror (alpm_config.handle.errno ())) };
return false;
2014-10-30 10:44:09 -03:00
}
2016-02-02 05:28:07 -03:00
return true;
2014-10-22 13:44:02 -03:00
}
2015-08-20 10:11:18 -03:00
private void trans_prepare () {
2016-02-02 05:28:07 -03:00
current_error = ErrorInfos ();
2015-08-20 10:11:18 -03:00
string[] details = {};
2016-02-02 05:28:07 -03:00
Alpm.List<void*> err_data;
if (alpm_config.handle.trans_prepare (out err_data) == -1) {
2015-08-20 10:11:18 -03:00
Alpm.Errno errno = alpm_config.handle.errno ();
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
2015-08-20 10:11:18 -03:00
string detail = Alpm.strerror (errno);
switch (errno) {
2016-02-02 05:28:07 -03:00
case Alpm.Errno.PKG_INVALID_ARCH:
2015-08-20 10:11:18 -03:00
detail += ":";
2016-02-02 05:28:07 -03:00
details += (owned) detail;
foreach (void* i in err_data) {
string* pkgname = i;
2015-08-20 10:11:18 -03:00
details += _("package %s does not have a valid architecture").printf (pkgname);
delete pkgname;
2014-10-30 10:44:09 -03:00
}
2015-08-20 10:11:18 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Errno.UNSATISFIED_DEPS:
2015-08-20 10:11:18 -03:00
detail += ":";
2016-02-02 05:28:07 -03:00
details += (owned) detail;
foreach (void* i in err_data) {
Alpm.DepMissing* miss = i;
details += _("%s: requires %s").printf (miss->target, miss->depend.compute_string ());
2015-08-20 10:11:18 -03:00
delete miss;
2014-10-30 10:44:09 -03:00
}
2015-08-20 10:11:18 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Errno.CONFLICTING_DEPS:
2015-08-20 10:11:18 -03:00
detail += ":";
2016-02-02 05:28:07 -03:00
details += (owned) detail;
foreach (void* i in err_data) {
Alpm.Conflict* conflict = i;
string conflict_detail = _("%s and %s are in conflict").printf (conflict->package1, conflict->package2);
2015-08-20 10:11:18 -03:00
// only print reason if it contains new information
2016-02-02 05:28:07 -03:00
if (conflict->reason.mod != Alpm.Depend.Mode.ANY) {
conflict_detail += " (%s)".printf (conflict->reason.compute_string ());
2015-08-20 10:11:18 -03:00
}
2016-02-02 05:28:07 -03:00
details += (owned) conflict_detail;
2015-08-20 10:11:18 -03:00
delete conflict;
2015-03-04 11:55:36 -03:00
}
2015-08-20 10:11:18 -03:00
break;
default:
2016-02-02 05:28:07 -03:00
details += (owned) detail;
2015-08-20 10:11:18 -03:00
break;
}
2016-02-02 05:28:07 -03:00
current_error.details = (owned) details;
2015-08-20 10:11:18 -03:00
trans_release ();
2016-02-02 05:28:07 -03:00
trans_prepare_finished (false);
2015-08-20 10:11:18 -03:00
} else {
// Search for holdpkg in target list
bool found_locked_pkg = false;
foreach (var pkg in alpm_config.handle.trans_to_remove ()) {
if (alpm_config.holdpkgs.find_custom (pkg.name, strcmp) != null) {
details += _("%s needs to be removed but it is a locked package").printf (pkg.name);
found_locked_pkg = true;
break;
2014-10-30 10:44:09 -03:00
}
2015-08-20 10:11:18 -03:00
}
if (found_locked_pkg) {
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to prepare transaction");
current_error.details = (owned) details;
2015-08-20 10:11:18 -03:00
trans_release ();
2016-02-02 05:28:07 -03:00
trans_prepare_finished (false);
} else {
trans_prepare_finished (true);
2015-08-20 10:11:18 -03:00
}
2014-10-30 10:44:09 -03:00
}
2015-03-04 11:55:36 -03:00
}
public void start_trans_prepare () {
2015-08-20 10:11:18 -03:00
try {
thread_pool.add (new AlpmAction (trans_prepare));
} catch (ThreadError e) {
stderr.printf ("Thread Error %s\n", e.message);
}
2014-10-22 13:44:02 -03:00
}
2014-10-30 10:44:09 -03:00
public void choose_provider (int provider) {
provider_mutex.lock ();
choosen_provider = provider;
provider_cond.signal ();
provider_mutex.unlock ();
}
2014-10-22 13:44:02 -03:00
2016-02-02 05:28:07 -03:00
public PackageInfos[] trans_to_add () {
PackageInfos[] to_add = {};
2014-12-03 12:02:14 -03:00
foreach (var pkg in alpm_config.handle.trans_to_add ()) {
2016-02-02 05:28:07 -03:00
var infos = PackageInfos () {
name = pkg.name,
version = pkg.version,
// if pkg was load from a file, pkg.db is null
db_name = pkg.db != null ? pkg.db.name : "",
download_size = pkg.download_size
};
to_add += (owned) infos;
2014-10-30 10:44:09 -03:00
}
2016-02-02 05:28:07 -03:00
return to_add;
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
public PackageInfos[] trans_to_remove () {
PackageInfos[] to_remove = {};
2014-12-03 12:02:14 -03:00
foreach (var pkg in alpm_config.handle.trans_to_remove ()) {
2016-02-02 05:28:07 -03:00
var infos = PackageInfos () {
name = pkg.name,
version = pkg.version,
db_name = pkg.db.name,
download_size = pkg.download_size
};
to_remove += (owned) infos;
2014-10-30 10:44:09 -03:00
}
2016-02-02 05:28:07 -03:00
return to_remove;
2014-10-22 13:44:02 -03:00
}
2015-08-20 10:11:18 -03:00
private void trans_commit () {
2016-02-02 05:28:07 -03:00
current_error = ErrorInfos ();
bool success = true;
Alpm.List<void*> err_data;
if (alpm_config.handle.trans_commit (out err_data) == -1) {
2015-08-20 10:11:18 -03:00
Alpm.Errno errno = alpm_config.handle.errno ();
2016-02-22 09:47:40 -03:00
// cancel the download return an EXTERNAL_DOWNLOAD error
if (errno == Alpm.Errno.EXTERNAL_DOWNLOAD && cancellable.is_cancelled ()) {
trans_release ();
refresh_handle ();
trans_commit_finished (false);
return;
}
2016-02-02 05:28:07 -03:00
current_error.message = _("Failed to commit transaction");
2015-08-20 10:11:18 -03:00
string detail = Alpm.strerror (errno);
2016-02-02 05:28:07 -03:00
string[] details = {};
2015-08-20 10:11:18 -03:00
switch (errno) {
case Alpm.Errno.FILE_CONFLICTS:
detail += ":";
2016-02-02 05:28:07 -03:00
details += (owned) detail;
2015-08-20 10:11:18 -03:00
//TransFlag flags = alpm_config.handle.trans_get_flags ();
//if ((flags & TransFlag.FORCE) != 0) {
//details += _("unable to %s directory-file conflicts").printf ("--force");
//}
2016-02-02 05:28:07 -03:00
foreach (void* i in err_data) {
Alpm.FileConflict* conflict = i;
2015-08-20 10:11:18 -03:00
switch (conflict->type) {
2016-02-02 05:28:07 -03:00
case Alpm.FileConflict.Type.TARGET:
2015-08-20 10:11:18 -03:00
details += _("%s exists in both %s and %s").printf (conflict->file, conflict->target, conflict->ctarget);
break;
2016-02-02 05:28:07 -03:00
case Alpm.FileConflict.Type.FILESYSTEM:
2015-08-20 10:11:18 -03:00
details += _("%s: %s already exists in filesystem").printf (conflict->target, conflict->file);
break;
}
delete conflict;
}
break;
case Alpm.Errno.PKG_INVALID:
case Alpm.Errno.PKG_INVALID_CHECKSUM:
case Alpm.Errno.PKG_INVALID_SIG:
case Alpm.Errno.DLT_INVALID:
detail += ":";
2016-02-02 05:28:07 -03:00
details += (owned) detail;
foreach (void* i in err_data) {
string* filename = i;
2015-08-20 10:11:18 -03:00
details += _("%s is invalid or corrupted").printf (filename);
delete filename;
}
break;
default:
2016-02-02 05:28:07 -03:00
details += (owned) detail;
2015-08-20 10:11:18 -03:00
break;
}
2016-02-02 05:28:07 -03:00
current_error.details = (owned) details;
success = false;
2015-08-20 10:11:18 -03:00
}
trans_release ();
refresh_handle ();
2016-02-02 05:28:07 -03:00
trans_commit_finished (success);
2015-08-20 10:11:18 -03:00
}
public void start_trans_commit (GLib.BusName sender) {
check_authorization.begin (sender, (obj, res) => {
bool authorized = check_authorization.end (res);
if (authorized) {
try {
thread_pool.add (new AlpmAction (trans_commit));
} catch (ThreadError e) {
stderr.printf ("Thread Error %s\n", e.message);
2014-10-30 10:44:09 -03:00
}
} else {
2016-02-02 05:28:07 -03:00
current_error = ErrorInfos () {
message = _("Authentication failed"),
details = {}
};
trans_release ();
refresh_handle ();
2016-02-02 05:28:07 -03:00
trans_commit_finished (false);
}
});
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
public void trans_release () {
alpm_config.handle.trans_release ();
intern_lock = false;
2014-10-30 10:44:09 -03:00
}
2014-10-22 13:44:02 -03:00
2016-02-02 05:28:07 -03:00
[DBus (no_reply = true)]
2014-10-30 10:44:09 -03:00
public void trans_cancel () {
if (alpm_config.handle.trans_interrupt () == 0) {
// a transaction is being interrupted
// it will end the normal way
return;
}
2016-02-22 09:47:40 -03:00
cancellable.cancel ();
2014-10-30 10:44:09 -03:00
}
2014-10-22 13:44:02 -03:00
2015-01-05 17:06:18 -03:00
[DBus (no_reply = true)]
2014-10-30 10:44:09 -03:00
public void quit () {
2015-08-20 10:11:18 -03:00
// to be sure to not quit with locked databases,
// the above function will wait for all task in queue
// to be processed before return;
ThreadPool.free ((owned) thread_pool, false, true);
2016-02-02 05:28:07 -03:00
alpm_config.handle.unlock ();
2015-06-19 12:48:34 -03:00
loop.quit ();
2014-10-30 10:44:09 -03:00
}
// End of Daemon Object
2014-10-22 13:44:02 -03:00
}
}
private void write_log_file (string event) {
var now = new DateTime.now_local ();
string log = "%s %s".printf (now.format ("[%Y-%m-%d %H:%M]"), event);
var file = GLib.File.new_for_path ("/var/log/pamac.log");
try {
// creating a DataOutputStream to the file
var dos = new DataOutputStream (file.append_to (FileCreateFlags.NONE));
// writing a short string to the stream
dos.put_string (log);
} catch (GLib.Error e) {
stderr.printf ("%s\n", e.message);
2014-10-22 13:44:02 -03:00
}
}
2016-02-02 05:28:07 -03:00
private void cb_event (Alpm.Event.Data data) {
2014-10-26 08:30:04 -03:00
string[] details = {};
2014-12-03 12:02:14 -03:00
uint secondary_type = 0;
switch (data.type) {
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.HOOK_START:
switch (data.hook_when) {
case Alpm.HookWhen.PRE_TRANSACTION:
secondary_type = (uint) Alpm.HookWhen.PRE_TRANSACTION;
break;
case Alpm.HookWhen.POST_TRANSACTION:
secondary_type = (uint) Alpm.HookWhen.POST_TRANSACTION;
break;
default:
break;
}
break;
case Alpm.Event.Type.HOOK_RUN_START:
details += data.hook_run_name;
2016-02-06 05:33:07 -03:00
details += data.hook_run_desc ?? "";
2016-02-02 05:28:07 -03:00
details += data.hook_run_position.to_string ();
details += data.hook_run_total.to_string ();
break;
case Alpm.Event.Type.PACKAGE_OPERATION_START:
2014-12-03 12:02:14 -03:00
switch (data.package_operation_operation) {
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.REMOVE:
2014-12-03 12:02:14 -03:00
details += data.package_operation_oldpkg.name;
details += data.package_operation_oldpkg.version;
2015-03-04 11:55:36 -03:00
secondary_type = (uint) Alpm.Package.Operation.REMOVE;
2014-12-03 12:02:14 -03:00
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.INSTALL:
2014-12-03 12:02:14 -03:00
details += data.package_operation_newpkg.name;
details += data.package_operation_newpkg.version;
2015-03-04 11:55:36 -03:00
secondary_type = (uint) Alpm.Package.Operation.INSTALL;
2014-12-03 12:02:14 -03:00
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.REINSTALL:
2014-12-03 12:02:14 -03:00
details += data.package_operation_newpkg.name;
details += data.package_operation_newpkg.version;
2015-03-04 11:55:36 -03:00
secondary_type = (uint) Alpm.Package.Operation.REINSTALL;
2014-12-03 12:02:14 -03:00
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.UPGRADE:
2014-12-03 12:02:14 -03:00
details += data.package_operation_oldpkg.name;
details += data.package_operation_oldpkg.version;
details += data.package_operation_newpkg.version;
2015-03-04 11:55:36 -03:00
secondary_type = (uint) Alpm.Package.Operation.UPGRADE;
2014-12-03 12:02:14 -03:00
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.DOWNGRADE:
2014-12-03 12:02:14 -03:00
details += data.package_operation_oldpkg.name;
details += data.package_operation_oldpkg.version;
details += data.package_operation_newpkg.version;
2015-03-04 11:55:36 -03:00
secondary_type = (uint) Alpm.Package.Operation.DOWNGRADE;
2014-12-03 12:02:14 -03:00
break;
2016-02-02 05:28:07 -03:00
default:
break;
2014-12-03 12:02:14 -03:00
}
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.PACKAGE_OPERATION_DONE:
2014-12-03 12:02:14 -03:00
switch (data.package_operation_operation) {
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.INSTALL:
2014-12-03 12:02:14 -03:00
string log = "Installed %s (%s)\n".printf (data.package_operation_newpkg.name, data.package_operation_newpkg.version);
write_log_file (log);
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.REMOVE:
2014-12-03 12:02:14 -03:00
string log = "Removed %s (%s)\n".printf (data.package_operation_oldpkg.name, data.package_operation_oldpkg.version);
write_log_file (log);
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.REINSTALL:
2014-12-03 12:02:14 -03:00
string log = "Reinstalled %s (%s)\n".printf (data.package_operation_newpkg.name, data.package_operation_newpkg.version);
write_log_file (log);
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.UPGRADE:
2014-12-03 12:02:14 -03:00
string log = "Upgraded %s (%s -> %s)\n".printf (data.package_operation_oldpkg.name, data.package_operation_oldpkg.version, data.package_operation_newpkg.version);
write_log_file (log);
break;
2015-03-04 11:55:36 -03:00
case Alpm.Package.Operation.DOWNGRADE:
2014-12-03 12:02:14 -03:00
string log = "Downgraded %s (%s -> %s)\n".printf (data.package_operation_oldpkg.name, data.package_operation_oldpkg.version, data.package_operation_newpkg.version);
write_log_file (log);
break;
}
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.DELTA_PATCH_START:
2014-12-03 12:02:14 -03:00
details += data.delta_patch_delta.to;
details += data.delta_patch_delta.delta;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.SCRIPTLET_INFO:
2014-12-03 12:02:14 -03:00
details += data.scriptlet_info_line;
write_log_file (data.scriptlet_info_line);
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.PKGDOWNLOAD_START:
2014-12-03 12:02:14 -03:00
details += data.pkgdownload_file;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.OPTDEP_REMOVAL:
2014-12-03 12:02:14 -03:00
details += data.optdep_removal_pkg.name;
details += data.optdep_removal_optdep.compute_string ();
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.DATABASE_MISSING:
2014-12-03 12:02:14 -03:00
details += data.database_missing_dbname;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.PACNEW_CREATED:
2014-12-03 12:02:14 -03:00
details += data.pacnew_created_file;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Event.Type.PACSAVE_CREATED:
2014-12-03 12:02:14 -03:00
details += data.pacsave_created_file;
2014-10-22 13:44:02 -03:00
break;
default:
break;
}
2014-12-03 12:02:14 -03:00
pamac_daemon.emit_event ((uint) data.type, secondary_type, details);
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
private void cb_question (Alpm.Question.Data data) {
2014-12-03 12:02:14 -03:00
switch (data.type) {
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.INSTALL_IGNOREPKG:
2014-10-22 13:44:02 -03:00
// Do not install package in IgnorePkg/IgnoreGroup
2014-12-03 12:02:14 -03:00
data.install_ignorepkg_install = 0;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.REPLACE_PKG:
2014-10-22 13:44:02 -03:00
// Auto-remove conflicts in case of replaces
2014-12-03 12:02:14 -03:00
data.replace_replace = 1;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.CONFLICT_PKG:
2014-10-22 13:44:02 -03:00
// Auto-remove conflicts
2014-12-03 12:02:14 -03:00
data.conflict_remove = 1;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.REMOVE_PKGS:
2014-10-22 13:44:02 -03:00
// Do not upgrade packages which have unresolvable dependencies
2014-12-03 12:02:14 -03:00
data.remove_pkgs_skip = 1;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.SELECT_PROVIDER:
2014-12-03 12:02:14 -03:00
string depend_str = data.select_provider_depend.compute_string ();
2014-10-22 13:44:02 -03:00
string[] providers_str = {};
2015-03-04 11:55:36 -03:00
foreach (unowned Alpm.Package pkg in data.select_provider_providers) {
2014-10-22 13:44:02 -03:00
providers_str += pkg.name;
}
2014-10-30 10:44:09 -03:00
pamac_daemon.provider_cond = Cond ();
pamac_daemon.provider_mutex = Mutex ();
pamac_daemon.choosen_provider = null;
pamac_daemon.emit_providers (depend_str, providers_str);
pamac_daemon.provider_mutex.lock ();
while (pamac_daemon.choosen_provider == null) {
pamac_daemon.provider_cond.wait (pamac_daemon.provider_mutex);
2014-10-22 13:44:02 -03:00
}
2014-12-03 12:02:14 -03:00
data.select_provider_use_index = pamac_daemon.choosen_provider;
2014-10-30 10:44:09 -03:00
pamac_daemon.provider_mutex.unlock ();
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.CORRUPTED_PKG:
2014-10-22 13:44:02 -03:00
// Auto-remove corrupted pkgs in cache
2014-12-03 12:02:14 -03:00
data.corrupted_remove = 1;
2014-10-22 13:44:02 -03:00
break;
2016-02-02 05:28:07 -03:00
case Alpm.Question.Type.IMPORT_KEY:
if (data.import_key_key.revoked == 1) {
// Do not get revoked key
2014-12-03 12:02:14 -03:00
data.import_key_import = 0;
} else {
// Auto get not revoked key
2014-12-03 12:02:14 -03:00
data.import_key_import = 1;
}
2014-10-22 13:44:02 -03:00
break;
default:
2014-12-03 12:02:14 -03:00
data.any_answer = 0;
2014-10-22 13:44:02 -03:00
break;
}
}
2016-02-02 05:28:07 -03:00
private void cb_progress (Alpm.Progress progress, string pkgname, int percent, uint n_targets, uint current_target) {
if (percent == 0) {
pamac_daemon.emit_progress ((uint) progress, pkgname, (uint) percent, n_targets, current_target);
pamac_daemon.timer.start ();
} else if (percent == 100) {
pamac_daemon.emit_progress ((uint) progress, pkgname, (uint) percent, n_targets, current_target);
pamac_daemon.timer.stop ();
}else if (pamac_daemon.timer.elapsed () < 0.5) {
return;
} else {
pamac_daemon.emit_progress ((uint) progress, pkgname, (uint) percent, n_targets, current_target);
pamac_daemon.timer.start ();
2014-10-22 13:44:02 -03:00
}
}
2016-02-22 09:47:40 -03:00
private uint64 prevprogress;
private int cb_download (void* data, uint64 dltotal, uint64 dlnow, uint64 ultotal, uint64 ulnow) {
if (unlikely (pamac_daemon.cancellable.is_cancelled ())) {
return 1;
}
string filename = (string) data;
if (unlikely (dltotal == 0 || prevprogress == dltotal)) {
return 0;
} else if (unlikely (dlnow == 0)) {
pamac_daemon.emit_download (filename, dlnow, dltotal);
2016-02-02 05:28:07 -03:00
pamac_daemon.timer.start ();
2016-02-22 09:47:40 -03:00
} else if (unlikely (dlnow == dltotal)) {
pamac_daemon.emit_download (filename, dlnow, dltotal);
2016-02-02 05:28:07 -03:00
pamac_daemon.timer.stop ();
2016-02-22 09:47:40 -03:00
} else if (likely (pamac_daemon.timer.elapsed () < 0.5)) {
return 0;
2016-02-02 05:28:07 -03:00
} else {
2016-02-22 09:47:40 -03:00
pamac_daemon.emit_download (filename, dlnow, dltotal);
2016-02-02 05:28:07 -03:00
pamac_daemon.timer.start ();
2014-10-22 13:44:02 -03:00
}
2016-02-22 09:47:40 -03:00
//~ // avoid displaying progress for redirects with a body
//~ if (respcode >= 300) {
//~ return 0;
//~ }
prevprogress = dlnow;
return 0;
}
private int cb_fetch (string fileurl, string localpath, int force) {
if (pamac_daemon.cancellable.is_cancelled ()) {
return -1;
}
if (pamac_daemon.curl == null) {
pamac_daemon.curl = new Curl.Easy ();
}
char error_buffer[Curl.ERROR_SIZE];
var url = GLib.File.new_for_uri (fileurl);
var destfile = GLib.File.new_for_path (localpath + url.get_basename ());
var tempfile = GLib.File.new_for_path (destfile.get_path () + ".part");
pamac_daemon.curl.reset ();
pamac_daemon.curl.setopt (Curl.Option.URL, fileurl);
pamac_daemon.curl.setopt (Curl.Option.FAILONERROR, 1L);
pamac_daemon.curl.setopt (Curl.Option.ERRORBUFFER, error_buffer);
pamac_daemon.curl.setopt (Curl.Option.CONNECTTIMEOUT, 30L);
pamac_daemon.curl.setopt (Curl.Option.FILETIME, 1L);
pamac_daemon.curl.setopt (Curl.Option.NOPROGRESS, 0L);
pamac_daemon.curl.setopt (Curl.Option.FOLLOWLOCATION, 1L);
pamac_daemon.curl.setopt (Curl.Option.XFERINFOFUNCTION, cb_download);
pamac_daemon.curl.setopt (Curl.Option.XFERINFODATA, (void*) url.get_basename ());
pamac_daemon.curl.setopt (Curl.Option.LOW_SPEED_LIMIT, 1L);
pamac_daemon.curl.setopt (Curl.Option.LOW_SPEED_TIME, 30L);
pamac_daemon.curl.setopt (Curl.Option.NETRC, Curl.NetRCOption.OPTIONAL);
pamac_daemon.curl.setopt (Curl.Option.HTTPAUTH, Curl.CURLAUTH_ANY);
bool remove_partial_download = true;
if (fileurl.contains (".pkg.tar.") && !fileurl.has_suffix (".sig")) {
remove_partial_download = false;
}
string open_mode = "wb";
prevprogress = 0;
try {
if (force == 0) {
if (destfile.query_exists ()) {
// start from scratch only download if our local is out of date.
pamac_daemon.curl.setopt (Curl.Option.TIMECONDITION, Curl.TimeCond.IFMODSINCE);
FileInfo info = destfile.query_info ("time::modified", 0);
TimeVal time = info.get_modification_time ();
pamac_daemon.curl.setopt (Curl.Option.TIMEVALUE, time.tv_sec);
} else if (tempfile.query_exists ()) {
// a previous partial download exists, resume from end of file.
FileInfo info = tempfile.query_info ("standard::size", 0);
int64 size = info.get_size ();
pamac_daemon.curl.setopt (Curl.Option.RESUME_FROM_LARGE, size);
open_mode = "ab";
}
} else {
if (tempfile.query_exists ()) {
tempfile.delete ();
}
}
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
Posix.FILE localf = Posix.FILE.open (tempfile.get_path (), open_mode);
if (localf == null) {
stdout.printf ("could not open file %s\n", tempfile.get_path ());
return -1;
}
pamac_daemon.curl.setopt (Curl.Option.WRITEDATA, localf);
// perform transfer
Curl.Code err = pamac_daemon.curl.perform ();
// disconnect relationships from the curl handle for things that might go out
// of scope, but could still be touched on connection teardown. This really
// only applies to FTP transfers.
pamac_daemon.curl.setopt (Curl.Option.NOPROGRESS, 1L);
pamac_daemon.curl.setopt (Curl.Option.ERRORBUFFER, null);
int ret;
// was it a success?
switch (err) {
case Curl.Code.OK:
long timecond, remote_time = -1;
double remote_size, bytes_dl;
unowned string effective_url;
// retrieve info about the state of the transfer
pamac_daemon.curl.getinfo (Curl.Info.FILETIME, out remote_time);
pamac_daemon.curl.getinfo (Curl.Info.CONTENT_LENGTH_DOWNLOAD, out remote_size);
pamac_daemon.curl.getinfo (Curl.Info.SIZE_DOWNLOAD, out bytes_dl);
pamac_daemon.curl.getinfo (Curl.Info.CONDITION_UNMET, out timecond);
pamac_daemon.curl.getinfo (Curl.Info.EFFECTIVE_URL, out effective_url);
if (timecond == 1 && bytes_dl == 0) {
// time condition was met and we didn't download anything. we need to
// clean up the 0 byte .part file that's left behind.
try {
if (tempfile.query_exists ()) {
tempfile.delete ();
}
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
ret = 1;
}
// remote_size isn't necessarily the full size of the file, just what the
// server reported as remaining to download. compare it to what curl reported
// as actually being transferred during curl_easy_perform ()
else if (remote_size != -1 && bytes_dl != -1 && bytes_dl != remote_size) {
pamac_daemon.emit_log ((uint) Alpm.LogLevel.ERROR,
_("%s appears to be truncated: %jd/%jd bytes\n").printf (
2016-02-22 09:47:40 -03:00
fileurl, bytes_dl, remote_size));
if (remove_partial_download) {
try {
if (tempfile.query_exists ()) {
tempfile.delete ();
}
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
}
ret = -1;
} else {
try {
tempfile.move (destfile, FileCopyFlags.OVERWRITE);
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
ret = 0;
}
break;
case Curl.Code.ABORTED_BY_CALLBACK:
if (remove_partial_download) {
try {
if (tempfile.query_exists ()) {
tempfile.delete ();
}
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
}
ret = -1;
break;
default:
// other cases are errors
try {
if (tempfile.query_exists ()) {
if (remove_partial_download) {
tempfile.delete ();
} else {
// delete zero length downloads
FileInfo info = tempfile.query_info ("standard::size", 0);
int64 size = info.get_size ();
if (size == 0) {
tempfile.delete ();
}
}
}
} catch (GLib.Error e) {
stderr.printf ("Error: %s\n", e.message);
}
// do not report error for missing sig with db
if (!fileurl.has_suffix ("db.sig")) {
string hostname = url.get_uri ().split("/")[2];
pamac_daemon.emit_log ((uint) Alpm.LogLevel.ERROR,
_("failed retrieving file '%s' from %s : %s\n").printf (
2016-02-22 09:47:40 -03:00
url.get_basename (), hostname, error_buffer));
}
ret = -1;
break;
}
return ret;
2014-10-22 13:44:02 -03:00
}
private void cb_totaldownload (uint64 total) {
2014-10-30 10:44:09 -03:00
pamac_daemon.emit_totaldownload (total);
2014-10-22 13:44:02 -03:00
}
2016-02-02 05:28:07 -03:00
private void cb_log (Alpm.LogLevel level, string fmt, va_list args) {
2016-02-22 09:47:40 -03:00
// do not log errors when download is cancelled
if (pamac_daemon.cancellable.is_cancelled ()) {
return;
}
2016-02-02 05:28:07 -03:00
Alpm.LogLevel logmask = Alpm.LogLevel.ERROR | Alpm.LogLevel.WARNING;
2015-05-20 09:48:12 -03:00
if ((level & logmask) == 0) {
2014-10-22 13:44:02 -03:00
return;
2015-05-20 09:48:12 -03:00
}
2014-10-22 13:44:02 -03:00
string? log = null;
log = fmt.vprintf (args);
2015-05-20 09:48:12 -03:00
if (log != null) {
2014-10-30 10:44:09 -03:00
pamac_daemon.emit_log ((uint) level, log);
2015-05-20 09:48:12 -03:00
}
2014-10-22 13:44:02 -03:00
}
void on_bus_acquired (DBusConnection conn) {
2014-10-30 10:44:09 -03:00
pamac_daemon = new Pamac.Daemon ();
2014-10-22 13:44:02 -03:00
try {
2014-10-30 10:44:09 -03:00
conn.register_object ("/org/manjaro/pamac", pamac_daemon);
2014-10-22 13:44:02 -03:00
}
catch (IOError e) {
stderr.printf ("Could not register service\n");
loop.quit ();
2014-10-22 13:44:02 -03:00
}
}
void main () {
// i18n
2014-10-30 10:44:09 -03:00
Intl.setlocale (LocaleCategory.ALL, "");
Intl.textdomain (GETTEXT_PACKAGE);
2014-10-22 13:44:02 -03:00
2016-02-02 05:28:07 -03:00
Bus.own_name (BusType.SYSTEM,
"org.manjaro.pamac",
BusNameOwnerFlags.NONE,
2014-10-22 13:44:02 -03:00
on_bus_acquired,
null,
() => {
stderr.printf ("Could not acquire name\n");
loop.quit ();
});
2014-10-22 13:44:02 -03:00
loop = new MainLoop ();
loop.run ();
}