Ubuntu – Bacula. Running concurrent job from one linux box

backupbaculaUbuntu

Hi all i have a strange problem, as bacula documentation i can run two or many concurrent jobs if i set "Maximum Concurrent Jobs" more then 1.

And this is work fine for jobs from different servers. But when i have two jobs which does run from one linux server, second job will wait when the first job was finished. Jobs have the same priority (10). Every job has separate pool, volume and storage device.

Bacula-dir and Bacula-sd running on different linux servers.

OS Ubuntu 14.04,
Bacula version 5.2.6

report from bconsole

Running Jobs:
Console connected at 03-Apr-16 09:12
 JobId Level   Name                       Status
======================================================================
  4094 Full    arkive03_Share.2016-04-02_22.00.00_06 is running
  4106 Full    BackupCatalog.2016-04-02_23.10.00_19 is waiting for higher priority jobs to finish
  4112 Full    arkive03EtcBackup.2016-04-03_06.00.00_25 is waiting on max Client jobs
====

bacula-dir.conf

Director {                            # define myself
  Name = bacula.tumo.lab-dir
  DIRport = 9101                # where we listen for UA connections
  QueryFile = "/etc/bacula/scripts/query.sql"
  WorkingDirectory = "/var/lib/bacula"
  PidDirectory = "/var/run/bacula"
  Maximum Concurrent Jobs = 10
  Password = "WDT0OAXCx57U"         # Console password
  Messages = Daemon
  DirAddress = bacula.tumo.lab
}

bacula-fd.conf

FileDaemon {                          # this is me
  Name = arkive03.tumo.lab-fd
  FDport = 9102                  # where we listen for the director
  WorkingDirectory = /var/lib/bacula
  Pid Directory = /var/run/bacula
  Maximum Concurrent Jobs = 20
  FDAddress = 10.44.20.137
}

bacula-sd.conf

Storage {                             # definition of myself
  Name = arkive03.tumo.lab-sd
  SDPort = 9103                  # Director's port      
  WorkingDirectory = "/var/lib/bacula"
  Pid Directory = "/var/run/bacula"
  Maximum Concurrent Jobs = 20
  SDAddress = 10.44.20.137
}

Device {
  Name = Arkive03_other               # device for arkive03EtcBackup
  Media Type = File
  Archive Device = /local/bacula/backup/other
  LabelMedia = yes;                   # lets Bacula label unlabeled media
  Random Access = Yes;
  AutomaticMount = yes;               # when device opened, read it
  RemovableMedia = no;
  AlwaysOpen = no;
}

Device {
  Name = Arkive03_Share               # device for arkive03_Share       
  Media Type = File
  Archive Device = /local/bacula/backup/Share
  LabelMedia = yes;                   # lets Bacula label unlabeled media
  Random Access = Yes;
  AutomaticMount = yes;               # when device opened, read it
  RemovableMedia = no;
  AlwaysOpen = no;
}

i try to add "Maximum Concurrent Jobs" to device section but it did't help.

pools.conf

Pool {
  Name = File                         # pool for arkive03EtcBackup
  Pool Type = Backup
  Recycle = yes                       # Bacula can automatically recycle Volumes
  AutoPrune = yes                     # Prune expired volumes
  Action On Purge = Truncate
  Volume Retention = 21 days         # 21 days
  Maximum Volume Bytes = 10G          # Limit Volume size to something reasonable
  Maximum Volumes = 100               # Limit number of Volumes in Pool
  Label Format = "Vol-"
}

Pool {
  Name = ark_share                    # pool for arkive03_Share
  Pool Type = Backup
  Recycle = yes                       # Bacula can automatically recycle Volumes
  AutoPrune = yes                     # Prune expired volumes
  Action On Purge = Truncate
  Volume Retention = 21 days         # 21 days
  Maximum Volume Bytes = 50G          # Limit Volume size to something reasonable
  Maximum Volumes = 400               # Limit number of Volumes in Pool
  Label Format = "Ark_share-"
}

jobdef.conf

JobDefs {
  Name = "ark_Share"
  Type = Backup
  Level = Incremental
  Client = arkive03.tumo.lab-fd
  Storage = Arkive03_Share
  Messages = Standard
  Pool = ark_share
  Priority = 10
  Write Bootstrap = "/var/lib/bacula/arkive03_share.bsr"
}

JobDefs {
  Name = "EtcBackup"
  Type = Backup
  Level = Incremental
  Schedule = "Dayly"
  Storage = Arkive03_other
  Messages = Standard
  Pool = File
  Priority = 10
  Write Bootstrap = "/var/lib/bacula/etc.bsr"
}

client arkive03.conf

Client {
  Name = arkive03.tumo.lab-fd
  Address = 10.44.20.137
  FDPort = 9102
  Catalog = MyCatalog
  Password = "WDT0OAXCx57U"          # password for FileDaemon
  File Retention = 30 days            # 30 days
  Job Retention = 6 months            # six months
  AutoPrune = yes                     # Prune expired Jobs/Files
}

Job {
  Name = "arkive03_Share"
  Schedule = "arkbackup"
  FileSet = "Share"
  JobDefs = "ark_Share"
  Client = "arkive03.tumo.lab-fd"
}

Job {
  Name = "arkive03EtcBackup"
  JobDefs = "EtcBackup"
  FileSet = "etc"
  Client = "arkive03.tumo.lab-fd"
}

I do not know what to do.
My "share" = 10tb , "etc" = 4mb, and i need wait when bacula will finished backup 10tb and will start backup 4 mb. It's crazy.

Best Answer

Adding "Maximum Concurrent Jobs" to storage definition in Storages.conf on Badula director and to device definition on bacula-sd resolve this issue.

storages.conf on bacula director

Storage {
  Name = Arkive03_other
  Address = arkive03.tumo.lab                # N.B. Use a fully qualified name here
  SDPort = 9103
  Password = "SomePassword"
  Device = Arkive03_other
  Media Type = File
  Maximum Concurrent Jobs = 5
}

bacula-sd.conf

Device {
  Name = Arkive03_other
  Media Type = File
  Archive Device = /local/bacula/backup/other
  LabelMedia = yes;                   # lets Bacula label unlabeled media
  Random Access = Yes;
  AutomaticMount = yes;               # when device opened, read it
  RemovableMedia = no;
  AlwaysOpen = no;
  Maximum Concurrent Jobs = 5
}
Related Topic