这是indexloc提供的服务,不要输入任何密码
Skip to content

Invalid memory address crash when running a backup #403

@lyze237

Description

@lyze237

Hey!

Im trying to setup the project with docker swarm, but whenever I run a backup I get a null pointer:

 time=2024-04-15T08:45:58.629Z level=INFO msg="Successfully scheduled backup from environment with expression 47 * * * *"
 time=2024-04-15T08:47:00.000Z level=INFO msg="Now running script on schedule 47 * * * *"
 time=2024-04-15T08:47:00.219Z level=ERROR msg="Unexpected error running schedule 47 * * * *: runtime error: invalid memory address or nil pointer dereference" error="main.runScript.func1: unexpected panic running script: runtime error: invalid memory address or nil pointer dereference"

stack ps:

ID             NAME                                        IMAGE                               NODE                DESIRED STATE   CURRENT STATE            ERROR     PORTS
cibr7veryup0   influx_backup.qfgvk3lkntbcstbbs7b6chb1y     offen/docker-volume-backup:latest   docker-management   Running         Running 10 minutes ago
vj9kj0oaxsyo   influx_influxdb.qfgvk3lkntbcstbbs7b6chb1y   influxdb:1.8                        docker-management   Running         Running 10 minutes ago

docker stack file:

version: '3.3'

services:
  backup:
    image: offen/docker-volume-backup:latest
    restart: always
    environment:
      BACKUP_RETENTION_DAYS: "7"
      BACKUP_CRON_EXPRESSION: "52 * * * *"
      BACKUP_PRUNING_LEEWAY: "5s"
      SSH_HOST_NAME: 192.168.122.186
      SSH_PORT: 22
      SSH_USER: lyze
      SSH_PASSWORD: pw
      SSH_REMOTE_PATH: /home/lyze/backups
    volumes:
      - /var/run/docker.sock:/var/run/docker.sock:ro
      - influx-data:/backup/influx-data:ro
    deploy:
      mode: global
      placement:
        constraints:
          - node.role == manager

  influxdb:
    image: influxdb:1.8
    volumes:
      - influx-data:/var/lib/influxdb
    networks:
      - net
    deploy:
      labels:
        - docker-volume-backup.stop-during-backup=true
      resources:
        limits:
          cpus: '0.60'
          memory: 512M
        reservations:
          cpus: '0.30'
          memory: 128M
      mode: global
      placement:
        constraints:
          - node.role == manager

networks:
  net:
    driver: overlay

volumes:
  influx-data:
    driver: local

This also happens with the docker file from the tests folder in your repo (Slightly adjusted to make sure that the backup container runs on the manager node):

sudo docker exec fa29d2fa55f0 backup
time=2024-04-15T08:59:03.382Z level=ERROR msg="Fatal error running command: runtime error: invalid memory address or nil pointer dereference" error="main.(*command).runAsCommand: error running script: main.runScript.func1: unexpected panic running script: runtime error: invalid memory address or nil pointer dereference"
ID             NAME                   IMAGE                                      NODE                DESIRED STATE   CURRENT STATE           ERROR     PORTS
md9x5l4f6iys   backup-test_backup.1   offen/docker-volume-backup:latest          docker-management   Running         Running 3 minutes ago
ccjk1hq5k1xt   backup-test_minio.1    minio/minio:RELEASE.2020-08-04T23-10-51Z   docker-default      Running         Running 3 minutes ago
a78w8tdg7jaj   backup-test_offen.1    offen/offen:latest                         docker-default      Running         Running 3 minutes ago
azeu0we83lb7   backup-test_offen.2    offen/offen:latest                         docker-vpn          Running         Running 3 minutes ago
wf2r8uoic7zs   backup-test_pg.1       postgres:14-alpine                         docker-default      Running         Running 3 minutes ago
version: '3.8'

services:
  minio:
    image: minio/minio:RELEASE.2020-08-04T23-10-51Z
    deploy:
      restart_policy:
        condition: on-failure
    environment:
      MINIO_ROOT_USER: test
      MINIO_ROOT_PASSWORD: test
      MINIO_ACCESS_KEY: test
      MINIO_SECRET_KEY: GMusLtUmILge2by+z890kQ
    entrypoint: /bin/ash -c 'mkdir -p /data/backup && minio server /data'
    volumes:
      - backup_data:/data

  backup:
    image: offen/docker-volume-backup:latest
    depends_on:
      - minio
    deploy:
      restart_policy:
        condition: on-failure
      placement:
        constraints:
          - node.role == manager
    environment:
      AWS_ACCESS_KEY_ID: test
      AWS_SECRET_ACCESS_KEY: GMusLtUmILge2by+z890kQ
      AWS_ENDPOINT: minio:9000
      AWS_ENDPOINT_PROTO: http
      AWS_S3_BUCKET_NAME: backup
      BACKUP_FILENAME: test.tar.gz
      BACKUP_CRON_EXPRESSION: 0 0 5 31 2 ?
      BACKUP_RETENTION_DAYS: 7
      BACKUP_PRUNING_LEEWAY: 5s
    volumes:
      - pg_data:/backup/pg_data:ro
      - /var/run/docker.sock:/var/run/docker.sock

  offen:
    image: offen/offen:latest
    labels:
      - docker-volume-backup.stop-during-backup=true
    healthcheck:
      disable: true
    deploy:
      replicas: 2
      restart_policy:
        condition: on-failure

  pg:
    image: postgres:14-alpine
    environment:
      POSTGRES_PASSWORD: example
    labels:
      - docker-volume-backup.stop-during-backup=true
    volumes:
      - pg_data:/var/lib/postgresql/data
    deploy:
      restart_policy:
        condition: on-failure

volumes:
  backup_data:
    name: backup_data
  pg_data:
    name: pg_data

Expected behavior
The backup shouldn't crash.

Version (please complete the following information):

  • Image Version: v2.39.0
  • Docker Version: Docker version 26.0.1, build d260a54

Additional context
I have 3 VMs running in a basic swarm network, they all run Ubuntu Server 22.04:

lyze@docker-management:~/stacks$ sudo docker node ls
ID                            HOSTNAME            STATUS    AVAILABILITY   MANAGER STATUS   ENGINE VERSION
603pnevt1ayxlgxoruya6uia0     docker-default      Ready     Active                          26.0.1
qfgvk3lkntbcstbbs7b6chb1y *   docker-management   Ready     Active         Leader           26.0.1
tw74duuij6dsun881i2n3lodl     docker-vpn          Ready     Active                          26.0.1

Metadata

Metadata

Assignees

No one assigned

    Labels

    bugSomething isn't working

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions