Giter Site home page Giter Site logo

Comments (10)

Slach avatar Slach commented on July 21, 2024 1

will release soon

from clickhouse-backup.

Slach avatar Slach commented on July 21, 2024

could share
clickouse-backup print-config
?

from clickhouse-backup.

ciprianfocsaneanu avatar ciprianfocsaneanu commented on July 21, 2024
general:
    remote_storage: gcs
    max_file_size: 0
    backups_to_keep_local: 0
    backups_to_keep_remote: 0
    log_level: debug
    allow_empty_backups: true
    download_concurrency: 6
    upload_concurrency: 6
    upload_max_bytes_per_second: 0
    download_max_bytes_per_second: 0
    object_disk_server_side_copy_concurrency: 32
    use_resumable_state: true
    restore_schema_on_cluster: ""
    upload_by_part: true
    download_by_part: true
    restore_database_mapping: {}
    retries_on_failure: 3
    retries_pause: 30s
    watch_interval: 1h
    full_interval: 24h
    watch_backup_name_template: shard{shard}-{type}-{time:20060102150405}
    sharded_operation_mode: ""
    cpu_nice_priority: 15
    io_nice_priority: idle
    rbac_backup_always: true
    rbac_conflict_resolution: recreate
    retriesduration: 30s
    watchduration: 1h0m0s
    fullduration: 24h0m0s
clickhouse:
    username: default
    password: ""
    host: localhost
    port: 9000
    disk_mapping: {}
    skip_tables:
        - system.*
        - INFORMATION_SCHEMA.*
        - information_schema.*
        - _temporary_and_external_tables.*
    skip_table_engines: []
    timeout: 30m
    freeze_by_part: false
    freeze_by_part_where: ""
    use_embedded_backup_restore: false
    embedded_backup_disk: ""
    embedded_backup_threads: 0
    embedded_restore_threads: 0
    backup_mutations: true
    restore_as_attach: false
    check_parts_columns: true
    secure: false
    skip_verify: false
    sync_replicated_tables: false
    log_sql_queries: true
    config_dir: /etc/clickhouse-server/
    restart_command: exec:systemctl restart clickhouse-server
    ignore_not_exists_error_during_freeze: true
    check_replicas_before_attach: true
    tls_key: ""
    tls_cert: ""
    tls_ca: ""
    max_connections: 6
    debug: false
s3:
    access_key: ""
    secret_key: ""
    bucket: ""
    endpoint: ""
    region: us-east-1
    acl: private
    assume_role_arn: ""
    force_path_style: false
    path: ""
    object_disk_path: ""
    disable_ssl: false
    compression_level: 1
    compression_format: tar
    sse: ""
    sse_kms_key_id: ""
    sse_customer_algorithm: ""
    sse_customer_key: ""
    sse_customer_key_md5: ""
    sse_kms_encryption_context: ""
    disable_cert_verification: false
    use_custom_storage_class: false
    storage_class: STANDARD
    custom_storage_class_map: {}
    concurrency: 7
    part_size: 0
    max_parts_count: 4000
    allow_multipart_download: false
    object_labels: {}
    request_payer: ""
    check_sum_algorithm: ""
    debug: false
gcs:
    credentials_file: ""
    credentials_json: ""
    credentials_json_encoded: REMOVED THIS
    embedded_access_key: ""
    embedded_secret_key: ""
    skip_credentials: false
    bucket: dev-master-rep-clickhouse-backup-disk
    path: backups/{cluster}/{shard}
    object_disk_path: object_disks/{cluster}/{shard}/{disk}
    compression_level: 1
    compression_format: tar
    debug: false
    force_http: false
    endpoint: ""
    storage_class: STANDARD
    object_labels: {}
    custom_storage_class_map: {}
    client_pool_size: 18
    chunk_size: 0
cos:
    url: ""
    timeout: 2m
    secret_id: ""
    secret_key: ""
    path: ""
    compression_format: tar
    compression_level: 1
    debug: false
api:
    listen: 0.0.0.0:7171
    enable_metrics: true
    enable_pprof: false
    username: ""
    password: ""
    secure: false
    certificate_file: ""
    private_key_file: ""
    ca_cert_file: ""
    ca_key_file: ""
    create_integration_tables: true
    integration_tables_host: ""
    allow_parallel: false
    complete_resumable_after_restart: true
ftp:
    address: ""
    timeout: 2m
    username: ""
    password: ""
    tls: false
    skip_tls_verify: false
    path: ""
    object_disk_path: ""
    compression_format: tar
    compression_level: 1
    concurrency: 18
    debug: false
sftp:
    address: ""
    port: 22
    username: ""
    password: ""
    key: ""
    path: ""
    object_disk_path: ""
    compression_format: tar
    compression_level: 1
    concurrency: 18
    debug: false
azblob:
    endpoint_schema: https
    endpoint_suffix: core.windows.net
    account_name: ""
    account_key: ""
    sas: ""
    use_managed_identity: false
    container: ""
    path: ""
    object_disk_path: ""
    compression_level: 1
    compression_format: tar
    sse_key: ""
    buffer_size: 0
    buffer_count: 3
    max_parts_count: 256
    timeout: 4h
    debug: false
custom:
    upload_command: ""
    download_command: ""
    list_command: ""
    delete_command: ""
    command_timeout: 4h
    commandtimeoutduration: 4h0m0s

from clickhouse-backup.

Slach avatar Slach commented on July 21, 2024

which linux user do you use to execute clickhouse-backup restore_remote? root or other regular user?

from clickhouse-backup.

ciprianfocsaneanu avatar ciprianfocsaneanu commented on July 21, 2024

clickhouse user

from clickhouse-backup.

Slach avatar Slach commented on July 21, 2024

this is weird, if local backup already present
then it should other error during download

could you share
ls -la /var/lib/clickhouse/backup/t_1/shadow/reporting/containers_local/default/202405_0_905_215_816/
and
clickhouse-backup list

from clickhouse-backup.

ciprianfocsaneanu avatar ciprianfocsaneanu commented on July 21, 2024

Removed that t_1 backup but can reproduce with any when trying to do download if I already have it locally.
So basically I did:

clickhouse-backup create_remote test_backup
clickhouse-backup download test_backup

and got error

2024/05/22 11:49:42.561089 error one of Download go-routine return error: one of downloadTableData go-routine return error: handling file: /202405_22_22_0/checksums.txt: open /var/lib/clickhouse/backup/test_backup/shadow/reporting/clusters_managed_resources_local/default/202405_22_22_0/checksums.txt: permission denied

clickhouse-backup list returns:

2024/05/22 11:50:12.535732  info SELECT value FROM `system`.`build_options` where name='VERSION_INTEGER' logger=clickhouse
2024/05/22 11:50:12.538440  info SELECT countIf(name='type') AS is_disk_type_present, countIf(name='object_storage_type') AS is_object_storage_type_present, countIf(name='free_space') AS is_free_space_present, countIf(name='disks') AS is_storage_policy_present FROM system.columns WHERE database='system' AND table IN ('disks','storage_policies')  logger=clickhouse
2024/05/22 11:50:12.542165  info SELECT d.path, any(d.name) AS name, any(lower(if(d.type='ObjectStorage',d.object_storage_type,d.type))) AS type, min(d.free_space) AS free_space, groupUniqArray(s.policy_name) AS storage_policies FROM system.disks AS d  LEFT JOIN (SELECT policy_name, arrayJoin(disks) AS disk FROM system.storage_policies) AS s ON s.disk = d.name GROUP BY d.path logger=clickhouse
2024/05/22 11:50:12.546967  info SELECT count() AS is_macros_exists FROM system.tables WHERE database='system' AND name='macros'  SETTINGS empty_result_for_aggregation_by_empty_set=0 logger=clickhouse
2024/05/22 11:50:12.551599  info SELECT macro, substitution FROM system.macros logger=clickhouse
2024/05/22 11:50:12.554142  info SELECT count() AS is_macros_exists FROM system.tables WHERE database='system' AND name='macros'  SETTINGS empty_result_for_aggregation_by_empty_set=0 logger=clickhouse
2024/05/22 11:50:12.556705  info SELECT macro, substitution FROM system.macros logger=clickhouse
2024/05/22 11:50:12.559553 debug /tmp/.clickhouse-backup-metadata.cache.GCS load 1 elements logger=gcs
2024/05/22 11:50:12.685566 debug /tmp/.clickhouse-backup-metadata.cache.GCS save 1 elements logger=gcs
test_backup   809.91KiB   22/05/2024 11:47:57   local       regular
test_backup   968.66KiB   22/05/2024 11:47:58   remote      tar, regular
2024/05/22 11:50:12.685883  info clickhouse connection closed logger=clickhouse

and

$ ls -la /var/lib/clickhouse/backup/test_backup/shadow/reporting/clusters_managed_resources_local/default/202405_22_22_0/
total 48
drwxr-s---    2 clickhou clickhou      4096 May 22 11:47 .
drwxr-s---    5 clickhou clickhou      4096 May 22 11:47 ..
-r--r-----    2 clickhou clickhou       333 May 22 11:12 checksums.txt
-r--r-----    2 clickhou clickhou      1021 May 22 11:12 columns.txt
-r--r-----    2 clickhou clickhou         1 May 22 11:12 count.txt
-r--r-----    2 clickhou clickhou      1079 May 22 11:12 data.bin
-r--r-----    2 clickhou clickhou       134 May 22 11:12 data.cmrk3
-r--r-----    2 clickhou clickhou        10 May 22 11:12 default_compression_codec.txt
-r--r-----    2 clickhou clickhou        16 May 22 11:12 minmax_timestamp.idx
-r--r-----    2 clickhou clickhou         4 May 22 11:12 partition.dat
-r--r-----    2 clickhou clickhou        93 May 22 11:12 primary.cidx
-r--r-----    2 clickhou clickhou      2242 May 22 11:12 serialization.json

from clickhouse-backup.

Slach avatar Slach commented on July 21, 2024

looks weird,
error shall be different
try

clickhouse-backup create_remote test_backup
USE_RESUMABLE_STATE=false clickhouse-backup download test_backup

as a workaround use

general:
  backups_to_keep_local: -1

it will delete local backup after upload to remote storage

from clickhouse-backup.

ciprianfocsaneanu avatar ciprianfocsaneanu commented on July 21, 2024

It works with USE_RESUMABLE_STATE=false. Thanks for the bug fix as well! I'll update to latest version.

from clickhouse-backup.

ciprianfocsaneanu avatar ciprianfocsaneanu commented on July 21, 2024

Will you also release the 2.5.10 version? Saw it added to the changelog but latest version released is 2.5.9

from clickhouse-backup.

Related Issues (20)

Recommend Projects

  • React photo React

    A declarative, efficient, and flexible JavaScript library for building user interfaces.

  • Vue.js photo Vue.js

    🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.

  • Typescript photo Typescript

    TypeScript is a superset of JavaScript that compiles to clean JavaScript output.

  • TensorFlow photo TensorFlow

    An Open Source Machine Learning Framework for Everyone

  • Django photo Django

    The Web framework for perfectionists with deadlines.

  • D3 photo D3

    Bring data to life with SVG, Canvas and HTML. 📊📈🎉

Recommend Topics

  • javascript

    JavaScript (JS) is a lightweight interpreted programming language with first-class functions.

  • web

    Some thing interesting about web. New door for the world.

  • server

    A server is a program made to process requests and deliver data to clients.

  • Machine learning

    Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.

  • Game

    Some thing interesting about game, make everyone happy.

Recommend Org

  • Facebook photo Facebook

    We are working to build community through open source technology. NB: members must have two-factor auth.

  • Microsoft photo Microsoft

    Open source projects and samples from Microsoft.

  • Google photo Google

    Google ❤️ Open Source for everyone.

  • D3 photo D3

    Data-Driven Documents codes.